diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000..04f40f9 Binary files /dev/null and b/.DS_Store differ diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 39fb081..0000000 --- a/.gitignore +++ /dev/null @@ -1,9 +0,0 @@ -*.iml -.gradle -/local.properties -/.idea/workspace.xml -/.idea/libraries -.DS_Store -/build -/captures -.externalNativeBuild diff --git a/.idea/caches/build_file_checksums.ser b/.idea/caches/build_file_checksums.ser deleted file mode 100644 index 91e5575..0000000 Binary files a/.idea/caches/build_file_checksums.ser and /dev/null differ diff --git a/.idea/codeStyles/Project.xml b/.idea/codeStyles/Project.xml deleted file mode 100644 index 30aa626..0000000 --- a/.idea/codeStyles/Project.xml +++ /dev/null @@ -1,29 +0,0 @@ - - - - - - - - - - - - - - \ No newline at end of file diff --git a/.idea/gradle.xml b/.idea/gradle.xml deleted file mode 100644 index 7ac24c7..0000000 --- a/.idea/gradle.xml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/.idea/markdown-navigator/profiles_settings.xml b/.idea/markdown-navigator/profiles_settings.xml deleted file mode 100644 index 57927c5..0000000 --- a/.idea/markdown-navigator/profiles_settings.xml +++ /dev/null @@ -1,3 +0,0 @@ - - - \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml deleted file mode 100644 index 2cebb57..0000000 --- a/.idea/misc.xml +++ /dev/null @@ -1,118 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1.8 - - - - - - - - \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml deleted file mode 100644 index e0cc4a4..0000000 --- a/.idea/modules.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/.idea/runConfigurations.xml b/.idea/runConfigurations.xml deleted file mode 100644 index 7f68460..0000000 --- a/.idea/runConfigurations.xml +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml deleted file mode 100644 index 94a25f7..0000000 --- a/.idea/vcs.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/README.md b/README.md index 395104c..2c6cb9b 100644 --- a/README.md +++ b/README.md @@ -1,15 +1,75 @@ # 作者简介 -Mr. Zhu ,英文名aserbao! 从事Android开发多年,技术不高,用来工作刚刚好。对视频音视频处理,硬编码这一块有一定的研究。之前北漂,现在深漂。同名微信公众号aserbao的维护者,喜欢看书,摄影,交友,目前生活这样子。 -![](https://github.com/aserbao/AserbaosAndroid/blob/master/app/src/main/assets/images/weixin.jpg) +Mr. Zhu ,英文名aserbao! 从事Android开发多年,技术不高,用来工作刚刚好。对视频音视频处理,硬编码这一块有一定的研究。之前北漂,现在深漂。同名微信公众号aserbao的维护者,喜欢看书,摄影,交友,目前生活这样子。欢迎大家关注我的公众号和微信一起学习交流。 +时间宝贵,咨询技术问题有偿哦。 + +If you have any question about this repo,please prepare money,no free. + +# 学习资料 +- [Android 零基础开发相机](https://gitbook.cn/gitchat/activity/5aeb03e3af08a333483d71c1) +- [Android openGl开发详解(一) - 简单图形的基本绘制](https://www.jianshu.com/p/92d02ac80611) +- [Android openGl开发详解(二) - 通过SurfaceView,TextureView,GlSurfaceView显示相机预览(附演示)](https://www.jianshu.com/p/db8ecba6037a) +- [Android 自定义相机开发(三) —— 了解下EGL](https://www.jianshu.com/p/1e82021b10b4) + +# 欢迎关注公众号一起学习交流Android技术 +|公众号(aserbao)| +|--| +|![](https://github.com/aserbao/AserbaosAndroid/blob/master/app/src/main/assets/images/weixin.jpg) # 项目介绍 -项目是有空闲时间就完善,目前所有功能没有全做完,先留个坑! -有什么问题,可以私聊我! +项目目前功能有: +- 分段录制 RecorderActivity +- 多段视频合成 RecorderActivity +- 倒计时录制 RecorderActivity +- 删除回滚 RecorderActivity +- 添加滤镜 RecorderActivity +- 视频裁剪 LocalVideoActivity +- 视频方向横竖屏切换 LocalVideoActivity +- 视频旋转 LocalVideoActivity +- 视频帧处理 SelCoverTimeActivity +- 添加水印 VideoEditActivity +- 添加动态贴纸 VideoEditActivity +- 添加动态字幕 VideoEditActivity +- 文字转视频 PrimaryMediaCodecActivity + +## 整体功能点效果图: +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/录制.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/摄像头切换.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/倒计时.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/回删功能.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/本地编辑.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/编辑界面.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/选封面.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/添加贴纸.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/添加字幕.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/保存到相册.gif) + + + +## 单个功能点: +单个点主要是包括MediaCodec,AudioRecord,MediaExtractor,MediaMuxer的使用,界面效果如下: + +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/Mediacodec的基本用法.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/EncodeDecode.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/ExtractDecode.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/DecodeEditEncode.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/Mediacodec录制随音乐改变.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/MediaExtractor.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/MediaMuxer.gif) + +## 打赏 +开源不易,若有帮助就打赏一下呗,打赏请备注ID。 + +|微信打赏|支付宝打赏| +|--|--| +|![](https://github.com/aserbao/AserbaosAndroid/blob/master/app/src/main/assets/images/%E5%BE%AE%E4%BF%A1%E6%89%93%E8%B5%8F.jpeg)|![](https://github.com/aserbao/AserbaosAndroid/blob/master/app/src/main/assets/images/%E6%94%AF%E4%BB%98%E5%AE%9D%E6%89%93%E8%B5%8F.jpeg)| + +### 打赏记录 +|备注|金额| +|--|--| +|贺利军|66.66¥| +|小个子|200¥| +|小Qiao|66.66¥| +|一路狂奔|20¥| +|Passerby「路人‘」不欢|66¥| +|有点小激动|20¥| +|Orange| 30¥| +|卡霾哈霾哈|66¥| +> 说明:大家挣钱不易,如果项目对你有所帮助再打赏。如果有遇到小问题有时间也会免费帮忙解决。 -![](https://github.com/aserbao/AndroidCamera/blob/master/app/src/main/assets/images/1.gif) -![](https://github.com/aserbao/AndroidCamera/blob/master/app/src/main/assets/images/%E9%80%89%E5%B0%81%E9%9D%A22.gif) -![](https://github.com/aserbao/AndroidCamera/blob/master/app/src/main/assets/images/%E7%BC%96%E8%BE%913.gif) -![](https://github.com/aserbao/AndroidCamera/blob/master/app/src/main/assets/images/%E6%9C%AC%E5%9C%B0%E7%BC%96%E8%BE%91.gif) +### Star History +[![Star History Chart](https://api.star-history.com/svg?repos=aserbao/AndroidCamera&type=Date)](https://star-history.com/#aserbao/AndroidCamera&Date) diff --git a/README_en.md b/README_en.md new file mode 100644 index 0000000..24214bd --- /dev/null +++ b/README_en.md @@ -0,0 +1,71 @@ + +# About the author +Mr. Zhu, English name aserbao! Engaged in Android development for many years, the technology is not high, and it is just right for work. There is a certain amount of research on video audio & video processing, hardcoding. Before Beipiao, now Shenpiao. The maintainer of the WeChat official account aserbao of the same name, likes reading, photography, making friends, and currently lives like this. Welcome to follow my official account and WeChat to learn and communicate together. + +Time is precious, and consulting technical issues is paid. + +If you have any question about this repo,please prepare money,no free. + +# Learning materials +- [Android Zero Base Development Camera](https://gitbook.cn/gitchat/activity/5aeb03e3af08a333483d71c1) +- [Android openGl development details (a) - simple graphics basic drawing](https://www.jianshu.com/p/92d02ac80611) +- [Android openGl development details (2) - through SurfaceView, TextureView, GlSurfaceView display camera preview (with demo) ](https://www.jianshu.com/p/db8ecba6037a) +- [Android Custom Camera Development (3) - Learn about EGL](https://www.jianshu.com/p/1e82021b10b4) + +# Welcome to pay attention to the official account to learn and communicate Android technology together +|weChat official account(aserbao) +|--| +|![](https://github.com/aserbao/AserbaosAndroid/blob/master/app/src/main/assets/images/weixin.jpg)| + +# Project Functions Introduction +The current functions of the project are:: +- Segmented Record RecorderActivity +- Multi-video synthesis Record RecorderActivity +- Countdown Record RecorderActivity +- Remove Rollback Video RecorderActivity +- Add Filter RecorderActivity +- Video cutting LocalVideoActivity +- Video orientation Change LocalVideoActivity +- Video Rotation LocalVideoActivity +- Handle Video Frame SelCoverTimeActivity +- Add watermark for Video VideoEditActivity +- Add dynamic stickers for Video VideoEditActivity +- Add dynamic subtitles for Video VideoEditActivity +- Text to Video PrimaryMediaCodecActivity + +## Overall Functional Button Effect Diagram : +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/录制.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/摄像头切换.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/倒计时.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/回删功能.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/本地编辑.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/编辑界面.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/选封面.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/添加贴纸.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/添加字幕.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/保存到相册.gif) + + + +## Single Functional Button: +The single point mainly includes the use of MediaCodec, AudioRecord, MediaExtractor, and MediaMuxer. The interface effect is as follows: + +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/Mediacodec的基本用法.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/EncodeDecode.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/ExtractDecode.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/DecodeEditEncode.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/Mediacodec录制随音乐改变.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/MediaExtractor.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/MediaMuxer.gif) + +## Tip +Open source is not easy, if there is help to reward it, reward please note ID. + +|WeChat Tipping | Alipay Tipping| +|--|--| +|![](https://github.com/aserbao/AserbaosAndroid/blob/master/app/src/main/assets/images/%E5%BE%AE%E4%BF%A1%E6%89%93%E8%B5%8F.jpeg)|![](https://github.com/aserbao/AserbaosAndroid/blob/master/app/src/main/assets/images/%E6%94%AF%E4%BB%98%E5%AE%9D%E6%89%93%E8%B5%8F.jpeg)| + +### Tipping Record +| Remarks | Amount | +|--|--| +|贺利军|66.66¥| +|小个子|200¥| +|小Qiao|66.66¥| +|一路狂奔|20¥| +|Passerby「路人‘」不欢|66¥| +|有点小激动|20¥| +|Orange| 30¥| +|卡霾哈霾哈|66¥| +> Description: It is not easy for everyone to make money. If the project helps you, you will be rewarded. diff --git a/app/.DS_Store b/app/.DS_Store new file mode 100644 index 0000000..78dcfbd Binary files /dev/null and b/app/.DS_Store differ diff --git a/app/.gitignore b/app/.gitignore deleted file mode 100644 index 796b96d..0000000 --- a/app/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/build diff --git a/app/CMakeLists.txt b/app/CMakeLists.txt new file mode 100644 index 0000000..7df5dbe --- /dev/null +++ b/app/CMakeLists.txt @@ -0,0 +1,61 @@ +# For more information about using CMake with Android Studio, read the +# documentation: https://d.android.com/studio/projects/add-native-code.html + +# Sets the minimum version of CMake required to build the native library. + +cmake_minimum_required(VERSION 3.4.1) + +# Creates and names a library, sets it as either STATIC +# or SHARED, and provides the relative paths to its source code. +# You can define multiple libraries, and CMake builds them for you. +# Gradle automatically packages shared libraries with your APK. + + +# Searches for a specified prebuilt library and stores the path as a +# variable. Because CMake includes system libraries in the search path by +# default, you only need to specify the name of the public NDK library +# you want to add. CMake verifies that the library exists before +# completing its build. + +find_library( # Sets the name of the path variable. + log-lib + + # Specifies the name of the NDK library that + # you want CMake to locate. + log ) + +# Specifies libraries CMake should link to your target library. You +# can link multiple libraries, such as libraries you define in this +# build script, prebuilt third-party libraries, or system libraries. + +set(distribution_DIR ${CMAKE_SOURCE_DIR}/libs) + +add_library( fmod + SHARED + IMPORTED ) + +set_target_properties( fmod + PROPERTIES IMPORTED_LOCATION + ${distribution_DIR}/${ANDROID_ABI}/libfmod.so ) + +add_library( fmodL + SHARED + IMPORTED ) + +set_target_properties( fmodL + PROPERTIES IMPORTED_LOCATION + ${distribution_DIR}/${ANDROID_ABI}/libfmodL.so ) + +add_library( # Sets the name of the library. + sound + + # Sets the library as a shared library. + SHARED + + # Provides a relative path to your source file(s). + src/main/cpp/sound.cpp) + +include_directories(src/main/cpp/inc) + +target_link_libraries( sound fmod fmodL + ${log-lib} ) diff --git a/app/build.gradle b/app/build.gradle index 0ff87b4..a156792 100644 --- a/app/build.gradle +++ b/app/build.gradle @@ -1,4 +1,6 @@ apply plugin: 'com.android.application' +apply plugin: 'kotlin-android-extensions' +apply plugin: 'kotlin-android' android { compileSdkVersion 27 buildToolsVersion '27.0.3' @@ -9,6 +11,13 @@ android { versionCode 1 versionName "1.0" testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" + + externalNativeBuild { + cmake { + cppFlags " " + abiFilters 'armeabi-v7a' + } + } } buildTypes { release { @@ -16,16 +25,52 @@ android { proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' } } + + sourceSets.main { + jniLibs.srcDirs = ['libs'] + jni.srcDirs = [] + } + + externalNativeBuild { + cmake { + path "CMakeLists.txt" + } + } + + testOptions { + unitTests.all { + // All the usual Gradle options. + jvmArgs '-XX:MaxPermSize=256m' + } + + unitTests.returnDefaultValues = true + } } dependencies { - implementation fileTree(dir: 'libs', include: ['*.jar']) + implementation fileTree(include: ['*.jar'], dir: 'libs') implementation 'com.android.support:appcompat-v7:27.1.1' implementation 'com.android.support.constraint:constraint-layout:1.0.2' testImplementation 'junit:junit:4.12' //=====================================UI implementation 'com.android.support:recyclerview-v7:27.1.1' + implementation 'com.android.support:cardview-v7:27.1.1' //======================================ButterKnife annotationProcessor 'com.jakewharton:butterknife-compiler:8.8.1' implementation 'com.jakewharton:butterknife:8.8.1' + implementation 'com.github.yangjie10930:EpMedia:v0.9.5' + implementation 'pub.devrel:easypermissions:0.3.0' + implementation 'com.github.bumptech.glide:glide:3.7.0' + implementation 'com.danikula:videocache:2.7.0' + //==================mp4parser==================== + implementation 'com.googlecode.mp4parser:isoparser:1.1.21' + implementation files('libs/fmod.jar') + implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" + + + testCompile 'junit:junit:4.12' + testCompile "org.mockito:mockito-core:1.9.5" +} +repositories { + mavenCentral() } diff --git a/app/libs/armeabi-v7a/libfmod.so b/app/libs/armeabi-v7a/libfmod.so new file mode 100755 index 0000000..5c96ff1 Binary files /dev/null and b/app/libs/armeabi-v7a/libfmod.so differ diff --git a/app/libs/armeabi-v7a/libfmodL.so b/app/libs/armeabi-v7a/libfmodL.so new file mode 100755 index 0000000..280fd24 Binary files /dev/null and b/app/libs/armeabi-v7a/libfmodL.so differ diff --git a/app/libs/fmod.jar b/app/libs/fmod.jar new file mode 100755 index 0000000..30eb0e3 Binary files /dev/null and b/app/libs/fmod.jar differ diff --git a/app/src/androidTest/java/com/aserbao/androidcustomcamera/ExampleInstrumentedTest.java b/app/src/androidTest/java/com/aserbao/androidcustomcamera/ExampleInstrumentedTest.java deleted file mode 100644 index e02b4a3..0000000 --- a/app/src/androidTest/java/com/aserbao/androidcustomcamera/ExampleInstrumentedTest.java +++ /dev/null @@ -1,26 +0,0 @@ -package com.aserbao.androidcustomcamera; - -import android.content.Context; -import android.support.test.InstrumentationRegistry; -import android.support.test.runner.AndroidJUnit4; - -import org.junit.Test; -import org.junit.runner.RunWith; - -import static org.junit.Assert.*; - -/** - * Instrumented test, which will execute on an Android device. - * - * @see Testing documentation - */ -@RunWith(AndroidJUnit4.class) -public class ExampleInstrumentedTest { - @Test - public void useAppContext() throws Exception { - // Context of the app under test. - Context appContext = InstrumentationRegistry.getTargetContext(); - - assertEquals("com.aserbao.androidcustomcamera", appContext.getPackageName()); - } -} diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml index 783c9b4..cd50b9f 100644 --- a/app/src/main/AndroidManifest.xml +++ b/app/src/main/AndroidManifest.xml @@ -1,45 +1,78 @@ + package="com.aserbao.androidcustomcamera"> - - - + + + + - + - + - + - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + - - - - + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/app/src/main/assets/five.mp3 b/app/src/main/assets/five.mp3 new file mode 100644 index 0000000..1535ff2 Binary files /dev/null and b/app/src/main/assets/five.mp3 differ diff --git a/app/src/main/assets/images/1.gif b/app/src/main/assets/images/1.gif deleted file mode 100644 index ce43717..0000000 Binary files a/app/src/main/assets/images/1.gif and /dev/null differ diff --git "a/app/src/main/assets/images/\346\234\254\345\234\260\347\274\226\350\276\221.gif" "b/app/src/main/assets/images/\346\234\254\345\234\260\347\274\226\350\276\221.gif" deleted file mode 100644 index ae2b999..0000000 Binary files "a/app/src/main/assets/images/\346\234\254\345\234\260\347\274\226\350\276\221.gif" and /dev/null differ diff --git "a/app/src/main/assets/images/\347\274\226\350\276\2213.gif" "b/app/src/main/assets/images/\347\274\226\350\276\2213.gif" deleted file mode 100644 index 6105fe1..0000000 Binary files "a/app/src/main/assets/images/\347\274\226\350\276\2213.gif" and /dev/null differ diff --git "a/app/src/main/assets/images/\351\200\211\345\260\201\351\235\2422.gif" "b/app/src/main/assets/images/\351\200\211\345\260\201\351\235\2422.gif" deleted file mode 100644 index 5601e62..0000000 Binary files "a/app/src/main/assets/images/\351\200\211\345\260\201\351\235\2422.gif" and /dev/null differ diff --git a/app/src/main/assets/league_legends.mp3 b/app/src/main/assets/league_legends.mp3 new file mode 100644 index 0000000..96c6f4f Binary files /dev/null and b/app/src/main/assets/league_legends.mp3 differ diff --git a/app/src/main/cpp/com_aserbao_androidcustomcamera_utils_VoiceUtils.h b/app/src/main/cpp/com_aserbao_androidcustomcamera_utils_VoiceUtils.h new file mode 100644 index 0000000..de9b207 --- /dev/null +++ b/app/src/main/cpp/com_aserbao_androidcustomcamera_utils_VoiceUtils.h @@ -0,0 +1,34 @@ +/* DO NOT EDIT THIS FILE - it is machine generated */ +#include +/* Header for class com_aserbao_androidcustomcamera_utils_VoiceUtils */ + +#ifndef _Included_com_aserbao_androidcustomcamera_utils_VoiceUtils +#define _Included_com_aserbao_androidcustomcamera_utils_VoiceUtils +#ifdef __cplusplus +extern "C" { +#endif +#undef com_aserbao_androidcustomcamera_utils_VoiceUtils_MODE_NORMAL +#define com_aserbao_androidcustomcamera_utils_VoiceUtils_MODE_NORMAL 0L +#undef com_aserbao_androidcustomcamera_utils_VoiceUtils_MODE_LUOLI +#define com_aserbao_androidcustomcamera_utils_VoiceUtils_MODE_LUOLI 1L +#undef com_aserbao_androidcustomcamera_utils_VoiceUtils_MODE_DASHU +#define com_aserbao_androidcustomcamera_utils_VoiceUtils_MODE_DASHU 2L +#undef com_aserbao_androidcustomcamera_utils_VoiceUtils_MODE_JINGSONG +#define com_aserbao_androidcustomcamera_utils_VoiceUtils_MODE_JINGSONG 3L +#undef com_aserbao_androidcustomcamera_utils_VoiceUtils_MODE_GAOGUAI +#define com_aserbao_androidcustomcamera_utils_VoiceUtils_MODE_GAOGUAI 4L +#undef com_aserbao_androidcustomcamera_utils_VoiceUtils_MODE_KONGLING +#define com_aserbao_androidcustomcamera_utils_VoiceUtils_MODE_KONGLING 5L + +/* + * Class: com_aserbao_androidcustomcamera_utils_VoiceUtils + * Method: fix + * Signature: (Ljava/lang/String;I)V + */ +JNIEXPORT void JNICALL Java_com_aserbao_androidcustomcamera_utils_VoiceUtils_fix + (JNIEnv *, jclass, jstring, jint); + +#ifdef __cplusplus +} +#endif +#endif diff --git a/app/src/main/cpp/inc/fmod.h b/app/src/main/cpp/inc/fmod.h new file mode 100644 index 0000000..f2d5c53 --- /dev/null +++ b/app/src/main/cpp/inc/fmod.h @@ -0,0 +1,718 @@ +/*$ preserve start $*/ + +/* ======================================================================================== */ +/* FMOD Studio Low Level API - C header file. */ +/* Copyright (c), Firelight Technologies Pty, Ltd. 2012-2017. */ +/* */ +/* Use this header in conjunction with fmod_common.h (which contains all the constants / */ +/* callbacks) to develop using C interface. */ +/* ======================================================================================== */ + +#ifndef _FMOD_H +#define _FMOD_H + +#include "fmod_common.h" + +/* ========================================================================================== */ +/* FUNCTION PROTOTYPES */ +/* ========================================================================================== */ + +#ifdef __cplusplus +extern "C" +{ +#endif + +/* + FMOD global system functions (optional). +*/ + +FMOD_RESULT F_API FMOD_Memory_Initialize (void *poolmem, int poollen, FMOD_MEMORY_ALLOC_CALLBACK useralloc, FMOD_MEMORY_REALLOC_CALLBACK userrealloc, FMOD_MEMORY_FREE_CALLBACK userfree, FMOD_MEMORY_TYPE memtypeflags); +FMOD_RESULT F_API FMOD_Memory_GetStats (int *currentalloced, int *maxalloced, FMOD_BOOL blocking); +FMOD_RESULT F_API FMOD_Debug_Initialize (FMOD_DEBUG_FLAGS flags, FMOD_DEBUG_MODE mode, FMOD_DEBUG_CALLBACK callback, const char *filename); +FMOD_RESULT F_API FMOD_File_SetDiskBusy (int busy); +FMOD_RESULT F_API FMOD_File_GetDiskBusy (int *busy); + +/* + FMOD System factory functions. Use this to create an FMOD System Instance. below you will see FMOD_System_Init/Close to get started. +*/ + +FMOD_RESULT F_API FMOD_System_Create (FMOD_SYSTEM **system); +FMOD_RESULT F_API FMOD_System_Release (FMOD_SYSTEM *system); + +/*$ preserve end $*/ + +/* + 'System' API +*/ + +/* + Setup functions. +*/ + +FMOD_RESULT F_API FMOD_System_SetOutput (FMOD_SYSTEM *system, FMOD_OUTPUTTYPE output); +FMOD_RESULT F_API FMOD_System_GetOutput (FMOD_SYSTEM *system, FMOD_OUTPUTTYPE *output); +FMOD_RESULT F_API FMOD_System_GetNumDrivers (FMOD_SYSTEM *system, int *numdrivers); +FMOD_RESULT F_API FMOD_System_GetDriverInfo (FMOD_SYSTEM *system, int id, char *name, int namelen, FMOD_GUID *guid, int *systemrate, FMOD_SPEAKERMODE *speakermode, int *speakermodechannels); +FMOD_RESULT F_API FMOD_System_SetDriver (FMOD_SYSTEM *system, int driver); +FMOD_RESULT F_API FMOD_System_GetDriver (FMOD_SYSTEM *system, int *driver); +FMOD_RESULT F_API FMOD_System_SetSoftwareChannels (FMOD_SYSTEM *system, int numsoftwarechannels); +FMOD_RESULT F_API FMOD_System_GetSoftwareChannels (FMOD_SYSTEM *system, int *numsoftwarechannels); +FMOD_RESULT F_API FMOD_System_SetSoftwareFormat (FMOD_SYSTEM *system, int samplerate, FMOD_SPEAKERMODE speakermode, int numrawspeakers); +FMOD_RESULT F_API FMOD_System_GetSoftwareFormat (FMOD_SYSTEM *system, int *samplerate, FMOD_SPEAKERMODE *speakermode, int *numrawspeakers); +FMOD_RESULT F_API FMOD_System_SetDSPBufferSize (FMOD_SYSTEM *system, unsigned int bufferlength, int numbuffers); +FMOD_RESULT F_API FMOD_System_GetDSPBufferSize (FMOD_SYSTEM *system, unsigned int *bufferlength, int *numbuffers); +FMOD_RESULT F_API FMOD_System_SetFileSystem (FMOD_SYSTEM *system, FMOD_FILE_OPEN_CALLBACK useropen, FMOD_FILE_CLOSE_CALLBACK userclose, FMOD_FILE_READ_CALLBACK userread, FMOD_FILE_SEEK_CALLBACK userseek, FMOD_FILE_ASYNCREAD_CALLBACK userasyncread, FMOD_FILE_ASYNCCANCEL_CALLBACK userasynccancel, int blockalign); +FMOD_RESULT F_API FMOD_System_AttachFileSystem (FMOD_SYSTEM *system, FMOD_FILE_OPEN_CALLBACK useropen, FMOD_FILE_CLOSE_CALLBACK userclose, FMOD_FILE_READ_CALLBACK userread, FMOD_FILE_SEEK_CALLBACK userseek); +FMOD_RESULT F_API FMOD_System_SetAdvancedSettings (FMOD_SYSTEM *system, FMOD_ADVANCEDSETTINGS *settings); +FMOD_RESULT F_API FMOD_System_GetAdvancedSettings (FMOD_SYSTEM *system, FMOD_ADVANCEDSETTINGS *settings); +FMOD_RESULT F_API FMOD_System_SetCallback (FMOD_SYSTEM *system, FMOD_SYSTEM_CALLBACK callback, FMOD_SYSTEM_CALLBACK_TYPE callbackmask); + +/* + Plug-in support. +*/ + +FMOD_RESULT F_API FMOD_System_SetPluginPath (FMOD_SYSTEM *system, const char *path); +FMOD_RESULT F_API FMOD_System_LoadPlugin (FMOD_SYSTEM *system, const char *filename, unsigned int *handle, unsigned int priority); +FMOD_RESULT F_API FMOD_System_UnloadPlugin (FMOD_SYSTEM *system, unsigned int handle); +FMOD_RESULT F_API FMOD_System_GetNumNestedPlugins (FMOD_SYSTEM *system, unsigned int handle, int *count); +FMOD_RESULT F_API FMOD_System_GetNestedPlugin (FMOD_SYSTEM *system, unsigned int handle, int index, unsigned int *nestedhandle); +FMOD_RESULT F_API FMOD_System_GetNumPlugins (FMOD_SYSTEM *system, FMOD_PLUGINTYPE plugintype, int *numplugins); +FMOD_RESULT F_API FMOD_System_GetPluginHandle (FMOD_SYSTEM *system, FMOD_PLUGINTYPE plugintype, int index, unsigned int *handle); +FMOD_RESULT F_API FMOD_System_GetPluginInfo (FMOD_SYSTEM *system, unsigned int handle, FMOD_PLUGINTYPE *plugintype, char *name, int namelen, unsigned int *version); +FMOD_RESULT F_API FMOD_System_SetOutputByPlugin (FMOD_SYSTEM *system, unsigned int handle); +FMOD_RESULT F_API FMOD_System_GetOutputByPlugin (FMOD_SYSTEM *system, unsigned int *handle); +FMOD_RESULT F_API FMOD_System_CreateDSPByPlugin (FMOD_SYSTEM *system, unsigned int handle, FMOD_DSP **dsp); +FMOD_RESULT F_API FMOD_System_GetDSPInfoByPlugin (FMOD_SYSTEM *system, unsigned int handle, const FMOD_DSP_DESCRIPTION **description); +FMOD_RESULT F_API FMOD_System_RegisterCodec (FMOD_SYSTEM *system, FMOD_CODEC_DESCRIPTION *description, unsigned int *handle, unsigned int priority); +FMOD_RESULT F_API FMOD_System_RegisterDSP (FMOD_SYSTEM *system, const FMOD_DSP_DESCRIPTION *description, unsigned int *handle); +FMOD_RESULT F_API FMOD_System_RegisterOutput (FMOD_SYSTEM *system, const FMOD_OUTPUT_DESCRIPTION *description, unsigned int *handle); + +/* + Init/Close. +*/ + +FMOD_RESULT F_API FMOD_System_Init (FMOD_SYSTEM *system, int maxchannels, FMOD_INITFLAGS flags, void *extradriverdata); +FMOD_RESULT F_API FMOD_System_Close (FMOD_SYSTEM *system); + +/* + General post-init system functions. +*/ + +FMOD_RESULT F_API FMOD_System_Update (FMOD_SYSTEM *system); + +FMOD_RESULT F_API FMOD_System_SetSpeakerPosition (FMOD_SYSTEM *system, FMOD_SPEAKER speaker, float x, float y, FMOD_BOOL active); +FMOD_RESULT F_API FMOD_System_GetSpeakerPosition (FMOD_SYSTEM *system, FMOD_SPEAKER speaker, float *x, float *y, FMOD_BOOL *active); +FMOD_RESULT F_API FMOD_System_SetStreamBufferSize (FMOD_SYSTEM *system, unsigned int filebuffersize, FMOD_TIMEUNIT filebuffersizetype); +FMOD_RESULT F_API FMOD_System_GetStreamBufferSize (FMOD_SYSTEM *system, unsigned int *filebuffersize, FMOD_TIMEUNIT *filebuffersizetype); +FMOD_RESULT F_API FMOD_System_Set3DSettings (FMOD_SYSTEM *system, float dopplerscale, float distancefactor, float rolloffscale); +FMOD_RESULT F_API FMOD_System_Get3DSettings (FMOD_SYSTEM *system, float *dopplerscale, float *distancefactor, float *rolloffscale); +FMOD_RESULT F_API FMOD_System_Set3DNumListeners (FMOD_SYSTEM *system, int numlisteners); +FMOD_RESULT F_API FMOD_System_Get3DNumListeners (FMOD_SYSTEM *system, int *numlisteners); +FMOD_RESULT F_API FMOD_System_Set3DListenerAttributes (FMOD_SYSTEM *system, int listener, const FMOD_VECTOR *pos, const FMOD_VECTOR *vel, const FMOD_VECTOR *forward, const FMOD_VECTOR *up); +FMOD_RESULT F_API FMOD_System_Get3DListenerAttributes (FMOD_SYSTEM *system, int listener, FMOD_VECTOR *pos, FMOD_VECTOR *vel, FMOD_VECTOR *forward, FMOD_VECTOR *up); +FMOD_RESULT F_API FMOD_System_Set3DRolloffCallback (FMOD_SYSTEM *system, FMOD_3D_ROLLOFF_CALLBACK callback); +FMOD_RESULT F_API FMOD_System_MixerSuspend (FMOD_SYSTEM *system); +FMOD_RESULT F_API FMOD_System_MixerResume (FMOD_SYSTEM *system); +FMOD_RESULT F_API FMOD_System_GetDefaultMixMatrix (FMOD_SYSTEM *system, FMOD_SPEAKERMODE sourcespeakermode, FMOD_SPEAKERMODE targetspeakermode, float *matrix, int matrixhop); +FMOD_RESULT F_API FMOD_System_GetSpeakerModeChannels (FMOD_SYSTEM *system, FMOD_SPEAKERMODE mode, int *channels); + +/* + System information functions. +*/ + +FMOD_RESULT F_API FMOD_System_GetVersion (FMOD_SYSTEM *system, unsigned int *version); +FMOD_RESULT F_API FMOD_System_GetOutputHandle (FMOD_SYSTEM *system, void **handle); +FMOD_RESULT F_API FMOD_System_GetChannelsPlaying (FMOD_SYSTEM *system, int *channels, int *realchannels); +FMOD_RESULT F_API FMOD_System_GetCPUUsage (FMOD_SYSTEM *system, float *dsp, float *stream, float *geometry, float *update, float *total); +FMOD_RESULT F_API FMOD_System_GetFileUsage (FMOD_SYSTEM *system, long long *sampleBytesRead, long long *streamBytesRead, long long *otherBytesRead); +FMOD_RESULT F_API FMOD_System_GetSoundRAM (FMOD_SYSTEM *system, int *currentalloced, int *maxalloced, int *total); + +/* + Sound/DSP/Channel/FX creation and retrieval. +*/ + +FMOD_RESULT F_API FMOD_System_CreateSound (FMOD_SYSTEM *system, const char *name_or_data, FMOD_MODE mode, FMOD_CREATESOUNDEXINFO *exinfo, FMOD_SOUND **sound); +FMOD_RESULT F_API FMOD_System_CreateStream (FMOD_SYSTEM *system, const char *name_or_data, FMOD_MODE mode, FMOD_CREATESOUNDEXINFO *exinfo, FMOD_SOUND **sound); +FMOD_RESULT F_API FMOD_System_CreateDSP (FMOD_SYSTEM *system, const FMOD_DSP_DESCRIPTION *description, FMOD_DSP **dsp); +FMOD_RESULT F_API FMOD_System_CreateDSPByType (FMOD_SYSTEM *system, FMOD_DSP_TYPE type, FMOD_DSP **dsp); +FMOD_RESULT F_API FMOD_System_CreateChannelGroup (FMOD_SYSTEM *system, const char *name, FMOD_CHANNELGROUP **channelgroup); +FMOD_RESULT F_API FMOD_System_CreateSoundGroup (FMOD_SYSTEM *system, const char *name, FMOD_SOUNDGROUP **soundgroup); +FMOD_RESULT F_API FMOD_System_CreateReverb3D (FMOD_SYSTEM *system, FMOD_REVERB3D **reverb); + +FMOD_RESULT F_API FMOD_System_PlaySound (FMOD_SYSTEM *system, FMOD_SOUND *sound, FMOD_CHANNELGROUP *channelgroup, FMOD_BOOL paused, FMOD_CHANNEL **channel); +FMOD_RESULT F_API FMOD_System_PlayDSP (FMOD_SYSTEM *system, FMOD_DSP *dsp, FMOD_CHANNELGROUP *channelgroup, FMOD_BOOL paused, FMOD_CHANNEL **channel); +FMOD_RESULT F_API FMOD_System_GetChannel (FMOD_SYSTEM *system, int channelid, FMOD_CHANNEL **channel); +FMOD_RESULT F_API FMOD_System_GetMasterChannelGroup (FMOD_SYSTEM *system, FMOD_CHANNELGROUP **channelgroup); +FMOD_RESULT F_API FMOD_System_GetMasterSoundGroup (FMOD_SYSTEM *system, FMOD_SOUNDGROUP **soundgroup); + +/* + Routing to ports. +*/ + +FMOD_RESULT F_API FMOD_System_AttachChannelGroupToPort (FMOD_SYSTEM *system, FMOD_PORT_TYPE portType, FMOD_PORT_INDEX portIndex, FMOD_CHANNELGROUP *channelgroup, FMOD_BOOL passThru); +FMOD_RESULT F_API FMOD_System_DetachChannelGroupFromPort(FMOD_SYSTEM *system, FMOD_CHANNELGROUP *channelgroup); + +/* + Reverb API. +*/ + +FMOD_RESULT F_API FMOD_System_SetReverbProperties (FMOD_SYSTEM *system, int instance, const FMOD_REVERB_PROPERTIES *prop); +FMOD_RESULT F_API FMOD_System_GetReverbProperties (FMOD_SYSTEM *system, int instance, FMOD_REVERB_PROPERTIES *prop); + +/* + System level DSP functionality. +*/ + +FMOD_RESULT F_API FMOD_System_LockDSP (FMOD_SYSTEM *system); +FMOD_RESULT F_API FMOD_System_UnlockDSP (FMOD_SYSTEM *system); + +/* + Recording API. +*/ + +FMOD_RESULT F_API FMOD_System_GetRecordNumDrivers (FMOD_SYSTEM *system, int *numdrivers, int *numconnected); +FMOD_RESULT F_API FMOD_System_GetRecordDriverInfo (FMOD_SYSTEM *system, int id, char *name, int namelen, FMOD_GUID *guid, int *systemrate, FMOD_SPEAKERMODE *speakermode, int *speakermodechannels, FMOD_DRIVER_STATE *state); +FMOD_RESULT F_API FMOD_System_GetRecordPosition (FMOD_SYSTEM *system, int id, unsigned int *position); +FMOD_RESULT F_API FMOD_System_RecordStart (FMOD_SYSTEM *system, int id, FMOD_SOUND *sound, FMOD_BOOL loop); +FMOD_RESULT F_API FMOD_System_RecordStop (FMOD_SYSTEM *system, int id); +FMOD_RESULT F_API FMOD_System_IsRecording (FMOD_SYSTEM *system, int id, FMOD_BOOL *recording); + +/* + Geometry API. +*/ + +FMOD_RESULT F_API FMOD_System_CreateGeometry (FMOD_SYSTEM *system, int maxpolygons, int maxvertices, FMOD_GEOMETRY **geometry); +FMOD_RESULT F_API FMOD_System_SetGeometrySettings (FMOD_SYSTEM *system, float maxworldsize); +FMOD_RESULT F_API FMOD_System_GetGeometrySettings (FMOD_SYSTEM *system, float *maxworldsize); +FMOD_RESULT F_API FMOD_System_LoadGeometry (FMOD_SYSTEM *system, const void *data, int datasize, FMOD_GEOMETRY **geometry); +FMOD_RESULT F_API FMOD_System_GetGeometryOcclusion (FMOD_SYSTEM *system, const FMOD_VECTOR *listener, const FMOD_VECTOR *source, float *direct, float *reverb); + +/* + Network functions. +*/ + +FMOD_RESULT F_API FMOD_System_SetNetworkProxy (FMOD_SYSTEM *system, const char *proxy); +FMOD_RESULT F_API FMOD_System_GetNetworkProxy (FMOD_SYSTEM *system, char *proxy, int proxylen); +FMOD_RESULT F_API FMOD_System_SetNetworkTimeout (FMOD_SYSTEM *system, int timeout); +FMOD_RESULT F_API FMOD_System_GetNetworkTimeout (FMOD_SYSTEM *system, int *timeout); + +/* + Userdata set/get. +*/ + +FMOD_RESULT F_API FMOD_System_SetUserData (FMOD_SYSTEM *system, void *userdata); +FMOD_RESULT F_API FMOD_System_GetUserData (FMOD_SYSTEM *system, void **userdata); + +/* + 'Sound' API +*/ + +FMOD_RESULT F_API FMOD_Sound_Release (FMOD_SOUND *sound); +FMOD_RESULT F_API FMOD_Sound_GetSystemObject (FMOD_SOUND *sound, FMOD_SYSTEM **system); + +/* + Standard sound manipulation functions. +*/ + +FMOD_RESULT F_API FMOD_Sound_Lock (FMOD_SOUND *sound, unsigned int offset, unsigned int length, void **ptr1, void **ptr2, unsigned int *len1, unsigned int *len2); +FMOD_RESULT F_API FMOD_Sound_Unlock (FMOD_SOUND *sound, void *ptr1, void *ptr2, unsigned int len1, unsigned int len2); +FMOD_RESULT F_API FMOD_Sound_SetDefaults (FMOD_SOUND *sound, float frequency, int priority); +FMOD_RESULT F_API FMOD_Sound_GetDefaults (FMOD_SOUND *sound, float *frequency, int *priority); +FMOD_RESULT F_API FMOD_Sound_Set3DMinMaxDistance (FMOD_SOUND *sound, float min, float max); +FMOD_RESULT F_API FMOD_Sound_Get3DMinMaxDistance (FMOD_SOUND *sound, float *min, float *max); +FMOD_RESULT F_API FMOD_Sound_Set3DConeSettings (FMOD_SOUND *sound, float insideconeangle, float outsideconeangle, float outsidevolume); +FMOD_RESULT F_API FMOD_Sound_Get3DConeSettings (FMOD_SOUND *sound, float *insideconeangle, float *outsideconeangle, float *outsidevolume); +FMOD_RESULT F_API FMOD_Sound_Set3DCustomRolloff (FMOD_SOUND *sound, FMOD_VECTOR *points, int numpoints); +FMOD_RESULT F_API FMOD_Sound_Get3DCustomRolloff (FMOD_SOUND *sound, FMOD_VECTOR **points, int *numpoints); +FMOD_RESULT F_API FMOD_Sound_GetSubSound (FMOD_SOUND *sound, int index, FMOD_SOUND **subsound); +FMOD_RESULT F_API FMOD_Sound_GetSubSoundParent (FMOD_SOUND *sound, FMOD_SOUND **parentsound); +FMOD_RESULT F_API FMOD_Sound_GetName (FMOD_SOUND *sound, char *name, int namelen); +FMOD_RESULT F_API FMOD_Sound_GetLength (FMOD_SOUND *sound, unsigned int *length, FMOD_TIMEUNIT lengthtype); +FMOD_RESULT F_API FMOD_Sound_GetFormat (FMOD_SOUND *sound, FMOD_SOUND_TYPE *type, FMOD_SOUND_FORMAT *format, int *channels, int *bits); +FMOD_RESULT F_API FMOD_Sound_GetNumSubSounds (FMOD_SOUND *sound, int *numsubsounds); +FMOD_RESULT F_API FMOD_Sound_GetNumTags (FMOD_SOUND *sound, int *numtags, int *numtagsupdated); +FMOD_RESULT F_API FMOD_Sound_GetTag (FMOD_SOUND *sound, const char *name, int index, FMOD_TAG *tag); +FMOD_RESULT F_API FMOD_Sound_GetOpenState (FMOD_SOUND *sound, FMOD_OPENSTATE *openstate, unsigned int *percentbuffered, FMOD_BOOL *starving, FMOD_BOOL *diskbusy); +FMOD_RESULT F_API FMOD_Sound_ReadData (FMOD_SOUND *sound, void *buffer, unsigned int length, unsigned int *read); +FMOD_RESULT F_API FMOD_Sound_SeekData (FMOD_SOUND *sound, unsigned int pcm); + +FMOD_RESULT F_API FMOD_Sound_SetSoundGroup (FMOD_SOUND *sound, FMOD_SOUNDGROUP *soundgroup); +FMOD_RESULT F_API FMOD_Sound_GetSoundGroup (FMOD_SOUND *sound, FMOD_SOUNDGROUP **soundgroup); + +/* + Synchronization point API. These points can come from markers embedded in wav files, and can also generate channel callbacks. +*/ + +FMOD_RESULT F_API FMOD_Sound_GetNumSyncPoints (FMOD_SOUND *sound, int *numsyncpoints); +FMOD_RESULT F_API FMOD_Sound_GetSyncPoint (FMOD_SOUND *sound, int index, FMOD_SYNCPOINT **point); +FMOD_RESULT F_API FMOD_Sound_GetSyncPointInfo (FMOD_SOUND *sound, FMOD_SYNCPOINT *point, char *name, int namelen, unsigned int *offset, FMOD_TIMEUNIT offsettype); +FMOD_RESULT F_API FMOD_Sound_AddSyncPoint (FMOD_SOUND *sound, unsigned int offset, FMOD_TIMEUNIT offsettype, const char *name, FMOD_SYNCPOINT **point); +FMOD_RESULT F_API FMOD_Sound_DeleteSyncPoint (FMOD_SOUND *sound, FMOD_SYNCPOINT *point); + +/* + Functions also in Channel class but here they are the 'default' to save having to change it in Channel all the time. +*/ + +FMOD_RESULT F_API FMOD_Sound_SetMode (FMOD_SOUND *sound, FMOD_MODE mode); +FMOD_RESULT F_API FMOD_Sound_GetMode (FMOD_SOUND *sound, FMOD_MODE *mode); +FMOD_RESULT F_API FMOD_Sound_SetLoopCount (FMOD_SOUND *sound, int loopcount); +FMOD_RESULT F_API FMOD_Sound_GetLoopCount (FMOD_SOUND *sound, int *loopcount); +FMOD_RESULT F_API FMOD_Sound_SetLoopPoints (FMOD_SOUND *sound, unsigned int loopstart, FMOD_TIMEUNIT loopstarttype, unsigned int loopend, FMOD_TIMEUNIT loopendtype); +FMOD_RESULT F_API FMOD_Sound_GetLoopPoints (FMOD_SOUND *sound, unsigned int *loopstart, FMOD_TIMEUNIT loopstarttype, unsigned int *loopend, FMOD_TIMEUNIT loopendtype); + +/* + For MOD/S3M/XM/IT/MID sequenced formats only. +*/ + +FMOD_RESULT F_API FMOD_Sound_GetMusicNumChannels (FMOD_SOUND *sound, int *numchannels); +FMOD_RESULT F_API FMOD_Sound_SetMusicChannelVolume (FMOD_SOUND *sound, int channel, float volume); +FMOD_RESULT F_API FMOD_Sound_GetMusicChannelVolume (FMOD_SOUND *sound, int channel, float *volume); +FMOD_RESULT F_API FMOD_Sound_SetMusicSpeed (FMOD_SOUND *sound, float speed); +FMOD_RESULT F_API FMOD_Sound_GetMusicSpeed (FMOD_SOUND *sound, float *speed); + +/* + Userdata set/get. +*/ + +FMOD_RESULT F_API FMOD_Sound_SetUserData (FMOD_SOUND *sound, void *userdata); +FMOD_RESULT F_API FMOD_Sound_GetUserData (FMOD_SOUND *sound, void **userdata); + +/* + 'Channel' API +*/ + +FMOD_RESULT F_API FMOD_Channel_GetSystemObject (FMOD_CHANNEL *channel, FMOD_SYSTEM **system); + +/* + General control functionality for Channels and ChannelGroups. +*/ + +FMOD_RESULT F_API FMOD_Channel_Stop (FMOD_CHANNEL *channel); +FMOD_RESULT F_API FMOD_Channel_SetPaused (FMOD_CHANNEL *channel, FMOD_BOOL paused); +FMOD_RESULT F_API FMOD_Channel_GetPaused (FMOD_CHANNEL *channel, FMOD_BOOL *paused); +FMOD_RESULT F_API FMOD_Channel_SetVolume (FMOD_CHANNEL *channel, float volume); +FMOD_RESULT F_API FMOD_Channel_GetVolume (FMOD_CHANNEL *channel, float *volume); +FMOD_RESULT F_API FMOD_Channel_SetVolumeRamp (FMOD_CHANNEL *channel, FMOD_BOOL ramp); +FMOD_RESULT F_API FMOD_Channel_GetVolumeRamp (FMOD_CHANNEL *channel, FMOD_BOOL *ramp); +FMOD_RESULT F_API FMOD_Channel_GetAudibility (FMOD_CHANNEL *channel, float *audibility); +FMOD_RESULT F_API FMOD_Channel_SetPitch (FMOD_CHANNEL *channel, float pitch); +FMOD_RESULT F_API FMOD_Channel_GetPitch (FMOD_CHANNEL *channel, float *pitch); +FMOD_RESULT F_API FMOD_Channel_SetMute (FMOD_CHANNEL *channel, FMOD_BOOL mute); +FMOD_RESULT F_API FMOD_Channel_GetMute (FMOD_CHANNEL *channel, FMOD_BOOL *mute); +FMOD_RESULT F_API FMOD_Channel_SetReverbProperties (FMOD_CHANNEL *channel, int instance, float wet); +FMOD_RESULT F_API FMOD_Channel_GetReverbProperties (FMOD_CHANNEL *channel, int instance, float *wet); +FMOD_RESULT F_API FMOD_Channel_SetLowPassGain (FMOD_CHANNEL *channel, float gain); +FMOD_RESULT F_API FMOD_Channel_GetLowPassGain (FMOD_CHANNEL *channel, float *gain); +FMOD_RESULT F_API FMOD_Channel_SetMode (FMOD_CHANNEL *channel, FMOD_MODE mode); +FMOD_RESULT F_API FMOD_Channel_GetMode (FMOD_CHANNEL *channel, FMOD_MODE *mode); +FMOD_RESULT F_API FMOD_Channel_SetCallback (FMOD_CHANNEL *channel, FMOD_CHANNELCONTROL_CALLBACK callback); +FMOD_RESULT F_API FMOD_Channel_IsPlaying (FMOD_CHANNEL *channel, FMOD_BOOL *isplaying); + +/* + Note all 'set' functions alter a final matrix, this is why the only get function is getMixMatrix, to avoid other get functions returning incorrect/obsolete values. +*/ + +FMOD_RESULT F_API FMOD_Channel_SetPan (FMOD_CHANNEL *channel, float pan); +FMOD_RESULT F_API FMOD_Channel_SetMixLevelsOutput (FMOD_CHANNEL *channel, float frontleft, float frontright, float center, float lfe, float surroundleft, float surroundright, float backleft, float backright); +FMOD_RESULT F_API FMOD_Channel_SetMixLevelsInput (FMOD_CHANNEL *channel, float *levels, int numlevels); +FMOD_RESULT F_API FMOD_Channel_SetMixMatrix (FMOD_CHANNEL *channel, float *matrix, int outchannels, int inchannels, int inchannel_hop); +FMOD_RESULT F_API FMOD_Channel_GetMixMatrix (FMOD_CHANNEL *channel, float *matrix, int *outchannels, int *inchannels, int inchannel_hop); + +/* + Clock based functionality. +*/ + +FMOD_RESULT F_API FMOD_Channel_GetDSPClock (FMOD_CHANNEL *channel, unsigned long long *dspclock, unsigned long long *parentclock); +FMOD_RESULT F_API FMOD_Channel_SetDelay (FMOD_CHANNEL *channel, unsigned long long dspclock_start, unsigned long long dspclock_end, FMOD_BOOL stopchannels); +FMOD_RESULT F_API FMOD_Channel_GetDelay (FMOD_CHANNEL *channel, unsigned long long *dspclock_start, unsigned long long *dspclock_end, FMOD_BOOL *stopchannels); +FMOD_RESULT F_API FMOD_Channel_AddFadePoint (FMOD_CHANNEL *channel, unsigned long long dspclock, float volume); +FMOD_RESULT F_API FMOD_Channel_SetFadePointRamp (FMOD_CHANNEL *channel, unsigned long long dspclock, float volume); +FMOD_RESULT F_API FMOD_Channel_RemoveFadePoints (FMOD_CHANNEL *channel, unsigned long long dspclock_start, unsigned long long dspclock_end); +FMOD_RESULT F_API FMOD_Channel_GetFadePoints (FMOD_CHANNEL *channel, unsigned int *numpoints, unsigned long long *point_dspclock, float *point_volume); + +/* + DSP effects. +*/ + +FMOD_RESULT F_API FMOD_Channel_GetDSP (FMOD_CHANNEL *channel, int index, FMOD_DSP **dsp); +FMOD_RESULT F_API FMOD_Channel_AddDSP (FMOD_CHANNEL *channel, int index, FMOD_DSP *dsp); +FMOD_RESULT F_API FMOD_Channel_RemoveDSP (FMOD_CHANNEL *channel, FMOD_DSP *dsp); +FMOD_RESULT F_API FMOD_Channel_GetNumDSPs (FMOD_CHANNEL *channel, int *numdsps); +FMOD_RESULT F_API FMOD_Channel_SetDSPIndex (FMOD_CHANNEL *channel, FMOD_DSP *dsp, int index); +FMOD_RESULT F_API FMOD_Channel_GetDSPIndex (FMOD_CHANNEL *channel, FMOD_DSP *dsp, int *index); + +/* + 3D functionality. +*/ + +FMOD_RESULT F_API FMOD_Channel_Set3DAttributes (FMOD_CHANNEL *channel, const FMOD_VECTOR *pos, const FMOD_VECTOR *vel, const FMOD_VECTOR *alt_pan_pos); +FMOD_RESULT F_API FMOD_Channel_Get3DAttributes (FMOD_CHANNEL *channel, FMOD_VECTOR *pos, FMOD_VECTOR *vel, FMOD_VECTOR *alt_pan_pos); +FMOD_RESULT F_API FMOD_Channel_Set3DMinMaxDistance (FMOD_CHANNEL *channel, float mindistance, float maxdistance); +FMOD_RESULT F_API FMOD_Channel_Get3DMinMaxDistance (FMOD_CHANNEL *channel, float *mindistance, float *maxdistance); +FMOD_RESULT F_API FMOD_Channel_Set3DConeSettings (FMOD_CHANNEL *channel, float insideconeangle, float outsideconeangle, float outsidevolume); +FMOD_RESULT F_API FMOD_Channel_Get3DConeSettings (FMOD_CHANNEL *channel, float *insideconeangle, float *outsideconeangle, float *outsidevolume); +FMOD_RESULT F_API FMOD_Channel_Set3DConeOrientation (FMOD_CHANNEL *channel, FMOD_VECTOR *orientation); +FMOD_RESULT F_API FMOD_Channel_Get3DConeOrientation (FMOD_CHANNEL *channel, FMOD_VECTOR *orientation); +FMOD_RESULT F_API FMOD_Channel_Set3DCustomRolloff (FMOD_CHANNEL *channel, FMOD_VECTOR *points, int numpoints); +FMOD_RESULT F_API FMOD_Channel_Get3DCustomRolloff (FMOD_CHANNEL *channel, FMOD_VECTOR **points, int *numpoints); +FMOD_RESULT F_API FMOD_Channel_Set3DOcclusion (FMOD_CHANNEL *channel, float directocclusion, float reverbocclusion); +FMOD_RESULT F_API FMOD_Channel_Get3DOcclusion (FMOD_CHANNEL *channel, float *directocclusion, float *reverbocclusion); +FMOD_RESULT F_API FMOD_Channel_Set3DSpread (FMOD_CHANNEL *channel, float angle); +FMOD_RESULT F_API FMOD_Channel_Get3DSpread (FMOD_CHANNEL *channel, float *angle); +FMOD_RESULT F_API FMOD_Channel_Set3DLevel (FMOD_CHANNEL *channel, float level); +FMOD_RESULT F_API FMOD_Channel_Get3DLevel (FMOD_CHANNEL *channel, float *level); +FMOD_RESULT F_API FMOD_Channel_Set3DDopplerLevel (FMOD_CHANNEL *channel, float level); +FMOD_RESULT F_API FMOD_Channel_Get3DDopplerLevel (FMOD_CHANNEL *channel, float *level); +FMOD_RESULT F_API FMOD_Channel_Set3DDistanceFilter (FMOD_CHANNEL *channel, FMOD_BOOL custom, float customLevel, float centerFreq); +FMOD_RESULT F_API FMOD_Channel_Get3DDistanceFilter (FMOD_CHANNEL *channel, FMOD_BOOL *custom, float *customLevel, float *centerFreq); + +/* + Userdata set/get. +*/ + +FMOD_RESULT F_API FMOD_Channel_SetUserData (FMOD_CHANNEL *channel, void *userdata); +FMOD_RESULT F_API FMOD_Channel_GetUserData (FMOD_CHANNEL *channel, void **userdata); + +/* + Channel specific control functionality. +*/ + +FMOD_RESULT F_API FMOD_Channel_SetFrequency (FMOD_CHANNEL *channel, float frequency); +FMOD_RESULT F_API FMOD_Channel_GetFrequency (FMOD_CHANNEL *channel, float *frequency); +FMOD_RESULT F_API FMOD_Channel_SetPriority (FMOD_CHANNEL *channel, int priority); +FMOD_RESULT F_API FMOD_Channel_GetPriority (FMOD_CHANNEL *channel, int *priority); +FMOD_RESULT F_API FMOD_Channel_SetPosition (FMOD_CHANNEL *channel, unsigned int position, FMOD_TIMEUNIT postype); +FMOD_RESULT F_API FMOD_Channel_GetPosition (FMOD_CHANNEL *channel, unsigned int *position, FMOD_TIMEUNIT postype); +FMOD_RESULT F_API FMOD_Channel_SetChannelGroup (FMOD_CHANNEL *channel, FMOD_CHANNELGROUP *channelgroup); +FMOD_RESULT F_API FMOD_Channel_GetChannelGroup (FMOD_CHANNEL *channel, FMOD_CHANNELGROUP **channelgroup); +FMOD_RESULT F_API FMOD_Channel_SetLoopCount (FMOD_CHANNEL *channel, int loopcount); +FMOD_RESULT F_API FMOD_Channel_GetLoopCount (FMOD_CHANNEL *channel, int *loopcount); +FMOD_RESULT F_API FMOD_Channel_SetLoopPoints (FMOD_CHANNEL *channel, unsigned int loopstart, FMOD_TIMEUNIT loopstarttype, unsigned int loopend, FMOD_TIMEUNIT loopendtype); +FMOD_RESULT F_API FMOD_Channel_GetLoopPoints (FMOD_CHANNEL *channel, unsigned int *loopstart, FMOD_TIMEUNIT loopstarttype, unsigned int *loopend, FMOD_TIMEUNIT loopendtype); + +/* + Information only functions. +*/ + +FMOD_RESULT F_API FMOD_Channel_IsVirtual (FMOD_CHANNEL *channel, FMOD_BOOL *isvirtual); +FMOD_RESULT F_API FMOD_Channel_GetCurrentSound (FMOD_CHANNEL *channel, FMOD_SOUND **sound); +FMOD_RESULT F_API FMOD_Channel_GetIndex (FMOD_CHANNEL *channel, int *index); + +/* + 'ChannelGroup' API +*/ + +FMOD_RESULT F_API FMOD_ChannelGroup_GetSystemObject (FMOD_CHANNELGROUP *channelgroup, FMOD_SYSTEM **system); + +/* + General control functionality for Channels and ChannelGroups. +*/ + +FMOD_RESULT F_API FMOD_ChannelGroup_Stop (FMOD_CHANNELGROUP *channelgroup); +FMOD_RESULT F_API FMOD_ChannelGroup_SetPaused (FMOD_CHANNELGROUP *channelgroup, FMOD_BOOL paused); +FMOD_RESULT F_API FMOD_ChannelGroup_GetPaused (FMOD_CHANNELGROUP *channelgroup, FMOD_BOOL *paused); +FMOD_RESULT F_API FMOD_ChannelGroup_SetVolume (FMOD_CHANNELGROUP *channelgroup, float volume); +FMOD_RESULT F_API FMOD_ChannelGroup_GetVolume (FMOD_CHANNELGROUP *channelgroup, float *volume); +FMOD_RESULT F_API FMOD_ChannelGroup_SetVolumeRamp (FMOD_CHANNELGROUP *channelgroup, FMOD_BOOL ramp); +FMOD_RESULT F_API FMOD_ChannelGroup_GetVolumeRamp (FMOD_CHANNELGROUP *channelgroup, FMOD_BOOL *ramp); +FMOD_RESULT F_API FMOD_ChannelGroup_GetAudibility (FMOD_CHANNELGROUP *channelgroup, float *audibility); +FMOD_RESULT F_API FMOD_ChannelGroup_SetPitch (FMOD_CHANNELGROUP *channelgroup, float pitch); +FMOD_RESULT F_API FMOD_ChannelGroup_GetPitch (FMOD_CHANNELGROUP *channelgroup, float *pitch); +FMOD_RESULT F_API FMOD_ChannelGroup_SetMute (FMOD_CHANNELGROUP *channelgroup, FMOD_BOOL mute); +FMOD_RESULT F_API FMOD_ChannelGroup_GetMute (FMOD_CHANNELGROUP *channelgroup, FMOD_BOOL *mute); +FMOD_RESULT F_API FMOD_ChannelGroup_SetReverbProperties (FMOD_CHANNELGROUP *channelgroup, int instance, float wet); +FMOD_RESULT F_API FMOD_ChannelGroup_GetReverbProperties (FMOD_CHANNELGROUP *channelgroup, int instance, float *wet); +FMOD_RESULT F_API FMOD_ChannelGroup_SetLowPassGain (FMOD_CHANNELGROUP *channelgroup, float gain); +FMOD_RESULT F_API FMOD_ChannelGroup_GetLowPassGain (FMOD_CHANNELGROUP *channelgroup, float *gain); +FMOD_RESULT F_API FMOD_ChannelGroup_SetMode (FMOD_CHANNELGROUP *channelgroup, FMOD_MODE mode); +FMOD_RESULT F_API FMOD_ChannelGroup_GetMode (FMOD_CHANNELGROUP *channelgroup, FMOD_MODE *mode); +FMOD_RESULT F_API FMOD_ChannelGroup_SetCallback (FMOD_CHANNELGROUP *channelgroup, FMOD_CHANNELCONTROL_CALLBACK callback); +FMOD_RESULT F_API FMOD_ChannelGroup_IsPlaying (FMOD_CHANNELGROUP *channelgroup, FMOD_BOOL *isplaying); + +/* + Note all 'set' functions alter a final matrix, this is why the only get function is getMixMatrix, to avoid other get functions returning incorrect/obsolete values. +*/ + +FMOD_RESULT F_API FMOD_ChannelGroup_SetPan (FMOD_CHANNELGROUP *channelgroup, float pan); +FMOD_RESULT F_API FMOD_ChannelGroup_SetMixLevelsOutput (FMOD_CHANNELGROUP *channelgroup, float frontleft, float frontright, float center, float lfe, float surroundleft, float surroundright, float backleft, float backright); +FMOD_RESULT F_API FMOD_ChannelGroup_SetMixLevelsInput (FMOD_CHANNELGROUP *channelgroup, float *levels, int numlevels); +FMOD_RESULT F_API FMOD_ChannelGroup_SetMixMatrix (FMOD_CHANNELGROUP *channelgroup, float *matrix, int outchannels, int inchannels, int inchannel_hop); +FMOD_RESULT F_API FMOD_ChannelGroup_GetMixMatrix (FMOD_CHANNELGROUP *channelgroup, float *matrix, int *outchannels, int *inchannels, int inchannel_hop); + +/* + Clock based functionality. +*/ + +FMOD_RESULT F_API FMOD_ChannelGroup_GetDSPClock (FMOD_CHANNELGROUP *channelgroup, unsigned long long *dspclock, unsigned long long *parentclock); +FMOD_RESULT F_API FMOD_ChannelGroup_SetDelay (FMOD_CHANNELGROUP *channelgroup, unsigned long long dspclock_start, unsigned long long dspclock_end, FMOD_BOOL stopchannels); +FMOD_RESULT F_API FMOD_ChannelGroup_GetDelay (FMOD_CHANNELGROUP *channelgroup, unsigned long long *dspclock_start, unsigned long long *dspclock_end, FMOD_BOOL *stopchannels); +FMOD_RESULT F_API FMOD_ChannelGroup_AddFadePoint (FMOD_CHANNELGROUP *channelgroup, unsigned long long dspclock, float volume); +FMOD_RESULT F_API FMOD_ChannelGroup_SetFadePointRamp (FMOD_CHANNELGROUP *channelgroup, unsigned long long dspclock, float volume); +FMOD_RESULT F_API FMOD_ChannelGroup_RemoveFadePoints (FMOD_CHANNELGROUP *channelgroup, unsigned long long dspclock_start, unsigned long long dspclock_end); +FMOD_RESULT F_API FMOD_ChannelGroup_GetFadePoints (FMOD_CHANNELGROUP *channelgroup, unsigned int *numpoints, unsigned long long *point_dspclock, float *point_volume); + +/* + DSP effects. +*/ + +FMOD_RESULT F_API FMOD_ChannelGroup_GetDSP (FMOD_CHANNELGROUP *channelgroup, int index, FMOD_DSP **dsp); +FMOD_RESULT F_API FMOD_ChannelGroup_AddDSP (FMOD_CHANNELGROUP *channelgroup, int index, FMOD_DSP *dsp); +FMOD_RESULT F_API FMOD_ChannelGroup_RemoveDSP (FMOD_CHANNELGROUP *channelgroup, FMOD_DSP *dsp); +FMOD_RESULT F_API FMOD_ChannelGroup_GetNumDSPs (FMOD_CHANNELGROUP *channelgroup, int *numdsps); +FMOD_RESULT F_API FMOD_ChannelGroup_SetDSPIndex (FMOD_CHANNELGROUP *channelgroup, FMOD_DSP *dsp, int index); +FMOD_RESULT F_API FMOD_ChannelGroup_GetDSPIndex (FMOD_CHANNELGROUP *channelgroup, FMOD_DSP *dsp, int *index); + +/* + 3D functionality. +*/ + +FMOD_RESULT F_API FMOD_ChannelGroup_Set3DAttributes (FMOD_CHANNELGROUP *channelgroup, const FMOD_VECTOR *pos, const FMOD_VECTOR *vel, const FMOD_VECTOR *alt_pan_pos); +FMOD_RESULT F_API FMOD_ChannelGroup_Get3DAttributes (FMOD_CHANNELGROUP *channelgroup, FMOD_VECTOR *pos, FMOD_VECTOR *vel, FMOD_VECTOR *alt_pan_pos); +FMOD_RESULT F_API FMOD_ChannelGroup_Set3DMinMaxDistance (FMOD_CHANNELGROUP *channelgroup, float mindistance, float maxdistance); +FMOD_RESULT F_API FMOD_ChannelGroup_Get3DMinMaxDistance (FMOD_CHANNELGROUP *channelgroup, float *mindistance, float *maxdistance); +FMOD_RESULT F_API FMOD_ChannelGroup_Set3DConeSettings (FMOD_CHANNELGROUP *channelgroup, float insideconeangle, float outsideconeangle, float outsidevolume); +FMOD_RESULT F_API FMOD_ChannelGroup_Get3DConeSettings (FMOD_CHANNELGROUP *channelgroup, float *insideconeangle, float *outsideconeangle, float *outsidevolume); +FMOD_RESULT F_API FMOD_ChannelGroup_Set3DConeOrientation(FMOD_CHANNELGROUP *channelgroup, FMOD_VECTOR *orientation); +FMOD_RESULT F_API FMOD_ChannelGroup_Get3DConeOrientation(FMOD_CHANNELGROUP *channelgroup, FMOD_VECTOR *orientation); +FMOD_RESULT F_API FMOD_ChannelGroup_Set3DCustomRolloff (FMOD_CHANNELGROUP *channelgroup, FMOD_VECTOR *points, int numpoints); +FMOD_RESULT F_API FMOD_ChannelGroup_Get3DCustomRolloff (FMOD_CHANNELGROUP *channelgroup, FMOD_VECTOR **points, int *numpoints); +FMOD_RESULT F_API FMOD_ChannelGroup_Set3DOcclusion (FMOD_CHANNELGROUP *channelgroup, float directocclusion, float reverbocclusion); +FMOD_RESULT F_API FMOD_ChannelGroup_Get3DOcclusion (FMOD_CHANNELGROUP *channelgroup, float *directocclusion, float *reverbocclusion); +FMOD_RESULT F_API FMOD_ChannelGroup_Set3DSpread (FMOD_CHANNELGROUP *channelgroup, float angle); +FMOD_RESULT F_API FMOD_ChannelGroup_Get3DSpread (FMOD_CHANNELGROUP *channelgroup, float *angle); +FMOD_RESULT F_API FMOD_ChannelGroup_Set3DLevel (FMOD_CHANNELGROUP *channelgroup, float level); +FMOD_RESULT F_API FMOD_ChannelGroup_Get3DLevel (FMOD_CHANNELGROUP *channelgroup, float *level); +FMOD_RESULT F_API FMOD_ChannelGroup_Set3DDopplerLevel (FMOD_CHANNELGROUP *channelgroup, float level); +FMOD_RESULT F_API FMOD_ChannelGroup_Get3DDopplerLevel (FMOD_CHANNELGROUP *channelgroup, float *level); +FMOD_RESULT F_API FMOD_ChannelGroup_Set3DDistanceFilter (FMOD_CHANNELGROUP *channelgroup, FMOD_BOOL custom, float customLevel, float centerFreq); +FMOD_RESULT F_API FMOD_ChannelGroup_Get3DDistanceFilter (FMOD_CHANNELGROUP *channelgroup, FMOD_BOOL *custom, float *customLevel, float *centerFreq); + +/* + Userdata set/get. +*/ + +FMOD_RESULT F_API FMOD_ChannelGroup_SetUserData (FMOD_CHANNELGROUP *channelgroup, void *userdata); +FMOD_RESULT F_API FMOD_ChannelGroup_GetUserData (FMOD_CHANNELGROUP *channelgroup, void **userdata); + +FMOD_RESULT F_API FMOD_ChannelGroup_Release (FMOD_CHANNELGROUP *channelgroup); + +/* + Nested channel groups. +*/ + +FMOD_RESULT F_API FMOD_ChannelGroup_AddGroup (FMOD_CHANNELGROUP *channelgroup, FMOD_CHANNELGROUP *group, FMOD_BOOL propagatedspclock, FMOD_DSPCONNECTION **connection); +FMOD_RESULT F_API FMOD_ChannelGroup_GetNumGroups (FMOD_CHANNELGROUP *channelgroup, int *numgroups); +FMOD_RESULT F_API FMOD_ChannelGroup_GetGroup (FMOD_CHANNELGROUP *channelgroup, int index, FMOD_CHANNELGROUP **group); +FMOD_RESULT F_API FMOD_ChannelGroup_GetParentGroup (FMOD_CHANNELGROUP *channelgroup, FMOD_CHANNELGROUP **group); + +/* + Information only functions. +*/ + +FMOD_RESULT F_API FMOD_ChannelGroup_GetName (FMOD_CHANNELGROUP *channelgroup, char *name, int namelen); +FMOD_RESULT F_API FMOD_ChannelGroup_GetNumChannels (FMOD_CHANNELGROUP *channelgroup, int *numchannels); +FMOD_RESULT F_API FMOD_ChannelGroup_GetChannel (FMOD_CHANNELGROUP *channelgroup, int index, FMOD_CHANNEL **channel); + +/* + 'SoundGroup' API +*/ + +FMOD_RESULT F_API FMOD_SoundGroup_Release (FMOD_SOUNDGROUP *soundgroup); +FMOD_RESULT F_API FMOD_SoundGroup_GetSystemObject (FMOD_SOUNDGROUP *soundgroup, FMOD_SYSTEM **system); + +/* + SoundGroup control functions. +*/ + +FMOD_RESULT F_API FMOD_SoundGroup_SetMaxAudible (FMOD_SOUNDGROUP *soundgroup, int maxaudible); +FMOD_RESULT F_API FMOD_SoundGroup_GetMaxAudible (FMOD_SOUNDGROUP *soundgroup, int *maxaudible); +FMOD_RESULT F_API FMOD_SoundGroup_SetMaxAudibleBehavior (FMOD_SOUNDGROUP *soundgroup, FMOD_SOUNDGROUP_BEHAVIOR behavior); +FMOD_RESULT F_API FMOD_SoundGroup_GetMaxAudibleBehavior (FMOD_SOUNDGROUP *soundgroup, FMOD_SOUNDGROUP_BEHAVIOR *behavior); +FMOD_RESULT F_API FMOD_SoundGroup_SetMuteFadeSpeed (FMOD_SOUNDGROUP *soundgroup, float speed); +FMOD_RESULT F_API FMOD_SoundGroup_GetMuteFadeSpeed (FMOD_SOUNDGROUP *soundgroup, float *speed); +FMOD_RESULT F_API FMOD_SoundGroup_SetVolume (FMOD_SOUNDGROUP *soundgroup, float volume); +FMOD_RESULT F_API FMOD_SoundGroup_GetVolume (FMOD_SOUNDGROUP *soundgroup, float *volume); +FMOD_RESULT F_API FMOD_SoundGroup_Stop (FMOD_SOUNDGROUP *soundgroup); + +/* + Information only functions. +*/ + +FMOD_RESULT F_API FMOD_SoundGroup_GetName (FMOD_SOUNDGROUP *soundgroup, char *name, int namelen); +FMOD_RESULT F_API FMOD_SoundGroup_GetNumSounds (FMOD_SOUNDGROUP *soundgroup, int *numsounds); +FMOD_RESULT F_API FMOD_SoundGroup_GetSound (FMOD_SOUNDGROUP *soundgroup, int index, FMOD_SOUND **sound); +FMOD_RESULT F_API FMOD_SoundGroup_GetNumPlaying (FMOD_SOUNDGROUP *soundgroup, int *numplaying); + +/* + Userdata set/get. +*/ + +FMOD_RESULT F_API FMOD_SoundGroup_SetUserData (FMOD_SOUNDGROUP *soundgroup, void *userdata); +FMOD_RESULT F_API FMOD_SoundGroup_GetUserData (FMOD_SOUNDGROUP *soundgroup, void **userdata); + +/* + 'DSP' API +*/ + +FMOD_RESULT F_API FMOD_DSP_Release (FMOD_DSP *dsp); +FMOD_RESULT F_API FMOD_DSP_GetSystemObject (FMOD_DSP *dsp, FMOD_SYSTEM **system); + +/* + Connection / disconnection / input and output enumeration. +*/ + +FMOD_RESULT F_API FMOD_DSP_AddInput (FMOD_DSP *dsp, FMOD_DSP *input, FMOD_DSPCONNECTION **connection, FMOD_DSPCONNECTION_TYPE type); +FMOD_RESULT F_API FMOD_DSP_DisconnectFrom (FMOD_DSP *dsp, FMOD_DSP *target, FMOD_DSPCONNECTION *connection); +FMOD_RESULT F_API FMOD_DSP_DisconnectAll (FMOD_DSP *dsp, FMOD_BOOL inputs, FMOD_BOOL outputs); +FMOD_RESULT F_API FMOD_DSP_GetNumInputs (FMOD_DSP *dsp, int *numinputs); +FMOD_RESULT F_API FMOD_DSP_GetNumOutputs (FMOD_DSP *dsp, int *numoutputs); +FMOD_RESULT F_API FMOD_DSP_GetInput (FMOD_DSP *dsp, int index, FMOD_DSP **input, FMOD_DSPCONNECTION **inputconnection); +FMOD_RESULT F_API FMOD_DSP_GetOutput (FMOD_DSP *dsp, int index, FMOD_DSP **output, FMOD_DSPCONNECTION **outputconnection); + +/* + DSP unit control. +*/ + +FMOD_RESULT F_API FMOD_DSP_SetActive (FMOD_DSP *dsp, FMOD_BOOL active); +FMOD_RESULT F_API FMOD_DSP_GetActive (FMOD_DSP *dsp, FMOD_BOOL *active); +FMOD_RESULT F_API FMOD_DSP_SetBypass (FMOD_DSP *dsp, FMOD_BOOL bypass); +FMOD_RESULT F_API FMOD_DSP_GetBypass (FMOD_DSP *dsp, FMOD_BOOL *bypass); +FMOD_RESULT F_API FMOD_DSP_SetWetDryMix (FMOD_DSP *dsp, float prewet, float postwet, float dry); +FMOD_RESULT F_API FMOD_DSP_GetWetDryMix (FMOD_DSP *dsp, float *prewet, float *postwet, float *dry); +FMOD_RESULT F_API FMOD_DSP_SetChannelFormat (FMOD_DSP *dsp, FMOD_CHANNELMASK channelmask, int numchannels, FMOD_SPEAKERMODE source_speakermode); +FMOD_RESULT F_API FMOD_DSP_GetChannelFormat (FMOD_DSP *dsp, FMOD_CHANNELMASK *channelmask, int *numchannels, FMOD_SPEAKERMODE *source_speakermode); +FMOD_RESULT F_API FMOD_DSP_GetOutputChannelFormat (FMOD_DSP *dsp, FMOD_CHANNELMASK inmask, int inchannels, FMOD_SPEAKERMODE inspeakermode, FMOD_CHANNELMASK *outmask, int *outchannels, FMOD_SPEAKERMODE *outspeakermode); +FMOD_RESULT F_API FMOD_DSP_Reset (FMOD_DSP *dsp); + +/* + DSP parameter control. +*/ + +FMOD_RESULT F_API FMOD_DSP_SetParameterFloat (FMOD_DSP *dsp, int index, float value); +FMOD_RESULT F_API FMOD_DSP_SetParameterInt (FMOD_DSP *dsp, int index, int value); +FMOD_RESULT F_API FMOD_DSP_SetParameterBool (FMOD_DSP *dsp, int index, FMOD_BOOL value); +FMOD_RESULT F_API FMOD_DSP_SetParameterData (FMOD_DSP *dsp, int index, void *data, unsigned int length); +FMOD_RESULT F_API FMOD_DSP_GetParameterFloat (FMOD_DSP *dsp, int index, float *value, char *valuestr, int valuestrlen); +FMOD_RESULT F_API FMOD_DSP_GetParameterInt (FMOD_DSP *dsp, int index, int *value, char *valuestr, int valuestrlen); +FMOD_RESULT F_API FMOD_DSP_GetParameterBool (FMOD_DSP *dsp, int index, FMOD_BOOL *value, char *valuestr, int valuestrlen); +FMOD_RESULT F_API FMOD_DSP_GetParameterData (FMOD_DSP *dsp, int index, void **data, unsigned int *length, char *valuestr, int valuestrlen); +FMOD_RESULT F_API FMOD_DSP_GetNumParameters (FMOD_DSP *dsp, int *numparams); +FMOD_RESULT F_API FMOD_DSP_GetParameterInfo (FMOD_DSP *dsp, int index, FMOD_DSP_PARAMETER_DESC **desc); +FMOD_RESULT F_API FMOD_DSP_GetDataParameterIndex (FMOD_DSP *dsp, int datatype, int *index); +FMOD_RESULT F_API FMOD_DSP_ShowConfigDialog (FMOD_DSP *dsp, void *hwnd, FMOD_BOOL show); + +/* + DSP attributes. +*/ + +FMOD_RESULT F_API FMOD_DSP_GetInfo (FMOD_DSP *dsp, char *name, unsigned int *version, int *channels, int *configwidth, int *configheight); +FMOD_RESULT F_API FMOD_DSP_GetType (FMOD_DSP *dsp, FMOD_DSP_TYPE *type); +FMOD_RESULT F_API FMOD_DSP_GetIdle (FMOD_DSP *dsp, FMOD_BOOL *idle); + +/* + Userdata set/get. +*/ + +FMOD_RESULT F_API FMOD_DSP_SetUserData (FMOD_DSP *dsp, void *userdata); +FMOD_RESULT F_API FMOD_DSP_GetUserData (FMOD_DSP *dsp, void **userdata); + +/* + Metering. +*/ + +FMOD_RESULT F_API FMOD_DSP_SetMeteringEnabled (FMOD_DSP *dsp, FMOD_BOOL inputEnabled, FMOD_BOOL outputEnabled); +FMOD_RESULT F_API FMOD_DSP_GetMeteringEnabled (FMOD_DSP *dsp, FMOD_BOOL *inputEnabled, FMOD_BOOL *outputEnabled); +FMOD_RESULT F_API FMOD_DSP_GetMeteringInfo (FMOD_DSP *dsp, FMOD_DSP_METERING_INFO *inputInfo, FMOD_DSP_METERING_INFO *outputInfo); + +/* + 'DSPConnection' API +*/ + +FMOD_RESULT F_API FMOD_DSPConnection_GetInput (FMOD_DSPCONNECTION *dspconnection, FMOD_DSP **input); +FMOD_RESULT F_API FMOD_DSPConnection_GetOutput (FMOD_DSPCONNECTION *dspconnection, FMOD_DSP **output); +FMOD_RESULT F_API FMOD_DSPConnection_SetMix (FMOD_DSPCONNECTION *dspconnection, float volume); +FMOD_RESULT F_API FMOD_DSPConnection_GetMix (FMOD_DSPCONNECTION *dspconnection, float *volume); +FMOD_RESULT F_API FMOD_DSPConnection_SetMixMatrix (FMOD_DSPCONNECTION *dspconnection, float *matrix, int outchannels, int inchannels, int inchannel_hop); +FMOD_RESULT F_API FMOD_DSPConnection_GetMixMatrix (FMOD_DSPCONNECTION *dspconnection, float *matrix, int *outchannels, int *inchannels, int inchannel_hop); +FMOD_RESULT F_API FMOD_DSPConnection_GetType (FMOD_DSPCONNECTION *dspconnection, FMOD_DSPCONNECTION_TYPE *type); + +/* + Userdata set/get. +*/ + +FMOD_RESULT F_API FMOD_DSPConnection_SetUserData (FMOD_DSPCONNECTION *dspconnection, void *userdata); +FMOD_RESULT F_API FMOD_DSPConnection_GetUserData (FMOD_DSPCONNECTION *dspconnection, void **userdata); + +/* + 'Geometry' API +*/ + +FMOD_RESULT F_API FMOD_Geometry_Release (FMOD_GEOMETRY *geometry); + +/* + Polygon manipulation. +*/ + +FMOD_RESULT F_API FMOD_Geometry_AddPolygon (FMOD_GEOMETRY *geometry, float directocclusion, float reverbocclusion, FMOD_BOOL doublesided, int numvertices, const FMOD_VECTOR *vertices, int *polygonindex); +FMOD_RESULT F_API FMOD_Geometry_GetNumPolygons (FMOD_GEOMETRY *geometry, int *numpolygons); +FMOD_RESULT F_API FMOD_Geometry_GetMaxPolygons (FMOD_GEOMETRY *geometry, int *maxpolygons, int *maxvertices); +FMOD_RESULT F_API FMOD_Geometry_GetPolygonNumVertices (FMOD_GEOMETRY *geometry, int index, int *numvertices); +FMOD_RESULT F_API FMOD_Geometry_SetPolygonVertex (FMOD_GEOMETRY *geometry, int index, int vertexindex, const FMOD_VECTOR *vertex); +FMOD_RESULT F_API FMOD_Geometry_GetPolygonVertex (FMOD_GEOMETRY *geometry, int index, int vertexindex, FMOD_VECTOR *vertex); +FMOD_RESULT F_API FMOD_Geometry_SetPolygonAttributes (FMOD_GEOMETRY *geometry, int index, float directocclusion, float reverbocclusion, FMOD_BOOL doublesided); +FMOD_RESULT F_API FMOD_Geometry_GetPolygonAttributes (FMOD_GEOMETRY *geometry, int index, float *directocclusion, float *reverbocclusion, FMOD_BOOL *doublesided); + +/* + Object manipulation. +*/ + +FMOD_RESULT F_API FMOD_Geometry_SetActive (FMOD_GEOMETRY *geometry, FMOD_BOOL active); +FMOD_RESULT F_API FMOD_Geometry_GetActive (FMOD_GEOMETRY *geometry, FMOD_BOOL *active); +FMOD_RESULT F_API FMOD_Geometry_SetRotation (FMOD_GEOMETRY *geometry, const FMOD_VECTOR *forward, const FMOD_VECTOR *up); +FMOD_RESULT F_API FMOD_Geometry_GetRotation (FMOD_GEOMETRY *geometry, FMOD_VECTOR *forward, FMOD_VECTOR *up); +FMOD_RESULT F_API FMOD_Geometry_SetPosition (FMOD_GEOMETRY *geometry, const FMOD_VECTOR *position); +FMOD_RESULT F_API FMOD_Geometry_GetPosition (FMOD_GEOMETRY *geometry, FMOD_VECTOR *position); +FMOD_RESULT F_API FMOD_Geometry_SetScale (FMOD_GEOMETRY *geometry, const FMOD_VECTOR *scale); +FMOD_RESULT F_API FMOD_Geometry_GetScale (FMOD_GEOMETRY *geometry, FMOD_VECTOR *scale); +FMOD_RESULT F_API FMOD_Geometry_Save (FMOD_GEOMETRY *geometry, void *data, int *datasize); + +/* + Userdata set/get. +*/ + +FMOD_RESULT F_API FMOD_Geometry_SetUserData (FMOD_GEOMETRY *geometry, void *userdata); +FMOD_RESULT F_API FMOD_Geometry_GetUserData (FMOD_GEOMETRY *geometry, void **userdata); + +/* + 'Reverb3D' API +*/ + +FMOD_RESULT F_API FMOD_Reverb3D_Release (FMOD_REVERB3D *reverb3d); + +/* + Reverb manipulation. +*/ + +FMOD_RESULT F_API FMOD_Reverb3D_Set3DAttributes (FMOD_REVERB3D *reverb3d, const FMOD_VECTOR *position, float mindistance, float maxdistance); +FMOD_RESULT F_API FMOD_Reverb3D_Get3DAttributes (FMOD_REVERB3D *reverb3d, FMOD_VECTOR *position, float *mindistance, float *maxdistance); +FMOD_RESULT F_API FMOD_Reverb3D_SetProperties (FMOD_REVERB3D *reverb3d, const FMOD_REVERB_PROPERTIES *properties); +FMOD_RESULT F_API FMOD_Reverb3D_GetProperties (FMOD_REVERB3D *reverb3d, FMOD_REVERB_PROPERTIES *properties); +FMOD_RESULT F_API FMOD_Reverb3D_SetActive (FMOD_REVERB3D *reverb3d, FMOD_BOOL active); +FMOD_RESULT F_API FMOD_Reverb3D_GetActive (FMOD_REVERB3D *reverb3d, FMOD_BOOL *active); + +/* + Userdata set/get. +*/ + +FMOD_RESULT F_API FMOD_Reverb3D_SetUserData (FMOD_REVERB3D *reverb3d, void *userdata); +FMOD_RESULT F_API FMOD_Reverb3D_GetUserData (FMOD_REVERB3D *reverb3d, void **userdata); + +/*$ preserve start $*/ + +#ifdef __cplusplus +} +#endif + +#endif /* _FMOD_H */ + +/*$ preserve end $*/ diff --git a/app/src/main/cpp/inc/fmod.hpp b/app/src/main/cpp/inc/fmod.hpp new file mode 100644 index 0000000..4134507 --- /dev/null +++ b/app/src/main/cpp/inc/fmod.hpp @@ -0,0 +1,605 @@ +/* ========================================================================================== */ +/* FMOD Studio - C++ header file. Copyright (c), Firelight Technologies Pty, Ltd. 2004-2017. */ +/* */ +/* Use this header in conjunction with fmod_common.h (which contains all the constants / */ +/* callbacks) to develop using C++ classes. */ +/* ========================================================================================== */ + +#ifndef _FMOD_HPP +#define _FMOD_HPP + +#include "fmod_common.h" +#include "fmod.h" + +/* + Constant and defines +*/ + +/* + FMOD Namespace +*/ +namespace FMOD +{ + class System; + class Sound; + class ChannelControl; + class Channel; + class ChannelGroup; + class SoundGroup; + class DSP; + class DSPConnection; + class Geometry; + class Reverb3D; + + /* + FMOD global system functions (optional). + */ + inline FMOD_RESULT Memory_Initialize (void *poolmem, int poollen, FMOD_MEMORY_ALLOC_CALLBACK useralloc, FMOD_MEMORY_REALLOC_CALLBACK userrealloc, FMOD_MEMORY_FREE_CALLBACK userfree, FMOD_MEMORY_TYPE memtypeflags = FMOD_MEMORY_ALL) { return FMOD_Memory_Initialize(poolmem, poollen, useralloc, userrealloc, userfree, memtypeflags); } + inline FMOD_RESULT Memory_GetStats (int *currentalloced, int *maxalloced, bool blocking = true) { return FMOD_Memory_GetStats(currentalloced, maxalloced, blocking); } + inline FMOD_RESULT Debug_Initialize (FMOD_DEBUG_FLAGS flags, FMOD_DEBUG_MODE mode = FMOD_DEBUG_MODE_TTY, FMOD_DEBUG_CALLBACK callback = 0, const char *filename = 0) { return FMOD_Debug_Initialize(flags, mode, callback, filename); } + inline FMOD_RESULT File_SetDiskBusy (int busy) { return FMOD_File_SetDiskBusy(busy); } + inline FMOD_RESULT File_GetDiskBusy (int *busy) { return FMOD_File_GetDiskBusy(busy); } + + /* + FMOD System factory functions. + */ + inline FMOD_RESULT System_Create (System **system) { return FMOD_System_Create((FMOD_SYSTEM **)system); } + + /* + 'System' API + */ + class System + { + private: + + // Constructor made private so user cannot statically instance a System class. System_Create must be used. + System(); + System(const System &); + + public: + + FMOD_RESULT F_API release (); + + // Setup functions. + FMOD_RESULT F_API setOutput (FMOD_OUTPUTTYPE output); + FMOD_RESULT F_API getOutput (FMOD_OUTPUTTYPE *output); + FMOD_RESULT F_API getNumDrivers (int *numdrivers); + FMOD_RESULT F_API getDriverInfo (int id, char *name, int namelen, FMOD_GUID *guid, int *systemrate, FMOD_SPEAKERMODE *speakermode, int *speakermodechannels); + FMOD_RESULT F_API setDriver (int driver); + FMOD_RESULT F_API getDriver (int *driver); + FMOD_RESULT F_API setSoftwareChannels (int numsoftwarechannels); + FMOD_RESULT F_API getSoftwareChannels (int *numsoftwarechannels); + FMOD_RESULT F_API setSoftwareFormat (int samplerate, FMOD_SPEAKERMODE speakermode, int numrawspeakers); + FMOD_RESULT F_API getSoftwareFormat (int *samplerate, FMOD_SPEAKERMODE *speakermode, int *numrawspeakers); + FMOD_RESULT F_API setDSPBufferSize (unsigned int bufferlength, int numbuffers); + FMOD_RESULT F_API getDSPBufferSize (unsigned int *bufferlength, int *numbuffers); + FMOD_RESULT F_API setFileSystem (FMOD_FILE_OPEN_CALLBACK useropen, FMOD_FILE_CLOSE_CALLBACK userclose, FMOD_FILE_READ_CALLBACK userread, FMOD_FILE_SEEK_CALLBACK userseek, FMOD_FILE_ASYNCREAD_CALLBACK userasyncread, FMOD_FILE_ASYNCCANCEL_CALLBACK userasynccancel, int blockalign); + FMOD_RESULT F_API attachFileSystem (FMOD_FILE_OPEN_CALLBACK useropen, FMOD_FILE_CLOSE_CALLBACK userclose, FMOD_FILE_READ_CALLBACK userread, FMOD_FILE_SEEK_CALLBACK userseek); + FMOD_RESULT F_API setAdvancedSettings (FMOD_ADVANCEDSETTINGS *settings); + FMOD_RESULT F_API getAdvancedSettings (FMOD_ADVANCEDSETTINGS *settings); + FMOD_RESULT F_API setCallback (FMOD_SYSTEM_CALLBACK callback, FMOD_SYSTEM_CALLBACK_TYPE callbackmask = FMOD_SYSTEM_CALLBACK_ALL); + + // Plug-in support. + FMOD_RESULT F_API setPluginPath (const char *path); + FMOD_RESULT F_API loadPlugin (const char *filename, unsigned int *handle, unsigned int priority = 0); + FMOD_RESULT F_API unloadPlugin (unsigned int handle); + FMOD_RESULT F_API getNumNestedPlugins (unsigned int handle, int *count); + FMOD_RESULT F_API getNestedPlugin (unsigned int handle, int index, unsigned int *nestedhandle); + FMOD_RESULT F_API getNumPlugins (FMOD_PLUGINTYPE plugintype, int *numplugins); + FMOD_RESULT F_API getPluginHandle (FMOD_PLUGINTYPE plugintype, int index, unsigned int *handle); + FMOD_RESULT F_API getPluginInfo (unsigned int handle, FMOD_PLUGINTYPE *plugintype, char *name, int namelen, unsigned int *version); + FMOD_RESULT F_API setOutputByPlugin (unsigned int handle); + FMOD_RESULT F_API getOutputByPlugin (unsigned int *handle); + FMOD_RESULT F_API createDSPByPlugin (unsigned int handle, DSP **dsp); + FMOD_RESULT F_API getDSPInfoByPlugin (unsigned int handle, const FMOD_DSP_DESCRIPTION **description); + FMOD_RESULT F_API registerCodec (FMOD_CODEC_DESCRIPTION *description, unsigned int *handle, unsigned int priority = 0); + FMOD_RESULT F_API registerDSP (const FMOD_DSP_DESCRIPTION *description, unsigned int *handle); + FMOD_RESULT F_API registerOutput (const FMOD_OUTPUT_DESCRIPTION *description, unsigned int *handle); + + // Init/Close. + FMOD_RESULT F_API init (int maxchannels, FMOD_INITFLAGS flags, void *extradriverdata); + FMOD_RESULT F_API close (); + + // General post-init system functions. + FMOD_RESULT F_API update (); /* IMPORTANT! CALL THIS ONCE PER FRAME! */ + + FMOD_RESULT F_API setSpeakerPosition (FMOD_SPEAKER speaker, float x, float y, bool active); + FMOD_RESULT F_API getSpeakerPosition (FMOD_SPEAKER speaker, float *x, float *y, bool *active); + FMOD_RESULT F_API setStreamBufferSize (unsigned int filebuffersize, FMOD_TIMEUNIT filebuffersizetype); + FMOD_RESULT F_API getStreamBufferSize (unsigned int *filebuffersize, FMOD_TIMEUNIT *filebuffersizetype); + FMOD_RESULT F_API set3DSettings (float dopplerscale, float distancefactor, float rolloffscale); + FMOD_RESULT F_API get3DSettings (float *dopplerscale, float *distancefactor, float *rolloffscale); + FMOD_RESULT F_API set3DNumListeners (int numlisteners); + FMOD_RESULT F_API get3DNumListeners (int *numlisteners); + FMOD_RESULT F_API set3DListenerAttributes (int listener, const FMOD_VECTOR *pos, const FMOD_VECTOR *vel, const FMOD_VECTOR *forward, const FMOD_VECTOR *up); + FMOD_RESULT F_API get3DListenerAttributes (int listener, FMOD_VECTOR *pos, FMOD_VECTOR *vel, FMOD_VECTOR *forward, FMOD_VECTOR *up); + FMOD_RESULT F_API set3DRolloffCallback (FMOD_3D_ROLLOFF_CALLBACK callback); + FMOD_RESULT F_API mixerSuspend (); + FMOD_RESULT F_API mixerResume (); + FMOD_RESULT F_API getDefaultMixMatrix (FMOD_SPEAKERMODE sourcespeakermode, FMOD_SPEAKERMODE targetspeakermode, float *matrix, int matrixhop); + FMOD_RESULT F_API getSpeakerModeChannels (FMOD_SPEAKERMODE mode, int *channels); + + // System information functions. + FMOD_RESULT F_API getVersion (unsigned int *version); + FMOD_RESULT F_API getOutputHandle (void **handle); + FMOD_RESULT F_API getChannelsPlaying (int *channels, int *realchannels = 0); + FMOD_RESULT F_API getCPUUsage (float *dsp, float *stream, float *geometry, float *update, float *total); + FMOD_RESULT F_API getFileUsage (long long *sampleBytesRead, long long *streamBytesRead, long long *otherBytesRead); + FMOD_RESULT F_API getSoundRAM (int *currentalloced, int *maxalloced, int *total); + + // Sound/DSP/Channel/FX creation and retrieval. + FMOD_RESULT F_API createSound (const char *name_or_data, FMOD_MODE mode, FMOD_CREATESOUNDEXINFO *exinfo, Sound **sound); + FMOD_RESULT F_API createStream (const char *name_or_data, FMOD_MODE mode, FMOD_CREATESOUNDEXINFO *exinfo, Sound **sound); + FMOD_RESULT F_API createDSP (const FMOD_DSP_DESCRIPTION *description, DSP **dsp); + FMOD_RESULT F_API createDSPByType (FMOD_DSP_TYPE type, DSP **dsp); + FMOD_RESULT F_API createChannelGroup (const char *name, ChannelGroup **channelgroup); + FMOD_RESULT F_API createSoundGroup (const char *name, SoundGroup **soundgroup); + FMOD_RESULT F_API createReverb3D (Reverb3D **reverb); + + FMOD_RESULT F_API playSound (Sound *sound, ChannelGroup *channelgroup, bool paused, Channel **channel); + FMOD_RESULT F_API playDSP (DSP *dsp, ChannelGroup *channelgroup, bool paused, Channel **channel); + FMOD_RESULT F_API getChannel (int channelid, Channel **channel); + FMOD_RESULT F_API getMasterChannelGroup (ChannelGroup **channelgroup); + FMOD_RESULT F_API getMasterSoundGroup (SoundGroup **soundgroup); + + // Routing to ports. + FMOD_RESULT F_API attachChannelGroupToPort (FMOD_PORT_TYPE portType, FMOD_PORT_INDEX portIndex, ChannelGroup *channelgroup, bool passThru = false); + FMOD_RESULT F_API detachChannelGroupFromPort (ChannelGroup *channelgroup); + + // Reverb API. + FMOD_RESULT F_API setReverbProperties (int instance, const FMOD_REVERB_PROPERTIES *prop); + FMOD_RESULT F_API getReverbProperties (int instance, FMOD_REVERB_PROPERTIES *prop); + + // System level DSP functionality. + FMOD_RESULT F_API lockDSP (); + FMOD_RESULT F_API unlockDSP (); + + // Recording API. + FMOD_RESULT F_API getRecordNumDrivers (int *numdrivers, int *numconnected); + FMOD_RESULT F_API getRecordDriverInfo (int id, char *name, int namelen, FMOD_GUID *guid, int *systemrate, FMOD_SPEAKERMODE *speakermode, int *speakermodechannels, FMOD_DRIVER_STATE *state); + FMOD_RESULT F_API getRecordPosition (int id, unsigned int *position); + FMOD_RESULT F_API recordStart (int id, Sound *sound, bool loop); + FMOD_RESULT F_API recordStop (int id); + FMOD_RESULT F_API isRecording (int id, bool *recording); + + // Geometry API. + FMOD_RESULT F_API createGeometry (int maxpolygons, int maxvertices, Geometry **geometry); + FMOD_RESULT F_API setGeometrySettings (float maxworldsize); + FMOD_RESULT F_API getGeometrySettings (float *maxworldsize); + FMOD_RESULT F_API loadGeometry (const void *data, int datasize, Geometry **geometry); + FMOD_RESULT F_API getGeometryOcclusion (const FMOD_VECTOR *listener, const FMOD_VECTOR *source, float *direct, float *reverb); + + // Network functions. + FMOD_RESULT F_API setNetworkProxy (const char *proxy); + FMOD_RESULT F_API getNetworkProxy (char *proxy, int proxylen); + FMOD_RESULT F_API setNetworkTimeout (int timeout); + FMOD_RESULT F_API getNetworkTimeout (int *timeout); + + // Userdata set/get. + FMOD_RESULT F_API setUserData (void *userdata); + FMOD_RESULT F_API getUserData (void **userdata); + }; + + /* + 'Sound' API + */ + class Sound + { + private: + + // Constructor made private so user cannot statically instance a Sound class. Appropriate Sound creation or retrieval function must be used. + Sound(); + Sound(const Sound &); + + public: + + FMOD_RESULT F_API release (); + FMOD_RESULT F_API getSystemObject (System **system); + + // Standard sound manipulation functions. + FMOD_RESULT F_API lock (unsigned int offset, unsigned int length, void **ptr1, void **ptr2, unsigned int *len1, unsigned int *len2); + FMOD_RESULT F_API unlock (void *ptr1, void *ptr2, unsigned int len1, unsigned int len2); + FMOD_RESULT F_API setDefaults (float frequency, int priority); + FMOD_RESULT F_API getDefaults (float *frequency, int *priority); + FMOD_RESULT F_API set3DMinMaxDistance (float min, float max); + FMOD_RESULT F_API get3DMinMaxDistance (float *min, float *max); + FMOD_RESULT F_API set3DConeSettings (float insideconeangle, float outsideconeangle, float outsidevolume); + FMOD_RESULT F_API get3DConeSettings (float *insideconeangle, float *outsideconeangle, float *outsidevolume); + FMOD_RESULT F_API set3DCustomRolloff (FMOD_VECTOR *points, int numpoints); + FMOD_RESULT F_API get3DCustomRolloff (FMOD_VECTOR **points, int *numpoints); + FMOD_RESULT F_API getSubSound (int index, Sound **subsound); + FMOD_RESULT F_API getSubSoundParent (Sound **parentsound); + FMOD_RESULT F_API getName (char *name, int namelen); + FMOD_RESULT F_API getLength (unsigned int *length, FMOD_TIMEUNIT lengthtype); + FMOD_RESULT F_API getFormat (FMOD_SOUND_TYPE *type, FMOD_SOUND_FORMAT *format, int *channels, int *bits); + FMOD_RESULT F_API getNumSubSounds (int *numsubsounds); + FMOD_RESULT F_API getNumTags (int *numtags, int *numtagsupdated); + FMOD_RESULT F_API getTag (const char *name, int index, FMOD_TAG *tag); + FMOD_RESULT F_API getOpenState (FMOD_OPENSTATE *openstate, unsigned int *percentbuffered, bool *starving, bool *diskbusy); + FMOD_RESULT F_API readData (void *buffer, unsigned int length, unsigned int *read); + FMOD_RESULT F_API seekData (unsigned int pcm); + + FMOD_RESULT F_API setSoundGroup (SoundGroup *soundgroup); + FMOD_RESULT F_API getSoundGroup (SoundGroup **soundgroup); + + // Synchronization point API. These points can come from markers embedded in wav files, and can also generate channel callbacks. + FMOD_RESULT F_API getNumSyncPoints (int *numsyncpoints); + FMOD_RESULT F_API getSyncPoint (int index, FMOD_SYNCPOINT **point); + FMOD_RESULT F_API getSyncPointInfo (FMOD_SYNCPOINT *point, char *name, int namelen, unsigned int *offset, FMOD_TIMEUNIT offsettype); + FMOD_RESULT F_API addSyncPoint (unsigned int offset, FMOD_TIMEUNIT offsettype, const char *name, FMOD_SYNCPOINT **point); + FMOD_RESULT F_API deleteSyncPoint (FMOD_SYNCPOINT *point); + + // Functions also in Channel class but here they are the 'default' to save having to change it in Channel all the time. + FMOD_RESULT F_API setMode (FMOD_MODE mode); + FMOD_RESULT F_API getMode (FMOD_MODE *mode); + FMOD_RESULT F_API setLoopCount (int loopcount); + FMOD_RESULT F_API getLoopCount (int *loopcount); + FMOD_RESULT F_API setLoopPoints (unsigned int loopstart, FMOD_TIMEUNIT loopstarttype, unsigned int loopend, FMOD_TIMEUNIT loopendtype); + FMOD_RESULT F_API getLoopPoints (unsigned int *loopstart, FMOD_TIMEUNIT loopstarttype, unsigned int *loopend, FMOD_TIMEUNIT loopendtype); + + // For MOD/S3M/XM/IT/MID sequenced formats only. + FMOD_RESULT F_API getMusicNumChannels (int *numchannels); + FMOD_RESULT F_API setMusicChannelVolume (int channel, float volume); + FMOD_RESULT F_API getMusicChannelVolume (int channel, float *volume); + FMOD_RESULT F_API setMusicSpeed (float speed); + FMOD_RESULT F_API getMusicSpeed (float *speed); + + // Userdata set/get. + FMOD_RESULT F_API setUserData (void *userdata); + FMOD_RESULT F_API getUserData (void **userdata); + }; + + + /* + 'ChannelControl API'. This is a base class for Channel and ChannelGroup so they can share the same functionality. This cannot be used or instansiated explicitly. + */ + class ChannelControl + { + private: + + // Constructor made private so user cannot statically instance a Control class. + ChannelControl(); + ChannelControl(const ChannelControl &); + + public: + + FMOD_RESULT F_API getSystemObject (System **system); + + // General control functionality for Channels and ChannelGroups. + FMOD_RESULT F_API stop (); + FMOD_RESULT F_API setPaused (bool paused); + FMOD_RESULT F_API getPaused (bool *paused); + FMOD_RESULT F_API setVolume (float volume); + FMOD_RESULT F_API getVolume (float *volume); + FMOD_RESULT F_API setVolumeRamp (bool ramp); + FMOD_RESULT F_API getVolumeRamp (bool *ramp); + FMOD_RESULT F_API getAudibility (float *audibility); + FMOD_RESULT F_API setPitch (float pitch); + FMOD_RESULT F_API getPitch (float *pitch); + FMOD_RESULT F_API setMute (bool mute); + FMOD_RESULT F_API getMute (bool *mute); + FMOD_RESULT F_API setReverbProperties (int instance, float wet); + FMOD_RESULT F_API getReverbProperties (int instance, float *wet); + FMOD_RESULT F_API setLowPassGain (float gain); + FMOD_RESULT F_API getLowPassGain (float *gain); + FMOD_RESULT F_API setMode (FMOD_MODE mode); + FMOD_RESULT F_API getMode (FMOD_MODE *mode); + FMOD_RESULT F_API setCallback (FMOD_CHANNELCONTROL_CALLBACK callback); + FMOD_RESULT F_API isPlaying (bool *isplaying); + + // Panning and level adjustment. + // Note all 'set' functions alter a final matrix, this is why the only get function is getMixMatrix, to avoid other get functions returning incorrect/obsolete values. + FMOD_RESULT F_API setPan (float pan); + FMOD_RESULT F_API setMixLevelsOutput (float frontleft, float frontright, float center, float lfe, float surroundleft, float surroundright, float backleft, float backright); + FMOD_RESULT F_API setMixLevelsInput (float *levels, int numlevels); + FMOD_RESULT F_API setMixMatrix (float *matrix, int outchannels, int inchannels, int inchannel_hop = 0); + FMOD_RESULT F_API getMixMatrix (float *matrix, int *outchannels, int *inchannels, int inchannel_hop = 0); + + // Clock based functionality. + FMOD_RESULT F_API getDSPClock (unsigned long long *dspclock, unsigned long long *parentclock); + FMOD_RESULT F_API setDelay (unsigned long long dspclock_start, unsigned long long dspclock_end, bool stopchannels = true); + FMOD_RESULT F_API getDelay (unsigned long long *dspclock_start, unsigned long long *dspclock_end, bool *stopchannels = 0); + FMOD_RESULT F_API addFadePoint (unsigned long long dspclock, float volume); + FMOD_RESULT F_API setFadePointRamp (unsigned long long dspclock, float volume); + FMOD_RESULT F_API removeFadePoints (unsigned long long dspclock_start, unsigned long long dspclock_end); + FMOD_RESULT F_API getFadePoints (unsigned int *numpoints, unsigned long long *point_dspclock, float *point_volume); + + // DSP effects. + FMOD_RESULT F_API getDSP (int index, DSP **dsp); + FMOD_RESULT F_API addDSP (int index, DSP *dsp); + FMOD_RESULT F_API removeDSP (DSP *dsp); + FMOD_RESULT F_API getNumDSPs (int *numdsps); + FMOD_RESULT F_API setDSPIndex (DSP *dsp, int index); + FMOD_RESULT F_API getDSPIndex (DSP *dsp, int *index); + + // 3D functionality. + FMOD_RESULT F_API set3DAttributes (const FMOD_VECTOR *pos, const FMOD_VECTOR *vel, const FMOD_VECTOR *alt_pan_pos = 0); + FMOD_RESULT F_API get3DAttributes (FMOD_VECTOR *pos, FMOD_VECTOR *vel, FMOD_VECTOR *alt_pan_pos = 0); + FMOD_RESULT F_API set3DMinMaxDistance (float mindistance, float maxdistance); + FMOD_RESULT F_API get3DMinMaxDistance (float *mindistance, float *maxdistance); + FMOD_RESULT F_API set3DConeSettings (float insideconeangle, float outsideconeangle, float outsidevolume); + FMOD_RESULT F_API get3DConeSettings (float *insideconeangle, float *outsideconeangle, float *outsidevolume); + FMOD_RESULT F_API set3DConeOrientation (FMOD_VECTOR *orientation); + FMOD_RESULT F_API get3DConeOrientation (FMOD_VECTOR *orientation); + FMOD_RESULT F_API set3DCustomRolloff (FMOD_VECTOR *points, int numpoints); + FMOD_RESULT F_API get3DCustomRolloff (FMOD_VECTOR **points, int *numpoints); + FMOD_RESULT F_API set3DOcclusion (float directocclusion, float reverbocclusion); + FMOD_RESULT F_API get3DOcclusion (float *directocclusion, float *reverbocclusion); + FMOD_RESULT F_API set3DSpread (float angle); + FMOD_RESULT F_API get3DSpread (float *angle); + FMOD_RESULT F_API set3DLevel (float level); + FMOD_RESULT F_API get3DLevel (float *level); + FMOD_RESULT F_API set3DDopplerLevel (float level); + FMOD_RESULT F_API get3DDopplerLevel (float *level); + FMOD_RESULT F_API set3DDistanceFilter (bool custom, float customLevel, float centerFreq); + FMOD_RESULT F_API get3DDistanceFilter (bool *custom, float *customLevel, float *centerFreq); + + // Userdata set/get. + FMOD_RESULT F_API setUserData (void *userdata); + FMOD_RESULT F_API getUserData (void **userdata); + }; + + /* + 'Channel' API. + */ + class Channel : public ChannelControl + { + private: + + // Constructor made private so user cannot statically instance a Channel class. Appropriate Channel creation or retrieval function must be used. + Channel(); + Channel(const Channel &); + + public: + + // Channel specific control functionality. + FMOD_RESULT F_API setFrequency (float frequency); + FMOD_RESULT F_API getFrequency (float *frequency); + FMOD_RESULT F_API setPriority (int priority); + FMOD_RESULT F_API getPriority (int *priority); + FMOD_RESULT F_API setPosition (unsigned int position, FMOD_TIMEUNIT postype); + FMOD_RESULT F_API getPosition (unsigned int *position, FMOD_TIMEUNIT postype); + FMOD_RESULT F_API setChannelGroup (ChannelGroup *channelgroup); + FMOD_RESULT F_API getChannelGroup (ChannelGroup **channelgroup); + FMOD_RESULT F_API setLoopCount (int loopcount); + FMOD_RESULT F_API getLoopCount (int *loopcount); + FMOD_RESULT F_API setLoopPoints (unsigned int loopstart, FMOD_TIMEUNIT loopstarttype, unsigned int loopend, FMOD_TIMEUNIT loopendtype); + FMOD_RESULT F_API getLoopPoints (unsigned int *loopstart, FMOD_TIMEUNIT loopstarttype, unsigned int *loopend, FMOD_TIMEUNIT loopendtype); + + // Information only functions. + FMOD_RESULT F_API isVirtual (bool *isvirtual); + FMOD_RESULT F_API getCurrentSound (Sound **sound); + FMOD_RESULT F_API getIndex (int *index); + }; + + /* + 'ChannelGroup' API + */ + class ChannelGroup : public ChannelControl + { + private: + + // Constructor made private so user cannot statically instance a ChannelGroup class. Appropriate ChannelGroup creation or retrieval function must be used. + ChannelGroup(); + ChannelGroup(const ChannelGroup &); + + public: + + FMOD_RESULT F_API release (); + + // Nested channel groups. + FMOD_RESULT F_API addGroup (ChannelGroup *group, bool propagatedspclock = true, DSPConnection **connection = 0); + FMOD_RESULT F_API getNumGroups (int *numgroups); + FMOD_RESULT F_API getGroup (int index, ChannelGroup **group); + FMOD_RESULT F_API getParentGroup (ChannelGroup **group); + + // Information only functions. + FMOD_RESULT F_API getName (char *name, int namelen); + FMOD_RESULT F_API getNumChannels (int *numchannels); + FMOD_RESULT F_API getChannel (int index, Channel **channel); + }; + + /* + 'SoundGroup' API + */ + class SoundGroup + { + private: + + // Constructor made private so user cannot statically instance a SoundGroup class. Appropriate SoundGroup creation or retrieval function must be used. + SoundGroup(); + SoundGroup(const SoundGroup &); + + public: + + FMOD_RESULT F_API release (); + FMOD_RESULT F_API getSystemObject (System **system); + + // SoundGroup control functions. + FMOD_RESULT F_API setMaxAudible (int maxaudible); + FMOD_RESULT F_API getMaxAudible (int *maxaudible); + FMOD_RESULT F_API setMaxAudibleBehavior (FMOD_SOUNDGROUP_BEHAVIOR behavior); + FMOD_RESULT F_API getMaxAudibleBehavior (FMOD_SOUNDGROUP_BEHAVIOR *behavior); + FMOD_RESULT F_API setMuteFadeSpeed (float speed); + FMOD_RESULT F_API getMuteFadeSpeed (float *speed); + FMOD_RESULT F_API setVolume (float volume); + FMOD_RESULT F_API getVolume (float *volume); + FMOD_RESULT F_API stop (); + + // Information only functions. + FMOD_RESULT F_API getName (char *name, int namelen); + FMOD_RESULT F_API getNumSounds (int *numsounds); + FMOD_RESULT F_API getSound (int index, Sound **sound); + FMOD_RESULT F_API getNumPlaying (int *numplaying); + + // Userdata set/get. + FMOD_RESULT F_API setUserData (void *userdata); + FMOD_RESULT F_API getUserData (void **userdata); + }; + + /* + 'DSP' API + */ + class DSP + { + private: + + // Constructor made private so user cannot statically instance a DSP class. Appropriate DSP creation or retrieval function must be used. + DSP(); + DSP(const DSP &); + + public: + + FMOD_RESULT F_API release (); + FMOD_RESULT F_API getSystemObject (System **system); + + // Connection / disconnection / input and output enumeration. + FMOD_RESULT F_API addInput (DSP *input, DSPConnection **connection = 0, FMOD_DSPCONNECTION_TYPE type = FMOD_DSPCONNECTION_TYPE_STANDARD); + FMOD_RESULT F_API disconnectFrom (DSP *target, DSPConnection *connection = 0); + FMOD_RESULT F_API disconnectAll (bool inputs, bool outputs); + FMOD_RESULT F_API getNumInputs (int *numinputs); + FMOD_RESULT F_API getNumOutputs (int *numoutputs); + FMOD_RESULT F_API getInput (int index, DSP **input, DSPConnection **inputconnection); + FMOD_RESULT F_API getOutput (int index, DSP **output, DSPConnection **outputconnection); + + // DSP unit control. + FMOD_RESULT F_API setActive (bool active); + FMOD_RESULT F_API getActive (bool *active); + FMOD_RESULT F_API setBypass (bool bypass); + FMOD_RESULT F_API getBypass (bool *bypass); + FMOD_RESULT F_API setWetDryMix (float prewet, float postwet, float dry); + FMOD_RESULT F_API getWetDryMix (float *prewet, float *postwet, float *dry); + FMOD_RESULT F_API setChannelFormat (FMOD_CHANNELMASK channelmask, int numchannels, FMOD_SPEAKERMODE source_speakermode); + FMOD_RESULT F_API getChannelFormat (FMOD_CHANNELMASK *channelmask, int *numchannels, FMOD_SPEAKERMODE *source_speakermode); + FMOD_RESULT F_API getOutputChannelFormat (FMOD_CHANNELMASK inmask, int inchannels, FMOD_SPEAKERMODE inspeakermode, FMOD_CHANNELMASK *outmask, int *outchannels, FMOD_SPEAKERMODE *outspeakermode); + FMOD_RESULT F_API reset (); + + // DSP parameter control. + FMOD_RESULT F_API setParameterFloat (int index, float value); + FMOD_RESULT F_API setParameterInt (int index, int value); + FMOD_RESULT F_API setParameterBool (int index, bool value); + FMOD_RESULT F_API setParameterData (int index, void *data, unsigned int length); + FMOD_RESULT F_API getParameterFloat (int index, float *value, char *valuestr, int valuestrlen); + FMOD_RESULT F_API getParameterInt (int index, int *value, char *valuestr, int valuestrlen); + FMOD_RESULT F_API getParameterBool (int index, bool *value, char *valuestr, int valuestrlen); + FMOD_RESULT F_API getParameterData (int index, void **data, unsigned int *length, char *valuestr, int valuestrlen); + FMOD_RESULT F_API getNumParameters (int *numparams); + FMOD_RESULT F_API getParameterInfo (int index, FMOD_DSP_PARAMETER_DESC **desc); + FMOD_RESULT F_API getDataParameterIndex (int datatype, int *index); + FMOD_RESULT F_API showConfigDialog (void *hwnd, bool show); + + // DSP attributes. + FMOD_RESULT F_API getInfo (char *name, unsigned int *version, int *channels, int *configwidth, int *configheight); + FMOD_RESULT F_API getType (FMOD_DSP_TYPE *type); + FMOD_RESULT F_API getIdle (bool *idle); + + // Userdata set/get. + FMOD_RESULT F_API setUserData (void *userdata); + FMOD_RESULT F_API getUserData (void **userdata); + + // Metering. + FMOD_RESULT F_API setMeteringEnabled (bool inputEnabled, bool outputEnabled); + FMOD_RESULT F_API getMeteringEnabled (bool *inputEnabled, bool *outputEnabled); + FMOD_RESULT F_API getMeteringInfo (FMOD_DSP_METERING_INFO *inputInfo, FMOD_DSP_METERING_INFO *outputInfo); + }; + + + /* + 'DSPConnection' API + */ + class DSPConnection + { + private: + + // Constructor made private so user cannot statically instance a DSPConnection class. Appropriate DSPConnection creation or retrieval function must be used. + DSPConnection(); + DSPConnection(const DSPConnection &); + + public: + + FMOD_RESULT F_API getInput (DSP **input); + FMOD_RESULT F_API getOutput (DSP **output); + FMOD_RESULT F_API setMix (float volume); + FMOD_RESULT F_API getMix (float *volume); + FMOD_RESULT F_API setMixMatrix (float *matrix, int outchannels, int inchannels, int inchannel_hop = 0); + FMOD_RESULT F_API getMixMatrix (float *matrix, int *outchannels, int *inchannels, int inchannel_hop = 0); + FMOD_RESULT F_API getType (FMOD_DSPCONNECTION_TYPE *type); + + // Userdata set/get. + FMOD_RESULT F_API setUserData (void *userdata); + FMOD_RESULT F_API getUserData (void **userdata); + }; + + + /* + 'Geometry' API + */ + class Geometry + { + private: + + // Constructor made private so user cannot statically instance a Geometry class. Appropriate Geometry creation or retrieval function must be used. + Geometry(); + Geometry(const Geometry &); + + public: + + FMOD_RESULT F_API release (); + + // Polygon manipulation. + FMOD_RESULT F_API addPolygon (float directocclusion, float reverbocclusion, bool doublesided, int numvertices, const FMOD_VECTOR *vertices, int *polygonindex); + FMOD_RESULT F_API getNumPolygons (int *numpolygons); + FMOD_RESULT F_API getMaxPolygons (int *maxpolygons, int *maxvertices); + FMOD_RESULT F_API getPolygonNumVertices (int index, int *numvertices); + FMOD_RESULT F_API setPolygonVertex (int index, int vertexindex, const FMOD_VECTOR *vertex); + FMOD_RESULT F_API getPolygonVertex (int index, int vertexindex, FMOD_VECTOR *vertex); + FMOD_RESULT F_API setPolygonAttributes (int index, float directocclusion, float reverbocclusion, bool doublesided); + FMOD_RESULT F_API getPolygonAttributes (int index, float *directocclusion, float *reverbocclusion, bool *doublesided); + + // Object manipulation. + FMOD_RESULT F_API setActive (bool active); + FMOD_RESULT F_API getActive (bool *active); + FMOD_RESULT F_API setRotation (const FMOD_VECTOR *forward, const FMOD_VECTOR *up); + FMOD_RESULT F_API getRotation (FMOD_VECTOR *forward, FMOD_VECTOR *up); + FMOD_RESULT F_API setPosition (const FMOD_VECTOR *position); + FMOD_RESULT F_API getPosition (FMOD_VECTOR *position); + FMOD_RESULT F_API setScale (const FMOD_VECTOR *scale); + FMOD_RESULT F_API getScale (FMOD_VECTOR *scale); + FMOD_RESULT F_API save (void *data, int *datasize); + + // Userdata set/get. + FMOD_RESULT F_API setUserData (void *userdata); + FMOD_RESULT F_API getUserData (void **userdata); + }; + + + /* + 'Reverb' API + */ + class Reverb3D + { + private: + + // Constructor made private so user cannot statically instance a Reverb3D class. Appropriate Reverb creation or retrieval function must be used. + Reverb3D(); + Reverb3D(const Reverb3D &); + + public: + + FMOD_RESULT F_API release (); + + // Reverb manipulation. + FMOD_RESULT F_API set3DAttributes (const FMOD_VECTOR *position, float mindistance, float maxdistance); + FMOD_RESULT F_API get3DAttributes (FMOD_VECTOR *position, float *mindistance,float *maxdistance); + FMOD_RESULT F_API setProperties (const FMOD_REVERB_PROPERTIES *properties); + FMOD_RESULT F_API getProperties (FMOD_REVERB_PROPERTIES *properties); + FMOD_RESULT F_API setActive (bool active); + FMOD_RESULT F_API getActive (bool *active); + + // Userdata set/get. + FMOD_RESULT F_API setUserData (void *userdata); + FMOD_RESULT F_API getUserData (void **userdata); + }; +} + +#endif diff --git a/app/src/main/cpp/inc/fmod_codec.h b/app/src/main/cpp/inc/fmod_codec.h new file mode 100644 index 0000000..80c9695 --- /dev/null +++ b/app/src/main/cpp/inc/fmod_codec.h @@ -0,0 +1,178 @@ +/* ======================================================================================================== */ +/* FMOD Studio - codec development header file. Copyright (c), Firelight Technologies Pty, Ltd. 2004-2017. */ +/* */ +/* Use this header if you are wanting to develop your own file format plugin to use with */ +/* FMOD's codec system. With this header you can make your own fileformat plugin that FMOD */ +/* can register and use. See the documentation and examples on how to make a working plugin. */ +/* */ +/* ======================================================================================================== */ + +#ifndef _FMOD_CODEC_H +#define _FMOD_CODEC_H + +typedef struct FMOD_CODEC_STATE FMOD_CODEC_STATE; +typedef struct FMOD_CODEC_WAVEFORMAT FMOD_CODEC_WAVEFORMAT; + +/* + Codec callbacks +*/ +typedef FMOD_RESULT (F_CALLBACK *FMOD_CODEC_OPEN_CALLBACK) (FMOD_CODEC_STATE *codec_state, FMOD_MODE usermode, FMOD_CREATESOUNDEXINFO *userexinfo); +typedef FMOD_RESULT (F_CALLBACK *FMOD_CODEC_CLOSE_CALLBACK) (FMOD_CODEC_STATE *codec_state); +typedef FMOD_RESULT (F_CALLBACK *FMOD_CODEC_READ_CALLBACK) (FMOD_CODEC_STATE *codec_state, void *buffer, unsigned int samples_in, unsigned int *samples_out); +typedef FMOD_RESULT (F_CALLBACK *FMOD_CODEC_GETLENGTH_CALLBACK) (FMOD_CODEC_STATE *codec_state, unsigned int *length, FMOD_TIMEUNIT lengthtype); +typedef FMOD_RESULT (F_CALLBACK *FMOD_CODEC_SETPOSITION_CALLBACK) (FMOD_CODEC_STATE *codec_state, int subsound, unsigned int position, FMOD_TIMEUNIT postype); +typedef FMOD_RESULT (F_CALLBACK *FMOD_CODEC_GETPOSITION_CALLBACK) (FMOD_CODEC_STATE *codec_state, unsigned int *position, FMOD_TIMEUNIT postype); +typedef FMOD_RESULT (F_CALLBACK *FMOD_CODEC_SOUNDCREATE_CALLBACK) (FMOD_CODEC_STATE *codec_state, int subsound, FMOD_SOUND *sound); +typedef FMOD_RESULT (F_CALLBACK *FMOD_CODEC_METADATA_CALLBACK) (FMOD_CODEC_STATE *codec_state, FMOD_TAGTYPE tagtype, char *name, void *data, unsigned int datalen, FMOD_TAGDATATYPE datatype, int unique); +typedef FMOD_RESULT (F_CALLBACK *FMOD_CODEC_GETWAVEFORMAT_CALLBACK)(FMOD_CODEC_STATE *codec_state, int index, FMOD_CODEC_WAVEFORMAT *waveformat); + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + When creating a codec, declare one of these and provide the relevant callbacks and name for FMOD to use when it opens and reads a file. + + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
+ Members marked with [w] mean the variable can be written to. The user can set the value.
+ + [SEE_ALSO] + FMOD_CODEC_STATE + FMOD_CODEC_WAVEFORMAT +] +*/ +typedef struct FMOD_CODEC_DESCRIPTION +{ + const char *name; /* [w] Name of the codec. */ + unsigned int version; /* [w] Plugin writer's version number. */ + int defaultasstream; /* [w] Tells FMOD to open the file as a stream when calling System::createSound, and not a static sample. Should normally be 0 (FALSE), because generally the user wants to decode the file into memory when using System::createSound. Mainly used for formats that decode for a very long time, or could use large amounts of memory when decoded. Usually sequenced formats such as mod/s3m/xm/it/midi fall into this category. It is mainly to stop users that don't know what they're doing from getting FMOD_ERR_MEMORY returned from createSound when they should have in fact called System::createStream or used FMOD_CREATESTREAM in System::createSound. */ + FMOD_TIMEUNIT timeunits; /* [w] When setposition codec is called, only these time formats will be passed to the codec. Use bitwise OR to accumulate different types. */ + FMOD_CODEC_OPEN_CALLBACK open; /* [w] Open callback for the codec for when FMOD tries to open a sound using this codec. */ + FMOD_CODEC_CLOSE_CALLBACK close; /* [w] Close callback for the codec for when FMOD tries to close a sound using this codec. */ + FMOD_CODEC_READ_CALLBACK read; /* [w] Read callback for the codec for when FMOD tries to read some data from the file to the destination format (specified in the open callback). */ + FMOD_CODEC_GETLENGTH_CALLBACK getlength; /* [w] Callback to return the length of the song in whatever format required when Sound::getLength is called. */ + FMOD_CODEC_SETPOSITION_CALLBACK setposition; /* [w] Seek callback for the codec for when FMOD tries to seek within the file with Channel::setPosition. */ + FMOD_CODEC_GETPOSITION_CALLBACK getposition; /* [w] Tell callback for the codec for when FMOD tries to get the current position within the with Channel::getPosition. */ + FMOD_CODEC_SOUNDCREATE_CALLBACK soundcreate; /* [w] Sound creation callback for the codec when FMOD finishes creating the sound. (So the codec can set more parameters for the related created sound, ie loop points/mode or 3D attributes etc). */ + FMOD_CODEC_GETWAVEFORMAT_CALLBACK getwaveformat; /* [w] Callback to tell FMOD about the waveformat of a particular subsound. This is to save memory, rather than saving 1000 FMOD_CODEC_WAVEFORMAT structures in the codec, the codec might have a more optimal way of storing this information. */ +} FMOD_CODEC_DESCRIPTION; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Set these values marked to tell fmod what sort of sound to create when the codec open callback is called.
+ The format, channels, frequency and lengthpcm tell FMOD what sort of sound buffer to create when you initialize your code.
+ If you wrote an MP3 codec that decoded to stereo 16bit integer PCM for a 44khz sound, you would specify FMOD_SOUND_FORMAT_PCM16, and channels would be equal to 2, and frequency would be 44100.
+ + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
+ Members marked with [w] mean the variable can be written to. The user can set the value.
+
+ 1.07 Note. 'blockalign' member which was in bytes has been removed. 'pcmblocksize' is now the replacement, and is measured in PCM samples only, not bytes. This is purely to support buffering + internal to FMOD for codecs that are not sample accurate. +
+ Note: When registering a codec, format, channels, frequency and lengthpcm must be supplied, otherwise there will be an error.
+ This structure is optional if FMOD_CODEC_GETWAVEFORMAT_CALLBACK is specified.
+ An array of these structures may be needed if FMOD_CODEC_STATE::numsubsounds is larger than 1. + + + [SEE_ALSO] + FMOD_CODEC_STATE + FMOD_SOUND_FORMAT + FMOD_MODE + FMOD_CHANNELMASK + FMOD_CHANNELORDER + FMOD_CODEC_WAVEFORMAT_VERSION +] +*/ +struct FMOD_CODEC_WAVEFORMAT +{ + const char* name; /* [w] Name of sound. Optional. If used, the codec must own the lifetime of the string memory until the codec is destroyed. */ + FMOD_SOUND_FORMAT format; /* [w] Format for (decompressed) codec output, ie FMOD_SOUND_FORMAT_PCM8, FMOD_SOUND_FORMAT_PCM16. Mandantory - Must be supplied. */ + int channels; /* [w] Number of channels used by codec, ie mono = 1, stereo = 2. Mandantory - Must be supplied. */ + int frequency; /* [w] Default frequency in hz of the codec, ie 44100. Mandantory - Must be supplied. */ + unsigned int lengthbytes; /* [w] Length in bytes of the source data. Used for FMOD_TIMEUNIT_RAWBYTES. Optional. Default = 0. */ + unsigned int lengthpcm; /* [w] Length in decompressed, PCM samples of the file, ie length in seconds * frequency. Used for Sound::getLength and for memory allocation of static decompressed sample data. Mandantory - Must be supplied. */ + unsigned int pcmblocksize; /* [w] Minimum, optimal number of decompressed PCM samples codec can handle. 0 or 1 = no buffering. Anything higher means FMOD will allocate a PCM buffer of this size to read in chunks. The codec read callback will be called in multiples of this value. Optional. */ + int loopstart; /* [w] Loopstart in decompressed, PCM samples of file. Optional. Default = 0. */ + int loopend; /* [w] Loopend in decompressed, PCM samples of file. Optional. Default = 0. */ + FMOD_MODE mode; /* [w] Mode to determine whether the sound should by default load as looping, non looping, 2d or 3d. Optional. Default = FMOD_DEFAULT. */ + FMOD_CHANNELMASK channelmask; /* [w] Defined channel bitmask to describe which speakers the channels in the codec map to, in order of channel count. See fmod_common.h. Optional. Leave at 0 to map to the speaker layout defined in FMOD_SPEAKER. */ + FMOD_CHANNELORDER channelorder; /* [w] Defined channel order type, to describe where each sound channel should pan for the number of channels specified. See fmod_common.h. Optional. Leave at 0 to play in default speaker order. */ + float peakvolume; /* [w] Peak volume of sound. Optional. Default = 0 if not used. */ +}; + + +/* +[DEFINE] +[ + [NAME] + FMOD_CODEC_WAVEFORMAT_VERSION + + [DESCRIPTION] + Version number of FMOD_CODEC_WAVEFORMAT structure. Should be set into FMOD_CODEC_STATE in the FMOD_CODEC_OPEN_CALLBACK. + + [REMARKS] + Use this for binary compatibility and for future expansion. + + [SEE_ALSO] + FMOD_CODEC_STATE + FMOD_CODEC_DESCRIPTION + FMOD_CODEC_OPEN_CALLBACK +] +*/ +#define FMOD_CODEC_WAVEFORMAT_VERSION 3 +/* [DEFINE_END] */ + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Codec plugin structure that is passed into each callback.
+
+ Optionally set the numsubsounds and waveformat members when called in FMOD_CODEC_OPEN_CALLBACK to tell fmod what sort of sound to create.
+ + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
+ Members marked with [w] mean the variable can be written to. The user can set the value.
+
+ 'numsubsounds' should be 0 if the file is a normal single sound stream or sound. Examples of this would be .WAV, .WMA, .MP3, .AIFF.
+ 'numsubsounds' should be 1+ if the file is a container format, and does not contain wav data itself. Examples of these types would be FSB (contains multiple sounds), DLS (contain instruments).
+ The waveformat value should point to an arrays of information based on how many subsounds are in the format. If the number of subsounds is 0 then it should point to 1 waveformat, the same as if the number of subsounds was 1. If subsounds was 100 for example, there should be a pointer to an array of 100 waveformat structures.
+
+ The waveformat pointer is optional and could be 0, if using FMOD_CODEC_GETWAVEFORMAT_CALLBACK is preferred.
+
+ When a sound has 1 or more subsounds, the caller must play the individual sounds specified by first obtaining the subsound with Sound::getSubSound. + + [SEE_ALSO] + FMOD_CODEC_WAVEFORMAT + FMOD_FILE_READ_CALLBACK + FMOD_FILE_SEEK_CALLBACK + FMOD_CODEC_METADATA_CALLBACK + Sound::getSubSound + Sound::getNumSubSounds + FMOD_CODEC_WAVEFORMAT_VERSION +] +*/ +struct FMOD_CODEC_STATE +{ + int numsubsounds; /* [w] Number of 'subsounds' in this sound. Anything other than 0 makes it a 'container' format (ie DLS/FSB etc which contain 1 or more subsounds). For most normal, single sound codec such as WAV/AIFF/MP3, this should be 0 as they are not a container for subsounds, they are the sound by itself. */ + FMOD_CODEC_WAVEFORMAT *waveformat; /* [w] Pointer to an array of format structures containing information about each sample. Can be 0 or NULL if FMOD_CODEC_GETWAVEFORMAT_CALLBACK callback is preferred. The number of entries here must equal the number of subsounds defined in the subsound parameter. If numsubsounds = 0 then there should be 1 instance of this structure. */ + void *plugindata; /* [w] Plugin writer created data the codec author wants to attach to this object. */ + + void *filehandle; /* [r] This will return an internal FMOD file handle to use with the callbacks provided. */ + unsigned int filesize; /* [r] This will contain the size of the file in bytes. */ + FMOD_FILE_READ_CALLBACK fileread; /* [r] This will return a callable FMOD file function to use from codec. */ + FMOD_FILE_SEEK_CALLBACK fileseek; /* [r] This will return a callable FMOD file function to use from codec. */ + FMOD_CODEC_METADATA_CALLBACK metadata; /* [r] This will return a callable FMOD metadata function to use from codec. */ + + int waveformatversion; /* [w] Must be set to FMOD_CODEC_WAVEFORMAT_VERSION in the FMOD_CODEC_OPEN_CALLBACK. */ +}; + +#endif + + diff --git a/app/src/main/cpp/inc/fmod_common.h b/app/src/main/cpp/inc/fmod_common.h new file mode 100644 index 0000000..9ba0bf1 --- /dev/null +++ b/app/src/main/cpp/inc/fmod_common.h @@ -0,0 +1,1725 @@ +/*$ preserve start $*/ + +/* ================================================================================================== */ +/* FMOD Studio - Common C/C++ header file. Copyright (c), Firelight Technologies Pty, Ltd. 2004-2017. */ +/* */ +/* This header is included by fmod.hpp (C++ interface) and fmod.h (C interface) therefore is the */ +/* base header for all FMOD headers. */ +/* ================================================================================================== */ + +#ifndef _FMOD_COMMON_H +#define _FMOD_COMMON_H + +/* + FMOD version number. Check this against FMOD::System::getVersion. + 0xaaaabbcc -> aaaa = major version number. bb = minor version number. cc = development version number. +*/ + +#define FMOD_VERSION 0x00010906 + +/* + Compiler specific settings. +*/ + +#if defined(_WIN32) || defined(__CYGWIN__) + #define F_CALL __stdcall +#elif defined(__ANDROID__) && defined(__arm__) && !defined(__LP64__) && !defined(__clang__) + #define F_CALL __attribute__((pcs("aapcs"))) +#else + #define F_CALL +#endif + +#if defined(_WIN32) || defined(__CYGWIN__) || defined(__ORBIS__) || defined(__psp2__) + #define F_EXPORT __declspec(dllexport) +#elif defined(__APPLE__) || defined(__ANDROID__) || defined(__linux__) + #define F_EXPORT __attribute__((visibility("default"))) +#else + #define F_EXPORT +#endif + +#ifdef DLL_EXPORTS + #define F_API F_EXPORT F_CALL +#else + #define F_API F_CALL +#endif + +#define F_CALLBACK F_CALL + +/* + FMOD types. +*/ + +typedef int FMOD_BOOL; +typedef struct FMOD_SYSTEM FMOD_SYSTEM; +typedef struct FMOD_SOUND FMOD_SOUND; +typedef struct FMOD_CHANNELCONTROL FMOD_CHANNELCONTROL; +typedef struct FMOD_CHANNEL FMOD_CHANNEL; +typedef struct FMOD_CHANNELGROUP FMOD_CHANNELGROUP; +typedef struct FMOD_SOUNDGROUP FMOD_SOUNDGROUP; +typedef struct FMOD_REVERB3D FMOD_REVERB3D; +typedef struct FMOD_DSP FMOD_DSP; +typedef struct FMOD_DSPCONNECTION FMOD_DSPCONNECTION; +typedef struct FMOD_POLYGON FMOD_POLYGON; +typedef struct FMOD_GEOMETRY FMOD_GEOMETRY; +typedef struct FMOD_SYNCPOINT FMOD_SYNCPOINT; +typedef struct FMOD_ASYNCREADINFO FMOD_ASYNCREADINFO; +typedef unsigned int FMOD_MODE; +typedef unsigned int FMOD_TIMEUNIT; +typedef unsigned int FMOD_INITFLAGS; +typedef unsigned int FMOD_DEBUG_FLAGS; +typedef unsigned int FMOD_MEMORY_TYPE; +typedef unsigned int FMOD_SYSTEM_CALLBACK_TYPE; +typedef unsigned int FMOD_CHANNELMASK; +typedef unsigned int FMOD_DRIVER_STATE; +typedef unsigned int FMOD_PORT_TYPE; +typedef unsigned long long FMOD_PORT_INDEX; + +/*$ fmod result start $*/ +/* +[ENUM] +[ + [DESCRIPTION] + error codes. Returned from every function. + + [REMARKS] + + [SEE_ALSO] +] +*/ +typedef enum +{ + FMOD_OK, /* No errors. */ + FMOD_ERR_BADCOMMAND, /* Tried to call a function on a data type that does not allow this type of functionality (ie calling Sound::lock on a streaming sound). */ + FMOD_ERR_CHANNEL_ALLOC, /* Error trying to allocate a channel. */ + FMOD_ERR_CHANNEL_STOLEN, /* The specified channel has been reused to play another sound. */ + FMOD_ERR_DMA, /* DMA Failure. See debug output for more information. */ + FMOD_ERR_DSP_CONNECTION, /* DSP connection error. Connection possibly caused a cyclic dependency or connected dsps with incompatible buffer counts. */ + FMOD_ERR_DSP_DONTPROCESS, /* DSP return code from a DSP process query callback. Tells mixer not to call the process callback and therefore not consume CPU. Use this to optimize the DSP graph. */ + FMOD_ERR_DSP_FORMAT, /* DSP Format error. A DSP unit may have attempted to connect to this network with the wrong format, or a matrix may have been set with the wrong size if the target unit has a specified channel map. */ + FMOD_ERR_DSP_INUSE, /* DSP is already in the mixer's DSP network. It must be removed before being reinserted or released. */ + FMOD_ERR_DSP_NOTFOUND, /* DSP connection error. Couldn't find the DSP unit specified. */ + FMOD_ERR_DSP_RESERVED, /* DSP operation error. Cannot perform operation on this DSP as it is reserved by the system. */ + FMOD_ERR_DSP_SILENCE, /* DSP return code from a DSP process query callback. Tells mixer silence would be produced from read, so go idle and not consume CPU. Use this to optimize the DSP graph. */ + FMOD_ERR_DSP_TYPE, /* DSP operation cannot be performed on a DSP of this type. */ + FMOD_ERR_FILE_BAD, /* Error loading file. */ + FMOD_ERR_FILE_COULDNOTSEEK, /* Couldn't perform seek operation. This is a limitation of the medium (ie netstreams) or the file format. */ + FMOD_ERR_FILE_DISKEJECTED, /* Media was ejected while reading. */ + FMOD_ERR_FILE_EOF, /* End of file unexpectedly reached while trying to read essential data (truncated?). */ + FMOD_ERR_FILE_ENDOFDATA, /* End of current chunk reached while trying to read data. */ + FMOD_ERR_FILE_NOTFOUND, /* File not found. */ + FMOD_ERR_FORMAT, /* Unsupported file or audio format. */ + FMOD_ERR_HEADER_MISMATCH, /* There is a version mismatch between the FMOD header and either the FMOD Studio library or the FMOD Low Level library. */ + FMOD_ERR_HTTP, /* A HTTP error occurred. This is a catch-all for HTTP errors not listed elsewhere. */ + FMOD_ERR_HTTP_ACCESS, /* The specified resource requires authentication or is forbidden. */ + FMOD_ERR_HTTP_PROXY_AUTH, /* Proxy authentication is required to access the specified resource. */ + FMOD_ERR_HTTP_SERVER_ERROR, /* A HTTP server error occurred. */ + FMOD_ERR_HTTP_TIMEOUT, /* The HTTP request timed out. */ + FMOD_ERR_INITIALIZATION, /* FMOD was not initialized correctly to support this function. */ + FMOD_ERR_INITIALIZED, /* Cannot call this command after System::init. */ + FMOD_ERR_INTERNAL, /* An error occurred that wasn't supposed to. Contact support. */ + FMOD_ERR_INVALID_FLOAT, /* Value passed in was a NaN, Inf or denormalized float. */ + FMOD_ERR_INVALID_HANDLE, /* An invalid object handle was used. */ + FMOD_ERR_INVALID_PARAM, /* An invalid parameter was passed to this function. */ + FMOD_ERR_INVALID_POSITION, /* An invalid seek position was passed to this function. */ + FMOD_ERR_INVALID_SPEAKER, /* An invalid speaker was passed to this function based on the current speaker mode. */ + FMOD_ERR_INVALID_SYNCPOINT, /* The syncpoint did not come from this sound handle. */ + FMOD_ERR_INVALID_THREAD, /* Tried to call a function on a thread that is not supported. */ + FMOD_ERR_INVALID_VECTOR, /* The vectors passed in are not unit length, or perpendicular. */ + FMOD_ERR_MAXAUDIBLE, /* Reached maximum audible playback count for this sound's soundgroup. */ + FMOD_ERR_MEMORY, /* Not enough memory or resources. */ + FMOD_ERR_MEMORY_CANTPOINT, /* Can't use FMOD_OPENMEMORY_POINT on non PCM source data, or non mp3/xma/adpcm data if FMOD_CREATECOMPRESSEDSAMPLE was used. */ + FMOD_ERR_NEEDS3D, /* Tried to call a command on a 2d sound when the command was meant for 3d sound. */ + FMOD_ERR_NEEDSHARDWARE, /* Tried to use a feature that requires hardware support. */ + FMOD_ERR_NET_CONNECT, /* Couldn't connect to the specified host. */ + FMOD_ERR_NET_SOCKET_ERROR, /* A socket error occurred. This is a catch-all for socket-related errors not listed elsewhere. */ + FMOD_ERR_NET_URL, /* The specified URL couldn't be resolved. */ + FMOD_ERR_NET_WOULD_BLOCK, /* Operation on a non-blocking socket could not complete immediately. */ + FMOD_ERR_NOTREADY, /* Operation could not be performed because specified sound/DSP connection is not ready. */ + FMOD_ERR_OUTPUT_ALLOCATED, /* Error initializing output device, but more specifically, the output device is already in use and cannot be reused. */ + FMOD_ERR_OUTPUT_CREATEBUFFER, /* Error creating hardware sound buffer. */ + FMOD_ERR_OUTPUT_DRIVERCALL, /* A call to a standard soundcard driver failed, which could possibly mean a bug in the driver or resources were missing or exhausted. */ + FMOD_ERR_OUTPUT_FORMAT, /* Soundcard does not support the specified format. */ + FMOD_ERR_OUTPUT_INIT, /* Error initializing output device. */ + FMOD_ERR_OUTPUT_NODRIVERS, /* The output device has no drivers installed. If pre-init, FMOD_OUTPUT_NOSOUND is selected as the output mode. If post-init, the function just fails. */ + FMOD_ERR_PLUGIN, /* An unspecified error has been returned from a plugin. */ + FMOD_ERR_PLUGIN_MISSING, /* A requested output, dsp unit type or codec was not available. */ + FMOD_ERR_PLUGIN_RESOURCE, /* A resource that the plugin requires cannot be found. (ie the DLS file for MIDI playback) */ + FMOD_ERR_PLUGIN_VERSION, /* A plugin was built with an unsupported SDK version. */ + FMOD_ERR_RECORD, /* An error occurred trying to initialize the recording device. */ + FMOD_ERR_REVERB_CHANNELGROUP, /* Reverb properties cannot be set on this channel because a parent channelgroup owns the reverb connection. */ + FMOD_ERR_REVERB_INSTANCE, /* Specified instance in FMOD_REVERB_PROPERTIES couldn't be set. Most likely because it is an invalid instance number or the reverb doesn't exist. */ + FMOD_ERR_SUBSOUNDS, /* The error occurred because the sound referenced contains subsounds when it shouldn't have, or it doesn't contain subsounds when it should have. The operation may also not be able to be performed on a parent sound. */ + FMOD_ERR_SUBSOUND_ALLOCATED, /* This subsound is already being used by another sound, you cannot have more than one parent to a sound. Null out the other parent's entry first. */ + FMOD_ERR_SUBSOUND_CANTMOVE, /* Shared subsounds cannot be replaced or moved from their parent stream, such as when the parent stream is an FSB file. */ + FMOD_ERR_TAGNOTFOUND, /* The specified tag could not be found or there are no tags. */ + FMOD_ERR_TOOMANYCHANNELS, /* The sound created exceeds the allowable input channel count. This can be increased using the 'maxinputchannels' parameter in System::setSoftwareFormat. */ + FMOD_ERR_TRUNCATED, /* The retrieved string is too long to fit in the supplied buffer and has been truncated. */ + FMOD_ERR_UNIMPLEMENTED, /* Something in FMOD hasn't been implemented when it should be! contact support! */ + FMOD_ERR_UNINITIALIZED, /* This command failed because System::init or System::setDriver was not called. */ + FMOD_ERR_UNSUPPORTED, /* A command issued was not supported by this object. Possibly a plugin without certain callbacks specified. */ + FMOD_ERR_VERSION, /* The version number of this file format is not supported. */ + FMOD_ERR_EVENT_ALREADY_LOADED, /* The specified bank has already been loaded. */ + FMOD_ERR_EVENT_LIVEUPDATE_BUSY, /* The live update connection failed due to the game already being connected. */ + FMOD_ERR_EVENT_LIVEUPDATE_MISMATCH, /* The live update connection failed due to the game data being out of sync with the tool. */ + FMOD_ERR_EVENT_LIVEUPDATE_TIMEOUT, /* The live update connection timed out. */ + FMOD_ERR_EVENT_NOTFOUND, /* The requested event, bus or vca could not be found. */ + FMOD_ERR_STUDIO_UNINITIALIZED, /* The Studio::System object is not yet initialized. */ + FMOD_ERR_STUDIO_NOT_LOADED, /* The specified resource is not loaded, so it can't be unloaded. */ + FMOD_ERR_INVALID_STRING, /* An invalid string was passed to this function. */ + FMOD_ERR_ALREADY_LOCKED, /* The specified resource is already locked. */ + FMOD_ERR_NOT_LOCKED, /* The specified resource is not locked, so it can't be unlocked. */ + FMOD_ERR_RECORD_DISCONNECTED, /* The specified recording driver has been disconnected. */ + FMOD_ERR_TOOMANYSAMPLES, /* The length provided exceeds the allowable limit. */ + + FMOD_RESULT_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_RESULT; +/*$ fmod result end $*/ + + +/* +[ENUM] +[ + [DESCRIPTION] + Used to distinguish if a FMOD_CHANNELCONTROL parameter is actually a channel or a channelgroup. + + [REMARKS] + Cast the FMOD_CHANNELCONTROL to an FMOD_CHANNEL/FMOD::Channel, or FMOD_CHANNELGROUP/FMOD::ChannelGroup if specific functionality is needed for either class. + Otherwise use as FMOD_CHANNELCONTROL/FMOD::ChannelControl and use that API. + + [SEE_ALSO] + Channel::setCallback + ChannelGroup::setCallback +] +*/ +typedef enum +{ + FMOD_CHANNELCONTROL_CHANNEL, + FMOD_CHANNELCONTROL_CHANNELGROUP, + + FMOD_CHANNELCONTROL_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_CHANNELCONTROL_TYPE; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure describing a point in 3D space. + + [REMARKS] + FMOD uses a left handed co-ordinate system by default.
+ To use a right handed co-ordinate system specify FMOD_INIT_3D_RIGHTHANDED from FMOD_INITFLAGS in System::init. + + [SEE_ALSO] + System::set3DListenerAttributes + System::get3DListenerAttributes + Channel::set3DAttributes + Channel::get3DAttributes + Channel::set3DCustomRolloff + Channel::get3DCustomRolloff + Sound::set3DCustomRolloff + Sound::get3DCustomRolloff + Geometry::addPolygon + Geometry::setPolygonVertex + Geometry::getPolygonVertex + Geometry::setRotation + Geometry::getRotation + Geometry::setPosition + Geometry::getPosition + Geometry::setScale + Geometry::getScale + FMOD_INITFLAGS +] +*/ +typedef struct +{ + float x; /* X co-ordinate in 3D space. */ + float y; /* Y co-ordinate in 3D space. */ + float z; /* Z co-ordinate in 3D space. */ +} FMOD_VECTOR; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure describing a position, velocity and orientation. + + [REMARKS] + + [SEE_ALSO] + FMOD_VECTOR + FMOD_DSP_PARAMETER_3DATTRIBUTES +] +*/ +typedef struct FMOD_3D_ATTRIBUTES +{ + FMOD_VECTOR position; /* The position of the object in world space, measured in distance units. */ + FMOD_VECTOR velocity; /* The velocity of the object measured in distance units **per second**. */ + FMOD_VECTOR forward; /* The forwards orientation of the object. This vector must be of unit length (1.0) and perpendicular to the up vector. */ + FMOD_VECTOR up; /* The upwards orientation of the object. This vector must be of unit length (1.0) and perpendicular to the forward vector. */ +} FMOD_3D_ATTRIBUTES; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure describing a globally unique identifier. + + [REMARKS] + + [SEE_ALSO] + System::getDriverInfo +] +*/ +typedef struct FMOD_GUID +{ + unsigned int Data1; /* Specifies the first 8 hexadecimal digits of the GUID */ + unsigned short Data2; /* Specifies the first group of 4 hexadecimal digits. */ + unsigned short Data3; /* Specifies the second group of 4 hexadecimal digits. */ + unsigned char Data4[8]; /* Array of 8 bytes. The first 2 bytes contain the third group of 4 hexadecimal digits. The remaining 6 bytes contain the final 12 hexadecimal digits. */ +} FMOD_GUID; + +typedef void (F_CALLBACK *FMOD_FILE_ASYNCDONE_FUNC) (FMOD_ASYNCREADINFO *info, FMOD_RESULT result); + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure that is passed into FMOD_FILE_ASYNCREAD_CALLBACK. Use the information in this structure to perform + + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
+ Members marked with [w] mean the variable can be written to. The user can set the value.
+
+ Instructions: write to 'buffer', and 'bytesread' BEFORE calling 'done'.
+ As soon as done is called, FMOD will asynchronously continue internally using the data provided in this structure.
+
+ Set result in the 'done' function pointer to the result expected from a normal file read callback.
+ If the read was successful, set it to FMOD_OK.
+ If it read some data but hit the end of the file, set it to FMOD_ERR_FILE_EOF.
+ If a bad error occurred, return FMOD_ERR_FILE_BAD
+ If a disk was ejected, return FMOD_ERR_FILE_DISKEJECTED.
+ + [SEE_ALSO] + FMOD_FILE_ASYNCREAD_CALLBACK + FMOD_FILE_ASYNCCANCEL_CALLBACK + FMOD_FILE_ASYNCDONE_FUNC +] +*/ +struct FMOD_ASYNCREADINFO +{ + void *handle; /* [r] The file handle that was filled out in the open callback. */ + unsigned int offset; /* [r] Seek position, make sure you read from this file offset. */ + unsigned int sizebytes; /* [r] how many bytes requested for read. */ + int priority; /* [r] 0 = low importance. 100 = extremely important (ie 'must read now or stuttering may occur') */ + + void *userdata; /* [r/w] User data pointer specific to this request. Initially 0, can be ignored or set by the user. Not related to the file's main userdata member. */ + + void *buffer; /* [w] Buffer to read file data into. */ + unsigned int bytesread; /* [w] Fill this in before setting result code to tell FMOD how many bytes were read. */ + + FMOD_FILE_ASYNCDONE_FUNC done; /* [r] FMOD file system wake up function. Call this when user file read is finished. Pass result of file read as a parameter. */ +}; + + +/* +[ENUM] +[ + [DESCRIPTION] + These output types are used with System::setOutput / System::getOutput, to choose which output method to use. + + [REMARKS] + To pass information to the driver when initializing fmod use the *extradriverdata* parameter in System::init for the following reasons. + + - FMOD_OUTPUTTYPE_WAVWRITER - extradriverdata is a pointer to a char * file name that the wav writer will output to. + - FMOD_OUTPUTTYPE_WAVWRITER_NRT - extradriverdata is a pointer to a char * file name that the wav writer will output to. + - FMOD_OUTPUTTYPE_DSOUND - extradriverdata is cast to a HWND type, so that FMOD can set the focus on the audio for a particular window. + - FMOD_OUTPUTTYPE_PS3 - extradriverdata is a pointer to a FMOD_PS3_EXTRADRIVERDATA struct. This can be found in fmodps3.h. + - FMOD_OUTPUTTYPE_XAUDIO - (Xbox360) extradriverdata is a pointer to a FMOD_360_EXTRADRIVERDATA struct. This can be found in fmodxbox360.h. + + Currently these are the only FMOD drivers that take extra information. Other unknown plugins may have different requirements. + + Note! If FMOD_OUTPUTTYPE_WAVWRITER_NRT or FMOD_OUTPUTTYPE_NOSOUND_NRT are used, and if the System::update function is being called + very quickly (ie for a non realtime decode) it may be being called too quickly for the FMOD streamer thread to respond to. + The result will be a skipping/stuttering output in the captured audio. + + To remedy this, disable the FMOD streamer thread, and use FMOD_INIT_STREAM_FROM_UPDATE to avoid skipping in the output stream, + as it will lock the mixer and the streamer together in the same thread. + + [SEE_ALSO] + System::setOutput + System::getOutput + System::init + System::update +] +*/ +typedef enum +{ + FMOD_OUTPUTTYPE_AUTODETECT, /* Picks the best output mode for the platform. This is the default. */ + + FMOD_OUTPUTTYPE_UNKNOWN, /* All - 3rd party plugin, unknown. This is for use with System::getOutput only. */ + FMOD_OUTPUTTYPE_NOSOUND, /* All - Perform all mixing but discard the final output. */ + FMOD_OUTPUTTYPE_WAVWRITER, /* All - Writes output to a .wav file. */ + FMOD_OUTPUTTYPE_NOSOUND_NRT, /* All - Non-realtime version of FMOD_OUTPUTTYPE_NOSOUND. User can drive mixer with System::update at whatever rate they want. */ + FMOD_OUTPUTTYPE_WAVWRITER_NRT, /* All - Non-realtime version of FMOD_OUTPUTTYPE_WAVWRITER. User can drive mixer with System::update at whatever rate they want. */ + + FMOD_OUTPUTTYPE_DSOUND, /* Win - Direct Sound. (Default on Windows XP and below) */ + FMOD_OUTPUTTYPE_WINMM, /* Win - Windows Multimedia. */ + FMOD_OUTPUTTYPE_WASAPI, /* Win/WinStore/XboxOne - Windows Audio Session API. (Default on Windows Vista and above, Xbox One and Windows Store Applications) */ + FMOD_OUTPUTTYPE_ASIO, /* Win - Low latency ASIO 2.0. */ + FMOD_OUTPUTTYPE_PULSEAUDIO, /* Linux - Pulse Audio. (Default on Linux if available) */ + FMOD_OUTPUTTYPE_ALSA, /* Linux - Advanced Linux Sound Architecture. (Default on Linux if PulseAudio isn't available) */ + FMOD_OUTPUTTYPE_COREAUDIO, /* Mac/iOS - Core Audio. (Default on Mac and iOS) */ + FMOD_OUTPUTTYPE_XAUDIO, /* Xbox 360 - XAudio. (Default on Xbox 360) */ + FMOD_OUTPUTTYPE_PS3, /* PS3 - Audio Out. (Default on PS3) */ + FMOD_OUTPUTTYPE_AUDIOTRACK, /* Android - Java Audio Track. (Default on Android 2.2 and below) */ + FMOD_OUTPUTTYPE_OPENSL, /* Android - OpenSL ES. (Default on Android 2.3 and above) */ + FMOD_OUTPUTTYPE_WIIU, /* Wii U - AX. (Default on Wii U) */ + FMOD_OUTPUTTYPE_AUDIOOUT, /* PS4/PSVita - Audio Out. (Default on PS4 and PS Vita) */ + FMOD_OUTPUTTYPE_AUDIO3D, /* PS4 - Audio3D. */ + FMOD_OUTPUTTYPE_ATMOS, /* Win - Dolby Atmos (WASAPI). */ + FMOD_OUTPUTTYPE_WEBAUDIO, /* Web Browser - JavaScript webaudio output. (Default on JavaScript) */ + FMOD_OUTPUTTYPE_NNAUDIO, /* NX - NX nn::audio. (Default on NX)*/ + + FMOD_OUTPUTTYPE_MAX, /* Maximum number of output types supported. */ + FMOD_OUTPUTTYPE_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_OUTPUTTYPE; + + +/* +[ENUM] +[ + [DESCRIPTION] + Specify the destination of log output when using the logging version of FMOD. + + [REMARKS] + TTY destination can vary depending on platform, common examples include the + Visual Studio / Xcode output window, stderr and LogCat. + + [SEE_ALSO] + FMOD_Debug_Initialize +] +*/ +typedef enum +{ + FMOD_DEBUG_MODE_TTY, /* Default log location per platform, i.e. Visual Studio output window, stderr, LogCat, etc */ + FMOD_DEBUG_MODE_FILE, /* Write log to specified file path */ + FMOD_DEBUG_MODE_CALLBACK, /* Call specified callback with log information */ + + FMOD_DEBUG_MODE_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_DEBUG_MODE; + + +/* +[DEFINE] +[ + [NAME] + FMOD_DEBUG_FLAGS + + [DESCRIPTION] + Specify the requested information to be output when using the logging version of FMOD. + + [REMARKS] + + [SEE_ALSO] + FMOD_Debug_Initialize +] +*/ +#define FMOD_DEBUG_LEVEL_NONE 0x00000000 /* Disable all messages */ +#define FMOD_DEBUG_LEVEL_ERROR 0x00000001 /* Enable only error messages. */ +#define FMOD_DEBUG_LEVEL_WARNING 0x00000002 /* Enable warning and error messages. */ +#define FMOD_DEBUG_LEVEL_LOG 0x00000004 /* Enable informational, warning and error messages (default). */ +#define FMOD_DEBUG_TYPE_MEMORY 0x00000100 /* Verbose logging for memory operations, only use this if you are debugging a memory related issue. */ +#define FMOD_DEBUG_TYPE_FILE 0x00000200 /* Verbose logging for file access, only use this if you are debugging a file related issue. */ +#define FMOD_DEBUG_TYPE_CODEC 0x00000400 /* Verbose logging for codec initialization, only use this if you are debugging a codec related issue. */ +#define FMOD_DEBUG_TYPE_TRACE 0x00000800 /* Verbose logging for internal errors, use this for tracking the origin of error codes. */ +#define FMOD_DEBUG_DISPLAY_TIMESTAMPS 0x00010000 /* Display the time stamp of the log message in milliseconds. */ +#define FMOD_DEBUG_DISPLAY_LINENUMBERS 0x00020000 /* Display the source code file and line number for where the message originated. */ +#define FMOD_DEBUG_DISPLAY_THREAD 0x00040000 /* Display the thread ID of the calling function that generated the message. */ +/* [DEFINE_END] */ + + +/* +[DEFINE] +[ + [NAME] + FMOD_MEMORY_TYPE + + [DESCRIPTION] + Bit fields for memory allocation type being passed into FMOD memory callbacks. + + [REMARKS] + Remember this is a bitfield. You may get more than 1 bit set (ie physical + persistent) so do not simply switch on the types! You must check each bit individually or clear out the bits that you do not want within the callback.
+ Bits can be excluded if you want during Memory_Initialize so that you never get them. + + [SEE_ALSO] + FMOD_MEMORY_ALLOC_CALLBACK + FMOD_MEMORY_REALLOC_CALLBACK + FMOD_MEMORY_FREE_CALLBACK + Memory_Initialize +] +*/ +#define FMOD_MEMORY_NORMAL 0x00000000 /* Standard memory. */ +#define FMOD_MEMORY_STREAM_FILE 0x00000001 /* Stream file buffer, size controllable with System::setStreamBufferSize. */ +#define FMOD_MEMORY_STREAM_DECODE 0x00000002 /* Stream decode buffer, size controllable with FMOD_CREATESOUNDEXINFO::decodebuffersize. */ +#define FMOD_MEMORY_SAMPLEDATA 0x00000004 /* Sample data buffer. Raw audio data, usually PCM/MPEG/ADPCM/XMA data. */ +#define FMOD_MEMORY_DSP_BUFFER 0x00000008 /* DSP memory block allocated when more than 1 output exists on a DSP node. */ +#define FMOD_MEMORY_PLUGIN 0x00000010 /* Memory allocated by a third party plugin. */ +#define FMOD_MEMORY_XBOX360_PHYSICAL 0x00100000 /* Requires XPhysicalAlloc / XPhysicalFree. */ +#define FMOD_MEMORY_PERSISTENT 0x00200000 /* Persistent memory. Memory will be freed when System::release is called. */ +#define FMOD_MEMORY_SECONDARY 0x00400000 /* Secondary memory. Allocation should be in secondary memory. For example RSX on the PS3. */ +#define FMOD_MEMORY_ALL 0xFFFFFFFF +/* [DEFINE_END] */ + + +/* +[ENUM] +[ + [DESCRIPTION] + These are speaker types defined for use with the System::setSoftwareFormat command. + + [REMARKS] + Note below the phrase 'sound channels' is used. These are the subchannels inside a sound, they are not related and + have nothing to do with the FMOD class "Channel".
+ For example a mono sound has 1 sound channel, a stereo sound has 2 sound channels, and an AC3 or 6 channel wav file have 6 "sound channels".
+
+ FMOD_SPEAKERMODE_RAW
+ ---------------------
+ This mode is for output devices that are not specifically mono/stereo/quad/surround/5.1 or 7.1, but are multichannel.
+ Use System::setSoftwareFormat to specify the number of speakers you want to address, otherwise it will default to 2 (stereo).
+ Sound channels map to speakers sequentially, so a mono sound maps to output speaker 0, stereo sound maps to output speaker 0 & 1.
+ The user assumes knowledge of the speaker order. FMOD_SPEAKER enumerations may not apply, so raw channel indices should be used.
+ Multichannel sounds map input channels to output channels 1:1.
+ Channel::setPan and Channel::setPanLevels do not work.
+ Speaker levels must be manually set with Channel::setPanMatrix.
+
+ FMOD_SPEAKERMODE_MONO
+ ---------------------
+ This mode is for a 1 speaker arrangement.
+ Panning does not work in this speaker mode.
+ Mono, stereo and multichannel sounds have each sound channel played on the one speaker unity.
+ Mix behavior for multichannel sounds can be set with Channel::setPanMatrix.
+ Channel::setPanLevels does not work.
+
+ FMOD_SPEAKERMODE_STEREO
+ -----------------------
+ This mode is for 2 speaker arrangements that have a left and right speaker.
+
  • Mono sounds default to an even distribution between left and right. They can be panned with Channel::setPan.
    +
  • Stereo sounds default to the middle, or full left in the left speaker and full right in the right speaker. +
  • They can be cross faded with Channel::setPan.
    +
  • Multichannel sounds have each sound channel played on each speaker at unity.
    +
  • Mix behavior for multichannel sounds can be set with Channel::setPanMatrix.
    +
  • Channel::setPanLevels works but only front left and right parameters are used, the rest are ignored.
    +
    + FMOD_SPEAKERMODE_QUAD
    + ------------------------
    + This mode is for 4 speaker arrangements that have a front left, front right, surround left and a surround right speaker.
    +
  • Mono sounds default to an even distribution between front left and front right. They can be panned with Channel::setPan.
    +
  • Stereo sounds default to the left sound channel played on the front left, and the right sound channel played on the front right.
    +
  • They can be cross faded with Channel::setPan.
    +
  • Multichannel sounds default to all of their sound channels being played on each speaker in order of input.
    +
  • Mix behavior for multichannel sounds can be set with Channel::setPanMatrix.
    +
  • Channel::setPanLevels works but rear left, rear right, center and lfe are ignored.
    +
    + FMOD_SPEAKERMODE_SURROUND
    + ------------------------
    + This mode is for 5 speaker arrangements that have a left/right/center/surround left/surround right.
    +
  • Mono sounds default to the center speaker. They can be panned with Channel::setPan.
    +
  • Stereo sounds default to the left sound channel played on the front left, and the right sound channel played on the front right. +
  • They can be cross faded with Channel::setPan.
    +
  • Multichannel sounds default to all of their sound channels being played on each speaker in order of input. +
  • Mix behavior for multichannel sounds can be set with Channel::setPanMatrix.
    +
  • Channel::setPanLevels works but rear left / rear right are ignored.
    +
    + FMOD_SPEAKERMODE_5POINT1
    + ---------------------------------------------------------
    + This mode is for 5.1 speaker arrangements that have a left/right/center/surround left/surround right and a subwoofer speaker.
    +
  • Mono sounds default to the center speaker. They can be panned with Channel::setPan.
    +
  • Stereo sounds default to the left sound channel played on the front left, and the right sound channel played on the front right. +
  • They can be cross faded with Channel::setPan.
    +
  • Multichannel sounds default to all of their sound channels being played on each speaker in order of input. +
  • Mix behavior for multichannel sounds can be set with Channel::setPanMatrix.
    +
  • Channel::setPanLevels works but rear left / rear right are ignored.
    +
    + FMOD_SPEAKERMODE_7POINT1
    + ------------------------
    + This mode is for 7.1 speaker arrangements that have a left/right/center/surround left/surround right/rear left/rear right + and a subwoofer speaker.
    +
  • Mono sounds default to the center speaker. They can be panned with Channel::setPan.
    +
  • Stereo sounds default to the left sound channel played on the front left, and the right sound channel played on the front right. +
  • They can be cross faded with Channel::setPan.
    +
  • Multichannel sounds default to all of their sound channels being played on each speaker in order of input. +
  • Mix behavior for multichannel sounds can be set with Channel::setPanMatrix.
    +
  • Channel::setPanLevels works and every parameter is used to set the balance of a sound in any speaker.
    +
    + + [SEE_ALSO] + System::setSoftwareFormat + System::getSoftwareFormat + DSP::setChannelFormat +] +*/ +typedef enum +{ + FMOD_SPEAKERMODE_DEFAULT, /* Default speaker mode based on operating system/output mode. Windows = control panel setting, Xbox = 5.1, PS3 = 7.1 etc. */ + FMOD_SPEAKERMODE_RAW, /* There is no specific speakermode. Sound channels are mapped in order of input to output. Use System::setSoftwareFormat to specify speaker count. See remarks for more information. */ + FMOD_SPEAKERMODE_MONO, /* The speakers are monaural. */ + FMOD_SPEAKERMODE_STEREO, /* The speakers are stereo. */ + FMOD_SPEAKERMODE_QUAD, /* 4 speaker setup. This includes front left, front right, surround left, surround right. */ + FMOD_SPEAKERMODE_SURROUND, /* 5 speaker setup. This includes front left, front right, center, surround left, surround right. */ + FMOD_SPEAKERMODE_5POINT1, /* 5.1 speaker setup. This includes front left, front right, center, surround left, surround right and an LFE speaker. */ + FMOD_SPEAKERMODE_7POINT1, /* 7.1 speaker setup. This includes front left, front right, center, surround left, surround right, back left, back right and an LFE speaker. */ + + FMOD_SPEAKERMODE_MAX, /* Maximum number of speaker modes supported. */ + FMOD_SPEAKERMODE_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_SPEAKERMODE; + + +/* +[DEFINE] +[ + [NAME] + FMOD_MAX_CHANNEL_WIDTH + + [DESCRIPTION] + The maximum number of channels per frame of audio supported by audio files, buffers, connections and DSPs.
    + + [REMARKS] + + [SEE_ALSO] + FMOD_CHANNELORDER + FMOD_CREATESOUNDEXINFO + System::setSoftwareFormat + System::getDefaultMixMatrix + ChannelControl::setMixMatrix + ChannelControl::getMixMatrix + FMOD::DSP::setChannelFormat +] +*/ +#define FMOD_MAX_CHANNEL_WIDTH 32 +/* [DEFINE_END] */ + +/* +[DEFINE] +[ + [NAME] + FMOD_MAX_LISTENERS + + [DESCRIPTION] + The maximum number of listeners supported. + + [REMARKS] + + [SEE_ALSO] + System::set3DNumListeners + System::set3DListenerAttributes + System::get3DListenerAttributes +] +*/ +#define FMOD_MAX_LISTENERS 8 +/* [DEFINE_END] */ + + +/* +[ENUM] +[ + [DESCRIPTION] + Assigns an enumeration for a speaker index. + + [REMARKS] + + [SEE_ALSO] + System::setSpeakerPosition + System::getSpeakerPosition +] +*/ +typedef enum +{ + FMOD_SPEAKER_FRONT_LEFT, /* The front left speaker */ + FMOD_SPEAKER_FRONT_RIGHT, /* The front right speaker */ + FMOD_SPEAKER_FRONT_CENTER, /* The front center speaker */ + FMOD_SPEAKER_LOW_FREQUENCY, /* The LFE or 'subwoofer' speaker */ + FMOD_SPEAKER_SURROUND_LEFT, /* The surround left (usually to the side) speaker */ + FMOD_SPEAKER_SURROUND_RIGHT, /* The surround right (usually to the side) speaker */ + FMOD_SPEAKER_BACK_LEFT, /* The back left speaker */ + FMOD_SPEAKER_BACK_RIGHT, /* The back right speaker */ + + FMOD_SPEAKER_MAX, /* Maximum number of speaker types supported. */ + FMOD_SPEAKER_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_SPEAKER; + + +/* +[DEFINE] +[ + [NAME] + FMOD_CHANNELMASK + + [DESCRIPTION] + These are bitfields to describe for a certain number of channels in a signal, which channels are being represented.
    + For example, a signal could be 1 channel, but contain the LFE channel only.
    + + [REMARKS] + FMOD_CHANNELMASK_BACK_CENTER is not represented as an output speaker in fmod - but it is encountered in input formats and is down or upmixed appropriately to the nearest speakers.
    + + [SEE_ALSO] + DSP::setChannelFormat + DSP::getChannelFormat + FMOD_SPEAKERMODE +] +*/ +#define FMOD_CHANNELMASK_FRONT_LEFT 0x00000001 +#define FMOD_CHANNELMASK_FRONT_RIGHT 0x00000002 +#define FMOD_CHANNELMASK_FRONT_CENTER 0x00000004 +#define FMOD_CHANNELMASK_LOW_FREQUENCY 0x00000008 +#define FMOD_CHANNELMASK_SURROUND_LEFT 0x00000010 +#define FMOD_CHANNELMASK_SURROUND_RIGHT 0x00000020 +#define FMOD_CHANNELMASK_BACK_LEFT 0x00000040 +#define FMOD_CHANNELMASK_BACK_RIGHT 0x00000080 +#define FMOD_CHANNELMASK_BACK_CENTER 0x00000100 + +#define FMOD_CHANNELMASK_MONO (FMOD_CHANNELMASK_FRONT_LEFT) +#define FMOD_CHANNELMASK_STEREO (FMOD_CHANNELMASK_FRONT_LEFT | FMOD_CHANNELMASK_FRONT_RIGHT) +#define FMOD_CHANNELMASK_LRC (FMOD_CHANNELMASK_FRONT_LEFT | FMOD_CHANNELMASK_FRONT_RIGHT | FMOD_CHANNELMASK_FRONT_CENTER) +#define FMOD_CHANNELMASK_QUAD (FMOD_CHANNELMASK_FRONT_LEFT | FMOD_CHANNELMASK_FRONT_RIGHT | FMOD_CHANNELMASK_SURROUND_LEFT | FMOD_CHANNELMASK_SURROUND_RIGHT) +#define FMOD_CHANNELMASK_SURROUND (FMOD_CHANNELMASK_FRONT_LEFT | FMOD_CHANNELMASK_FRONT_RIGHT | FMOD_CHANNELMASK_FRONT_CENTER | FMOD_CHANNELMASK_SURROUND_LEFT | FMOD_CHANNELMASK_SURROUND_RIGHT) +#define FMOD_CHANNELMASK_5POINT1 (FMOD_CHANNELMASK_FRONT_LEFT | FMOD_CHANNELMASK_FRONT_RIGHT | FMOD_CHANNELMASK_FRONT_CENTER | FMOD_CHANNELMASK_LOW_FREQUENCY | FMOD_CHANNELMASK_SURROUND_LEFT | FMOD_CHANNELMASK_SURROUND_RIGHT) +#define FMOD_CHANNELMASK_5POINT1_REARS (FMOD_CHANNELMASK_FRONT_LEFT | FMOD_CHANNELMASK_FRONT_RIGHT | FMOD_CHANNELMASK_FRONT_CENTER | FMOD_CHANNELMASK_LOW_FREQUENCY | FMOD_CHANNELMASK_BACK_LEFT | FMOD_CHANNELMASK_BACK_RIGHT) +#define FMOD_CHANNELMASK_7POINT0 (FMOD_CHANNELMASK_FRONT_LEFT | FMOD_CHANNELMASK_FRONT_RIGHT | FMOD_CHANNELMASK_FRONT_CENTER | FMOD_CHANNELMASK_SURROUND_LEFT | FMOD_CHANNELMASK_SURROUND_RIGHT | FMOD_CHANNELMASK_BACK_LEFT | FMOD_CHANNELMASK_BACK_RIGHT) +#define FMOD_CHANNELMASK_7POINT1 (FMOD_CHANNELMASK_FRONT_LEFT | FMOD_CHANNELMASK_FRONT_RIGHT | FMOD_CHANNELMASK_FRONT_CENTER | FMOD_CHANNELMASK_LOW_FREQUENCY | FMOD_CHANNELMASK_SURROUND_LEFT | FMOD_CHANNELMASK_SURROUND_RIGHT | FMOD_CHANNELMASK_BACK_LEFT | FMOD_CHANNELMASK_BACK_RIGHT) +/* [DEFINE_END] */ + +/* +[ENUM] +[ + [DESCRIPTION] + When creating a multichannel sound, FMOD will pan them to their default speaker locations, for example a 6 channel sound will default to one channel per 5.1 output speaker.
    + Another example is a stereo sound. It will default to left = front left, right = front right.
    +
    + This is for sounds that are not 'default'. For example you might have a sound that is 6 channels but actually made up of 3 stereo pairs, that should all be located in front left, front right only. + + [REMARKS] + + [SEE_ALSO] + FMOD_CREATESOUNDEXINFO + FMOD_MAX_CHANNEL_WIDTH +] +*/ +typedef enum FMOD_CHANNELORDER +{ + FMOD_CHANNELORDER_DEFAULT, /* Left, Right, Center, LFE, Surround Left, Surround Right, Back Left, Back Right (see FMOD_SPEAKER enumeration) */ + FMOD_CHANNELORDER_WAVEFORMAT, /* Left, Right, Center, LFE, Back Left, Back Right, Surround Left, Surround Right (as per Microsoft .wav WAVEFORMAT structure master order) */ + FMOD_CHANNELORDER_PROTOOLS, /* Left, Center, Right, Surround Left, Surround Right, LFE */ + FMOD_CHANNELORDER_ALLMONO, /* Mono, Mono, Mono, Mono, Mono, Mono, ... (each channel all the way up to FMOD_MAX_CHANNEL_WIDTH channels are treated as if they were mono) */ + FMOD_CHANNELORDER_ALLSTEREO, /* Left, Right, Left, Right, Left, Right, ... (each pair of channels is treated as stereo all the way up to FMOD_MAX_CHANNEL_WIDTH channels) */ + FMOD_CHANNELORDER_ALSA, /* Left, Right, Surround Left, Surround Right, Center, LFE (as per Linux ALSA channel order) */ + + FMOD_CHANNELORDER_MAX, /* Maximum number of channel orderings supported. */ + FMOD_CHANNELORDER_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_CHANNELORDER; + + +/* +[ENUM] +[ + [DESCRIPTION] + These are plugin types defined for use with the System::getNumPlugins, + System::getPluginInfo and System::unloadPlugin functions. + + [REMARKS] + + [SEE_ALSO] + System::getNumPlugins + System::getPluginInfo + System::unloadPlugin +] +*/ +typedef enum +{ + FMOD_PLUGINTYPE_OUTPUT, /* The plugin type is an output module. FMOD mixed audio will play through one of these devices */ + FMOD_PLUGINTYPE_CODEC, /* The plugin type is a file format codec. FMOD will use these codecs to load file formats for playback. */ + FMOD_PLUGINTYPE_DSP, /* The plugin type is a DSP unit. FMOD will use these plugins as part of its DSP network to apply effects to output or generate sound in realtime. */ + + FMOD_PLUGINTYPE_MAX, /* Maximum number of plugin types supported. */ + FMOD_PLUGINTYPE_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_PLUGINTYPE; + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Used to support lists of plugins within the one file. + + [REMARKS] + The description field is either a pointer to FMOD_DSP_DESCRIPTION, FMOD_OUTPUT_DESCRIPTION, FMOD_CODEC_DESCRIPTION. + + This structure is returned from a plugin as a pointer to a list where the last entry has FMOD_PLUGINTYPE_MAX and + a null description pointer. + + [SEE_ALSO] + System::getNumNestedPlugins + System::getNestedPlugin +] +*/ +typedef struct FMOD_PLUGINLIST +{ + FMOD_PLUGINTYPE type; /* The plugin type */ + void* description; /* One of FMOD_DSP_DESCRIPTION, FMOD_OUTPUT_DESCRIPTION, FMOD_CODEC_DESCRIPTION */ +} FMOD_PLUGINLIST; + + +/* +[DEFINE] +[ + [NAME] + FMOD_INITFLAGS + + [DESCRIPTION] + Initialization flags. Use them with System::init in the *flags* parameter to change various behavior. + + [REMARKS] + Use System::setAdvancedSettings to adjust settings for some of the features that are enabled by these flags. + + [SEE_ALSO] + System::init + System::update + System::setAdvancedSettings + Channel::set3DOcclusion +] +*/ +#define FMOD_INIT_NORMAL 0x00000000 /* Initialize normally */ +#define FMOD_INIT_STREAM_FROM_UPDATE 0x00000001 /* No stream thread is created internally. Streams are driven from System::update. Mainly used with non-realtime outputs. */ +#define FMOD_INIT_MIX_FROM_UPDATE 0x00000002 /* No mixer thread is created internally. Mixing is driven from System::update. Only applies to polling based output modes such as FMOD_OUTPUTTYPE_NOSOUND, FMOD_OUTPUTTYPE_WAVWRITER, FMOD_OUTPUTTYPE_DSOUND, FMOD_OUTPUTTYPE_WINMM,FMOD_OUTPUTTYPE_XAUDIO. */ +#define FMOD_INIT_3D_RIGHTHANDED 0x00000004 /* FMOD will treat +X as right, +Y as up and +Z as backwards (towards you). */ +#define FMOD_INIT_CHANNEL_LOWPASS 0x00000100 /* All FMOD_3D based voices will add a software lowpass filter effect into the DSP chain which is automatically used when Channel::set3DOcclusion is used or the geometry API. This also causes sounds to sound duller when the sound goes behind the listener, as a fake HRTF style effect. Use System::setAdvancedSettings to disable or adjust cutoff frequency for this feature. */ +#define FMOD_INIT_CHANNEL_DISTANCEFILTER 0x00000200 /* All FMOD_3D based voices will add a software lowpass and highpass filter effect into the DSP chain which will act as a distance-automated bandpass filter. Use System::setAdvancedSettings to adjust the center frequency. */ +#define FMOD_INIT_PROFILE_ENABLE 0x00010000 /* Enable TCP/IP based host which allows FMOD Designer or FMOD Profiler to connect to it, and view memory, CPU and the DSP network graph in real-time. */ +#define FMOD_INIT_VOL0_BECOMES_VIRTUAL 0x00020000 /* Any sounds that are 0 volume will go virtual and not be processed except for having their positions updated virtually. Use System::setAdvancedSettings to adjust what volume besides zero to switch to virtual at. */ +#define FMOD_INIT_GEOMETRY_USECLOSEST 0x00040000 /* With the geometry engine, only process the closest polygon rather than accumulating all polygons the sound to listener line intersects. */ +#define FMOD_INIT_PREFER_DOLBY_DOWNMIX 0x00080000 /* When using FMOD_SPEAKERMODE_5POINT1 with a stereo output device, use the Dolby Pro Logic II downmix algorithm instead of the SRS Circle Surround algorithm. */ +#define FMOD_INIT_THREAD_UNSAFE 0x00100000 /* Disables thread safety for API calls. Only use this if FMOD low level is being called from a single thread, and if Studio API is not being used! */ +#define FMOD_INIT_PROFILE_METER_ALL 0x00200000 /* Slower, but adds level metering for every single DSP unit in the graph. Use DSP::setMeteringEnabled to turn meters off individually. */ +#define FMOD_INIT_DISABLE_SRS_HIGHPASSFILTER 0x00400000 /* Using FMOD_SPEAKERMODE_5POINT1 with a stereo output device will enable the SRS Circle Surround downmixer. By default the SRS downmixer applies a high pass filter with a cutoff frequency of 80Hz. Use this flag to diable the high pass fitler, or use FMOD_INIT_PREFER_DOLBY_DOWNMIX to use the Dolby Pro Logic II downmix algorithm instead. */ +/* [DEFINE_END] */ + + +/* +[ENUM] +[ + [DESCRIPTION] + These definitions describe the type of song being played. + + [REMARKS] + + [SEE_ALSO] + Sound::getFormat +] +*/ +typedef enum +{ + FMOD_SOUND_TYPE_UNKNOWN, /* 3rd party / unknown plugin format. */ + FMOD_SOUND_TYPE_AIFF, /* AIFF. */ + FMOD_SOUND_TYPE_ASF, /* Microsoft Advanced Systems Format (ie WMA/ASF/WMV). */ + FMOD_SOUND_TYPE_DLS, /* Sound font / downloadable sound bank. */ + FMOD_SOUND_TYPE_FLAC, /* FLAC lossless codec. */ + FMOD_SOUND_TYPE_FSB, /* FMOD Sample Bank. */ + FMOD_SOUND_TYPE_IT, /* Impulse Tracker. */ + FMOD_SOUND_TYPE_MIDI, /* MIDI. */ + FMOD_SOUND_TYPE_MOD, /* Protracker / Fasttracker MOD. */ + FMOD_SOUND_TYPE_MPEG, /* MP2/MP3 MPEG. */ + FMOD_SOUND_TYPE_OGGVORBIS, /* Ogg vorbis. */ + FMOD_SOUND_TYPE_PLAYLIST, /* Information only from ASX/PLS/M3U/WAX playlists */ + FMOD_SOUND_TYPE_RAW, /* Raw PCM data. */ + FMOD_SOUND_TYPE_S3M, /* ScreamTracker 3. */ + FMOD_SOUND_TYPE_USER, /* User created sound. */ + FMOD_SOUND_TYPE_WAV, /* Microsoft WAV. */ + FMOD_SOUND_TYPE_XM, /* FastTracker 2 XM. */ + FMOD_SOUND_TYPE_XMA, /* Xbox360 XMA */ + FMOD_SOUND_TYPE_AUDIOQUEUE, /* iPhone hardware decoder, supports AAC, ALAC and MP3. */ + FMOD_SOUND_TYPE_AT9, /* PS4 / PSVita ATRAC 9 format */ + FMOD_SOUND_TYPE_VORBIS, /* Vorbis */ + FMOD_SOUND_TYPE_MEDIA_FOUNDATION,/* Windows Store Application built in system codecs */ + FMOD_SOUND_TYPE_MEDIACODEC, /* Android MediaCodec */ + FMOD_SOUND_TYPE_FADPCM, /* FMOD Adaptive Differential Pulse Code Modulation */ + + FMOD_SOUND_TYPE_MAX, /* Maximum number of sound types supported. */ + FMOD_SOUND_TYPE_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_SOUND_TYPE; + + +/* +[ENUM] +[ + [DESCRIPTION] + These definitions describe the native format of the hardware or software buffer that will be used. + + [REMARKS] + This is the format the native hardware or software buffer will be or is created in. + + [SEE_ALSO] + System::createSound + Sound::getFormat +] +*/ +typedef enum +{ + FMOD_SOUND_FORMAT_NONE, /* Unitialized / unknown. */ + FMOD_SOUND_FORMAT_PCM8, /* 8bit integer PCM data. */ + FMOD_SOUND_FORMAT_PCM16, /* 16bit integer PCM data. */ + FMOD_SOUND_FORMAT_PCM24, /* 24bit integer PCM data. */ + FMOD_SOUND_FORMAT_PCM32, /* 32bit integer PCM data. */ + FMOD_SOUND_FORMAT_PCMFLOAT, /* 32bit floating point PCM data. */ + FMOD_SOUND_FORMAT_BITSTREAM, /* Sound data is in its native compressed format. */ + + FMOD_SOUND_FORMAT_MAX, /* Maximum number of sound formats supported. */ + FMOD_SOUND_FORMAT_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_SOUND_FORMAT; + + +/* +[DEFINE] +[ + [NAME] + FMOD_MODE + + [DESCRIPTION] + Sound description bitfields, bitwise OR them together for loading and describing sounds. + + [REMARKS] + By default a sound will open as a static sound that is decompressed fully into memory to PCM. (ie equivalent of FMOD_CREATESAMPLE)
    + To have a sound stream instead, use FMOD_CREATESTREAM, or use the wrapper function System::createStream.
    + Some opening modes (ie FMOD_OPENUSER, FMOD_OPENMEMORY, FMOD_OPENMEMORY_POINT, FMOD_OPENRAW) will need extra information.
    + This can be provided using the FMOD_CREATESOUNDEXINFO structure. +
    + Specifying FMOD_OPENMEMORY_POINT will POINT to your memory rather allocating its own sound buffers and duplicating it internally.
    + This means you cannot free the memory while FMOD is using it, until after Sound::release is called. + With FMOD_OPENMEMORY_POINT, for PCM formats, only WAV, FSB, and RAW are supported. For compressed formats, only those formats supported by FMOD_CREATECOMPRESSEDSAMPLE are supported.
    + With FMOD_OPENMEMORY_POINT and FMOD_OPENRAW or PCM, if using them together, note that you must pad the data on each side by 16 bytes. This is so fmod can modify the ends of the data for looping/interpolation/mixing purposes. If a wav file, you will need to insert silence, and then reset loop points to stop the playback from playing that silence.
    +
    + Xbox 360 memory On Xbox 360 Specifying FMOD_OPENMEMORY_POINT to a virtual memory address will cause FMOD_ERR_INVALID_ADDRESS + to be returned. Use physical memory only for this functionality.
    +
    + FMOD_LOWMEM is used on a sound if you want to minimize the memory overhead, by having FMOD not allocate memory for certain + features that are not likely to be used in a game environment. These are :
    + 1. Sound::getName functionality is removed. 256 bytes per sound is saved.
    + + [SEE_ALSO] + System::createSound + System::createStream + Sound::setMode + Sound::getMode + Channel::setMode + Channel::getMode + Sound::set3DCustomRolloff + Channel::set3DCustomRolloff + Sound::getOpenState +] +*/ +#define FMOD_DEFAULT 0x00000000 /* Default for all modes listed below. FMOD_LOOP_OFF, FMOD_2D, FMOD_3D_WORLDRELATIVE, FMOD_3D_INVERSEROLLOFF */ +#define FMOD_LOOP_OFF 0x00000001 /* For non looping sounds. (DEFAULT). Overrides FMOD_LOOP_NORMAL / FMOD_LOOP_BIDI. */ +#define FMOD_LOOP_NORMAL 0x00000002 /* For forward looping sounds. */ +#define FMOD_LOOP_BIDI 0x00000004 /* For bidirectional looping sounds. (only works on software mixed static sounds). */ +#define FMOD_2D 0x00000008 /* Ignores any 3d processing. (DEFAULT). */ +#define FMOD_3D 0x00000010 /* Makes the sound positionable in 3D. Overrides FMOD_2D. */ +#define FMOD_CREATESTREAM 0x00000080 /* Decompress at runtime, streaming from the source provided (ie from disk). Overrides FMOD_CREATESAMPLE and FMOD_CREATECOMPRESSEDSAMPLE. Note a stream can only be played once at a time due to a stream only having 1 stream buffer and file handle. Open multiple streams to have them play concurrently. */ +#define FMOD_CREATESAMPLE 0x00000100 /* Decompress at loadtime, decompressing or decoding whole file into memory as the target sample format (ie PCM). Fastest for playback and most flexible. */ +#define FMOD_CREATECOMPRESSEDSAMPLE 0x00000200 /* Load MP2/MP3/FADPCM/IMAADPCM/Vorbis/AT9 or XMA into memory and leave it compressed. Vorbis/AT9/FADPCM encoding only supported in the .FSB container format. During playback the FMOD software mixer will decode it in realtime as a 'compressed sample'. Overrides FMOD_CREATESAMPLE. If the sound data is not one of the supported formats, it will behave as if it was created with FMOD_CREATESAMPLE and decode the sound into PCM. */ +#define FMOD_OPENUSER 0x00000400 /* Opens a user created static sample or stream. Use FMOD_CREATESOUNDEXINFO to specify format and/or read callbacks. If a user created 'sample' is created with no read callback, the sample will be empty. Use Sound::lock and Sound::unlock to place sound data into the sound if this is the case. */ +#define FMOD_OPENMEMORY 0x00000800 /* "name_or_data" will be interpreted as a pointer to memory instead of filename for creating sounds. Use FMOD_CREATESOUNDEXINFO to specify length. If used with FMOD_CREATESAMPLE or FMOD_CREATECOMPRESSEDSAMPLE, FMOD duplicates the memory into its own buffers. Your own buffer can be freed after open. If used with FMOD_CREATESTREAM, FMOD will stream out of the buffer whose pointer you passed in. In this case, your own buffer should not be freed until you have finished with and released the stream.*/ +#define FMOD_OPENMEMORY_POINT 0x10000000 /* "name_or_data" will be interpreted as a pointer to memory instead of filename for creating sounds. Use FMOD_CREATESOUNDEXINFO to specify length. This differs to FMOD_OPENMEMORY in that it uses the memory as is, without duplicating the memory into its own buffers. Cannot be freed after open, only after Sound::release. Will not work if the data is compressed and FMOD_CREATECOMPRESSEDSAMPLE is not used. */ +#define FMOD_OPENRAW 0x00001000 /* Will ignore file format and treat as raw pcm. Use FMOD_CREATESOUNDEXINFO to specify format. Requires at least defaultfrequency, numchannels and format to be specified before it will open. Must be little endian data. */ +#define FMOD_OPENONLY 0x00002000 /* Just open the file, dont prebuffer or read. Good for fast opens for info, or when sound::readData is to be used. */ +#define FMOD_ACCURATETIME 0x00004000 /* For System::createSound - for accurate Sound::getLength/Channel::setPosition on VBR MP3, and MOD/S3M/XM/IT/MIDI files. Scans file first, so takes longer to open. FMOD_OPENONLY does not affect this. */ +#define FMOD_MPEGSEARCH 0x00008000 /* For corrupted / bad MP3 files. This will search all the way through the file until it hits a valid MPEG header. Normally only searches for 4k. */ +#define FMOD_NONBLOCKING 0x00010000 /* For opening sounds and getting streamed subsounds (seeking) asyncronously. Use Sound::getOpenState to poll the state of the sound as it opens or retrieves the subsound in the background. */ +#define FMOD_UNIQUE 0x00020000 /* Unique sound, can only be played one at a time */ +#define FMOD_3D_HEADRELATIVE 0x00040000 /* Make the sound's position, velocity and orientation relative to the listener. */ +#define FMOD_3D_WORLDRELATIVE 0x00080000 /* Make the sound's position, velocity and orientation absolute (relative to the world). (DEFAULT) */ +#define FMOD_3D_INVERSEROLLOFF 0x00100000 /* This sound will follow the inverse rolloff model where mindistance = full volume, maxdistance = where sound stops attenuating, and rolloff is fixed according to the global rolloff factor. (DEFAULT) */ +#define FMOD_3D_LINEARROLLOFF 0x00200000 /* This sound will follow a linear rolloff model where mindistance = full volume, maxdistance = silence. */ +#define FMOD_3D_LINEARSQUAREROLLOFF 0x00400000 /* This sound will follow a linear-square rolloff model where mindistance = full volume, maxdistance = silence. */ +#define FMOD_3D_INVERSETAPEREDROLLOFF 0x00800000 /* This sound will follow the inverse rolloff model at distances close to mindistance and a linear-square rolloff close to maxdistance. */ +#define FMOD_3D_CUSTOMROLLOFF 0x04000000 /* This sound will follow a rolloff model defined by Sound::set3DCustomRolloff / Channel::set3DCustomRolloff. */ +#define FMOD_3D_IGNOREGEOMETRY 0x40000000 /* Is not affect by geometry occlusion. If not specified in Sound::setMode, or Channel::setMode, the flag is cleared and it is affected by geometry again. */ +/* Unused 0x01000000 Used to be FMOD_UNICODE */ +#define FMOD_IGNORETAGS 0x02000000 /* Skips id3v2/asf/etc tag checks when opening a sound, to reduce seek/read overhead when opening files (helps with CD performance). */ +#define FMOD_LOWMEM 0x08000000 /* Removes some features from samples to give a lower memory overhead, like Sound::getName. See remarks. */ +#define FMOD_LOADSECONDARYRAM 0x20000000 /* Load sound into the secondary RAM of supported platform. On PS3, sounds will be loaded into RSX/VRAM. */ +#define FMOD_VIRTUAL_PLAYFROMSTART 0x80000000 /* For sounds that start virtual (due to being quiet or low importance), instead of swapping back to audible, and playing at the correct offset according to time, this flag makes the sound play from the start. */ +/* [DEFINE_END] */ + + +/* +[ENUM] +[ + [DESCRIPTION] + These values describe what state a sound is in after FMOD_NONBLOCKING has been used to open it. + + [REMARKS] + With streams, if you are using FMOD_NONBLOCKING, note that if the user calls Sound::getSubSound, a stream will go into FMOD_OPENSTATE_SEEKING state and sound related commands will return FMOD_ERR_NOTREADY.
    + With streams, if you are using FMOD_NONBLOCKING, note that if the user calls Channel::getPosition, a stream will go into FMOD_OPENSTATE_SETPOSITION state and sound related commands will return FMOD_ERR_NOTREADY.
    + + [SEE_ALSO] + Sound::getOpenState + FMOD_MODE +] +*/ +typedef enum +{ + FMOD_OPENSTATE_READY = 0, /* Opened and ready to play. */ + FMOD_OPENSTATE_LOADING, /* Initial load in progress. */ + FMOD_OPENSTATE_ERROR, /* Failed to open - file not found, out of memory etc. See return value of Sound::getOpenState for what happened. */ + FMOD_OPENSTATE_CONNECTING, /* Connecting to remote host (internet sounds only). */ + FMOD_OPENSTATE_BUFFERING, /* Buffering data. */ + FMOD_OPENSTATE_SEEKING, /* Seeking to subsound and re-flushing stream buffer. */ + FMOD_OPENSTATE_PLAYING, /* Ready and playing, but not possible to release at this time without stalling the main thread. */ + FMOD_OPENSTATE_SETPOSITION, /* Seeking within a stream to a different position. */ + + FMOD_OPENSTATE_MAX, /* Maximum number of open state types. */ + FMOD_OPENSTATE_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_OPENSTATE; + + +/* +[ENUM] +[ + [DESCRIPTION] + These values are used with SoundGroup::setMaxAudibleBehavior to determine what happens when more sounds + are played than are specified with SoundGroup::setMaxAudible. + + [REMARKS] + When using FMOD_SOUNDGROUP_BEHAVIOR_MUTE, SoundGroup::setMuteFadeSpeed can be used to stop a sudden transition. + Instead, the time specified will be used to cross fade between the sounds that go silent and the ones that become audible. + + [SEE_ALSO] + SoundGroup::setMaxAudibleBehavior + SoundGroup::getMaxAudibleBehavior + SoundGroup::setMaxAudible + SoundGroup::getMaxAudible + SoundGroup::setMuteFadeSpeed + SoundGroup::getMuteFadeSpeed +] +*/ +typedef enum +{ + FMOD_SOUNDGROUP_BEHAVIOR_FAIL, /* Any sound played that puts the sound count over the SoundGroup::setMaxAudible setting, will simply fail during System::playSound. */ + FMOD_SOUNDGROUP_BEHAVIOR_MUTE, /* Any sound played that puts the sound count over the SoundGroup::setMaxAudible setting, will be silent, then if another sound in the group stops the sound that was silent before becomes audible again. */ + FMOD_SOUNDGROUP_BEHAVIOR_STEALLOWEST, /* Any sound played that puts the sound count over the SoundGroup::setMaxAudible setting, will steal the quietest / least important sound playing in the group. */ + + FMOD_SOUNDGROUP_BEHAVIOR_MAX, /* Maximum number of sound group behaviors. */ + FMOD_SOUNDGROUP_BEHAVIOR_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_SOUNDGROUP_BEHAVIOR; + + +/* +[ENUM] +[ + [DESCRIPTION] + These callback types are used with Channel::setCallback. + + [REMARKS] + Each callback has commanddata parameters passed as int unique to the type of callback.
    + See reference to FMOD_CHANNELCONTROL_CALLBACK to determine what they might mean for each type of callback.
    +
    + Note! Currently the user must call System::update for these callbacks to trigger! + + [SEE_ALSO] + Channel::setCallback + ChannelGroup::setCallback + FMOD_CHANNELCONTROL_CALLBACK + System::update +] +*/ +typedef enum +{ + FMOD_CHANNELCONTROL_CALLBACK_END, /* Called when a sound ends. */ + FMOD_CHANNELCONTROL_CALLBACK_VIRTUALVOICE, /* Called when a voice is swapped out or swapped in. */ + FMOD_CHANNELCONTROL_CALLBACK_SYNCPOINT, /* Called when a syncpoint is encountered. Can be from wav file markers. */ + FMOD_CHANNELCONTROL_CALLBACK_OCCLUSION, /* Called when the channel has its geometry occlusion value calculated. Can be used to clamp or change the value. */ + + FMOD_CHANNELCONTROL_CALLBACK_MAX, /* Maximum number of callback types supported. */ + FMOD_CHANNELCONTROL_CALLBACK_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_CHANNELCONTROL_CALLBACK_TYPE; + + +/* +[ENUM] +[ + [DESCRIPTION] + These enums denote special types of node within a DSP chain. + + [REMARKS] + By default there is 1 fader for a ChannelGroup or Channel, and it is the head. + + [SEE_ALSO] + Channel::getDSP + ChannelGroup::getDSP + ChannelControl::getNumDSPs + ChannelControl::setDSPIndex +] +*/ +typedef enum +{ + FMOD_CHANNELCONTROL_DSP_HEAD = -1, /* Head of the DSP chain. Equivalent of index 0. */ + FMOD_CHANNELCONTROL_DSP_FADER = -2, /* Built in fader DSP. */ + FMOD_CHANNELCONTROL_DSP_TAIL = -3, /* Tail of the DSP chain. Equivalent of the number of dsps minus 1. */ + + FMOD_CHANNELCONTROL_DSP_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_CHANNELCONTROL_DSP_INDEX; + +/* +[ENUM] +[ + [DESCRIPTION] + Used to distinguish the instance type passed into FMOD_ERROR_CALLBACK. + + [REMARKS] + Cast the instance of FMOD_ERROR_CALLBACK to the appropriate class indicated by this enum. + + [SEE_ALSO] +] +*/ +typedef enum +{ + FMOD_ERRORCALLBACK_INSTANCETYPE_NONE, + FMOD_ERRORCALLBACK_INSTANCETYPE_SYSTEM, + FMOD_ERRORCALLBACK_INSTANCETYPE_CHANNEL, + FMOD_ERRORCALLBACK_INSTANCETYPE_CHANNELGROUP, + FMOD_ERRORCALLBACK_INSTANCETYPE_CHANNELCONTROL, + FMOD_ERRORCALLBACK_INSTANCETYPE_SOUND, + FMOD_ERRORCALLBACK_INSTANCETYPE_SOUNDGROUP, + FMOD_ERRORCALLBACK_INSTANCETYPE_DSP, + FMOD_ERRORCALLBACK_INSTANCETYPE_DSPCONNECTION, + FMOD_ERRORCALLBACK_INSTANCETYPE_GEOMETRY, + FMOD_ERRORCALLBACK_INSTANCETYPE_REVERB3D, + FMOD_ERRORCALLBACK_INSTANCETYPE_STUDIO_SYSTEM, + FMOD_ERRORCALLBACK_INSTANCETYPE_STUDIO_EVENTDESCRIPTION, + FMOD_ERRORCALLBACK_INSTANCETYPE_STUDIO_EVENTINSTANCE, + FMOD_ERRORCALLBACK_INSTANCETYPE_STUDIO_PARAMETERINSTANCE, + FMOD_ERRORCALLBACK_INSTANCETYPE_STUDIO_BUS, + FMOD_ERRORCALLBACK_INSTANCETYPE_STUDIO_VCA, + FMOD_ERRORCALLBACK_INSTANCETYPE_STUDIO_BANK, + FMOD_ERRORCALLBACK_INSTANCETYPE_STUDIO_COMMANDREPLAY, + + FMOD_ERRORCALLBACK_INSTANCETYPE_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_ERRORCALLBACK_INSTANCETYPE; + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure that is passed into FMOD_SYSTEM_CALLBACK for the FMOD_SYSTEM_CALLBACK_ERROR callback type. + + [REMARKS] + The instance pointer will be a type corresponding to the instanceType enum. + + [SEE_ALSO] + FMOD_ERRORCALLBACK_INSTANCETYPE +] +*/ +typedef struct +{ + FMOD_RESULT result; /* Error code result */ + FMOD_ERRORCALLBACK_INSTANCETYPE instancetype; /* Type of instance the error occurred on */ + void *instance; /* Instance pointer */ + const char *functionname; /* Function that the error occurred on */ + const char *functionparams; /* Function parameters that the error ocurred on */ +} FMOD_ERRORCALLBACK_INFO; + +/* +[DEFINE] +[ + [NAME] + FMOD_SYSTEM_CALLBACK_TYPE + + [DESCRIPTION] + These callback types are used with System::setCallback. + + [REMARKS] + Each callback has commanddata parameters passed as void* unique to the type of callback.
    + See reference to FMOD_SYSTEM_CALLBACK to determine what they might mean for each type of callback.
    +
    + Note! Using FMOD_SYSTEM_CALLBACK_DEVICELISTCHANGED (Windows only) will disable any automated device ejection/insertion handling by FMOD. Use this callback to control the behaviour yourself.
    +
    + Note! Using FMOD_SYSTEM_CALLBACK_DEVICELISTCHANGED (on Mac only) requires the application to be running an event loop which will allow external changes to device list to be detected by FMOD.
    +
    + Note! The 'system' object pointer will be null for FMOD_SYSTEM_CALLBACK_MEMORYALLOCATIONFAILED callback. + + [SEE_ALSO] + System::setCallback + System::update + DSP::addInput +] +*/ +#define FMOD_SYSTEM_CALLBACK_DEVICELISTCHANGED 0x00000001 /* Called from System::update when the enumerated list of devices has changed. */ +#define FMOD_SYSTEM_CALLBACK_DEVICELOST 0x00000002 /* Called from System::update when an output device has been lost due to control panel parameter changes and FMOD cannot automatically recover. */ +#define FMOD_SYSTEM_CALLBACK_MEMORYALLOCATIONFAILED 0x00000004 /* Called directly when a memory allocation fails somewhere in FMOD. (NOTE - 'system' will be NULL in this callback type.)*/ +#define FMOD_SYSTEM_CALLBACK_THREADCREATED 0x00000008 /* Called directly when a thread is created. */ +#define FMOD_SYSTEM_CALLBACK_BADDSPCONNECTION 0x00000010 /* Called when a bad connection was made with DSP::addInput. Usually called from mixer thread because that is where the connections are made. */ +#define FMOD_SYSTEM_CALLBACK_PREMIX 0x00000020 /* Called each tick before a mix update happens. */ +#define FMOD_SYSTEM_CALLBACK_POSTMIX 0x00000040 /* Called each tick after a mix update happens. */ +#define FMOD_SYSTEM_CALLBACK_ERROR 0x00000080 /* Called when each API function returns an error code, including delayed async functions. */ +#define FMOD_SYSTEM_CALLBACK_MIDMIX 0x00000100 /* Called each tick in mix update after clocks have been updated before the main mix occurs. */ +#define FMOD_SYSTEM_CALLBACK_THREADDESTROYED 0x00000200 /* Called directly when a thread is destroyed. */ +#define FMOD_SYSTEM_CALLBACK_PREUPDATE 0x00000400 /* Called at start of System::update function. */ +#define FMOD_SYSTEM_CALLBACK_POSTUPDATE 0x00000800 /* Called at end of System::update function. */ +#define FMOD_SYSTEM_CALLBACK_RECORDLISTCHANGED 0x00001000 /* Called from System::update when the enumerated list of recording devices has changed. */ +#define FMOD_SYSTEM_CALLBACK_ALL 0xFFFFFFFF /* Pass this mask to System::setCallback to receive all callback types. */ + +/* [DEFINE_END] */ + + +/* + FMOD Callbacks +*/ +typedef FMOD_RESULT (F_CALLBACK *FMOD_DEBUG_CALLBACK) (FMOD_DEBUG_FLAGS flags, const char *file, int line, const char *func, const char *message); + +typedef FMOD_RESULT (F_CALLBACK *FMOD_SYSTEM_CALLBACK) (FMOD_SYSTEM *system, FMOD_SYSTEM_CALLBACK_TYPE type, void *commanddata1, void *commanddata2, void *userdata); + +typedef FMOD_RESULT (F_CALLBACK *FMOD_CHANNELCONTROL_CALLBACK) (FMOD_CHANNELCONTROL *channelcontrol, FMOD_CHANNELCONTROL_TYPE controltype, FMOD_CHANNELCONTROL_CALLBACK_TYPE callbacktype, void *commanddata1, void *commanddata2); + +typedef FMOD_RESULT (F_CALLBACK *FMOD_SOUND_NONBLOCK_CALLBACK) (FMOD_SOUND *sound, FMOD_RESULT result); +typedef FMOD_RESULT (F_CALLBACK *FMOD_SOUND_PCMREAD_CALLBACK) (FMOD_SOUND *sound, void *data, unsigned int datalen); +typedef FMOD_RESULT (F_CALLBACK *FMOD_SOUND_PCMSETPOS_CALLBACK) (FMOD_SOUND *sound, int subsound, unsigned int position, FMOD_TIMEUNIT postype); + +typedef FMOD_RESULT (F_CALLBACK *FMOD_FILE_OPEN_CALLBACK) (const char *name, unsigned int *filesize, void **handle, void *userdata); +typedef FMOD_RESULT (F_CALLBACK *FMOD_FILE_CLOSE_CALLBACK) (void *handle, void *userdata); +typedef FMOD_RESULT (F_CALLBACK *FMOD_FILE_READ_CALLBACK) (void *handle, void *buffer, unsigned int sizebytes, unsigned int *bytesread, void *userdata); +typedef FMOD_RESULT (F_CALLBACK *FMOD_FILE_SEEK_CALLBACK) (void *handle, unsigned int pos, void *userdata); +typedef FMOD_RESULT (F_CALLBACK *FMOD_FILE_ASYNCREAD_CALLBACK) (FMOD_ASYNCREADINFO *info, void *userdata); +typedef FMOD_RESULT (F_CALLBACK *FMOD_FILE_ASYNCCANCEL_CALLBACK)(FMOD_ASYNCREADINFO *info, void *userdata); + +typedef void * (F_CALLBACK *FMOD_MEMORY_ALLOC_CALLBACK) (unsigned int size, FMOD_MEMORY_TYPE type, const char *sourcestr); +typedef void * (F_CALLBACK *FMOD_MEMORY_REALLOC_CALLBACK) (void *ptr, unsigned int size, FMOD_MEMORY_TYPE type, const char *sourcestr); +typedef void (F_CALLBACK *FMOD_MEMORY_FREE_CALLBACK) (void *ptr, FMOD_MEMORY_TYPE type, const char *sourcestr); + +typedef float (F_CALLBACK *FMOD_3D_ROLLOFF_CALLBACK) (FMOD_CHANNELCONTROL *channelcontrol, float distance); + + + + +/* +[ENUM] +[ + [DESCRIPTION] + List of interpolation types that the FMOD Studio software mixer supports. + + [REMARKS] + The default resampler type is FMOD_DSP_RESAMPLER_LINEAR.
    + Use System::setAdvancedSettings and the resamplerMethod member to tell FMOD the resampling quality you require for sample rate conversion during sound playback. + + [SEE_ALSO] + System::setAdvancedSettings + System::setAdvancedSettings + FMOD_ADVANCEDSETINGS +] +*/ +typedef enum +{ + FMOD_DSP_RESAMPLER_DEFAULT, /* Default interpolation method. Currently equal to FMOD_DSP_RESAMPLER_LINEAR. */ + FMOD_DSP_RESAMPLER_NOINTERP, /* No interpolation. High frequency aliasing hiss will be audible depending on the sample rate of the sound. */ + FMOD_DSP_RESAMPLER_LINEAR, /* Linear interpolation (default method). Fast and good quality, causes very slight lowpass effect on low frequency sounds. */ + FMOD_DSP_RESAMPLER_CUBIC, /* Cubic interpolation. Slower than linear interpolation but better quality. */ + FMOD_DSP_RESAMPLER_SPLINE, /* 5 point spline interpolation. Slowest resampling method but best quality. */ + + FMOD_DSP_RESAMPLER_MAX, /* Maximum number of resample methods supported. */ + FMOD_DSP_RESAMPLER_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_DSP_RESAMPLER; + + +/* +[ENUM] +[ + [DESCRIPTION] + List of connection types between 2 DSP nodes. + + [REMARKS] + FMOD_DSP_CONNECTION_TYPE_STANDARD
    + ----------------------------------
    + Default DSPConnection type. Audio is mixed from the input to the output DSP's audible buffer, meaning it will be part of the audible signal. A standard connection will execute its input DSP if it has not been executed before.
    +
    + FMOD_DSP_CONNECTION_TYPE_SIDECHAIN
    + ----------------------------------
    + Sidechain DSPConnection type. Audio is mixed from the input to the output DSP's sidechain buffer, meaning it will NOT be part of the audible signal. A sidechain connection will execute its input DSP if it has not been executed before.
    + The purpose of the seperate sidechain buffer in a DSP, is so that the DSP effect can privately access for analysis purposes. An example of use in this case, could be a compressor which analyzes the signal, to control its own effect parameters (ie a compression level or gain).
    +
    + For the effect developer, to accept sidechain data, the sidechain data will appear in the FMOD_DSP_STATE struct which is passed into the read callback of a DSP unit.
    + FMOD_DSP_STATE::sidechaindata and FMOD_DSP::sidechainchannels will hold the mixed result of any sidechain data flowing into it.
    +
    + FMOD_DSP_CONNECTION_TYPE_SEND
    + -----------------------------
    + Send DSPConnection type. Audio is mixed from the input to the output DSP's audible buffer, meaning it will be part of the audible signal. A send connection will NOT execute its input DSP if it has not been executed before.
    + A send connection will only read what exists at the input's buffer at the time of executing the output DSP unit (which can be considered the 'return')
    +
    + FMOD_DSP_CONNECTION_TYPE_SEND_SIDECHAIN
    + ---------------------------------------
    + Send sidechain DSPConnection type. Audio is mixed from the input to the output DSP's sidechain buffer, meaning it will NOT be part of the audible signal. A send sidechain connection will NOT execute its input DSP if it has not been executed before.
    + A send sidechain connection will only read what exists at the input's buffer at the time of executing the output DSP unit (which can be considered the 'sidechain return'). +
    + For the effect developer, to accept sidechain data, the sidechain data will appear in the FMOD_DSP_STATE struct which is passed into the read callback of a DSP unit.
    + FMOD_DSP_STATE::sidechaindata and FMOD_DSP::sidechainchannels will hold the mixed result of any sidechain data flowing into it. + + [SEE_ALSO] + DSP::addInput + DSPConnection::getType +] +*/ +typedef enum +{ + FMOD_DSPCONNECTION_TYPE_STANDARD, /* Default connection type. Audio is mixed from the input to the output DSP's audible buffer. */ + FMOD_DSPCONNECTION_TYPE_SIDECHAIN, /* Sidechain connection type. Audio is mixed from the input to the output DSP's sidechain buffer. */ + FMOD_DSPCONNECTION_TYPE_SEND, /* Send connection type. Audio is mixed from the input to the output DSP's audible buffer, but the input is NOT executed, only copied from. A standard connection or sidechain needs to make an input execute to generate data. */ + FMOD_DSPCONNECTION_TYPE_SEND_SIDECHAIN, /* Send sidechain connection type. Audio is mixed from the input to the output DSP's sidechain buffer, but the input is NOT executed, only copied from. A standard connection or sidechain needs to make an input execute to generate data. */ + + FMOD_DSPCONNECTION_TYPE_MAX, /* Maximum number of DSP connection types supported. */ + FMOD_DSPCONNECTION_TYPE_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_DSPCONNECTION_TYPE; + + +/* +[ENUM] +[ + [DESCRIPTION] + List of tag types that could be stored within a sound. These include id3 tags, metadata from netstreams and vorbis/asf data. + + [REMARKS] + + [SEE_ALSO] + Sound::getTag +] +*/ +typedef enum +{ + FMOD_TAGTYPE_UNKNOWN = 0, + FMOD_TAGTYPE_ID3V1, + FMOD_TAGTYPE_ID3V2, + FMOD_TAGTYPE_VORBISCOMMENT, + FMOD_TAGTYPE_SHOUTCAST, + FMOD_TAGTYPE_ICECAST, + FMOD_TAGTYPE_ASF, + FMOD_TAGTYPE_MIDI, + FMOD_TAGTYPE_PLAYLIST, + FMOD_TAGTYPE_FMOD, + FMOD_TAGTYPE_USER, + + FMOD_TAGTYPE_MAX, /* Maximum number of tag types supported. */ + FMOD_TAGTYPE_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_TAGTYPE; + + +/* +[ENUM] +[ + [DESCRIPTION] + List of data types that can be returned by Sound::getTag + + [REMARKS] + + [SEE_ALSO] + Sound::getTag +] +*/ +typedef enum +{ + FMOD_TAGDATATYPE_BINARY = 0, + FMOD_TAGDATATYPE_INT, + FMOD_TAGDATATYPE_FLOAT, + FMOD_TAGDATATYPE_STRING, + FMOD_TAGDATATYPE_STRING_UTF16, + FMOD_TAGDATATYPE_STRING_UTF16BE, + FMOD_TAGDATATYPE_STRING_UTF8, + FMOD_TAGDATATYPE_CDTOC, + + FMOD_TAGDATATYPE_MAX, /* Maximum number of tag datatypes supported. */ + FMOD_TAGDATATYPE_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_TAGDATATYPE; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure describing a piece of tag data. + + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value.
    + + [SEE_ALSO] + Sound::getTag + FMOD_TAGTYPE + FMOD_TAGDATATYPE +] +*/ +typedef struct FMOD_TAG +{ + FMOD_TAGTYPE type; /* [r] The type of this tag. */ + FMOD_TAGDATATYPE datatype; /* [r] The type of data that this tag contains */ + char *name; /* [r] The name of this tag i.e. "TITLE", "ARTIST" etc. */ + void *data; /* [r] Pointer to the tag data - its format is determined by the datatype member */ + unsigned int datalen; /* [r] Length of the data contained in this tag */ + FMOD_BOOL updated; /* [r] True if this tag has been updated since last being accessed with Sound::getTag */ +} FMOD_TAG; + + +/* +[DEFINE] +[ + [NAME] + FMOD_TIMEUNIT + + [DESCRIPTION] + List of time types that can be returned by Sound::getLength and used with Channel::setPosition or Channel::getPosition. + + [REMARKS] + Do not combine flags except FMOD_TIMEUNIT_BUFFERED. + + [SEE_ALSO] + Sound::getLength + Channel::setPosition + Channel::getPosition +] +*/ +#define FMOD_TIMEUNIT_MS 0x00000001 /* Milliseconds. */ +#define FMOD_TIMEUNIT_PCM 0x00000002 /* PCM samples, related to milliseconds * samplerate / 1000. */ +#define FMOD_TIMEUNIT_PCMBYTES 0x00000004 /* Bytes, related to PCM samples * channels * datawidth (ie 16bit = 2 bytes). */ +#define FMOD_TIMEUNIT_RAWBYTES 0x00000008 /* Raw file bytes of (compressed) sound data (does not include headers). Only used by Sound::getLength and Channel::getPosition. */ +#define FMOD_TIMEUNIT_PCMFRACTION 0x00000010 /* Fractions of 1 PCM sample. Unsigned int range 0 to 0xFFFFFFFF. Used for sub-sample granularity for DSP purposes. */ +#define FMOD_TIMEUNIT_MODORDER 0x00000100 /* MOD/S3M/XM/IT. Order in a sequenced module format. Use Sound::getFormat to determine the PCM format being decoded to. */ +#define FMOD_TIMEUNIT_MODROW 0x00000200 /* MOD/S3M/XM/IT. Current row in a sequenced module format. Sound::getLength will return the number of rows in the currently playing or seeked to pattern. */ +#define FMOD_TIMEUNIT_MODPATTERN 0x00000400 /* MOD/S3M/XM/IT. Current pattern in a sequenced module format. Sound::getLength will return the number of patterns in the song and Channel::getPosition will return the currently playing pattern. */ +#define FMOD_TIMEUNIT_BUFFERED 0x10000000 /* Time value as seen by buffered stream. This is always ahead of audible time, and is only used for processing. */ +/* [DEFINE_END] */ + +/* +[DEFINE] +[ + [NAME] + FMOD_PORT_INDEX + + [DESCRIPTION] + + [REMARKS] + + [SEE_ALSO] + System::AttachChannelGroupToPort +] +*/ +#define FMOD_PORT_INDEX_NONE 0xFFFFFFFFFFFFFFFF /* Use when a port index is not required */ +/* [DEFINE_END] */ + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Use this structure with System::createSound when more control is needed over loading. + The possible reasons to use this with System::createSound are: + + - Loading a file from memory. + - Loading a file from within another larger (possibly wad/pak) file, by giving the loader an offset and length. + - To create a user created / non file based sound. + - To specify a starting subsound to seek to within a multi-sample sounds (ie FSB/DLS) when created as a stream. + - To specify which subsounds to load for multi-sample sounds (ie FSB/DLS) so that memory is saved and only a subset is actually loaded/read from disk. + - To specify 'piggyback' read and seek callbacks for capture of sound data as fmod reads and decodes it. Useful for ripping decoded PCM data from sounds as they are loaded / played. + - To specify a MIDI DLS sample set file to load when opening a MIDI file. + + See below on what members to fill for each of the above types of sound you want to create. + + [REMARKS] + This structure is optional! Specify 0 or NULL in System::createSound if you don't need it! + + Loading a file from memory. + + - Create the sound using the FMOD_OPENMEMORY flag. + - Mandatory. Specify 'length' for the size of the memory block in bytes. + - Other flags are optional. + + Loading a file from within another larger (possibly wad/pak) file, by giving the loader an offset and length. + + - Mandatory. Specify 'fileoffset' and 'length'. + - Other flags are optional. + + To create a user created / non file based sound. + + - Create the sound using the FMOD_OPENUSER flag. + - Mandatory. Specify 'defaultfrequency, 'numchannels' and 'format'. + - Other flags are optional. + + To specify a starting subsound to seek to and flush with, within a multi-sample stream (ie FSB/DLS). + + - Mandatory. Specify 'initialsubsound'. + + To specify which subsounds to load for multi-sample sounds (ie FSB/DLS) so that memory is saved and only a subset is actually loaded/read from disk. + + - Mandatory. Specify 'inclusionlist' and 'inclusionlistnum'. + + To specify 'piggyback' read and seek callbacks for capture of sound data as fmod reads and decodes it. Useful for ripping decoded PCM data from sounds as they are loaded / played. + + - Mandatory. Specify 'pcmreadcallback' and 'pcmseekcallback'. + + To specify a MIDI DLS sample set file to load when opening a MIDI file. + + - Mandatory. Specify 'dlsname'. + + Setting the 'decodebuffersize' is for cpu intensive codecs that may be causing stuttering, not file intensive codecs (ie those from CD or netstreams) which are normally + altered with System::setStreamBufferSize. As an example of cpu intensive codecs, an mp3 file will take more cpu to decode than a PCM wav file. + + If you have a stuttering effect, then it is using more cpu than the decode buffer playback rate can keep up with. Increasing the decode buffersize will most likely solve this problem. + + FSB codec. If inclusionlist and numsubsounds are used together, this will trigger a special mode where subsounds are shuffled down to save memory. (useful for large FSB + files where you only want to load 1 sound). There will be no gaps, ie no null subsounds. As an example, if there are 10,000 subsounds and there is an inclusionlist with only 1 entry, + and numsubsounds = 1, then subsound 0 will be that entry, and there will only be the memory allocated for 1 subsound. Previously there would still be 10,000 subsound pointers and other + associated codec entries allocated along with it multiplied by 10,000. + + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value. + + [SEE_ALSO] + System::createSound + System::setStreamBufferSize + FMOD_MODE + FMOD_SOUND_FORMAT + FMOD_SOUND_TYPE + FMOD_CHANNELMASK + FMOD_CHANNELORDER + FMOD_MAX_CHANNEL_WIDTH +] +*/ +typedef struct FMOD_CREATESOUNDEXINFO +{ + int cbsize; /* [w] Size of this structure. This is used so the structure can be expanded in the future and still work on older versions of FMOD Studio. */ + unsigned int length; /* [w] Optional. Specify 0 to ignore. Number of bytes to load starting at 'fileoffset', or size of sound to create (if FMOD_OPENUSER is used). Required if loading from memory. If 0 is specified, then it will use the size of the file (unless loading from memory then an error will be returned). */ + unsigned int fileoffset; /* [w] Optional. Specify 0 to ignore. Offset from start of the file to start loading from. This is useful for loading files from inside big data files. */ + int numchannels; /* [w] Optional. Specify 0 to ignore. Number of channels in a sound mandatory if FMOD_OPENUSER or FMOD_OPENRAW is used. Can be specified up to FMOD_MAX_CHANNEL_WIDTH. */ + int defaultfrequency; /* [w] Optional. Specify 0 to ignore. Default frequency of sound in Hz, mandatory if FMOD_OPENUSER or FMOD_OPENRAW is used. Other formats use the frequency determined by the file format. */ + FMOD_SOUND_FORMAT format; /* [w] Optional. Specify 0 or FMOD_SOUND_FORMAT_NONE to ignore. Format of the sound, mandatory if FMOD_OPENUSER or FMOD_OPENRAW is used. Other formats use the format determined by the file format. */ + unsigned int decodebuffersize; /* [w] Optional. Specify 0 to ignore. For streams. This determines the size of the double buffer (in PCM samples) that a stream uses. Use this for user created streams if you want to determine the size of the callback buffer passed to you. Specify 0 to use FMOD's default size which is currently equivalent to 400ms of the sound format created/loaded. */ + int initialsubsound; /* [w] Optional. Specify 0 to ignore. In a multi-sample file format such as .FSB/.DLS, specify the initial subsound to seek to, only if FMOD_CREATESTREAM is used. */ + int numsubsounds; /* [w] Optional. Specify 0 to ignore or have no subsounds. In a sound created with FMOD_OPENUSER, specify the number of subsounds that are accessable with Sound::getSubSound. If not created with FMOD_OPENUSER, this will limit the number of subsounds loaded within a multi-subsound file. If using FSB, then if FMOD_CREATESOUNDEXINFO::inclusionlist is used, this will shuffle subsounds down so that there are not any gaps. It will mean that the indices of the sounds will be different. */ + int *inclusionlist; /* [w] Optional. Specify 0 to ignore. In a multi-sample format such as .FSB/.DLS it may be desirable to specify only a subset of sounds to be loaded out of the whole file. This is an array of subsound indices to load into memory when created. */ + int inclusionlistnum; /* [w] Optional. Specify 0 to ignore. This is the number of integers contained within the inclusionlist array. */ + FMOD_SOUND_PCMREAD_CALLBACK pcmreadcallback; /* [w] Optional. Specify 0 to ignore. Callback to 'piggyback' on FMOD's read functions and accept or even write PCM data while FMOD is opening the sound. Used for user sounds created with FMOD_OPENUSER or for capturing decoded data as FMOD reads it. */ + FMOD_SOUND_PCMSETPOS_CALLBACK pcmsetposcallback; /* [w] Optional. Specify 0 to ignore. Callback for when the user calls a seeking function such as Channel::setTime or Channel::setPosition within a multi-sample sound, and for when it is opened.*/ + FMOD_SOUND_NONBLOCK_CALLBACK nonblockcallback; /* [w] Optional. Specify 0 to ignore. Callback for successful completion, or error while loading a sound that used the FMOD_NONBLOCKING flag. Also called duing seeking, when setPosition is called or a stream is restarted. */ + const char *dlsname; /* [w] Optional. Specify 0 to ignore. Filename for a DLS sample set when loading a MIDI file. If not specified, on Windows it will attempt to open /windows/system32/drivers/gm.dls or /windows/system32/drivers/etc/gm.dls, on Mac it will attempt to load /System/Library/Components/CoreAudio.component/Contents/Resources/gs_instruments.dls, otherwise the MIDI will fail to open. Current DLS support is for level 1 of the specification. */ + const char *encryptionkey; /* [w] Optional. Specify 0 to ignore. Key for encrypted FSB file. Without this key an encrypted FSB file will not load. */ + int maxpolyphony; /* [w] Optional. Specify 0 to ignore. For sequenced formats with dynamic channel allocation such as .MID and .IT, this specifies the maximum voice count allowed while playing. .IT defaults to 64. .MID defaults to 32. */ + void *userdata; /* [w] Optional. Specify 0 to ignore. This is user data to be attached to the sound during creation. Access via Sound::getUserData. Note: This is not passed to FMOD_FILE_OPEN_CALLBACK - use fileuserdata for that. */ + FMOD_SOUND_TYPE suggestedsoundtype; /* [w] Optional. Specify 0 or FMOD_SOUND_TYPE_UNKNOWN to ignore. Instead of scanning all codec types, use this to speed up loading by making it jump straight to this codec. */ + FMOD_FILE_OPEN_CALLBACK fileuseropen; /* [w] Optional. Specify 0 to ignore. Callback for opening this file. */ + FMOD_FILE_CLOSE_CALLBACK fileuserclose; /* [w] Optional. Specify 0 to ignore. Callback for closing this file. */ + FMOD_FILE_READ_CALLBACK fileuserread; /* [w] Optional. Specify 0 to ignore. Callback for reading from this file. */ + FMOD_FILE_SEEK_CALLBACK fileuserseek; /* [w] Optional. Specify 0 to ignore. Callback for seeking within this file. */ + FMOD_FILE_ASYNCREAD_CALLBACK fileuserasyncread; /* [w] Optional. Specify 0 to ignore. Callback for seeking within this file. */ + FMOD_FILE_ASYNCCANCEL_CALLBACK fileuserasynccancel;/* [w] Optional. Specify 0 to ignore. Callback for seeking within this file. */ + void *fileuserdata; /* [w] Optional. Specify 0 to ignore. User data to be passed into the file callbacks. */ + int filebuffersize; /* [w] Optional. Specify 0 to ignore. Buffer size for reading the file, -1 to disable buffering, or 0 for system default. */ + FMOD_CHANNELORDER channelorder; /* [w] Optional. Specify 0 to ignore. Use this to differ the way fmod maps multichannel sounds to speakers. See FMOD_CHANNELORDER for more. */ + FMOD_CHANNELMASK channelmask; /* [w] Optional. Specify 0 to ignore. Use this to specify which channels map to which speakers. See FMOD_CHANNELMASK for more. */ + FMOD_SOUNDGROUP *initialsoundgroup; /* [w] Optional. Specify 0 to ignore. Specify a sound group if required, to put sound in as it is created. */ + unsigned int initialseekposition;/* [w] Optional. Specify 0 to ignore. For streams. Specify an initial position to seek the stream to. */ + FMOD_TIMEUNIT initialseekpostype; /* [w] Optional. Specify 0 to ignore. For streams. Specify the time unit for the position set in initialseekposition. */ + int ignoresetfilesystem;/* [w] Optional. Specify 0 to ignore. Set to 1 to use fmod's built in file system. Ignores setFileSystem callbacks and also FMOD_CREATESOUNEXINFO file callbacks. Useful for specific cases where you don't want to use your own file system but want to use fmod's file system (ie net streaming). */ + unsigned int audioqueuepolicy; /* [w] Optional. Specify 0 or FMOD_AUDIOQUEUE_CODECPOLICY_DEFAULT to ignore. Policy used to determine whether hardware or software is used for decoding, see FMOD_AUDIOQUEUE_CODECPOLICY for options (iOS >= 3.0 required, otherwise only hardware is available) */ + unsigned int minmidigranularity; /* [w] Optional. Specify 0 to ignore. Allows you to set a minimum desired MIDI mixer granularity. Values smaller than 512 give greater than default accuracy at the cost of more CPU and vice versa. Specify 0 for default (512 samples). */ + int nonblockthreadid; /* [w] Optional. Specify 0 to ignore. Specifies a thread index to execute non blocking load on. Allows for up to 5 threads to be used for loading at once. This is to avoid one load blocking another. Maximum value = 4. */ + FMOD_GUID *fsbguid; /* [r/w] Optional. Specify 0 to ignore. Allows you to provide the GUID lookup for cached FSB header info. Once loaded the GUID will be written back to the pointer. This is to avoid seeking and reading the FSB header. */ +} FMOD_CREATESOUNDEXINFO; + + +/* +[DEFINE] +[ + [NAME] + FMOD_REVERB_MAXINSTANCES + + [DESCRIPTION] + The maximum number of global/physical reverb instances. + + [REMARKS] + Each instance of a physical reverb is an instance of a FMOD_DSP_SFXREVERB dsp in the mix graph. + This is unrelated to the number of possible Reverb3D objects, which is unlimited. + + [SEE_ALSO] + ChannelControl::setReverbProperties + ChannelControl::setReverbProperties + System::setReverbProperties + System::getReverbProperties +] +*/ +#define FMOD_REVERB_MAXINSTANCES 4 +/* [DEFINE_END] */ + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure defining a reverb environment. + + [REMARKS] + Note the default reverb properties are the same as the FMOD_PRESET_GENERIC preset. + + All members are read/write [r/w], written to by FMOD when queried with System::getReverbProperties and read by FMOD when set with System::setReverbProperties. + + [SEE_ALSO] + System::setReverbProperties + System::getReverbProperties + FMOD_REVERB_PRESETS +] +*/ +typedef struct FMOD_REVERB_PROPERTIES +{ /* MIN MAX DEFAULT DESCRIPTION */ + float DecayTime; /* [r/w] 0.0 20000.0 1500.0 Reverberation decay time (ms) */ + float EarlyDelay; /* [r/w] 0.0 300.0 7.0 Initial reflection delay time (ms) */ + float LateDelay; /* [r/w] 0.0 100 11.0 Late reverberation delay time relative to initial reflection (ms) */ + float HFReference; /* [r/w] 20.0 20000.0 5000 Reference high frequency (Hz) */ + float HFDecayRatio; /* [r/w] 10.0 100.0 50.0 High-frequency to mid-frequency decay time ratio (%) */ + float Diffusion; /* [r/w] 0.0 100.0 100.0 Value that controls the echo density in the late reverberation decay (%) */ + float Density; /* [r/w] 0.0 100.0 100.0 Value that controls the modal density in the late reverberation decay (%) */ + float LowShelfFrequency; /* [r/w] 20.0 1000.0 250.0 Reference low frequency (Hz) */ + float LowShelfGain; /* [r/w] -36.0 12.0 0.0 Relative room effect level at low frequencies (dB) */ + float HighCut; /* [r/w] 20.0 20000.0 20000.0 Relative room effect level at high frequencies (Hz) */ + float EarlyLateMix; /* [r/w] 0.0 100.0 50.0 Early reflections level relative to room effect (%) */ + float WetLevel; /* [r/w] -80.0 20.0 -6.0 Room effect level at mid frequencies (dB) */ +} FMOD_REVERB_PROPERTIES; + + +/* +[DEFINE] +[ + [NAME] + FMOD_REVERB_PRESETS + + [DESCRIPTION] + Sets of predefined reverb properties used to initialize an FMOD_REVERB_PROPERTIES structure statically. + + i.e. FMOD_REVERB_PROPERTIES prop = FMOD_PRESET_GENERIC; + + [REMARKS] + + [SEE_ALSO] + System::setReverbProperties + System::getReverbProperties +] +*/ +/* Decay LateDly HFDecay Densty LoGain E/L-Mix + EarlyDly HFRef Diffus LoFreq HiCut WetLvl */ +#define FMOD_PRESET_OFF { 1000, 7, 11, 5000, 100, 100, 100, 250, 0, 20, 96, -80.0f } /* Off / disabled */ +#define FMOD_PRESET_GENERIC { 1500, 7, 11, 5000, 83, 100, 100, 250, 0, 14500, 96, -8.0f } /* Generic / default */ +#define FMOD_PRESET_PADDEDCELL { 170, 1, 2, 5000, 10, 100, 100, 250, 0, 160, 84, -7.8f } /* Padded cell */ +#define FMOD_PRESET_ROOM { 400, 2, 3, 5000, 83, 100, 100, 250, 0, 6050, 88, -9.4f } /* Room */ +#define FMOD_PRESET_BATHROOM { 1500, 7, 11, 5000, 54, 100, 60, 250, 0, 2900, 83, 0.5f } /* Bathroom */ +#define FMOD_PRESET_LIVINGROOM { 500, 3, 4, 5000, 10, 100, 100, 250, 0, 160, 58, -19.0f } /* Living room */ +#define FMOD_PRESET_STONEROOM { 2300, 12, 17, 5000, 64, 100, 100, 250, 0, 7800, 71, -8.5f } /* Stone room */ +#define FMOD_PRESET_AUDITORIUM { 4300, 20, 30, 5000, 59, 100, 100, 250, 0, 5850, 64, -11.7f } /* Auditorium */ +#define FMOD_PRESET_CONCERTHALL { 3900, 20, 29, 5000, 70, 100, 100, 250, 0, 5650, 80, -9.8f } /* Convert hall */ +#define FMOD_PRESET_CAVE { 2900, 15, 22, 5000, 100, 100, 100, 250, 0, 20000, 59, -11.3f } /* Cave */ +#define FMOD_PRESET_ARENA { 7200, 20, 30, 5000, 33, 100, 100, 250, 0, 4500, 80, -9.6f } /* Arena */ +#define FMOD_PRESET_HANGAR { 10000, 20, 30, 5000, 23, 100, 100, 250, 0, 3400, 72, -7.4f } /* Hangar */ +#define FMOD_PRESET_CARPETTEDHALLWAY { 300, 2, 30, 5000, 10, 100, 100, 250, 0, 500, 56, -24.0f } /* Carpeted hallway */ +#define FMOD_PRESET_HALLWAY { 1500, 7, 11, 5000, 59, 100, 100, 250, 0, 7800, 87, -5.5f } /* Hallway */ +#define FMOD_PRESET_STONECORRIDOR { 270, 13, 20, 5000, 79, 100, 100, 250, 0, 9000, 86, -6.0f } /* Stone corridor */ +#define FMOD_PRESET_ALLEY { 1500, 7, 11, 5000, 86, 100, 100, 250, 0, 8300, 80, -9.8f } /* Alley */ +#define FMOD_PRESET_FOREST { 1500, 162, 88, 5000, 54, 79, 100, 250, 0, 760, 94, -12.3f } /* Forest */ +#define FMOD_PRESET_CITY { 1500, 7, 11, 5000, 67, 50, 100, 250, 0, 4050, 66, -26.0f } /* City */ +#define FMOD_PRESET_MOUNTAINS { 1500, 300, 100, 5000, 21, 27, 100, 250, 0, 1220, 82, -24.0f } /* Mountains */ +#define FMOD_PRESET_QUARRY { 1500, 61, 25, 5000, 83, 100, 100, 250, 0, 3400, 100, -5.0f } /* Quarry */ +#define FMOD_PRESET_PLAIN { 1500, 179, 100, 5000, 50, 21, 100, 250, 0, 1670, 65, -28.0f } /* Plain */ +#define FMOD_PRESET_PARKINGLOT { 1700, 8, 12, 5000, 100, 100, 100, 250, 0, 20000, 56, -19.5f } /* Parking lot */ +#define FMOD_PRESET_SEWERPIPE { 2800, 14, 21, 5000, 14, 80, 60, 250, 0, 3400, 66, 1.2f } /* Sewer pipe */ +#define FMOD_PRESET_UNDERWATER { 1500, 7, 11, 5000, 10, 100, 100, 250, 0, 500, 92, 7.0f } /* Underwater */ +/* [DEFINE_END] */ + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Settings for advanced features like configuring memory and cpu usage for the FMOD_CREATECOMPRESSEDSAMPLE feature. + + [REMARKS] + maxMPEGCodecs / maxADPCMCodecs / maxXMACodecs will determine the maximum cpu usage of playing realtime samples. Use this to lower potential excess cpu usage and also control memory usage.
    +
    + maxPCMCodecs is for use with PS3 only. It will determine the maximum number of PCM voices that can be played at once. This includes streams of any format and all sounds created + *without* the FMOD_CREATECOMPRESSEDSAMPLE flag. +
    + Memory will be allocated for codecs 'up front' (during System::init) if these values are specified as non zero. If any are zero, it allocates memory for the codec whenever a file of the type in question is loaded. So if maxMPEGCodecs is 0 for example, it will allocate memory for the mpeg codecs the first time an mp3 is loaded or an mp3 based .FSB file is loaded.
    +
    + Due to inefficient encoding techniques on certain .wav based ADPCM files, FMOD can can need an extra 29720 bytes per codec. This means for lowest memory consumption. Use FSB as it uses an optimal/small ADPCM block size.
    +
    + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value.
    + Members marked with [r/w] are either read or write depending on if you are using System::setAdvancedSettings (w) or System::getAdvancedSettings (r). + + [SEE_ALSO] + System::setAdvancedSettings + System::getAdvancedSettings + System::init + FMOD_MODE +] +*/ +typedef struct FMOD_ADVANCEDSETTINGS +{ + int cbSize; /* [w] Size of this structure. Use sizeof(FMOD_ADVANCEDSETTINGS) NOTE: This must be set before calling System::getAdvancedSettings or System::setAdvancedSettings! */ + int maxMPEGCodecs; /* [r/w] Optional. Specify 0 to ignore. For use with FMOD_CREATECOMPRESSEDSAMPLE only. MPEG codecs consume 22,216 bytes per instance and this number will determine how many MPEG channels can be played simultaneously. Default = 32. */ + int maxADPCMCodecs; /* [r/w] Optional. Specify 0 to ignore. For use with FMOD_CREATECOMPRESSEDSAMPLE only. ADPCM codecs consume 2,480 bytes per instance and this number will determine how many ADPCM channels can be played simultaneously. Default = 32. */ + int maxXMACodecs; /* [r/w] Optional. Specify 0 to ignore. For use with FMOD_CREATECOMPRESSEDSAMPLE only. XMA codecs consume 6,263 bytes per instance and this number will determine how many XMA channels can be played simultaneously. Default = 32. */ + int maxVorbisCodecs; /* [r/w] Optional. Specify 0 to ignore. For use with FMOD_CREATECOMPRESSEDSAMPLE only. Vorbis codecs consume 16,512 bytes per instance and this number will determine how many Vorbis channels can be played simultaneously. Default = 32. */ + int maxAT9Codecs; /* [r/w] Optional. Specify 0 to ignore. For use with FMOD_CREATECOMPRESSEDSAMPLE only. AT9 codecs consume 20,664 bytes per instance and this number will determine how many AT9 channels can be played simultaneously. Default = 32. */ + int maxFADPCMCodecs; /* [r/w] Optional. Specify 0 to ignore. For use with FMOD_CREATECOMPRESSEDSAMPLE only. FADPCM codecs consume 2,232 bytes per instance and this number will determine how many FADPCM channels can be played simultaneously. Default = 32. */ + int maxPCMCodecs; /* [r/w] Optional. Specify 0 to ignore. For use with PS3 only. PCM codecs consume 2,536 bytes per instance and this number will determine how many streams and PCM voices can be played simultaneously. Default = 32. */ + int ASIONumChannels; /* [r/w] Optional. Specify 0 to ignore. Number of channels available on the ASIO device. */ + char **ASIOChannelList; /* [r/w] Optional. Specify 0 to ignore. Pointer to an array of strings (number of entries defined by ASIONumChannels) with ASIO channel names. */ + FMOD_SPEAKER *ASIOSpeakerList; /* [r/w] Optional. Specify 0 to ignore. Pointer to a list of speakers that the ASIO channels map to. This can be called after System::init to remap ASIO output. */ + float HRTFMinAngle; /* [r/w] Optional. For use with FMOD_INIT_HRTF_LOWPASS. The angle range (0-360) of a 3D sound in relation to the listener, at which the HRTF function begins to have an effect. 0 = in front of the listener. 180 = from 90 degrees to the left of the listener to 90 degrees to the right. 360 = behind the listener. Default = 180.0. */ + float HRTFMaxAngle; /* [r/w] Optional. For use with FMOD_INIT_HRTF_LOWPASS. The angle range (0-360) of a 3D sound in relation to the listener, at which the HRTF function has maximum effect. 0 = front of the listener. 180 = from 90 degrees to the left of the listener to 90 degrees to the right. 360 = behind the listener. Default = 360.0. */ + float HRTFFreq; /* [r/w] Optional. Specify 0 to ignore. For use with FMOD_INIT_HRTF_LOWPASS. The cutoff frequency of the HRTF's lowpass filter function when at maximum effect. (i.e. at HRTFMaxAngle). Default = 4000.0. */ + float vol0virtualvol; /* [r/w] Optional. Specify 0 to ignore. For use with FMOD_INIT_VOL0_BECOMES_VIRTUAL. If this flag is used, and the volume is below this, then the sound will become virtual. Use this value to raise the threshold to a different point where a sound goes virtual. */ + unsigned int defaultDecodeBufferSize; /* [r/w] Optional. Specify 0 to ignore. For streams. This determines the default size of the double buffer (in milliseconds) that a stream uses. Default = 400ms */ + unsigned short profilePort; /* [r/w] Optional. Specify 0 to ignore. For use with FMOD_INIT_PROFILE_ENABLE. Specify the port to listen on for connections by the profiler application. */ + unsigned int geometryMaxFadeTime; /* [r/w] Optional. Specify 0 to ignore. The maximum time in miliseconds it takes for a channel to fade to the new level when its occlusion changes. */ + float distanceFilterCenterFreq; /* [r/w] Optional. Specify 0 to ignore. For use with FMOD_INIT_DISTANCE_FILTERING. The default center frequency in Hz for the distance filtering effect. Default = 1500.0. */ + int reverb3Dinstance; /* [r/w] Optional. Specify 0 to ignore. Out of 0 to 3, 3d reverb spheres will create a phyical reverb unit on this instance slot. See FMOD_REVERB_PROPERTIES. */ + int DSPBufferPoolSize; /* [r/w] Optional. Specify 0 to ignore. Number of buffers in DSP buffer pool. Each buffer will be DSPBlockSize * sizeof(float) * SpeakerModeChannelCount. ie 7.1 @ 1024 DSP block size = 8 * 1024 * 4 = 32kb. Default = 8. */ + unsigned int stackSizeStream; /* [r/w] Optional. Specify 0 to ignore. Specify the stack size for the FMOD Stream thread in bytes. Useful for custom codecs that use excess stack. Default 49,152 (48kb) */ + unsigned int stackSizeNonBlocking; /* [r/w] Optional. Specify 0 to ignore. Specify the stack size for the FMOD_NONBLOCKING loading thread. Useful for custom codecs that use excess stack. Default 65,536 (64kb) */ + unsigned int stackSizeMixer; /* [r/w] Optional. Specify 0 to ignore. Specify the stack size for the FMOD mixer thread. Useful for custom dsps that use excess stack. Default 49,152 (48kb) */ + FMOD_DSP_RESAMPLER resamplerMethod; /* [r/w] Optional. Specify 0 to ignore. Resampling method used with fmod's software mixer. See FMOD_DSP_RESAMPLER for details on methods. */ + unsigned int commandQueueSize; /* [r/w] Optional. Specify 0 to ignore. Specify the command queue size for thread safe processing. Default 2048 (2kb) */ + unsigned int randomSeed; /* [r/w] Optional. Specify 0 to ignore. Seed value that FMOD will use to initialize its internal random number generators. */ +} FMOD_ADVANCEDSETTINGS; + + +/* +[DEFINE] +[ + [NAME] + FMOD_DRIVER_STATE + + [DESCRIPTION] + Flags that provide additional information about a particular driver. + + [REMARKS] + + [SEE_ALSO] + System::getRecordDriverInfo +] +*/ +#define FMOD_DRIVER_STATE_CONNECTED 0x00000001 /* Device is currently plugged in. */ +#define FMOD_DRIVER_STATE_DEFAULT 0x00000002 /* Device is the users preferred choice. */ +/* [DEFINE_END] */ + + +/*$ preserve start $*/ + +#include "fmod_codec.h" +#include "fmod_dsp.h" +#include "fmod_output.h" + +#endif + +/*$ preserve end $*/ diff --git a/app/src/main/cpp/inc/fmod_dsp.h b/app/src/main/cpp/inc/fmod_dsp.h new file mode 100644 index 0000000..c324075 --- /dev/null +++ b/app/src/main/cpp/inc/fmod_dsp.h @@ -0,0 +1,916 @@ +/* ========================================================================================== */ +/* FMOD Studio - DSP header file. Copyright (c), Firelight Technologies Pty, Ltd. 2004-2017. */ +/* */ +/* Use this header if you are interested in delving deeper into the FMOD software mixing / */ +/* DSP engine. */ +/* Also use this header if you are wanting to develop your own DSP plugin to use with FMOD's */ +/* dsp system. With this header you can make your own DSP plugin that FMOD can */ +/* register and use. See the documentation and examples on how to make a working plugin. */ +/* */ +/* ========================================================================================== */ + +#ifndef _FMOD_DSP_H +#define _FMOD_DSP_H + +#include "fmod_dsp_effects.h" + +typedef struct FMOD_DSP_STATE FMOD_DSP_STATE; + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure for FMOD_DSP_PROCESS_CALLBACK input and output buffers. + + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value.
    + + [SEE_ALSO] + FMOD_DSP_DESCRIPTION +] +*/ +typedef struct FMOD_DSP_BUFFER_ARRAY +{ + int numbuffers; /* [r/w] number of buffers */ + int *buffernumchannels; /* [r/w] array of number of channels for each buffer */ + FMOD_CHANNELMASK *bufferchannelmask; /* [r/w] array of channel masks for each buffer */ + float **buffers; /* [r/w] array of buffers */ + FMOD_SPEAKERMODE speakermode; /* [r/w] speaker mode for all buffers in the array */ +} FMOD_DSP_BUFFER_ARRAY; + + +/* +[ENUM] +[ + [DESCRIPTION] + Operation type for FMOD_DSP_PROCESS_CALLBACK. + + [REMARKS] + A process callback will be called twice per mix for a DSP unit. Once with the FMOD_DSP_PROCESS_QUERY command, then conditionally, FMOD_DSP_PROCESS_PERFORM.
    + FMOD_DSP_PROCESS_QUERY is to be handled only by filling out the outputarray information, and returning a relevant return code.
    + It should not really do any logic besides checking and returning one of the following codes:
    + - FMOD_OK - Meaning yes, it should execute the dsp process function with FMOD_DSP_PROCESS_PERFORM
    + - FMOD_ERR_DSP_DONTPROCESS - Meaning no, it should skip the process function and not call it with FMOD_DSP_PROCESS_PERFORM.
    + - FMOD_ERR_DSP_SILENCE - Meaning no, it should skip the process function and not call it with FMOD_DSP_PROCESS_PERFORM, AND, tell the signal chain to follow that it is now idle, so that no more processing happens down the chain.
    + If audio is to be processed, 'outbufferarray' must be filled with the expected output format, channel count and mask. Mask can be 0.
    +
    + FMOD_DSP_PROCESS_PROCESS is to be handled by reading the data from the input, processing it, and writing it to the output. Always write to the output buffer and fill it fully to avoid unpredictable audio output.
    + Always return FMOD_OK, the return value is ignored from the process stage. + + [SEE_ALSO] + FMOD_DSP_DESCRIPTION +] +*/ +typedef enum +{ + FMOD_DSP_PROCESS_PERFORM, /* Process the incoming audio in 'inbufferarray' and output to 'outbufferarray'. */ + FMOD_DSP_PROCESS_QUERY /* The DSP is being queried for the expected output format and whether it needs to process audio or should be bypassed. The function should return FMOD_OK, or FMOD_ERR_DSP_DONTPROCESS or FMOD_ERR_DSP_SILENCE if audio can pass through unprocessed. See remarks for more. If audio is to be processed, 'outbufferarray' must be filled with the expected output format, channel count and mask. */ +} FMOD_DSP_PROCESS_OPERATION; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Complex number structure used for holding FFT frequency domain-data for FMOD_FFTREAL and FMOD_IFFTREAL DSP functions. + + [REMARKS] + + [SEE_ALSO] + FMOD_DSP_STATE_FUNCTIONS + FMOD_DSP_STATE_DFT_FUNCTIONS +] +*/ +typedef struct FMOD_COMPLEX +{ + float real; /* Real component */ + float imag; /* Imaginary component */ +} FMOD_COMPLEX; + + +/* +[ENUM] +[ + [DESCRIPTION] + Flags for the FMOD_DSP_PAN_SUMSURROUNDMATRIX_FUNC function. + + [REMARKS] + This functionality is experimental, please contact support@fmod.org for more information. + + [SEE_ALSO] + FMOD_DSP_STATE_PAN_FUNCTIONS +] +*/ +typedef enum FMOD_DSP_PAN_SURROUND_FLAGS +{ + FMOD_DSP_PAN_SURROUND_DEFAULT = 0, + FMOD_DSP_PAN_SURROUND_ROTATION_NOT_BIASED = 1, + + FMOD_DSP_PAN_SURROUND_FLAGS_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_DSP_PAN_SURROUND_FLAGS; + + +/* + FMOD_DSP_DESCRIPTION callbacks +*/ +typedef FMOD_RESULT (F_CALL *FMOD_DSP_CREATE_CALLBACK) (FMOD_DSP_STATE *dsp_state); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_RELEASE_CALLBACK) (FMOD_DSP_STATE *dsp_state); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_RESET_CALLBACK) (FMOD_DSP_STATE *dsp_state); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_READ_CALLBACK) (FMOD_DSP_STATE *dsp_state, float *inbuffer, float *outbuffer, unsigned int length, int inchannels, int *outchannels); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_PROCESS_CALLBACK) (FMOD_DSP_STATE *dsp_state, unsigned int length, const FMOD_DSP_BUFFER_ARRAY *inbufferarray, FMOD_DSP_BUFFER_ARRAY *outbufferarray, FMOD_BOOL inputsidle, FMOD_DSP_PROCESS_OPERATION op); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_SETPOSITION_CALLBACK) (FMOD_DSP_STATE *dsp_state, unsigned int pos); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_SHOULDIPROCESS_CALLBACK) (FMOD_DSP_STATE *dsp_state, FMOD_BOOL inputsidle, unsigned int length, FMOD_CHANNELMASK inmask, int inchannels, FMOD_SPEAKERMODE speakermode); + +typedef FMOD_RESULT (F_CALL *FMOD_DSP_SETPARAM_FLOAT_CALLBACK) (FMOD_DSP_STATE *dsp_state, int index, float value); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_SETPARAM_INT_CALLBACK) (FMOD_DSP_STATE *dsp_state, int index, int value); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_SETPARAM_BOOL_CALLBACK) (FMOD_DSP_STATE *dsp_state, int index, FMOD_BOOL value); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_SETPARAM_DATA_CALLBACK) (FMOD_DSP_STATE *dsp_state, int index, void *data, unsigned int length); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_GETPARAM_FLOAT_CALLBACK) (FMOD_DSP_STATE *dsp_state, int index, float *value, char *valuestr); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_GETPARAM_INT_CALLBACK) (FMOD_DSP_STATE *dsp_state, int index, int *value, char *valuestr); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_GETPARAM_BOOL_CALLBACK) (FMOD_DSP_STATE *dsp_state, int index, FMOD_BOOL *value, char *valuestr); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_GETPARAM_DATA_CALLBACK) (FMOD_DSP_STATE *dsp_state, int index, void **data, unsigned int *length, char *valuestr); + +typedef FMOD_RESULT (F_CALL *FMOD_DSP_SYSTEM_REGISTER_CALLBACK) (FMOD_DSP_STATE *dsp_state); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_SYSTEM_DEREGISTER_CALLBACK) (FMOD_DSP_STATE *dsp_state); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_SYSTEM_MIX_CALLBACK) (FMOD_DSP_STATE *dsp_state, int stage); + + +/* + FMOD_DSP_STATE functions +*/ +typedef void * (F_CALL *FMOD_DSP_ALLOC_FUNC) (unsigned int size, FMOD_MEMORY_TYPE type, const char *sourcestr); +typedef void * (F_CALL *FMOD_DSP_REALLOC_FUNC) (void *ptr, unsigned int size, FMOD_MEMORY_TYPE type, const char *sourcestr); +typedef void (F_CALL *FMOD_DSP_FREE_FUNC) (void *ptr, FMOD_MEMORY_TYPE type, const char *sourcestr); +typedef void (F_CALL *FMOD_DSP_LOG_FUNC) (FMOD_DEBUG_FLAGS level, const char *file, int line, const char *function, const char *string, ...); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_GETSAMPLERATE_FUNC) (FMOD_DSP_STATE *dsp_state, int *rate); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_GETBLOCKSIZE_FUNC) (FMOD_DSP_STATE *dsp_state, unsigned int *blocksize); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_GETSPEAKERMODE_FUNC) (FMOD_DSP_STATE *dsp_state, FMOD_SPEAKERMODE *speakermode_mixer, FMOD_SPEAKERMODE *speakermode_output); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_GETCLOCK_FUNC) (FMOD_DSP_STATE *dsp_state, unsigned long long *clock, unsigned int *offset, unsigned int *length); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_GETLISTENERATTRIBUTES_FUNC) (FMOD_DSP_STATE *dsp_state, int *numlisteners, FMOD_3D_ATTRIBUTES *attributes); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_GETUSERDATA_FUNC) (FMOD_DSP_STATE *dsp_state, void **userdata); + +typedef FMOD_RESULT (F_CALL *FMOD_DSP_DFT_FFTREAL_FUNC) (FMOD_DSP_STATE *dsp_state, int size, const float *signal, FMOD_COMPLEX* dft, const float *window, int signalhop); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_DFT_IFFTREAL_FUNC) (FMOD_DSP_STATE *dsp_state, int size, const FMOD_COMPLEX *dft, float* signal, const float *window, int signalhop); + +typedef FMOD_RESULT (F_CALL *FMOD_DSP_PAN_SUMMONOMATRIX_FUNC) (FMOD_DSP_STATE *dsp_state, int sourceSpeakerMode, float lowFrequencyGain, float overallGain, float *matrix); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_PAN_SUMSTEREOMATRIX_FUNC) (FMOD_DSP_STATE *dsp_state, int sourceSpeakerMode, float pan, float lowFrequencyGain, float overallGain, int matrixHop, float *matrix); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_PAN_SUMSURROUNDMATRIX_FUNC) (FMOD_DSP_STATE *dsp_state, int sourceSpeakerMode, int targetSpeakerMode, float direction, float extent, float rotation, float lowFrequencyGain, float overallGain, int matrixHop, float *matrix, FMOD_DSP_PAN_SURROUND_FLAGS flags); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_PAN_SUMMONOTOSURROUNDMATRIX_FUNC) (FMOD_DSP_STATE *dsp_state, int targetSpeakerMode, float direction, float extent, float lowFrequencyGain, float overallGain, int matrixHop, float *matrix); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_PAN_SUMSTEREOTOSURROUNDMATRIX_FUNC) (FMOD_DSP_STATE *dsp_state, int targetSpeakerMode, float direction, float extent, float rotation, float lowFrequencyGain, float overallGain, int matrixHop, float *matrix); +typedef FMOD_RESULT (F_CALL *FMOD_DSP_PAN_GETROLLOFFGAIN_FUNC) (FMOD_DSP_STATE *dsp_state, FMOD_DSP_PAN_3D_ROLLOFF_TYPE rolloff, float distance, float mindistance, float maxdistance, float *gain); + + +/* +[DEFINE] +[ + [NAME] + FMOD_DSP_GETPARAM_VALUESTR_LENGTH + + [DESCRIPTION] + Length in bytes of the buffer pointed to by the valuestr argument of FMOD_DSP_GETPARAM_XXXX_CALLBACK functions. + + [REMARKS] + DSP plugins should not copy more than this number of bytes into the buffer or memory corruption will occur. + + [SEE_ALSO] + FMOD_DSP_GETPARAM_FLOAT_CALLBACK + FMOD_DSP_GETPARAM_INT_CALLBACK + FMOD_DSP_GETPARAM_BOOL_CALLBACK + FMOD_DSP_GETPARAM_DATA_CALLBACK +] +*/ +#define FMOD_DSP_GETPARAM_VALUESTR_LENGTH 32 +/* [DEFINE_END] */ + +/* +[ENUM] +[ + [DESCRIPTION] + DSP parameter types. + + [REMARKS] + + [SEE_ALSO] + FMOD_DSP_PARAMETER_DESC +] +*/ +typedef enum +{ + FMOD_DSP_PARAMETER_TYPE_FLOAT, /* FMOD_DSP_PARAMETER_DESC will use the FMOD_DSP_PARAMETER_DESC_FLOAT. */ + FMOD_DSP_PARAMETER_TYPE_INT, /* FMOD_DSP_PARAMETER_DESC will use the FMOD_DSP_PARAMETER_DESC_INT. */ + FMOD_DSP_PARAMETER_TYPE_BOOL, /* FMOD_DSP_PARAMETER_DESC will use the FMOD_DSP_PARAMETER_DESC_BOOL. */ + FMOD_DSP_PARAMETER_TYPE_DATA, /* FMOD_DSP_PARAMETER_DESC will use the FMOD_DSP_PARAMETER_DESC_DATA. */ + + FMOD_DSP_PARAMETER_TYPE_MAX, /* Maximum number of DSP parameter types. */ + FMOD_DSP_PARAMETER_TYPE_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_DSP_PARAMETER_TYPE; + + +/* +[ENUM] +[ + [DESCRIPTION] + DSP float parameter mappings. These determine how values are mapped across dials and automation curves. + + [REMARKS] + FMOD_DSP_PARAMETER_FLOAT_MAPPING_TYPE_AUTO generates a mapping based on range and units. For example, if the units are in Hertz and the range is with-in the audio spectrum, a Bark scale will be chosen. Logarithmic scales may also be generated for ranges above zero spanning several orders of magnitude. + + [SEE_ALSO] + FMOD_DSP_PARAMETER_FLOAT_MAPPING +] +*/ +typedef enum +{ + FMOD_DSP_PARAMETER_FLOAT_MAPPING_TYPE_LINEAR, /* Values mapped linearly across range. */ + FMOD_DSP_PARAMETER_FLOAT_MAPPING_TYPE_AUTO, /* A mapping is automatically chosen based on range and units. See remarks. */ + FMOD_DSP_PARAMETER_FLOAT_MAPPING_TYPE_PIECEWISE_LINEAR, /* Values mapped in a piecewise linear fashion defined by FMOD_DSP_PARAMETER_FLOAT_MAPPING_PIECEWISE_LINEAR. */ + + FMOD_DSP_PARAMETER_FLOAT_MAPPING_TYPE_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_DSP_PARAMETER_FLOAT_MAPPING_TYPE; + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure to define a piecewise linear mapping. + + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value.
    + + [SEE_ALSO] + FMOD_DSP_PARAMETER_FLOAT_MAPPING_TYPE + FMOD_DSP_PARAMETER_FLOAT_MAPPING +] +*/ +typedef struct FMOD_DSP_PARAMETER_FLOAT_MAPPING_PIECEWISE_LINEAR +{ + int numpoints; /* [w] The number of pairs in the piecewise mapping (at least 2). */ + float *pointparamvalues; /* [w] The values in the parameter's units for each point */ + float *pointpositions; /* [w] The positions along the control's scale (e.g. dial angle) corresponding to each parameter value. The range of this scale is arbitrary and all positions will be relative to the minimum and maximum values (e.g. [0,1,3] is equivalent to [1,2,4] and [2,4,8]). If this array is zero, pointparamvalues will be distributed with equal spacing. */ +} FMOD_DSP_PARAMETER_FLOAT_MAPPING_PIECEWISE_LINEAR; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure to define a mapping for a DSP unit's float parameter. + + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value.
    + + [SEE_ALSO] + FMOD_DSP_PARAMETER_FLOAT_MAPPING_TYPE + FMOD_DSP_PARAMETER_FLOAT_MAPPING_PIECEWISE_LINEAR + FMOD_DSP_PARAMETER_DESC_FLOAT +] +*/ +typedef struct FMOD_DSP_PARAMETER_FLOAT_MAPPING +{ + FMOD_DSP_PARAMETER_FLOAT_MAPPING_TYPE type; + FMOD_DSP_PARAMETER_FLOAT_MAPPING_PIECEWISE_LINEAR piecewiselinearmapping; /* [w] Only required for FMOD_DSP_PARAMETER_FLOAT_MAPPING_TYPE_PIECEWISE_LINEAR type mapping. */ +} FMOD_DSP_PARAMETER_FLOAT_MAPPING; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure to define a float parameter for a DSP unit. + + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value.
    + + [SEE_ALSO] + System::createDSP + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_PARAMETER_DESC + FMOD_DSP_PARAMETER_FLOAT_MAPPING +] +*/ +typedef struct FMOD_DSP_PARAMETER_DESC_FLOAT +{ + float min; /* [w] Minimum parameter value. */ + float max; /* [w] Maximum parameter value. */ + float defaultval; /* [w] Default parameter value. */ + FMOD_DSP_PARAMETER_FLOAT_MAPPING mapping; /* [w] How the values are distributed across dials and automation curves (e.g. linearly, exponentially etc). */ +} FMOD_DSP_PARAMETER_DESC_FLOAT; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure to define a int parameter for a DSP unit. + + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value.
    + + [SEE_ALSO] + System::createDSP + DSP::setParameterInt + DSP::getParameterInt + FMOD_DSP_PARAMETER_DESC +] +*/ +typedef struct FMOD_DSP_PARAMETER_DESC_INT +{ + int min; /* [w] Minimum parameter value. */ + int max; /* [w] Maximum parameter value. */ + int defaultval; /* [w] Default parameter value. */ + FMOD_BOOL goestoinf; /* [w] Whether the last value represents infiniy. */ + const char* const* valuenames; /* [w] Names for each value. There should be as many strings as there are possible values (max - min + 1). Optional. */ +} FMOD_DSP_PARAMETER_DESC_INT; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure to define a boolean parameter for a DSP unit. + + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value.
    + + [SEE_ALSO] + System::createDSP + DSP::setParameterBool + DSP::getParameterBool + FMOD_DSP_PARAMETER_DESC +] +*/ +typedef struct FMOD_DSP_PARAMETER_DESC_BOOL +{ + FMOD_BOOL defaultval; /* [w] Default parameter value. */ + const char* const* valuenames; /* [w] Names for false and true, respectively. There should be two strings. Optional. */ +} FMOD_DSP_PARAMETER_DESC_BOOL; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure to define a data parameter for a DSP unit. Use 0 or above for custom types. This parameter will be treated specially by the system if set to one of the FMOD_DSP_PARAMETER_DATA_TYPE values. + + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value.
    + + [SEE_ALSO] + System::createDSP + DSP::setParameterData + DSP::getParameterData + FMOD_DSP_PARAMETER_DATA_TYPE + FMOD_DSP_PARAMETER_DESC +] +*/ +typedef struct FMOD_DSP_PARAMETER_DESC_DATA +{ + int datatype; /* [w] The type of data for this parameter. Use 0 or above for custom types or set to one of the FMOD_DSP_PARAMETER_DATA_TYPE values. */ +} FMOD_DSP_PARAMETER_DESC_DATA; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Base Structure for DSP parameter descriptions. + + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value.
    + + [SEE_ALSO] + System::createDSP + DSP::setParameterFloat + DSP::getParameterFloat + DSP::setParameterInt + DSP::getParameterInt + DSP::setParameterBool + DSP::getParameterBool + DSP::setParameterData + DSP::getParameterData + FMOD_DSP_PARAMETER_DESC_FLOAT + FMOD_DSP_PARAMETER_DESC_INT + FMOD_DSP_PARAMETER_DESC_BOOL + FMOD_DSP_PARAMETER_DESC_DATA +] +*/ +typedef struct FMOD_DSP_PARAMETER_DESC +{ + FMOD_DSP_PARAMETER_TYPE type; /* [w] Type of this parameter. */ + char name[16]; /* [w] Name of the parameter to be displayed (ie "Cutoff frequency"). */ + char label[16]; /* [w] Short string to be put next to value to denote the unit type (ie "hz"). */ + const char *description; /* [w] Description of the parameter to be displayed as a help item / tooltip for this parameter. */ + + union + { + FMOD_DSP_PARAMETER_DESC_FLOAT floatdesc; /* [w] Struct containing information about the parameter in floating point format. Use when type is FMOD_DSP_PARAMETER_TYPE_FLOAT. */ + FMOD_DSP_PARAMETER_DESC_INT intdesc; /* [w] Struct containing information about the parameter in integer format. Use when type is FMOD_DSP_PARAMETER_TYPE_INT. */ + FMOD_DSP_PARAMETER_DESC_BOOL booldesc; /* [w] Struct containing information about the parameter in boolean format. Use when type is FMOD_DSP_PARAMETER_TYPE_BOOL. */ + FMOD_DSP_PARAMETER_DESC_DATA datadesc; /* [w] Struct containing information about the parameter in data format. Use when type is FMOD_DSP_PARAMETER_TYPE_DATA. */ + }; +} FMOD_DSP_PARAMETER_DESC; + + +/* +[ENUM] +[ + [DESCRIPTION] + Built-in types for the 'datatype' member of FMOD_DSP_PARAMETER_DESC_DATA. Data parameters of type other than FMOD_DSP_PARAMETER_DATA_TYPE_USER will be treated specially by the system. + + [REMARKS] + + [SEE_ALSO] + FMOD_DSP_PARAMETER_DESC_DATA + FMOD_DSP_PARAMETER_OVERALLGAIN + FMOD_DSP_PARAMETER_3DATTRIBUTES + FMOD_DSP_PARAMETER_3DATTRIBUTES_MULTI + FMOD_DSP_PARAMETER_SIDECHAIN +] +*/ +typedef enum +{ + FMOD_DSP_PARAMETER_DATA_TYPE_USER = 0, /* The default data type. All user data types should be 0 or above. */ + FMOD_DSP_PARAMETER_DATA_TYPE_OVERALLGAIN = -1, /* The data type for FMOD_DSP_PARAMETER_OVERALLGAIN parameters. There should a maximum of one per DSP. */ + FMOD_DSP_PARAMETER_DATA_TYPE_3DATTRIBUTES = -2, /* The data type for FMOD_DSP_PARAMETER_3DATTRIBUTES parameters. There should a maximum of one per DSP. */ + FMOD_DSP_PARAMETER_DATA_TYPE_SIDECHAIN = -3, /* The data type for FMOD_DSP_PARAMETER_SIDECHAIN parameters. There should a maximum of one per DSP. */ + FMOD_DSP_PARAMETER_DATA_TYPE_FFT = -4, /* The data type for FMOD_DSP_PARAMETER_FFT parameters. There should a maximum of one per DSP. */ + FMOD_DSP_PARAMETER_DATA_TYPE_3DATTRIBUTES_MULTI = -5, /* The data type for FMOD_DSP_PARAMETER_3DATTRIBUTES_MULTI parameters. There should a maximum of one per DSP. */ +} FMOD_DSP_PARAMETER_DATA_TYPE; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure for data parameters of type FMOD_DSP_PARAMETER_DATA_TYPE_OVERALLGAIN. + A parameter of this type is used in effects that affect the overgain of the signal in a predictable way. + This parameter is read by the system to determine the effect's gain for voice virtualization. + + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value.
    + + [SEE_ALSO] + FMOD_DSP_PARAMETER_DATA_TYPE + FMOD_DSP_PARAMETER_DESC +] +*/ +typedef struct FMOD_DSP_PARAMETER_OVERALLGAIN +{ + float linear_gain; /* [r] The overall linear gain of the effect on the direct signal path */ + float linear_gain_additive; /* [r] Additive gain, for parallel signal paths */ +} FMOD_DSP_PARAMETER_OVERALLGAIN; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure for data parameters of type FMOD_DSP_PARAMETER_DATA_TYPE_3DATTRIBUTES. + + A parameter of this type is used in effects that respond to a 3D position. + + [REMARKS] + The FMOD::Studio::System will set this parameter automatically if an FMOD::Studio::EventInstance position + changes, however if using the low level FMOD::System you must set this DSP parameter explicitly. + + FMOD will convert passed in co-ordinates to left-handed for the plugin if the System was initialized with the FMOD_INIT_3D_RIGHTHANDED flag. + + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value.
    + + [SEE_ALSO] + FMOD_DSP_PARAMETER_DATA_TYPE + FMOD_DSP_PARAMETER_DESC +] +*/ +typedef struct FMOD_DSP_PARAMETER_3DATTRIBUTES +{ + FMOD_3D_ATTRIBUTES relative; /* [w] The position of the sound relative to the listener. */ + FMOD_3D_ATTRIBUTES absolute; /* [w] The position of the sound in world coordinates. */ +} FMOD_DSP_PARAMETER_3DATTRIBUTES; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure for data parameters of type FMOD_DSP_PARAMETER_DATA_TYPE_3DATTRIBUTES_MULTI. + + A parameter of this type is used in effects that respond to a 3D position and support multiple listeners. + + [REMARKS] + The FMOD::Studio::System will set this parameter automatically if an FMOD::Studio::EventInstance position + changes, however if using the low level FMOD::System you must set this DSP parameter explicitly. + + FMOD will convert passed in co-ordinates to left-handed for the plugin if the System was initialized with the FMOD_INIT_3D_RIGHTHANDED flag. + + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value.
    + + [SEE_ALSO] + FMOD_DSP_PARAMETER_DATA_TYPE + FMOD_DSP_PARAMETER_DESC +] +*/ +typedef struct FMOD_DSP_PARAMETER_3DATTRIBUTES_MULTI +{ + int numlisteners; /* [w] The number of listeners. */ + FMOD_3D_ATTRIBUTES relative[FMOD_MAX_LISTENERS]; /* [w] The position of the sound relative to the listeners. */ + float weight[FMOD_MAX_LISTENERS]; /* [w] The weighting of the listeners where 0 means listener has no contribution and 1 means full contribution. */ + FMOD_3D_ATTRIBUTES absolute; /* [w] The position of the sound in world coordinates. */ +} FMOD_DSP_PARAMETER_3DATTRIBUTES_MULTI; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure for data parameters of type FMOD_DSP_PARAMETER_DATA_TYPE_SIDECHAIN. + A parameter of this type is declared for effects which support sidechaining. + + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value.
    + + [SEE_ALSO] + FMOD_DSP_PARAMETER_DATA_TYPE + FMOD_DSP_PARAMETER_DESC +] +*/ +typedef struct FMOD_DSP_PARAMETER_SIDECHAIN +{ + FMOD_BOOL sidechainenable; /* [r/w] Whether sidechains are enabled. */ +} FMOD_DSP_PARAMETER_SIDECHAIN; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Structure for data parameters of type FMOD_DSP_PARAMETER_DATA_TYPE_FFT. + A parameter of this type is declared for the FMOD_DSP_TYPE_FFT effect. + + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value.
    +
    + Notes on the spectrum data member. Values inside the float buffer are typically between 0 and 1.0.
    + Each top level array represents one PCM channel of data.
    + Address data as spectrum[channel][bin]. A bin is 1 fft window entry.
    + Only read/display half of the buffer typically for analysis as the 2nd half is usually the same data reversed due to the nature of the way FFT works.
    + + [SEE_ALSO] + FMOD_DSP_PARAMETER_DATA_TYPE + FMOD_DSP_PARAMETER_DESC + FMOD_DSP_PARAMETER_DATA_TYPE_FFT + FMOD_DSP_TYPE + FMOD_DSP_FFT +] +*/ +typedef struct FMOD_DSP_PARAMETER_FFT +{ + int length; /* [r] Number of entries in this spectrum window. Divide this by the output rate to get the hz per entry. */ + int numchannels; /* [r] Number of channels in spectrum. */ + float *spectrum[32]; /* [r] Per channel spectrum arrays. See remarks for more. */ +} FMOD_DSP_PARAMETER_FFT; + + +/* + Helpers for declaring parameters in custom DSPSs +*/ +#define FMOD_DSP_INIT_PARAMDESC_FLOAT(_paramstruct, _name, _label, _description, _min, _max, _defaultval) \ + memset(&(_paramstruct), 0, sizeof(_paramstruct)); \ + (_paramstruct).type = FMOD_DSP_PARAMETER_TYPE_FLOAT; \ + strncpy((_paramstruct).name, _name, 15); \ + strncpy((_paramstruct).label, _label, 15); \ + (_paramstruct).description = _description; \ + (_paramstruct).floatdesc.min = _min; \ + (_paramstruct).floatdesc.max = _max; \ + (_paramstruct).floatdesc.defaultval = _defaultval; \ + (_paramstruct).floatdesc.mapping.type = FMOD_DSP_PARAMETER_FLOAT_MAPPING_TYPE_AUTO; + +#define FMOD_DSP_INIT_PARAMDESC_FLOAT_WITH_MAPPING(_paramstruct, _name, _label, _description, _defaultval, _values, _positions); \ + memset(&(_paramstruct), 0, sizeof(_paramstruct)); \ + (_paramstruct).type = FMOD_DSP_PARAMETER_TYPE_FLOAT; \ + strncpy((_paramstruct).name, _name , 15); \ + strncpy((_paramstruct).label, _label, 15); \ + (_paramstruct).description = _description; \ + (_paramstruct).floatdesc.min = _values[0]; \ + (_paramstruct).floatdesc.max = _values[sizeof(_values) / sizeof(float) - 1]; \ + (_paramstruct).floatdesc.defaultval = _defaultval; \ + (_paramstruct).floatdesc.mapping.type = FMOD_DSP_PARAMETER_FLOAT_MAPPING_TYPE_PIECEWISE_LINEAR; \ + (_paramstruct).floatdesc.mapping.piecewiselinearmapping.numpoints = sizeof(_values) / sizeof(float); \ + (_paramstruct).floatdesc.mapping.piecewiselinearmapping.pointparamvalues = _values; \ + (_paramstruct).floatdesc.mapping.piecewiselinearmapping.pointpositions = _positions; + +#define FMOD_DSP_INIT_PARAMDESC_INT(_paramstruct, _name, _label, _description, _min, _max, _defaultval, _goestoinf, _valuenames) \ + memset(&(_paramstruct), 0, sizeof(_paramstruct)); \ + (_paramstruct).type = FMOD_DSP_PARAMETER_TYPE_INT; \ + strncpy((_paramstruct).name, _name , 15); \ + strncpy((_paramstruct).label, _label, 15); \ + (_paramstruct).description = _description; \ + (_paramstruct).intdesc.min = _min; \ + (_paramstruct).intdesc.max = _max; \ + (_paramstruct).intdesc.defaultval = _defaultval; \ + (_paramstruct).intdesc.goestoinf = _goestoinf; \ + (_paramstruct).intdesc.valuenames = _valuenames; + +#define FMOD_DSP_INIT_PARAMDESC_INT_ENUMERATED(_paramstruct, _name, _label, _description, _defaultval, _valuenames) \ + memset(&(_paramstruct), 0, sizeof(_paramstruct)); \ + (_paramstruct).type = FMOD_DSP_PARAMETER_TYPE_INT; \ + strncpy((_paramstruct).name, _name , 15); \ + strncpy((_paramstruct).label, _label, 15); \ + (_paramstruct).description = _description; \ + (_paramstruct).intdesc.min = 0; \ + (_paramstruct).intdesc.max = sizeof(_valuenames) / sizeof(char*) - 1; \ + (_paramstruct).intdesc.defaultval = _defaultval; \ + (_paramstruct).intdesc.goestoinf = false; \ + (_paramstruct).intdesc.valuenames = _valuenames; + +#define FMOD_DSP_INIT_PARAMDESC_BOOL(_paramstruct, _name, _label, _description, _defaultval, _valuenames) \ + memset(&(_paramstruct), 0, sizeof(_paramstruct)); \ + (_paramstruct).type = FMOD_DSP_PARAMETER_TYPE_BOOL; \ + strncpy((_paramstruct).name, _name , 15); \ + strncpy((_paramstruct).label, _label, 15); \ + (_paramstruct).description = _description; \ + (_paramstruct).booldesc.defaultval = _defaultval; \ + (_paramstruct).booldesc.valuenames = _valuenames; + +#define FMOD_DSP_INIT_PARAMDESC_DATA(_paramstruct, _name, _label, _description, _datatype) \ + memset(&(_paramstruct), 0, sizeof(_paramstruct)); \ + (_paramstruct).type = FMOD_DSP_PARAMETER_TYPE_DATA; \ + strncpy((_paramstruct).name, _name , 15); \ + strncpy((_paramstruct).label, _label, 15); \ + (_paramstruct).description = _description; \ + (_paramstruct).datadesc.datatype = _datatype; + +#define FMOD_PLUGIN_SDK_VERSION 110 + +/* +[STRUCTURE] +[ + [DESCRIPTION] + When creating a DSP unit, declare one of these and provide the relevant callbacks and name for FMOD to use when it creates and uses a DSP unit of this type. + + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value.
    +
    + There are 2 different ways to change a parameter in this architecture.
    + One is to use DSP::setParameterFloat / DSP::setParameterInt / DSP::setParameterBool / DSP::setParameterData. This is platform independant and is dynamic, so new unknown plugins can have their parameters enumerated and used.
    + The other is to use DSP::showConfigDialog. This is platform specific and requires a GUI, and will display a dialog box to configure the plugin.
    + + [SEE_ALSO] + System::createDSP + DSP::setParameterFloat + DSP::setParameterInt + DSP::setParameterBool + DSP::setParameterData + FMOD_DSP_STATE + FMOD_DSP_CREATE_CALLBACK + FMOD_DSP_RELEASE_CALLBACK + FMOD_DSP_RESET_CALLBACK + FMOD_DSP_READ_CALLBACK + FMOD_DSP_PROCESS_CALLBACK + FMOD_DSP_SETPOSITION_CALLBACK + FMOD_DSP_PARAMETER_DESC + FMOD_DSP_SETPARAM_FLOAT_CALLBACK + FMOD_DSP_SETPARAM_INT_CALLBACK + FMOD_DSP_SETPARAM_BOOL_CALLBACK + FMOD_DSP_SETPARAM_DATA_CALLBACK + FMOD_DSP_GETPARAM_FLOAT_CALLBACK + FMOD_DSP_GETPARAM_INT_CALLBACK + FMOD_DSP_GETPARAM_BOOL_CALLBACK + FMOD_DSP_GETPARAM_DATA_CALLBACK + FMOD_DSP_SHOULDIPROCESS_CALLBACK + FMOD_DSP_SYSTEM_REGISTER_CALLBACK + FMOD_DSP_SYSTEM_DEREGISTER_CALLBACK + FMOD_DSP_SYSTEM_MIX_CALLBACK +] +*/ +typedef struct FMOD_DSP_DESCRIPTION +{ + unsigned int pluginsdkversion; /* [w] The plugin SDK version this plugin is built for. Set to this to FMOD_PLUGIN_SDK_VERSION defined above. */ + char name[32]; /* [w] The identifier of the DSP. This will also be used as the name of DSP and shouldn't change between versions. */ + unsigned int version; /* [w] Plugin writer's version number. */ + int numinputbuffers; /* [w] Number of input buffers to process. Use 0 for DSPs that only generate sound and 1 for effects that process incoming sound. */ + int numoutputbuffers; /* [w] Number of audio output buffers. Only one output buffer is currently supported. */ + FMOD_DSP_CREATE_CALLBACK create; /* [w] Create callback. This is called when DSP unit is created. Can be null. */ + FMOD_DSP_RELEASE_CALLBACK release; /* [w] Release callback. This is called just before the unit is freed so the user can do any cleanup needed for the unit. Can be null. */ + FMOD_DSP_RESET_CALLBACK reset; /* [w] Reset callback. This is called by the user to reset any history buffers that may need resetting for a filter, when it is to be used or re-used for the first time to its initial clean state. Use to avoid clicks or artifacts. */ + FMOD_DSP_READ_CALLBACK read; /* [w] Read callback. Processing is done here. Can be null. */ + FMOD_DSP_PROCESS_CALLBACK process; /* [w] Process callback. Can be specified instead of the read callback if any channel format changes occur between input and output. This also replaces shouldiprocess and should return an error if the effect is to be bypassed. Can be null. */ + FMOD_DSP_SETPOSITION_CALLBACK setposition; /* [w] Set position callback. This is called if the unit wants to update its position info but not process data, or reset a cursor position internally if it is reading data from a certain source. Can be null. */ + + int numparameters; /* [w] Number of parameters used in this filter. The user finds this with DSP::getNumParameters */ + FMOD_DSP_PARAMETER_DESC **paramdesc; /* [w] Variable number of parameter structures. */ + FMOD_DSP_SETPARAM_FLOAT_CALLBACK setparameterfloat; /* [w] This is called when the user calls DSP::setParameterFloat. Can be null. */ + FMOD_DSP_SETPARAM_INT_CALLBACK setparameterint; /* [w] This is called when the user calls DSP::setParameterInt. Can be null. */ + FMOD_DSP_SETPARAM_BOOL_CALLBACK setparameterbool; /* [w] This is called when the user calls DSP::setParameterBool. Can be null. */ + FMOD_DSP_SETPARAM_DATA_CALLBACK setparameterdata; /* [w] This is called when the user calls DSP::setParameterData. Can be null. */ + FMOD_DSP_GETPARAM_FLOAT_CALLBACK getparameterfloat; /* [w] This is called when the user calls DSP::getParameterFloat. Can be null. */ + FMOD_DSP_GETPARAM_INT_CALLBACK getparameterint; /* [w] This is called when the user calls DSP::getParameterInt. Can be null. */ + FMOD_DSP_GETPARAM_BOOL_CALLBACK getparameterbool; /* [w] This is called when the user calls DSP::getParameterBool. Can be null. */ + FMOD_DSP_GETPARAM_DATA_CALLBACK getparameterdata; /* [w] This is called when the user calls DSP::getParameterData. Can be null. */ + FMOD_DSP_SHOULDIPROCESS_CALLBACK shouldiprocess; /* [w] This is called before processing. You can detect if inputs are idle and return FMOD_OK to process, or any other error code to avoid processing the effect. Use a count down timer to allow effect tails to process before idling! */ + void *userdata; /* [w] Optional. Specify 0 to ignore. This is user data to be attached to the DSP unit during creation. Access via FMOD_DSP_STATE_FUNCTIONS::getuserdata. */ + + FMOD_DSP_SYSTEM_REGISTER_CALLBACK sys_register; /* [w] Register callback. This is called when DSP unit is loaded/registered. Useful for 'global'/per system object init for plugin. Can be null. */ + FMOD_DSP_SYSTEM_DEREGISTER_CALLBACK sys_deregister; /* [w] Deregister callback. This is called when DSP unit is unloaded/deregistered. Useful as 'global'/per system object shutdown for plugin. Can be null. */ + FMOD_DSP_SYSTEM_MIX_CALLBACK sys_mix; /* [w] System mix stage callback. This is called when the mixer starts to execute or is just finishing executing. Useful for 'global'/per system object once a mix update calls for a plugin. Can be null. */ + +} FMOD_DSP_DESCRIPTION; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Struct containing DFT functions to enable a plugin to perform optimized time-frequency domain conversion. + + [REMARKS] + Members marked with [r] mean read only for the developer, read/write for the FMOD system. + + Members marked with [w] mean read/write for the developer, read only for the FMOD system. + + [SEE_ALSO] + FMOD_DSP_STATE_FUNCTIONS +] +*/ +typedef struct FMOD_DSP_STATE_DFT_FUNCTIONS +{ + FMOD_DSP_DFT_FFTREAL_FUNC fftreal; /* [r] Function for performing an FFT on a real signal. */ + FMOD_DSP_DFT_IFFTREAL_FUNC inversefftreal; /* [r] Function for performing an inverse FFT to get a real signal. */ +} FMOD_DSP_STATE_DFT_FUNCTIONS; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Struct containing panning helper functions for spatialization plugins. + + [REMARKS] + These are experimental, please contact support@fmod.org for more information. + + Members marked with [r] mean read only for the developer, read/write for the FMOD system. + + Members marked with [w] mean read/write for the developer, read only for the FMOD system. + + [SEE_ALSO] + FMOD_DSP_STATE_FUNCTIONS + FMOD_DSP_PAN_SURROUND_FLAGS +] +*/ +typedef struct FMOD_DSP_STATE_PAN_FUNCTIONS +{ + FMOD_DSP_PAN_SUMMONOMATRIX_FUNC summonomatrix; /* [r] TBD. */ + FMOD_DSP_PAN_SUMSTEREOMATRIX_FUNC sumstereomatrix; /* [r] TBD. */ + FMOD_DSP_PAN_SUMSURROUNDMATRIX_FUNC sumsurroundmatrix; /* [r] TBD. */ + FMOD_DSP_PAN_SUMMONOTOSURROUNDMATRIX_FUNC summonotosurroundmatrix; /* [r] TBD. */ + FMOD_DSP_PAN_SUMSTEREOTOSURROUNDMATRIX_FUNC sumstereotosurroundmatrix; /* [r] TBD. */ + FMOD_DSP_PAN_GETROLLOFFGAIN_FUNC getrolloffgain; /* [r] TBD. */ +} FMOD_DSP_STATE_PAN_FUNCTIONS; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Struct containing functions to give plugin developers the ability to query system state, access system level functionality and helpers. + + [REMARKS] + Members marked with [r] mean read only for the developer, read/write for the FMOD system. + + Members marked with [w] mean read/write for the developer, read only for the FMOD system. + + [SEE_ALSO] + FMOD_DSP_STATE + FMOD_DSP_STATE_DFT_FUNCTIONS + FMOD_DSP_STATE_PAN_FUNCTIONS +] +*/ +typedef struct FMOD_DSP_STATE_FUNCTIONS +{ + FMOD_DSP_ALLOC_FUNC alloc; /* [r] Function to allocate memory using the FMOD memory system. */ + FMOD_DSP_REALLOC_FUNC realloc; /* [r] Function to reallocate memory using the FMOD memory system. */ + FMOD_DSP_FREE_FUNC free; /* [r] Function to free memory allocated with FMOD_DSP_ALLOC_FUNC. */ + FMOD_DSP_GETSAMPLERATE_FUNC getsamplerate; /* [r] Function to query the system sample rate. */ + FMOD_DSP_GETBLOCKSIZE_FUNC getblocksize; /* [r] Function to query the system block size, DSPs will be requested to process blocks of varying length up to this size. */ + FMOD_DSP_STATE_DFT_FUNCTIONS *dft; /* [r] Struct containing DFT functions to enable a plugin to perform optimized time-frequency domain conversion. */ + FMOD_DSP_STATE_PAN_FUNCTIONS *pan; /* [r] Struct containing panning helper functions for spatialization plugins. */ + FMOD_DSP_GETSPEAKERMODE_FUNC getspeakermode; /* [r] Function to query the system speaker modes. One is the mixer's default speaker mode, the other is the output mode the system is downmixing or upmixing to.*/ + FMOD_DSP_GETCLOCK_FUNC getclock; /* [r] Function to get the clock of the current DSP, as well as the subset of the input buffer that contains the signal. */ + FMOD_DSP_GETLISTENERATTRIBUTES_FUNC getlistenerattributes; /* [r] Callback for getting the absolute listener attributes set via the API (returned as left-handed co-ordinates). */ + FMOD_DSP_LOG_FUNC log; /* [r] Function to write to the FMOD logging system. */ + FMOD_DSP_GETUSERDATA_FUNC getuserdata; /* [r] Function to get the user data attached to this DSP. See FMOD_DSP_DESCRIPTION::userdata. */ +} FMOD_DSP_STATE_FUNCTIONS; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + DSP plugin structure that is passed into each callback. + + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value.
    +
    + 'systemobject' is an integer that relates to the System object that created the DSP or registered the DSP plugin. If only 1 System object is created then it should be 0. A second object would be 1 and so on. + FMOD_DSP_STATE_FUNCTIONS::getsamplerate/getblocksize/getspeakermode could return different results so it could be relevant to plugin developers to monitor which object is being used. + + [SEE_ALSO] + FMOD_DSP_DESCRIPTION + FMOD_DSP_STATE_FUNCTIONS +] +*/ +struct FMOD_DSP_STATE +{ + FMOD_DSP *instance; /* [r] Handle to the FMOD_DSP object the callback is associated with. Not to be modified. C++ users cast to FMOD::DSP to use. */ + void *plugindata; /* [w] Plugin writer created data the output author wants to attach to this object. */ + FMOD_CHANNELMASK channelmask; /* [r] Specifies which speakers the DSP effect is active on */ + FMOD_SPEAKERMODE source_speakermode; /* [r] Specifies which speaker mode the signal originated for information purposes, ie in case panning needs to be done differently. */ + float *sidechaindata; /* [r] The mixed result of all incoming sidechains is stored at this pointer address. */ + int sidechainchannels; /* [r] The number of channels of pcm data stored within the sidechain buffer. */ + FMOD_DSP_STATE_FUNCTIONS *functions; /* [r] Struct containing functions to give plugin developers the ability to query system state, access system level functionality and helpers. */ + int systemobject; /* [r] FMOD::System object index, relating to the System object that created this DSP. */ +}; + + +/* + Macro helpers for accessing FMOD_DSP_STATE_FUNCTIONS +*/ +#define FMOD_DSP_ALLOC(_state, _size) \ + (_state)->functions->alloc(_size, FMOD_MEMORY_NORMAL, __FILE__) +#define FMOD_DSP_REALLOC(_state, _ptr, _size) \ + (_state)->functions->realloc(_ptr, _size, FMOD_MEMORY_NORMAL, __FILE__) +#define FMOD_DSP_FREE(_state, _ptr) \ + (_state)->functions->free(_ptr, FMOD_MEMORY_NORMAL, __FILE__) +#define FMOD_DSP_LOG(_state, _level, _location, _format, ...) \ + (_state)->functions->log(_level, __FILE__, __LINE__, _location, _format, __VA_ARGS__) +#define FMOD_DSP_GETSAMPLERATE(_state, _rate) \ + (_state)->functions->getsamplerate(_state, _rate) +#define FMOD_DSP_GETBLOCKSIZE(_state, _blocksize) \ + (_state)->functions->getblocksize(_state, _blocksize) +#define FMOD_DSP_GETSPEAKERMODE(_state, _speakermodemix, _speakermodeout) \ + (_state)->functions->getspeakermode(_state, _speakermodemix, _speakermodeout) +#define FMOD_DSP_GETCLOCK(_state, _clock, _offset, _length) \ + (_state)->functions->getclock(_state, _clock, _offset, _length) +#define FMOD_DSP_GETLISTENERATTRIBUTES(_state, _numlisteners, _attributes) \ + (_state)->functions->getlistenerattributes(_state, _numlisteners, _attributes) +#define FMOD_DSP_GETUSERDATA(_state, _userdata) \ + (_state)->functions->getuserdata(_state, _userdata) +#define FMOD_DSP_DFT_FFTREAL(_state, _size, _signal, _dft, _window, _signalhop) \ + (_state)->functions->dft->fftreal(_state, _size, _signal, _dft, _window, _signalhop) +#define FMOD_DSP_DFT_IFFTREAL(_state, _size, _dft, _signal, _window, _signalhop) \ + (_state)->functions->dft->inversefftreal(_state, _size, _dft, _signal, _window, _signalhop) +#define FMOD_DSP_PAN_SUMMONOMATRIX(_state, _sourcespeakermode, _lowfrequencygain, _overallgain, _matrix) \ + (_state)->functions->pan->summonomatrix(_state, _sourcespeakermode, _lowfrequencygain, _overallgain, _matrix) +#define FMOD_DSP_PAN_SUMSTEREOMATRIX(_state, _sourcespeakermode, _pan, _lowfrequencygain, _overallgain, _matrixhop, _matrix) \ + (_state)->functions->pan->sumstereomatrix(_state, _sourcespeakermode, _pan, _lowfrequencygain, _overallgain, _matrixhop, _matrix) +#define FMOD_DSP_PAN_SUMSURROUNDMATRIX(_state, _sourcespeakermode, _targetspeakermode, _direction, _extent, _rotation, _lowfrequencygain, _overallgain, _matrixhop, _matrix, _flags) \ + (_state)->functions->pan->sumsurroundmatrix(_state, _sourcespeakermode, _targetspeakermode, _direction, _extent, _rotation, _lowfrequencygain, _overallgain, _matrixhop, _matrix, _flags) +#define FMOD_DSP_PAN_SUMMONOTOSURROUNDMATRIX(_state, _targetspeakermode, _direction, _extent, _lowfrequencygain, _overallgain, _matrixhop, _matrix) \ + (_state)->functions->pan->summonotosurroundmatrix(_state, _targetspeakermode, _direction, _extent, _lowfrequencygain, _overallgain, _matrixhop, _matrix) +#define FMOD_DSP_PAN_SUMSTEREOTOSURROUNDMATRIX(_state, _targetspeakermode, _direction, _extent, _rotation, _lowfrequencygain, _overallgain, matrixhop, _matrix) \ + (_state)->functions->pan->sumstereotosurroundmatrix(_state, _targetspeakermode, _direction, _extent, _rotation, _lowfrequencygain, _overallgain, matrixhop, _matrix) +#define FMOD_DSP_PAN_GETROLLOFFGAIN(_state, _rolloff, _distance, _mindistance, _maxdistance, _gain) \ + (_state)->functions->pan->getrolloffgain(_state, _rolloff, _distance, _mindistance, _maxdistance, _gain) + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + DSP metering info used for retrieving metering info with DSP::getMeteringInfo + + [REMARKS] + Members marked with [r] mean the variable is modified by FMOD and is for reading purposes only. Do not change this value.
    + Members marked with [w] mean the variable can be written to. The user can set the value.
    + + [SEE_ALSO] + FMOD_SPEAKER + DSP::getMeteringInfo +] +*/ +typedef struct FMOD_DSP_METERING_INFO +{ + int numsamples; /* [r] The number of samples considered for this metering info. */ + float peaklevel[32]; /* [r] The peak level per channel. */ + float rmslevel[32]; /* [r] The rms level per channel. */ + short numchannels; /* [r] Number of channels. */ +} FMOD_DSP_METERING_INFO; + +#endif + diff --git a/app/src/main/cpp/inc/fmod_dsp_effects.h b/app/src/main/cpp/inc/fmod_dsp_effects.h new file mode 100644 index 0000000..9b30800 --- /dev/null +++ b/app/src/main/cpp/inc/fmod_dsp_effects.h @@ -0,0 +1,1273 @@ +/* ========================================================================================== */ +/* FMOD Studio - Built-in effects header file. */ +/* Copyright (c), Firelight Technologies Pty, Ltd. 2004-2017. */ +/* */ +/* In this header you can find parameter structures for FMOD system registered DSP effects */ +/* and generators. */ +/* */ +/* ========================================================================================== */ + +#ifndef _FMOD_DSP_EFFECTS_H +#define _FMOD_DSP_EFFECTS_H + +/* +[ENUM] +[ + [DESCRIPTION] + These definitions can be used for creating FMOD defined special effects or DSP units. + + [REMARKS] + To get them to be active, first create the unit, then add it somewhere into the DSP network, + either at the front of the network near the soundcard unit to affect the global output + (by using System::getDSPHead), or on a single channel (using Channel::getDSPHead). + + [SEE_ALSO] + System::createDSPByType +] +*/ +typedef enum +{ + FMOD_DSP_TYPE_UNKNOWN, /* This unit was created via a non FMOD plugin so has an unknown purpose. */ + FMOD_DSP_TYPE_MIXER, /* This unit does nothing but take inputs and mix them together then feed the result to the soundcard unit. */ + FMOD_DSP_TYPE_OSCILLATOR, /* This unit generates sine/square/saw/triangle or noise tones. */ + FMOD_DSP_TYPE_LOWPASS, /* This unit filters sound using a high quality, resonant lowpass filter algorithm but consumes more CPU time. Deprecated and will be removed in a future release (see FMOD_DSP_LOWPASS remarks for alternatives). */ + FMOD_DSP_TYPE_ITLOWPASS, /* This unit filters sound using a resonant lowpass filter algorithm that is used in Impulse Tracker, but with limited cutoff range (0 to 8060hz). */ + FMOD_DSP_TYPE_HIGHPASS, /* This unit filters sound using a resonant highpass filter algorithm. Deprecated and will be removed in a future release (see FMOD_DSP_HIGHPASS remarks for alternatives). */ + FMOD_DSP_TYPE_ECHO, /* This unit produces an echo on the sound and fades out at the desired rate. */ + FMOD_DSP_TYPE_FADER, /* This unit pans and scales the volume of a unit. */ + FMOD_DSP_TYPE_FLANGE, /* This unit produces a flange effect on the sound. */ + FMOD_DSP_TYPE_DISTORTION, /* This unit distorts the sound. */ + FMOD_DSP_TYPE_NORMALIZE, /* This unit normalizes or amplifies the sound to a certain level. */ + FMOD_DSP_TYPE_LIMITER, /* This unit limits the sound to a certain level. */ + FMOD_DSP_TYPE_PARAMEQ, /* This unit attenuates or amplifies a selected frequency range. Deprecated and will be removed in a future release (see FMOD_DSP_PARAMEQ remarks for alternatives). */ + FMOD_DSP_TYPE_PITCHSHIFT, /* This unit bends the pitch of a sound without changing the speed of playback. */ + FMOD_DSP_TYPE_CHORUS, /* This unit produces a chorus effect on the sound. */ + FMOD_DSP_TYPE_VSTPLUGIN, /* This unit allows the use of Steinberg VST plugins */ + FMOD_DSP_TYPE_WINAMPPLUGIN, /* This unit allows the use of Nullsoft Winamp plugins */ + FMOD_DSP_TYPE_ITECHO, /* This unit produces an echo on the sound and fades out at the desired rate as is used in Impulse Tracker. */ + FMOD_DSP_TYPE_COMPRESSOR, /* This unit implements dynamic compression (linked/unlinked multichannel, wideband) */ + FMOD_DSP_TYPE_SFXREVERB, /* This unit implements SFX reverb */ + FMOD_DSP_TYPE_LOWPASS_SIMPLE, /* This unit filters sound using a simple lowpass with no resonance, but has flexible cutoff and is fast. Deprecated and will be removed in a future release (see FMOD_DSP_LOWPASS_SIMPLE remarks for alternatives). */ + FMOD_DSP_TYPE_DELAY, /* This unit produces different delays on individual channels of the sound. */ + FMOD_DSP_TYPE_TREMOLO, /* This unit produces a tremolo / chopper effect on the sound. */ + FMOD_DSP_TYPE_LADSPAPLUGIN, /* Unsupported / Deprecated. */ + FMOD_DSP_TYPE_SEND, /* This unit sends a copy of the signal to a return DSP anywhere in the DSP tree. */ + FMOD_DSP_TYPE_RETURN, /* This unit receives signals from a number of send DSPs. */ + FMOD_DSP_TYPE_HIGHPASS_SIMPLE, /* This unit filters sound using a simple highpass with no resonance, but has flexible cutoff and is fast. Deprecated and will be removed in a future release (see FMOD_DSP_HIGHPASS_SIMPLE remarks for alternatives). */ + FMOD_DSP_TYPE_PAN, /* This unit pans the signal, possibly upmixing or downmixing as well. */ + FMOD_DSP_TYPE_THREE_EQ, /* This unit is a three-band equalizer. */ + FMOD_DSP_TYPE_FFT, /* This unit simply analyzes the signal and provides spectrum information back through getParameter. */ + FMOD_DSP_TYPE_LOUDNESS_METER, /* This unit analyzes the loudness and true peak of the signal. */ + FMOD_DSP_TYPE_ENVELOPEFOLLOWER, /* This unit tracks the envelope of the input/sidechain signal. Format to be publicly disclosed soon. */ + FMOD_DSP_TYPE_CONVOLUTIONREVERB, /* This unit implements convolution reverb. */ + FMOD_DSP_TYPE_CHANNELMIX, /* This unit provides per signal channel gain, and output channel mapping to allow 1 multichannel signal made up of many groups of signals to map to a single output signal. */ + FMOD_DSP_TYPE_TRANSCEIVER, /* This unit 'sends' and 'receives' from a selection of up to 32 different slots. It is like a send/return but it uses global slots rather than returns as the destination. It also has other features. Multiple transceivers can receive from a single channel, or multiple transceivers can send to a single channel, or a combination of both. */ + FMOD_DSP_TYPE_OBJECTPAN, /* This unit sends the signal to a 3d object encoder like Dolby Atmos. Supports a subset of the FMOD_DSP_TYPE_PAN parameters. */ + FMOD_DSP_TYPE_MULTIBAND_EQ, /* This unit is a flexible five band parametric equalizer. */ + + FMOD_DSP_TYPE_MAX, /* Maximum number of pre-defined DSP types. */ + FMOD_DSP_TYPE_FORCEINT = 65536 /* Makes sure this enum is signed 32bit. */ +} FMOD_DSP_TYPE; + +/* + =================================================================================================== + + FMOD built in effect parameters. + Use DSP::setParameter with these enums for the 'index' parameter. + + =================================================================================================== +*/ + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_OSCILLATOR filter. + + [REMARKS] + + [SEE_ALSO] + DSP::setParameterFloat + DSP::setParameterInt + DSP::getParameterFloat + DSP::getParameterInt + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_OSCILLATOR_TYPE, /* (Type:int) - Waveform type. 0 = sine. 1 = square. 2 = sawup. 3 = sawdown. 4 = triangle. 5 = noise. */ + FMOD_DSP_OSCILLATOR_RATE /* (Type:float) - Frequency of the sinewave in hz. 1.0 to 22000.0. Default = 220.0. */ +} FMOD_DSP_OSCILLATOR; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_LOWPASS filter. + + [REMARKS] + Deprecated and will be removed in a future release, to emulate with FMOD_DSP_TYPE_MULTIBAND_EQ: + + // Configure a single band (band A) as a lowpass (all other bands default to off). + // 24dB rolloff to approximate the old effect curve. + // Cutoff frequency can be used the same as with the old effect. + // Resonance can be applied by setting the 'Q' value of the new effect. + FMOD_DSP_SetParameterInt(multiband, FMOD_DSP_MULTIBAND_EQ_A_FILTER, FMOD_DSP_MULTIBAND_EQ_FILTER_LOWPASS_24DB); + FMOD_DSP_SetParameterFloat(multiband, FMOD_DSP_MULTIBAND_EQ_A_FREQUENCY, frequency); + FMOD_DSP_SetParameterFloat(multiband, FMOD_DSP_MULTIBAND_EQ_A_Q, resonance); + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_LOWPASS_CUTOFF, /* (Type:float) - Lowpass cutoff frequency in hz. 10.0 to 22000.0. Default = 5000.0. */ + FMOD_DSP_LOWPASS_RESONANCE /* (Type:float) - Lowpass resonance Q value. 1.0 to 10.0. Default = 1.0. */ +} FMOD_DSP_LOWPASS; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_ITLOWPASS filter.
    + This is different to the default FMOD_DSP_TYPE_ITLOWPASS filter in that it uses a different quality algorithm and is + the filter used to produce the correct sounding playback in .IT files.
    + FMOD Studio's .IT playback uses this filter.
    + + [REMARKS] + Note! This filter actually has a limited cutoff frequency below the specified maximum, due to its limited design, + so for a more open range filter use FMOD_DSP_LOWPASS or if you don't mind not having resonance, + FMOD_DSP_LOWPASS_SIMPLE.
    + The effective maximum cutoff is about 8060hz. + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_ITLOWPASS_CUTOFF, /* (Type:float) - Lowpass cutoff frequency in hz. 1.0 to 22000.0. Default = 5000.0/ */ + FMOD_DSP_ITLOWPASS_RESONANCE /* (Type:float) - Lowpass resonance Q value. 0.0 to 127.0. Default = 1.0. */ +} FMOD_DSP_ITLOWPASS; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_HIGHPASS filter. + + [REMARKS] + Deprecated and will be removed in a future release, to emulate with FMOD_DSP_TYPE_MULTIBAND_EQ: + + // Configure a single band (band A) as a highpass (all other bands default to off). + // 12dB rolloff to approximate the old effect curve. + // Cutoff frequency can be used the same as with the old effect. + // Resonance can be applied by setting the 'Q' value of the new effect. + FMOD_DSP_SetParameterInt(multiband, FMOD_DSP_MULTIBAND_EQ_A_FILTER, FMOD_DSP_MULTIBAND_EQ_FILTER_HIGHPASS_12DB); + FMOD_DSP_SetParameterFloat(multiband, FMOD_DSP_MULTIBAND_EQ_A_FREQUENCY, frequency); + FMOD_DSP_SetParameterFloat(multiband, FMOD_DSP_MULTIBAND_EQ_A_Q, resonance); + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_HIGHPASS_CUTOFF, /* (Type:float) - Highpass cutoff frequency in hz. 1.0 to output 22000.0. Default = 5000.0. */ + FMOD_DSP_HIGHPASS_RESONANCE /* (Type:float) - Highpass resonance Q value. 1.0 to 10.0. Default = 1.0. */ +} FMOD_DSP_HIGHPASS; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_ECHO filter. + + [REMARKS] + Note. Every time the delay is changed, the plugin re-allocates the echo buffer. This means the echo will dissapear at that time while it refills its new buffer.
    + Larger echo delays result in larger amounts of memory allocated.
    + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_ECHO_DELAY, /* (Type:float) - Echo delay in ms. 10 to 5000. Default = 500. */ + FMOD_DSP_ECHO_FEEDBACK, /* (Type:float) - Echo decay per delay. 0 to 100. 100.0 = No decay, 0.0 = total decay (ie simple 1 line delay). Default = 50.0. */ + FMOD_DSP_ECHO_DRYLEVEL, /* (Type:float) - Original sound volume in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_ECHO_WETLEVEL /* (Type:float) - Volume of echo signal to pass to output in dB. -80.0 to 10.0. Default = 0. */ +} FMOD_DSP_ECHO; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_FLANGE filter. + + [REMARKS] + Flange is an effect where the signal is played twice at the same time, and one copy slides back and forth creating a whooshing or flanging effect.
    + As there are 2 copies of the same signal, by default each signal is given 50% mix, so that the total is not louder than the original unaffected signal.
    +
    + Flange depth is a percentage of a 10ms shift from the original signal. Anything above 10ms is not considered flange because to the ear it begins to 'echo' so 10ms is the highest value possible.
    + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_FLANGE_MIX, /* (Type:float) - Percentage of wet signal in mix. 0 to 100. Default = 50. */ + FMOD_DSP_FLANGE_DEPTH, /* (Type:float) - Flange depth (percentage of 40ms delay). 0.01 to 1.0. Default = 1.0. */ + FMOD_DSP_FLANGE_RATE /* (Type:float) - Flange speed in hz. 0.0 to 20.0. Default = 0.1. */ +} FMOD_DSP_FLANGE; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_DISTORTION filter. + + [REMARKS] + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_DISTORTION_LEVEL /* (Type:float) - Distortion value. 0.0 to 1.0. Default = 0.5. */ +} FMOD_DSP_DISTORTION; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_NORMALIZE filter. + + [REMARKS] + Normalize amplifies the sound based on the maximum peaks within the signal.
    + For example if the maximum peaks in the signal were 50% of the bandwidth, it would scale the whole sound by 2.
    + The lower threshold value makes the normalizer ignores peaks below a certain point, to avoid over-amplification if a loud signal suddenly came in, and also to avoid amplifying to maximum things like background hiss.
    +
    + Because FMOD is a realtime audio processor, it doesn't have the luxury of knowing the peak for the whole sound (ie it can't see into the future), so it has to process data as it comes in.
    + To avoid very sudden changes in volume level based on small samples of new data, fmod fades towards the desired amplification which makes for smooth gain control. The fadetime parameter can control this.
    + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_NORMALIZE_FADETIME, /* (Type:float) - Time to ramp the silence to full in ms. 0.0 to 20000.0. Default = 5000.0. */ + FMOD_DSP_NORMALIZE_THRESHHOLD, /* (Type:float) - Lower volume range threshold to ignore. 0.0 to 1.0. Default = 0.1. Raise higher to stop amplification of very quiet signals. */ + FMOD_DSP_NORMALIZE_MAXAMP /* (Type:float) - Maximum amplification allowed. 1.0 to 100000.0. Default = 20.0. 1.0 = no amplifaction, higher values allow more boost. */ +} FMOD_DSP_NORMALIZE; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_LIMITER filter. + + [REMARKS] + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_LIMITER_RELEASETIME, /* (Type:float) - Time to ramp the silence to full in ms. 1.0 to 1000.0. Default = 10.0. */ + FMOD_DSP_LIMITER_CEILING, /* (Type:float) - Maximum level of the output signal in dB. -12.0 to 0.0. Default = 0.0. */ + FMOD_DSP_LIMITER_MAXIMIZERGAIN, /* (Type:float) - Maximum amplification allowed in dB. 0.0 to 12.0. Default = 0.0. 0.0 = no amplifaction, higher values allow more boost. */ + FMOD_DSP_LIMITER_MODE, /* (Type:float) - Channel processing mode. 0 or 1. Default = 0. 0 = Independent (limiter per channel), 1 = Linked. */ +} FMOD_DSP_LIMITER; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_PARAMEQ filter. + + [REMARKS] + Deprecated and will be removed in a future release, to emulate with FMOD_DSP_TYPE_MULTIBAND_EQ: + + // Configure a single band (band A) as a peaking EQ (all other bands default to off). + // Center frequency can be used as with the old effect. + // Bandwidth can be applied by setting the 'Q' value of the new effect. + // Gain at the center frequency can be used the same as with the old effect. + FMOD_DSP_SetParameterInt(multiband, FMOD_DSP_MULTIBAND_EQ_A_FILTER, FMOD_DSP_MULTIBAND_EQ_FILTER_PEAKING); + FMOD_DSP_SetParameterFloat(multiband, FMOD_DSP_MULTIBAND_EQ_A_FREQUENCY, center); + FMOD_DSP_SetParameterFloat(multiband, FMOD_DSP_MULTIBAND_EQ_A_Q, bandwidth); + FMOD_DSP_SetParameterFloat(multiband, FMOD_DSP_MULTIBAND_EQ_A_GAIN, gain); + + Parametric EQ is a single band peaking EQ filter that attenuates or amplifies a selected frequency and its neighbouring frequencies. + + When a frequency has its gain set to 1.0, the sound will be unaffected and represents the original signal exactly. + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_PARAMEQ_CENTER, /* (Type:float) - Frequency center. 20.0 to 22000.0. Default = 8000.0. */ + FMOD_DSP_PARAMEQ_BANDWIDTH, /* (Type:float) - Octave range around the center frequency to filter. 0.2 to 5.0. Default = 1.0. */ + FMOD_DSP_PARAMEQ_GAIN /* (Type:float) - Frequency Gain in dB. -30 to 30. Default = 0. */ +} FMOD_DSP_PARAMEQ; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_MULTIBAND_EQ filter. + + [REMARKS] + Flexible five band parametric equalizer. + + [SEE_ALSO] + DSP::setParameterInt + DSP::getParameterInt + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE +] +*/ +typedef enum FMOD_DSP_MULTIBAND_EQ +{ + FMOD_DSP_MULTIBAND_EQ_A_FILTER, /* (Type:int) - Band A: FMOD_DSP_MULTIBAND_EQ_FILTER_TYPE used to interpret the behavior of the remaining parameters. Default = FMOD_DSP_MULTIBAND_EQ_FILTER_LOWPASS_12DB */ + FMOD_DSP_MULTIBAND_EQ_A_FREQUENCY, /* (Type:float) - Band A: Significant frequency in Hz, cutoff [low/high pass, low/high shelf], center [notch, peaking, band-pass], phase transition point [all-pass]. 20 to 22000. Default = 8000. */ + FMOD_DSP_MULTIBAND_EQ_A_Q, /* (Type:float) - Band A: Quality factor, resonance [low/high pass], bandwidth [notch, peaking, band-pass], phase transition sharpness [all-pass], unused [low/high shelf]. 0.1 to 10.0. Default = 0.707. */ + FMOD_DSP_MULTIBAND_EQ_A_GAIN, /* (Type:float) - Band A: Boost or attenuation in dB [peaking, high/low shelf only]. -30 to 30. Default = 0. */ + FMOD_DSP_MULTIBAND_EQ_B_FILTER, /* (Type:int) - Band B: See Band A. Default = FMOD_DSP_MULTIBAND_EQ_FILTER_DISABLED */ + FMOD_DSP_MULTIBAND_EQ_B_FREQUENCY, /* (Type:float) - Band B: See Band A */ + FMOD_DSP_MULTIBAND_EQ_B_Q, /* (Type:float) - Band B: See Band A */ + FMOD_DSP_MULTIBAND_EQ_B_GAIN, /* (Type:float) - Band B: See Band A */ + FMOD_DSP_MULTIBAND_EQ_C_FILTER, /* (Type:int) - Band C: See Band A. Default = FMOD_DSP_MULTIBAND_EQ_FILTER_DISABLED */ + FMOD_DSP_MULTIBAND_EQ_C_FREQUENCY, /* (Type:float) - Band C: See Band A. */ + FMOD_DSP_MULTIBAND_EQ_C_Q, /* (Type:float) - Band C: See Band A. */ + FMOD_DSP_MULTIBAND_EQ_C_GAIN, /* (Type:float) - Band C: See Band A. */ + FMOD_DSP_MULTIBAND_EQ_D_FILTER, /* (Type:int) - Band D: See Band A. Default = FMOD_DSP_MULTIBAND_EQ_FILTER_DISABLED */ + FMOD_DSP_MULTIBAND_EQ_D_FREQUENCY, /* (Type:float) - Band D: See Band A. */ + FMOD_DSP_MULTIBAND_EQ_D_Q, /* (Type:float) - Band D: See Band A. */ + FMOD_DSP_MULTIBAND_EQ_D_GAIN, /* (Type:float) - Band D: See Band A. */ + FMOD_DSP_MULTIBAND_EQ_E_FILTER, /* (Type:int) - Band E: See Band A. Default = FMOD_DSP_MULTIBAND_EQ_FILTER_DISABLED */ + FMOD_DSP_MULTIBAND_EQ_E_FREQUENCY, /* (Type:float) - Band E: See Band A. */ + FMOD_DSP_MULTIBAND_EQ_E_Q, /* (Type:float) - Band E: See Band A. */ + FMOD_DSP_MULTIBAND_EQ_E_GAIN, /* (Type:float) - Band E: See Band A. */ +} FMOD_DSP_MULTIBAND_EQ; + + +/* +[ENUM] +[ + [DESCRIPTION] + Filter types for FMOD_DSP_MULTIBAND_EQ. + + [REMARKS] + + [SEE_ALSO] + FMOD_DSP_MULTIBAND_EQ +] +*/ +typedef enum FMOD_DSP_MULTIBAND_EQ_FILTER_TYPE +{ + FMOD_DSP_MULTIBAND_EQ_FILTER_DISABLED, /* Disabled filter, no processing. */ + FMOD_DSP_MULTIBAND_EQ_FILTER_LOWPASS_12DB, /* Resonant low-pass filter, attenuates frequencies (12dB per octave) above a given point (with specificed resonance) while allowing the rest to pass. */ + FMOD_DSP_MULTIBAND_EQ_FILTER_LOWPASS_24DB, /* Resonant low-pass filter, attenuates frequencies (24dB per octave) above a given point (with specificed resonance) while allowing the rest to pass. */ + FMOD_DSP_MULTIBAND_EQ_FILTER_LOWPASS_48DB, /* Resonant low-pass filter, attenuates frequencies (48dB per octave) above a given point (with specificed resonance) while allowing the rest to pass. */ + FMOD_DSP_MULTIBAND_EQ_FILTER_HIGHPASS_12DB, /* Resonant low-pass filter, attenuates frequencies (12dB per octave) below a given point (with specificed resonance) while allowing the rest to pass. */ + FMOD_DSP_MULTIBAND_EQ_FILTER_HIGHPASS_24DB, /* Resonant low-pass filter, attenuates frequencies (24dB per octave) below a given point (with specificed resonance) while allowing the rest to pass. */ + FMOD_DSP_MULTIBAND_EQ_FILTER_HIGHPASS_48DB, /* Resonant low-pass filter, attenuates frequencies (48dB per octave) below a given point (with specificed resonance) while allowing the rest to pass. */ + FMOD_DSP_MULTIBAND_EQ_FILTER_LOWSHELF, /* Low-shelf filter, boosts or attenuates frequencies (with specified gain) below a given point while allowing the rest to pass. */ + FMOD_DSP_MULTIBAND_EQ_FILTER_HIGHSHELF, /* High-shelf filter, boosts or attenuates frequencies (with specified gain) above a given point while allowing the rest to pass. */ + FMOD_DSP_MULTIBAND_EQ_FILTER_PEAKING, /* Peaking filter, boosts or attenuates frequencies (with specified gain) at a given point (with specificed bandwidth) while allowing the rest to pass. */ + FMOD_DSP_MULTIBAND_EQ_FILTER_BANDPASS, /* Band-pass filter, allows frequencies at a given point (with specificed bandwidth) to pass while attenuating frequencies outside this range. */ + FMOD_DSP_MULTIBAND_EQ_FILTER_NOTCH, /* Notch or band-reject filter, attenuates frequencies at a given point (with specificed bandwidth) while allowing frequencies outside this range to pass. */ + FMOD_DSP_MULTIBAND_EQ_FILTER_ALLPASS, /* All-pass filter, allows all frequencies to pass, but changes the phase response at a given point (with specified sharpness). */ +} FMOD_DSP_MULTIBAND_EQ_FILTER_TYPE; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_PITCHSHIFT filter. + + [REMARKS] + This pitch shifting unit can be used to change the pitch of a sound without speeding it up or slowing it down.
    + It can also be used for time stretching or scaling, for example if the pitch was doubled, and the frequency of the sound was halved, the pitch of the sound would sound correct but it would be twice as slow.
    +
    + Warning! This filter is very computationally expensive! Similar to a vocoder, it requires several overlapping FFT and IFFT's to produce smooth output, and can require around 440mhz for 1 stereo 48khz signal using the default settings.
    + Reducing the signal to mono will half the cpu usage.
    + Reducing this will lower audio quality, but what settings to use are largely dependant on the sound being played. A noisy polyphonic signal will need higher fft size compared to a speaking voice for example.
    +
    + This pitch shifter is based on the pitch shifter code at http://www.dspdimension.com, written by Stephan M. Bernsee.
    + The original code is COPYRIGHT 1999-2003 Stephan M. Bernsee .
    +
    + 'maxchannels' dictates the amount of memory allocated. By default, the maxchannels value is 0. If FMOD is set to stereo, the pitch shift unit will allocate enough memory for 2 channels. If it is 5.1, it will allocate enough memory for a 6 channel pitch shift, etc.
    + If the pitch shift effect is only ever applied to the global mix (ie it was added with ChannelGroup::addDSP), then 0 is the value to set as it will be enough to handle all speaker modes.
    + When the pitch shift is added to a channel (ie Channel::addDSP) then the channel count that comes in could be anything from 1 to 8 possibly. It is only in this case where you might want to increase the channel count above the output's channel count.
    + If a channel pitch shift is set to a lower number than the sound's channel count that is coming in, it will not pitch shift the sound.
    +
    + NOTE! Not supported on PlayStation 3.
    + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + ChannelGroup::addDSP + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_PITCHSHIFT_PITCH, /* (Type:float) - Pitch value. 0.5 to 2.0. Default = 1.0. 0.5 = one octave down, 2.0 = one octave up. 1.0 does not change the pitch. */ + FMOD_DSP_PITCHSHIFT_FFTSIZE, /* (Type:float) - FFT window size. 256, 512, 1024, 2048, 4096. Default = 1024. Increase this to reduce 'smearing'. This effect is a warbling sound similar to when an mp3 is encoded at very low bitrates. */ + FMOD_DSP_PITCHSHIFT_OVERLAP, /* (Type:float) - Removed. Do not use. FMOD now uses 4 overlaps and cannot be changed. */ + FMOD_DSP_PITCHSHIFT_MAXCHANNELS /* (Type:float) - Maximum channels supported. 0 to 16. 0 = same as fmod's default output polyphony, 1 = mono, 2 = stereo etc. See remarks for more. Default = 0. It is suggested to leave at 0! */ +} FMOD_DSP_PITCHSHIFT; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_CHORUS filter. + + [REMARKS] + Chorous is an effect where the sound is more 'spacious' due to 1 to 3 versions of the sound being played along side the original signal but with the pitch of each copy modulating on a sine wave.
    + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_CHORUS_MIX, /* (Type:float) - Volume of original signal to pass to output. 0.0 to 100.0. Default = 50.0. */ + FMOD_DSP_CHORUS_RATE, /* (Type:float) - Chorus modulation rate in Hz. 0.0 to 20.0. Default = 0.8 Hz. */ + FMOD_DSP_CHORUS_DEPTH, /* (Type:float) - Chorus modulation depth. 0.0 to 100.0. Default = 3.0. */ +} FMOD_DSP_CHORUS; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_ITECHO filter.
    + This is effectively a software based echo filter that emulates the DirectX DMO echo effect. Impulse tracker files can support this, and FMOD will produce the effect on ANY platform, not just those that support DirectX effects!
    + + [REMARKS] + Note. Every time the delay is changed, the plugin re-allocates the echo buffer. This means the echo will dissapear at that time while it refills its new buffer.
    + Larger echo delays result in larger amounts of memory allocated.
    +
    + As this is a stereo filter made mainly for IT playback, it is targeted for stereo signals.
    + With mono signals only the FMOD_DSP_ITECHO_LEFTDELAY is used.
    + For multichannel signals (>2) there will be no echo on those channels.
    + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_ITECHO_WETDRYMIX, /* (Type:float) - Ratio of wet (processed) signal to dry (unprocessed) signal. Must be in the range from 0.0 through 100.0 (all wet). Default = 50. */ + FMOD_DSP_ITECHO_FEEDBACK, /* (Type:float) - Percentage of output fed back into input, in the range from 0.0 through 100.0. Default = 50. */ + FMOD_DSP_ITECHO_LEFTDELAY, /* (Type:float) - Delay for left channel, in milliseconds, in the range from 1.0 through 2000.0. Default = 500 ms. */ + FMOD_DSP_ITECHO_RIGHTDELAY, /* (Type:float) - Delay for right channel, in milliseconds, in the range from 1.0 through 2000.0. Default = 500 ms. */ + FMOD_DSP_ITECHO_PANDELAY /* (Type:float) - Value that specifies whether to swap left and right delays with each successive echo. Ranges from 0.0 (equivalent to FALSE) to 1.0 (equivalent to TRUE), meaning no swap. Default = 0. CURRENTLY NOT SUPPORTED. */ +} FMOD_DSP_ITECHO; + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_COMPRESSOR unit. + This is a multichannel software limiter that is uniform across the whole spectrum. + + [REMARKS] + The limiter is not guaranteed to catch every peak above the threshold level, + because it cannot apply gain reduction instantaneously - the time delay is + determined by the attack time. However setting the attack time too short will + distort the sound, so it is a compromise. High level peaks can be avoided by + using a short attack time - but not too short, and setting the threshold a few + decibels below the critical level. +
    + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + DSP::setParameterBool + DSP::getParameterBool + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_COMPRESSOR_THRESHOLD, /* (Type:float) - Threshold level (dB) in the range from -80 through 0. Default = 0. */ + FMOD_DSP_COMPRESSOR_RATIO, /* (Type:float) - Compression Ratio (dB/dB) in the range from 1 to 50. Default = 2.5. */ + FMOD_DSP_COMPRESSOR_ATTACK, /* (Type:float) - Attack time (milliseconds), in the range from 0.1 through 1000. Default value is 20. */ + FMOD_DSP_COMPRESSOR_RELEASE, /* (Type:float) - Release time (milliseconds), in the range from 10 through 5000. Default value is 100 */ + FMOD_DSP_COMPRESSOR_GAINMAKEUP, /* (Type:float) - Make-up gain (dB) applied after limiting, in the range from 0 through 30. Default = 0. */ + FMOD_DSP_COMPRESSOR_USESIDECHAIN, /* (Type:data) - Data of type FMOD_DSP_PARAMETER_SIDECHAIN. Whether to analyse the sidechain signal instead of the input signal. Default is { false } */ + FMOD_DSP_COMPRESSOR_LINKED /* (Type:bool) - FALSE = Independent (compressor per channel), TRUE = Linked. Default = TRUE. */ +} FMOD_DSP_COMPRESSOR; + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_SFXREVERB unit.
    + + [REMARKS] + This is a high quality I3DL2 based reverb.
    + On top of the I3DL2 property set, "Dry Level" is also included to allow the dry mix to be changed.
    +
    + These properties can be set with presets in FMOD_REVERB_PRESETS. + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE + FMOD_REVERB_PRESETS +] +*/ +typedef enum +{ + FMOD_DSP_SFXREVERB_DECAYTIME, /* (Type:float) - Decay Time : Reverberation decay time at low-frequencies in milliseconds. Ranges from 100.0 to 20000.0. Default is 1500. */ + FMOD_DSP_SFXREVERB_EARLYDELAY, /* (Type:float) - Early Delay : Delay time of first reflection in milliseconds. Ranges from 0.0 to 300.0. Default is 20. */ + FMOD_DSP_SFXREVERB_LATEDELAY, /* (Type:float) - Reverb Delay : Late reverberation delay time relative to first reflection in milliseconds. Ranges from 0.0 to 100.0. Default is 40. */ + FMOD_DSP_SFXREVERB_HFREFERENCE, /* (Type:float) - HF Reference : Reference frequency for high-frequency decay in Hz. Ranges from 20.0 to 20000.0. Default is 5000. */ + FMOD_DSP_SFXREVERB_HFDECAYRATIO, /* (Type:float) - Decay HF Ratio : High-frequency decay time relative to decay time in percent. Ranges from 10.0 to 100.0. Default is 50. */ + FMOD_DSP_SFXREVERB_DIFFUSION, /* (Type:float) - Diffusion : Reverberation diffusion (echo density) in percent. Ranges from 0.0 to 100.0. Default is 100. */ + FMOD_DSP_SFXREVERB_DENSITY, /* (Type:float) - Density : Reverberation density (modal density) in percent. Ranges from 0.0 to 100.0. Default is 100. */ + FMOD_DSP_SFXREVERB_LOWSHELFFREQUENCY, /* (Type:float) - Low Shelf Frequency : Transition frequency of low-shelf filter in Hz. Ranges from 20.0 to 1000.0. Default is 250. */ + FMOD_DSP_SFXREVERB_LOWSHELFGAIN, /* (Type:float) - Low Shelf Gain : Gain of low-shelf filter in dB. Ranges from -36.0 to 12.0. Default is 0. */ + FMOD_DSP_SFXREVERB_HIGHCUT, /* (Type:float) - High Cut : Cutoff frequency of low-pass filter in Hz. Ranges from 20.0 to 20000.0. Default is 20000. */ + FMOD_DSP_SFXREVERB_EARLYLATEMIX, /* (Type:float) - Early/Late Mix : Blend ratio of late reverb to early reflections in percent. Ranges from 0.0 to 100.0. Default is 50. */ + FMOD_DSP_SFXREVERB_WETLEVEL, /* (Type:float) - Wet Level : Reverb signal level in dB. Ranges from -80.0 to 20.0. Default is -6. */ + FMOD_DSP_SFXREVERB_DRYLEVEL /* (Type:float) - Dry Level : Dry signal level in dB. Ranges from -80.0 to 20.0. Default is 0. */ +} FMOD_DSP_SFXREVERB; + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_LOWPASS_SIMPLE filter. + + [REMARKS] + Deprecated and will be removed in a future release, to emulate with FMOD_DSP_TYPE_MULTIBAND_EQ: + + // Configure a single band (band A) as a lowpass (all other bands default to off). + // 12dB rolloff to approximate the old effect curve. + // Cutoff frequency can be used the same as with the old effect. + // Resonance / 'Q' should remain at default 0.707. + FMOD_DSP_SetParameterInt(multiband, FMOD_DSP_MULTIBAND_EQ_A_FILTER, FMOD_DSP_MULTIBAND_EQ_FILTER_LOWPASS_12DB); + FMOD_DSP_SetParameterFloat(multiband, FMOD_DSP_MULTIBAND_EQ_A_FREQUENCY, frequency); + + This is a very simple low pass filter, based on two single-pole RC time-constant modules. + + The emphasis is on speed rather than accuracy, so this should not be used for task requiring critical filtering. + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_LOWPASS_SIMPLE_CUTOFF /* (Type:float) - Lowpass cutoff frequency in hz. 10.0 to 22000.0. Default = 5000.0 */ +} FMOD_DSP_LOWPASS_SIMPLE; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_DELAY filter. + + [REMARKS] + Note. Every time MaxDelay is changed, the plugin re-allocates the delay buffer. This means the delay will dissapear at that time while it refills its new buffer.
    + A larger MaxDelay results in larger amounts of memory allocated.
    + Channel delays above MaxDelay will be clipped to MaxDelay and the delay buffer will not be resized.
    +
    + NOTE! Not supported on PlayStation 3. + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_DELAY_CH0, /* (Type:float) - Channel #0 Delay in ms. 0 to 10000. Default = 0. */ + FMOD_DSP_DELAY_CH1, /* (Type:float) - Channel #1 Delay in ms. 0 to 10000. Default = 0. */ + FMOD_DSP_DELAY_CH2, /* (Type:float) - Channel #2 Delay in ms. 0 to 10000. Default = 0. */ + FMOD_DSP_DELAY_CH3, /* (Type:float) - Channel #3 Delay in ms. 0 to 10000. Default = 0. */ + FMOD_DSP_DELAY_CH4, /* (Type:float) - Channel #4 Delay in ms. 0 to 10000. Default = 0. */ + FMOD_DSP_DELAY_CH5, /* (Type:float) - Channel #5 Delay in ms. 0 to 10000. Default = 0. */ + FMOD_DSP_DELAY_CH6, /* (Type:float) - Channel #6 Delay in ms. 0 to 10000. Default = 0. */ + FMOD_DSP_DELAY_CH7, /* (Type:float) - Channel #7 Delay in ms. 0 to 10000. Default = 0. */ + FMOD_DSP_DELAY_CH8, /* (Type:float) - Channel #8 Delay in ms. 0 to 10000. Default = 0. */ + FMOD_DSP_DELAY_CH9, /* (Type:float) - Channel #9 Delay in ms. 0 to 10000. Default = 0. */ + FMOD_DSP_DELAY_CH10, /* (Type:float) - Channel #10 Delay in ms. 0 to 10000. Default = 0. */ + FMOD_DSP_DELAY_CH11, /* (Type:float) - Channel #11 Delay in ms. 0 to 10000. Default = 0. */ + FMOD_DSP_DELAY_CH12, /* (Type:float) - Channel #12 Delay in ms. 0 to 10000. Default = 0. */ + FMOD_DSP_DELAY_CH13, /* (Type:float) - Channel #13 Delay in ms. 0 to 10000. Default = 0. */ + FMOD_DSP_DELAY_CH14, /* (Type:float) - Channel #14 Delay in ms. 0 to 10000. Default = 0. */ + FMOD_DSP_DELAY_CH15, /* (Type:float) - Channel #15 Delay in ms. 0 to 10000. Default = 0. */ + FMOD_DSP_DELAY_MAXDELAY /* (Type:float) - Maximum delay in ms. 0 to 10000. Default = 10. */ +} FMOD_DSP_DELAY; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_TREMOLO filter. + + [REMARKS] + The tremolo effect varies the amplitude of a sound. Depending on the settings, this unit can produce a tremolo, chopper or auto-pan effect.
    +
    + The shape of the LFO (low freq. oscillator) can morphed between sine, triangle and sawtooth waves using the FMOD_DSP_TREMOLO_SHAPE and FMOD_DSP_TREMOLO_SKEW parameters.
    + FMOD_DSP_TREMOLO_DUTY and FMOD_DSP_TREMOLO_SQUARE are useful for a chopper-type effect where the first controls the on-time duration and second controls the flatness of the envelope.
    + FMOD_DSP_TREMOLO_SPREAD varies the LFO phase between channels to get an auto-pan effect. This works best with a sine shape LFO.
    + The LFO can be synchronized using the FMOD_DSP_TREMOLO_PHASE parameter which sets its instantaneous phase.
    + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_TREMOLO_FREQUENCY, /* (Type:float) - LFO frequency in Hz. 0.1 to 20. Default = 5. */ + FMOD_DSP_TREMOLO_DEPTH, /* (Type:float) - Tremolo depth. 0 to 1. Default = 1. */ + FMOD_DSP_TREMOLO_SHAPE, /* (Type:float) - LFO shape morph between triangle and sine. 0 to 1. Default = 0. */ + FMOD_DSP_TREMOLO_SKEW, /* (Type:float) - Time-skewing of LFO cycle. -1 to 1. Default = 0. */ + FMOD_DSP_TREMOLO_DUTY, /* (Type:float) - LFO on-time. 0 to 1. Default = 0.5. */ + FMOD_DSP_TREMOLO_SQUARE, /* (Type:float) - Flatness of the LFO shape. 0 to 1. Default = 0. */ + FMOD_DSP_TREMOLO_PHASE, /* (Type:float) - Instantaneous LFO phase. 0 to 1. Default = 0. */ + FMOD_DSP_TREMOLO_SPREAD /* (Type:float) - Rotation / auto-pan effect. -1 to 1. Default = 0. */ +} FMOD_DSP_TREMOLO; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_SEND DSP. + + [REMARKS] + + [SEE_ALSO] + DSP::setParameterInt + DSP::getParameterInt + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_SEND_RETURNID, /* (Type:int) - ID of the Return DSP this send is connected to (integer values only). -1 indicates no connected Return DSP. Default = -1. */ + FMOD_DSP_SEND_LEVEL, /* (Type:float) - Send level. 0.0 to 1.0. Default = 1.0 */ +} FMOD_DSP_SEND; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_RETURN DSP. + + [REMARKS] + + [SEE_ALSO] + DSP::setParameterInt + DSP::getParameterInt + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_RETURN_ID, /* (Type:int) - [r] ID of this Return DSP. Read-only. Default = -1. */ + FMOD_DSP_RETURN_INPUT_SPEAKER_MODE /* (Type:int) - [r/w] Input speaker mode of this return. Default = FMOD_SPEAKERMODE_DEFAULT. */ +} FMOD_DSP_RETURN; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_HIGHPASS_SIMPLE filter. + + [REMARKS] + Deprecated and will be removed in a future release, to emulate with FMOD_DSP_TYPE_MULTIBAND_EQ: + + // Configure a single band (band A) as a highpass (all other bands default to off). + // 12dB rolloff to approximate the old effect curve. + // Cutoff frequency can be used the same as with the old effect. + // Resonance / 'Q' should remain at default 0.707. + FMOD_DSP_SetParameterInt(multiband, FMOD_DSP_MULTIBAND_EQ_A_FILTER, FMOD_DSP_MULTIBAND_EQ_FILTER_HIGHPASS_12DB); + FMOD_DSP_SetParameterFloat(multiband, FMOD_DSP_MULTIBAND_EQ_A_FREQUENCY, frequency); + + This is a very simple single-order high pass filter. + + The emphasis is on speed rather than accuracy, so this should not be used for task requiring critical filtering. + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_HIGHPASS_SIMPLE_CUTOFF /* (Type:float) - Highpass cutoff frequency in hz. 10.0 to 22000.0. Default = 1000.0 */ +} FMOD_DSP_HIGHPASS_SIMPLE; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter values for the FMOD_DSP_PAN_2D_STEREO_MODE parameter of the FMOD_DSP_TYPE_PAN DSP. + + [REMARKS] + + [SEE_ALSO] + FMOD_DSP_PAN +] +*/ +typedef enum +{ + FMOD_DSP_PAN_2D_STEREO_MODE_DISTRIBUTED, /* The parts of a stereo sound are spread around desination speakers based on FMOD_DSP_PAN_2D_EXTENT / FMOD_DSP_PAN_2D_DIRECTION */ + FMOD_DSP_PAN_2D_STEREO_MODE_DISCRETE /* The L/R parts of a stereo sound are rotated around a circle based on FMOD_DSP_PAN_2D_STEREO_AXIS / FMOD_DSP_PAN_2D_STEREO_SEPARATION. */ +} FMOD_DSP_PAN_2D_STEREO_MODE_TYPE; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter values for the FMOD_DSP_PAN_MODE parameter of the FMOD_DSP_TYPE_PAN DSP. + + [REMARKS] + + [SEE_ALSO] + FMOD_DSP_PAN +] +*/ +typedef enum +{ + FMOD_DSP_PAN_MODE_MONO, + FMOD_DSP_PAN_MODE_STEREO, + FMOD_DSP_PAN_MODE_SURROUND +} FMOD_DSP_PAN_MODE_TYPE; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter values for the FMOD_DSP_PAN_3D_ROLLOFF parameter of the FMOD_DSP_TYPE_PAN DSP. + + [REMARKS] + + [SEE_ALSO] + FMOD_DSP_PAN +] +*/ +typedef enum +{ + FMOD_DSP_PAN_3D_ROLLOFF_LINEARSQUARED, + FMOD_DSP_PAN_3D_ROLLOFF_LINEAR, + FMOD_DSP_PAN_3D_ROLLOFF_INVERSE, + FMOD_DSP_PAN_3D_ROLLOFF_INVERSETAPERED, + FMOD_DSP_PAN_3D_ROLLOFF_CUSTOM +} FMOD_DSP_PAN_3D_ROLLOFF_TYPE; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter values for the FMOD_DSP_PAN_3D_EXTENT_MODE parameter of the FMOD_DSP_TYPE_PAN DSP. + + [REMARKS] + + [SEE_ALSO] + FMOD_DSP_PAN +] +*/ +typedef enum +{ + FMOD_DSP_PAN_3D_EXTENT_MODE_AUTO, + FMOD_DSP_PAN_3D_EXTENT_MODE_USER, + FMOD_DSP_PAN_3D_EXTENT_MODE_OFF +} FMOD_DSP_PAN_3D_EXTENT_MODE_TYPE; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_PAN DSP. + + [REMARKS] + FMOD_DSP_PAN_3D_PAN_BLEND controls the percentage of the effect supplied by FMOD_DSP_PAN_2D_DIRECTION and FMOD_DSP_PAN_2D_EXTENT. + + For FMOD_DSP_PAN_3D_POSITION, the following members in the FMOD_DSP_PARAMETER_3DATTRIBUTES_MULTI struct should be non zero. + - numlisteners - This is typically 1, can be up to 8. Typically more than 1 is only used for split screen purposes. The FMOD Panner will average angles and produce the best compromise for panning and attenuation. + - relative[listenernum].position - This is the delta between the listener position and the sound position. Typically the listener position is subtracted from the sound position. + - relative[listenernum].forward - This is the sound's forward vector. Optional, set to 0,0,1 if not needed. This is only relevant for more than mono sounds in 3D, that are spread amongst the destination speakers at the time of panning. + If the sound rotates then the L/R part of a stereo sound will rotate amongst its destination speakers. + If the sound has moved and pinpointed into a single speaker, rotation of the sound will have no effect as at that point the channels are collapsed into a single point. + + For FMOD_DSP_PAN_2D_STEREO_MODE, when it is set to FMOD_DSP_PAN_2D_STEREO_MODE_DISCRETE, only FMOD_DSP_PAN_2D_STEREO_SEPARATION and FMOD_DSP_PAN_2D_STEREO_AXIS are used. + When it is set to FMOD_DSP_PAN_2D_STEREO_MODE_DISTRIBUTED, then standard FMOD_DSP_PAN_2D_DIRECTION/FMOD_DSP_PAN_2D_EXTENT parameters are used. + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + DSP::setParameterInt + DSP::getParameterInt + DSP::setParameterData + DSP::getParameterData + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_PAN_MODE, /* (Type:int) - Panner mode. FMOD_DSP_PAN_MODE_MONO for mono down-mix, FMOD_DSP_PAN_MODE_STEREO for stereo panning or FMOD_DSP_PAN_MODE_SURROUND for surround panning. Default = FMOD_DSP_PAN_MODE_SURROUND */ + FMOD_DSP_PAN_2D_STEREO_POSITION, /* (Type:float) - 2D Stereo pan position. -100.0 to 100.0. Default = 0.0. */ + FMOD_DSP_PAN_2D_DIRECTION, /* (Type:float) - 2D Surround pan direction. Direction from center point of panning circle. -180.0 (degrees) to 180.0 (degrees). 0 = front center, -180 or +180 = rear speakers center point. Default = 0.0. */ + FMOD_DSP_PAN_2D_EXTENT, /* (Type:float) - 2D Surround pan extent. Distance from center point of panning circle. 0.0 (degrees) to 360.0 (degrees). Default = 360.0. */ + FMOD_DSP_PAN_2D_ROTATION, /* (Type:float) - 2D Surround pan rotation. -180.0 (degrees) to 180.0 (degrees). Default = 0.0. */ + FMOD_DSP_PAN_2D_LFE_LEVEL, /* (Type:float) - 2D Surround pan LFE level. 2D LFE level in dB. -80.0 (db) to 20.0 (db). Default = 0.0. */ + FMOD_DSP_PAN_2D_STEREO_MODE, /* (Type:int) - Stereo-To-Surround Mode. FMOD_DSP_PAN_2D_STEREO_MODE_DISTRIBUTED to FMOD_DSP_PAN_2D_STEREO_MODE_DISCRETE. Default = FMOD_DSP_PAN_2D_STEREO_MODE_DISCRETE.*/ + FMOD_DSP_PAN_2D_STEREO_SEPARATION, /* (Type:float) - Stereo-To-Surround Stereo For FMOD_DSP_PAN_2D_STEREO_MODE_DISCRETE mode. Separation/width of L/R parts of stereo sound. -180.0 (degrees) to +180.0 (degrees). Default = 60.0. */ + FMOD_DSP_PAN_2D_STEREO_AXIS, /* (Type:float) - Stereo-To-Surround Stereo For FMOD_DSP_PAN_2D_STEREO_MODE_DISCRETE mode. Axis/rotation of L/R parts of stereo sound. -180.0 (degrees) to +180.0 (degrees). Default = 0.0. */ + FMOD_DSP_PAN_ENABLED_SPEAKERS, /* (Type:int) - Speakers Enabled. Bitmask for each speaker from 0 to 32 to be considered by panner. Use to disable speakers from being panned to. 0 to 0xFFF. Default = 0xFFF (All on). */ + FMOD_DSP_PAN_3D_POSITION, /* (Type:data) - 3D Position. Data of type FMOD_DSP_PARAMETER_3DATTRIBUTES_MULTI. See remarks on what to fill out. */ + FMOD_DSP_PAN_3D_ROLLOFF, /* (Type:int) - 3D Rolloff. FMOD_DSP_PAN_3D_ROLLOFF_LINEARSQUARED to FMOD_DSP_PAN_3D_ROLLOFF_CUSTOM. Default = FMOD_DSP_PAN_3D_ROLLOFF_LINEARSQUARED. */ + FMOD_DSP_PAN_3D_MIN_DISTANCE, /* (Type:float) - 3D Min Distance. 0.0 to 1e+18f. Default = 1.0. */ + FMOD_DSP_PAN_3D_MAX_DISTANCE, /* (Type:float) - 3D Max Distance. 0.0 to 1e+18f. Default = 20.0. */ + FMOD_DSP_PAN_3D_EXTENT_MODE, /* (Type:int) - 3D Extent Mode. FMOD_DSP_PAN_3D_EXTENT_MODE_AUTO to FMOD_DSP_PAN_3D_EXTENT_MODE_OFF. Default = FMOD_DSP_PAN_3D_EXTENT_MODE_AUTO. */ + FMOD_DSP_PAN_3D_SOUND_SIZE, /* (Type:float) - 3D Sound Size. 0.0 to 1e+18f. Default = 0.0. */ + FMOD_DSP_PAN_3D_MIN_EXTENT, /* (Type:float) - 3D Min Extent. 0.0 (degrees) to 360.0 (degrees). Default = 0.0. */ + FMOD_DSP_PAN_3D_PAN_BLEND, /* (Type:float) - 3D Pan Blend. 0.0 (fully 2D) to 1.0 (fully 3D). Default = 0.0. */ + FMOD_DSP_PAN_LFE_UPMIX_ENABLED, /* (Type:int) - LFE Upmix Enabled. Determines whether non-LFE source channels should mix to the LFE or leave it alone. 0 (off) to 1 (on). Default = 0 (off). */ + FMOD_DSP_PAN_OVERALL_GAIN, /* (Type:data) - Overall gain. For information only, not set by user. Data of type FMOD_DSP_PARAMETER_DATA_TYPE_OVERALLGAIN to provide to FMOD, to allow FMOD to know the DSP is scaling the signal for virtualization purposes. */ + FMOD_DSP_PAN_SURROUND_SPEAKER_MODE /* (Type:int) - Surround speaker mode. Target speaker mode for surround panning. Default = FMOD_SPEAKERMODE_DEFAULT. */ +} FMOD_DSP_PAN; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter values for the FMOD_DSP_THREE_EQ_CROSSOVERSLOPE parameter of the FMOD_DSP_TYPE_THREE_EQ DSP. + + [REMARKS] + + [SEE_ALSO] + FMOD_DSP_THREE_EQ +] +*/ +typedef enum +{ + FMOD_DSP_THREE_EQ_CROSSOVERSLOPE_12DB, + FMOD_DSP_THREE_EQ_CROSSOVERSLOPE_24DB, + FMOD_DSP_THREE_EQ_CROSSOVERSLOPE_48DB +} FMOD_DSP_THREE_EQ_CROSSOVERSLOPE_TYPE; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_THREE_EQ filter. + + [REMARKS] + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + DSP::setParameterInt + DSP::getParameterInt + FMOD_DSP_TYPE + FMOD_DSP_THREE_EQ_CROSSOVERSLOPE_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_THREE_EQ_LOWGAIN, /* (Type:float) - Low frequency gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_THREE_EQ_MIDGAIN, /* (Type:float) - Mid frequency gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_THREE_EQ_HIGHGAIN, /* (Type:float) - High frequency gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_THREE_EQ_LOWCROSSOVER, /* (Type:float) - Low-to-mid crossover frequency in Hz. 10.0 to 22000.0. Default = 400.0. */ + FMOD_DSP_THREE_EQ_HIGHCROSSOVER, /* (Type:float) - Mid-to-high crossover frequency in Hz. 10.0 to 22000.0. Default = 4000.0. */ + FMOD_DSP_THREE_EQ_CROSSOVERSLOPE /* (Type:int) - Crossover Slope. 0 = 12dB/Octave, 1 = 24dB/Octave, 2 = 48dB/Octave. Default = 1 (24dB/Octave). */ +} FMOD_DSP_THREE_EQ; + + +/* +[ENUM] +[ + [DESCRIPTION] + List of windowing methods for the FMOD_DSP_TYPE_FFT unit. Used in spectrum analysis to reduce leakage / transient signals intefering with the analysis.
    + This is a problem with analysis of continuous signals that only have a small portion of the signal sample (the fft window size).
    + Windowing the signal with a curve or triangle tapers the sides of the fft window to help alleviate this problem. + + [REMARKS] + Cyclic signals such as a sine wave that repeat their cycle in a multiple of the window size do not need windowing.
    + I.e. If the sine wave repeats every 1024, 512, 256 etc samples and the FMOD fft window is 1024, then the signal would not need windowing.
    + Not windowing is the same as FMOD_DSP_FFT_WINDOW_RECT, which is the default.
    + If the cycle of the signal (ie the sine wave) is not a multiple of the window size, it will cause frequency abnormalities, so a different windowing method is needed.
    + +
    + FMOD_DSP_FFT_WINDOW_RECT.
    +
    +
    + FMOD_DSP_FFT_WINDOW_TRIANGLE.
    +
    +
    + FMOD_DSP_FFT_WINDOW_HAMMING.
    +
    +
    + FMOD_DSP_FFT_WINDOW_HANNING.
    +
    +
    + FMOD_DSP_FFT_WINDOW_BLACKMAN.
    +
    +
    + FMOD_DSP_FFT_WINDOW_BLACKMANHARRIS.
    + +
    + + [SEE_ALSO] + FMOD_DSP_FFT +] +*/ +typedef enum +{ + FMOD_DSP_FFT_WINDOW_RECT, /* w[n] = 1.0 */ + FMOD_DSP_FFT_WINDOW_TRIANGLE, /* w[n] = TRI(2n/N) */ + FMOD_DSP_FFT_WINDOW_HAMMING, /* w[n] = 0.54 - (0.46 * COS(n/N) ) */ + FMOD_DSP_FFT_WINDOW_HANNING, /* w[n] = 0.5 * (1.0 - COS(n/N) ) */ + FMOD_DSP_FFT_WINDOW_BLACKMAN, /* w[n] = 0.42 - (0.5 * COS(n/N) ) + (0.08 * COS(2.0 * n/N) ) */ + FMOD_DSP_FFT_WINDOW_BLACKMANHARRIS /* w[n] = 0.35875 - (0.48829 * COS(1.0 * n/N)) + (0.14128 * COS(2.0 * n/N)) - (0.01168 * COS(3.0 * n/N)) */ +} FMOD_DSP_FFT_WINDOW; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_FFT dsp effect. + + [REMARKS] + Set the attributes for the spectrum analysis with FMOD_DSP_FFT_WINDOWSIZE and FMOD_DSP_FFT_WINDOWTYPE, and retrieve the results with FMOD_DSP_FFT_SPECTRUM and FMOD_DSP_FFT_DOMINANT_FREQ. + FMOD_DSP_FFT_SPECTRUM stores its data in the FMOD_DSP_PARAMETER_DATA_TYPE_FFT. You will need to cast to this structure to get the right data. + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + DSP::setParameterInt + DSP::getParameterInt + DSP::setParameterData + DSP::getParameterData + FMOD_DSP_TYPE + FMOD_DSP_FFT_WINDOW +] +*/ +typedef enum +{ + FMOD_DSP_FFT_WINDOWSIZE, /* (Type:int) - [r/w] Must be a power of 2 between 128 and 16384. 128, 256, 512, 1024, 2048, 4096, 8192, 16384 are accepted. Default = 2048. */ + FMOD_DSP_FFT_WINDOWTYPE, /* (Type:int) - [r/w] Refer to FMOD_DSP_FFT_WINDOW enumeration. Default = FMOD_DSP_FFT_WINDOW_HAMMING. */ + FMOD_DSP_FFT_SPECTRUMDATA, /* (Type:data) - [r] Returns the current spectrum values between 0 and 1 for each 'fft bin'. Cast data to FMOD_DSP_PARAMETER_DATA_TYPE_FFT. Divide the niquist rate by the window size to get the hz value per entry. */ + FMOD_DSP_FFT_DOMINANT_FREQ /* (Type:float) - [r] Returns the dominant frequencies for each channel. */ +} FMOD_DSP_FFT; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_ENVELOPEFOLLOWER unit. + This is a simple envelope follower for tracking the signal level.
    + + [REMARKS] + This unit does not affect the incoming signal +
    + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + DSP::setParameterData + DSP::getParameterData + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_ENVELOPEFOLLOWER_ATTACK, /* (Type:float) [r/w] - Attack time (milliseconds), in the range from 0.1 through 1000. Default = 20. */ + FMOD_DSP_ENVELOPEFOLLOWER_RELEASE, /* (Type:float) [r/w] - Release time (milliseconds), in the range from 10 through 5000. Default = 100 */ + FMOD_DSP_ENVELOPEFOLLOWER_ENVELOPE, /* (Type:float) [r] - Current value of the envelope, in the range 0 to 1. Read-only. */ + FMOD_DSP_ENVELOPEFOLLOWER_USESIDECHAIN /* (Type:data) [r/w] - Data of type FMOD_DSP_PARAMETER_SIDECHAIN. Whether to analyse the sidechain signal instead of the input signal. Default is { false } */ +} FMOD_DSP_ENVELOPEFOLLOWER; + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_CONVOLUTIONREVERB filter. + + [REMARKS] + Convolution Reverb reverb IR.
    + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + DSP::setParameterData + DSP::getParameterData + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_CONVOLUTION_REVERB_PARAM_IR, /* (Type:data) - [w] 16-bit reverb IR (short*) with an extra sample prepended to the start which specifies the number of channels. */ + FMOD_DSP_CONVOLUTION_REVERB_PARAM_WET, /* (Type:float) - [r/w] Volume of echo signal to pass to output in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CONVOLUTION_REVERB_PARAM_DRY, /* (Type:float) - [r/w] Original sound volume in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CONVOLUTION_REVERB_PARAM_LINKED /* (Type:bool) - [r/w] Linked - channels are mixed together before processing through the reverb. Default = TRUE. */ +} FMOD_DSP_CONVOLUTION_REVERB; + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_CHANNELMIX_OUTPUTGROUPING parameter for FMOD_DSP_TYPE_CHANNELMIX effect. + + [REMARKS] + + [SEE_ALSO] + DSP::setParameterInt + DSP::getParameterInt + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_CHANNELMIX_OUTPUT_DEFAULT, /* Output channel count = input channel count. Mapping: See FMOD_SPEAKER enumeration. */ + FMOD_DSP_CHANNELMIX_OUTPUT_ALLMONO, /* Output channel count = 1. Mapping: Mono, Mono, Mono, Mono, Mono, Mono, ... (each channel all the way up to FMOD_MAX_CHANNEL_WIDTH channels are treated as if they were mono) */ + FMOD_DSP_CHANNELMIX_OUTPUT_ALLSTEREO, /* Output channel count = 2. Mapping: Left, Right, Left, Right, Left, Right, ... (each pair of channels is treated as stereo all the way up to FMOD_MAX_CHANNEL_WIDTH channels) */ + FMOD_DSP_CHANNELMIX_OUTPUT_ALLQUAD, /* Output channel count = 4. Mapping: Repeating pattern of Front Left, Front Right, Surround Left, Surround Right. */ + FMOD_DSP_CHANNELMIX_OUTPUT_ALL5POINT1, /* Output channel count = 6. Mapping: Repeating pattern of Front Left, Front Right, Center, LFE, Surround Left, Surround Right. */ + FMOD_DSP_CHANNELMIX_OUTPUT_ALL7POINT1, /* Output channel count = 8. Mapping: Repeating pattern of Front Left, Front Right, Center, LFE, Surround Left, Surround Right, Back Left, Back Right. */ + FMOD_DSP_CHANNELMIX_OUTPUT_ALLLFE /* Output channel count = 6. Mapping: Repeating pattern of LFE in a 5.1 output signal. */ +} FMOD_DSP_CHANNELMIX_OUTPUT; + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_CHANNELMIX filter. + + [REMARKS] + For FMOD_DSP_CHANNELMIX_OUTPUTGROUPING, this value will set the output speaker format for the DSP, and also map the incoming channels to the + outgoing channels in a round-robin fashion. Use this for example play a 32 channel input signal as if it were a repeating group of output signals. + Ie. + FMOD_DSP_CHANNELMIX_OUTPUT_ALLMONO = all incoming channels are mixed to a mono output. + FMOD_DSP_CHANNELMIX_OUTPUT_ALLSTEREO = all incoming channels are mixed to a stereo output, ie even incoming channels 0,2,4,6,etc are mixed to left, and odd incoming channels 1,3,5,7,etc are mixed to right. + FMOD_DSP_CHANNELMIX_OUTPUT_ALL5POINT1 = all incoming channels are mixed to a 5.1 output. If there are less than 6 coming in, it will just fill the first n channels in the 6 output channels. + If there are more, then it will repeat the input pattern to the output like it did with the stereo case, ie 12 incoming channels are mapped as 0-5 mixed to the + 5.1 output and 6 to 11 mapped to the 5.1 output. + FMOD_DSP_CHANNELMIX_OUTPUT_ALLLFE = all incoming channels are mixed to a 5.1 output but via the LFE channel only. + + [SEE_ALSO] + DSP::setParameterInt + DSP::getParameterInt + DSP::setParameterFloat + DSP::getParameterFloat + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_CHANNELMIX_OUTPUTGROUPING, /* (Type:int) - Refer to FMOD_DSP_CHANNELMIX_OUTPUT enumeration. Default = FMOD_DSP_CHANNELMIX_OUTPUT_DEFAULT. See remarks. */ + FMOD_DSP_CHANNELMIX_GAIN_CH0, /* (Type:float) - Channel #0 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH1, /* (Type:float) - Channel #1 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH2, /* (Type:float) - Channel #2 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH3, /* (Type:float) - Channel #3 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH4, /* (Type:float) - Channel #4 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH5, /* (Type:float) - Channel #5 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH6, /* (Type:float) - Channel #6 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH7, /* (Type:float) - Channel #7 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH8, /* (Type:float) - Channel #8 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH9, /* (Type:float) - Channel #9 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH10, /* (Type:float) - Channel #10 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH11, /* (Type:float) - Channel #11 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH12, /* (Type:float) - Channel #12 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH13, /* (Type:float) - Channel #13 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH14, /* (Type:float) - Channel #14 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH15, /* (Type:float) - Channel #15 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH16, /* (Type:float) - Channel #16 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH17, /* (Type:float) - Channel #17 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH18, /* (Type:float) - Channel #18 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH19, /* (Type:float) - Channel #19 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH20, /* (Type:float) - Channel #20 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH21, /* (Type:float) - Channel #21 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH22, /* (Type:float) - Channel #22 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH23, /* (Type:float) - Channel #23 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH24, /* (Type:float) - Channel #24 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH25, /* (Type:float) - Channel #25 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH26, /* (Type:float) - Channel #26 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH27, /* (Type:float) - Channel #27 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH28, /* (Type:float) - Channel #28 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH29, /* (Type:float) - Channel #29 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH30, /* (Type:float) - Channel #30 gain in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_CHANNELMIX_GAIN_CH31 /* (Type:float) - Channel #31 gain in dB. -80.0 to 10.0. Default = 0. */ +} FMOD_DSP_CHANNELMIX; + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TRANSCEIVER_SPEAKERMODE parameter for FMOD_DSP_TYPE_TRANSCEIVER effect. + + [REMARKS] + The speaker mode of a transceiver buffer (of which there are up to 32 of) is determined automatically depending on the signal flowing through the transceiver effect, or it can be forced. + Use a smaller fixed speaker mode buffer to save memory. + + Only relevant for transmitter dsps, as they control the format of the transceiver channel's buffer. + + If multiple transceivers transmit to a single buffer in different speaker modes, it will allocate memory for each speaker mode. This uses more memory than a single speaker mode. + If there are multiple receivers reading from a channel with multiple speaker modes, it will read them all and mix them together. + + If the system's speaker mode is stereo or mono, it will not create a 3rd buffer, it will just use the mono/stereo speaker mode buffer. + + [SEE_ALSO] + DSP::setParameterInt + DSP::getParameterInt + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_TRANSCEIVER_SPEAKERMODE_AUTO = -1, /* A transmitter will use whatever signal channel count coming in to the transmitter, to determine which speaker mode is allocated for the transceiver channel. */ + FMOD_DSP_TRANSCEIVER_SPEAKERMODE_MONO = 0, /* A transmitter will always downmix to a mono channel buffer. */ + FMOD_DSP_TRANSCEIVER_SPEAKERMODE_STEREO, /* A transmitter will always upmix or downmix to a stereo channel buffer. */ + FMOD_DSP_TRANSCEIVER_SPEAKERMODE_SURROUND, /* A transmitter will always upmix or downmix to a surround channel buffer. Surround is the speaker mode of the system above stereo, so could be quad/surround/5.1/7.1. */ +} FMOD_DSP_TRANSCEIVER_SPEAKERMODE; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_TRANSCEIVER filter. + + [REMARKS] + The transceiver only transmits and receives to a global array of 32 channels. The transceiver can be set to receiver mode (like a return) and can receive the signal at a variable gain (FMOD_DSP_TRANSCEIVER_GAIN). + The transceiver can also be set to transmit to a chnnel (like a send) and can transmit the signal with a variable gain (FMOD_DSP_TRANSCEIVER_GAIN). + + The FMOD_DSP_TRANSCEIVER_TRANSMITSPEAKERMODE is only applicable to the transmission format, not the receive format. This means this parameter is ignored in 'receive mode'. This allows receivers to receive at + the speaker mode of the user's choice. Receiving from a mono channel, is cheaper than receiving from a surround channel for example. + The 3 speaker modes FMOD_DSP_TRANSCEIVER_SPEAKERMODE_MONO, FMOD_DSP_TRANSCEIVER_SPEAKERMODE_STEREO, FMOD_DSP_TRANSCEIVER_SPEAKERMODE_SURROUND are stored as seperate buffers in memory for a tranmitter channel. + To save memory, use 1 common speaker mode for a transmitter. + + The transceiver is double buffered to avoid desyncing of transmitters and receivers. This means there will be a 1 block delay on a receiver, compared to the data sent from a transmitter. + + Multiple transmitters sending to the same channel will be mixed together. + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + DSP::setParameterInt + DSP::getParameterInt + DSP::setParameterBool + DSP::getParameterBool + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_TRANSCEIVER_TRANSMIT, /* (Type:bool) - [r/w] - FALSE = Transceiver is a 'receiver' (like a return) and accepts data from a channel. TRUE = Transceiver is a 'transmitter' (like a send). Default = FALSE. */ + FMOD_DSP_TRANSCEIVER_GAIN, /* (Type:float) - [r/w] - Gain to receive or transmit at in dB. -80.0 to 10.0. Default = 0. */ + FMOD_DSP_TRANSCEIVER_CHANNEL, /* (Type:int) - [r/w] - Integer to select current global slot, shared by all Transceivers, that can be transmitted to or received from. 0 to 31. Default = 0.*/ + FMOD_DSP_TRANSCEIVER_TRANSMITSPEAKERMODE /* (Type:int) - [r/w] - Speaker mode (transmitter mode only). Specifies either 0 (Auto) Default = 0.*/ +} FMOD_DSP_TRANSCEIVER; + + +/* +[ENUM] +[ + [DESCRIPTION] + Parameter types for the FMOD_DSP_TYPE_OBJECTPAN DSP. 3D Object panners are meant for hardware 3d object systems like Dolby Atmos or Sony Morpheus. + These object panners take input in, and send it to the 7.1 bed, but do not send the signal further down the DSP chain (the output of the dsp is silence). + + [REMARKS] + + [SEE_ALSO] + DSP::setParameterFloat + DSP::getParameterFloat + DSP::setParameterInt + DSP::getParameterInt + DSP::setParameterData + DSP::getParameterData + FMOD_DSP_TYPE +] +*/ +typedef enum +{ + FMOD_DSP_OBJECTPAN_3D_POSITION, /* (Type:data) - 3D Position. data of type FMOD_DSP_PARAMETER_3DATTRIBUTES_MULTI */ + FMOD_DSP_OBJECTPAN_3D_ROLLOFF, /* (Type:int) - 3D Rolloff. FMOD_DSP_PAN_3D_ROLLOFF_LINEARSQUARED to FMOD_DSP_PAN_3D_ROLLOFF_CUSTOM. Default = FMOD_DSP_PAN_3D_ROLLOFF_LINEARSQUARED. */ + FMOD_DSP_OBJECTPAN_3D_MIN_DISTANCE, /* (Type:float) - 3D Min Distance. 0.0 to 1e+18f. Default = 1.0. */ + FMOD_DSP_OBJECTPAN_3D_MAX_DISTANCE, /* (Type:float) - 3D Max Distance. 0.0 to 1e+18f. Default = 20.0. */ + FMOD_DSP_OBJECTPAN_3D_EXTENT_MODE, /* (Type:int) - 3D Extent Mode. FMOD_DSP_PAN_3D_EXTENT_MODE_AUTO to FMOD_DSP_PAN_3D_EXTENT_MODE_OFF. Default = FMOD_DSP_PAN_3D_EXTENT_MODE_AUTO. */ + FMOD_DSP_OBJECTPAN_3D_SOUND_SIZE, /* (Type:float) - 3D Sound Size. 0.0 to 1e+18f. Default = 0.0. */ + FMOD_DSP_OBJECTPAN_3D_MIN_EXTENT, /* (Type:float) - 3D Min Extent. 0.0 (degrees) to 360.0 (degrees). Default = 0.0. */ + FMOD_DSP_OBJECTPAN_OVERALL_GAIN, /* (Type:data) - Overall gain. For information only, not set by user. Data of type FMOD_DSP_PARAMETER_DATA_TYPE_OVERALLGAIN to provide to FMOD, to allow FMOD to know the DSP is scaling the signal for virtualization purposes. */ + FMOD_DSP_OBJECTPAN_OUTPUTGAIN /* (Type:float) - Output gain level. 0.0 to 1.0 linear scale. For the user to scale the output of the object panner's signal. */ +} FMOD_DSP_OBJECTPAN; + +#endif + diff --git a/app/src/main/cpp/inc/fmod_errors.h b/app/src/main/cpp/inc/fmod_errors.h new file mode 100644 index 0000000..0b7be1e --- /dev/null +++ b/app/src/main/cpp/inc/fmod_errors.h @@ -0,0 +1,113 @@ +/*$ preserve start $*/ + +/* ================================================================================================== */ +/* FMOD Studio - Error string header file. Copyright (c), Firelight Technologies Pty, Ltd. 2004-2017. */ +/* */ +/* Use this header if you want to store or display a string version / english explanation of */ +/* the FMOD error codes. */ +/* */ +/* ================================================================================================== */ + +#ifndef _FMOD_ERRORS_H +#define _FMOD_ERRORS_H + +#include "fmod.h" + +#ifdef __GNUC__ +static const char *FMOD_ErrorString(FMOD_RESULT errcode) __attribute__((unused)); +#endif + +static const char *FMOD_ErrorString(FMOD_RESULT errcode) +{ + switch (errcode) + { +/*$ preserve end $*/ + case FMOD_OK: return "No errors."; + case FMOD_ERR_BADCOMMAND: return "Tried to call a function on a data type that does not allow this type of functionality (ie calling Sound::lock on a streaming sound)."; + case FMOD_ERR_CHANNEL_ALLOC: return "Error trying to allocate a channel."; + case FMOD_ERR_CHANNEL_STOLEN: return "The specified channel has been reused to play another sound."; + case FMOD_ERR_DMA: return "DMA Failure. See debug output for more information."; + case FMOD_ERR_DSP_CONNECTION: return "DSP connection error. Connection possibly caused a cyclic dependency or connected dsps with incompatible buffer counts."; + case FMOD_ERR_DSP_DONTPROCESS: return "DSP return code from a DSP process query callback. Tells mixer not to call the process callback and therefore not consume CPU. Use this to optimize the DSP graph."; + case FMOD_ERR_DSP_FORMAT: return "DSP Format error. A DSP unit may have attempted to connect to this network with the wrong format, or a matrix may have been set with the wrong size if the target unit has a specified channel map."; + case FMOD_ERR_DSP_INUSE: return "DSP is already in the mixer's DSP network. It must be removed before being reinserted or released."; + case FMOD_ERR_DSP_NOTFOUND: return "DSP connection error. Couldn't find the DSP unit specified."; + case FMOD_ERR_DSP_RESERVED: return "DSP operation error. Cannot perform operation on this DSP as it is reserved by the system."; + case FMOD_ERR_DSP_SILENCE: return "DSP return code from a DSP process query callback. Tells mixer silence would be produced from read, so go idle and not consume CPU. Use this to optimize the DSP graph."; + case FMOD_ERR_DSP_TYPE: return "DSP operation cannot be performed on a DSP of this type."; + case FMOD_ERR_FILE_BAD: return "Error loading file."; + case FMOD_ERR_FILE_COULDNOTSEEK: return "Couldn't perform seek operation. This is a limitation of the medium (ie netstreams) or the file format."; + case FMOD_ERR_FILE_DISKEJECTED: return "Media was ejected while reading."; + case FMOD_ERR_FILE_EOF: return "End of file unexpectedly reached while trying to read essential data (truncated?)."; + case FMOD_ERR_FILE_ENDOFDATA: return "End of current chunk reached while trying to read data."; + case FMOD_ERR_FILE_NOTFOUND: return "File not found."; + case FMOD_ERR_FORMAT: return "Unsupported file or audio format."; + case FMOD_ERR_HEADER_MISMATCH: return "There is a version mismatch between the FMOD header and either the FMOD Studio library or the FMOD Low Level library."; + case FMOD_ERR_HTTP: return "A HTTP error occurred. This is a catch-all for HTTP errors not listed elsewhere."; + case FMOD_ERR_HTTP_ACCESS: return "The specified resource requires authentication or is forbidden."; + case FMOD_ERR_HTTP_PROXY_AUTH: return "Proxy authentication is required to access the specified resource."; + case FMOD_ERR_HTTP_SERVER_ERROR: return "A HTTP server error occurred."; + case FMOD_ERR_HTTP_TIMEOUT: return "The HTTP request timed out."; + case FMOD_ERR_INITIALIZATION: return "FMOD was not initialized correctly to support this function."; + case FMOD_ERR_INITIALIZED: return "Cannot call this command after System::init."; + case FMOD_ERR_INTERNAL: return "An error occurred that wasn't supposed to. Contact support."; + case FMOD_ERR_INVALID_FLOAT: return "Value passed in was a NaN, Inf or denormalized float."; + case FMOD_ERR_INVALID_HANDLE: return "An invalid object handle was used."; + case FMOD_ERR_INVALID_PARAM: return "An invalid parameter was passed to this function."; + case FMOD_ERR_INVALID_POSITION: return "An invalid seek position was passed to this function."; + case FMOD_ERR_INVALID_SPEAKER: return "An invalid speaker was passed to this function based on the current speaker mode."; + case FMOD_ERR_INVALID_SYNCPOINT: return "The syncpoint did not come from this sound handle."; + case FMOD_ERR_INVALID_THREAD: return "Tried to call a function on a thread that is not supported."; + case FMOD_ERR_INVALID_VECTOR: return "The vectors passed in are not unit length, or perpendicular."; + case FMOD_ERR_MAXAUDIBLE: return "Reached maximum audible playback count for this sound's soundgroup."; + case FMOD_ERR_MEMORY: return "Not enough memory or resources."; + case FMOD_ERR_MEMORY_CANTPOINT: return "Can't use FMOD_OPENMEMORY_POINT on non PCM source data, or non mp3/xma/adpcm data if FMOD_CREATECOMPRESSEDSAMPLE was used."; + case FMOD_ERR_NEEDS3D: return "Tried to call a command on a 2d sound when the command was meant for 3d sound."; + case FMOD_ERR_NEEDSHARDWARE: return "Tried to use a feature that requires hardware support."; + case FMOD_ERR_NET_CONNECT: return "Couldn't connect to the specified host."; + case FMOD_ERR_NET_SOCKET_ERROR: return "A socket error occurred. This is a catch-all for socket-related errors not listed elsewhere."; + case FMOD_ERR_NET_URL: return "The specified URL couldn't be resolved."; + case FMOD_ERR_NET_WOULD_BLOCK: return "Operation on a non-blocking socket could not complete immediately."; + case FMOD_ERR_NOTREADY: return "Operation could not be performed because specified sound/DSP connection is not ready."; + case FMOD_ERR_OUTPUT_ALLOCATED: return "Error initializing output device, but more specifically, the output device is already in use and cannot be reused."; + case FMOD_ERR_OUTPUT_CREATEBUFFER: return "Error creating hardware sound buffer."; + case FMOD_ERR_OUTPUT_DRIVERCALL: return "A call to a standard soundcard driver failed, which could possibly mean a bug in the driver or resources were missing or exhausted."; + case FMOD_ERR_OUTPUT_FORMAT: return "Soundcard does not support the specified format."; + case FMOD_ERR_OUTPUT_INIT: return "Error initializing output device."; + case FMOD_ERR_OUTPUT_NODRIVERS: return "The output device has no drivers installed. If pre-init, FMOD_OUTPUT_NOSOUND is selected as the output mode. If post-init, the function just fails."; + case FMOD_ERR_PLUGIN: return "An unspecified error has been returned from a plugin."; + case FMOD_ERR_PLUGIN_MISSING: return "A requested output, dsp unit type or codec was not available."; + case FMOD_ERR_PLUGIN_RESOURCE: return "A resource that the plugin requires cannot be found. (ie the DLS file for MIDI playback)"; + case FMOD_ERR_PLUGIN_VERSION: return "A plugin was built with an unsupported SDK version."; + case FMOD_ERR_RECORD: return "An error occurred trying to initialize the recording device."; + case FMOD_ERR_REVERB_CHANNELGROUP: return "Reverb properties cannot be set on this channel because a parent channelgroup owns the reverb connection."; + case FMOD_ERR_REVERB_INSTANCE: return "Specified instance in FMOD_REVERB_PROPERTIES couldn't be set. Most likely because it is an invalid instance number or the reverb doesn't exist."; + case FMOD_ERR_SUBSOUNDS: return "The error occurred because the sound referenced contains subsounds when it shouldn't have, or it doesn't contain subsounds when it should have. The operation may also not be able to be performed on a parent sound."; + case FMOD_ERR_SUBSOUND_ALLOCATED: return "This subsound is already being used by another sound, you cannot have more than one parent to a sound. Null out the other parent's entry first."; + case FMOD_ERR_SUBSOUND_CANTMOVE: return "Shared subsounds cannot be replaced or moved from their parent stream, such as when the parent stream is an FSB file."; + case FMOD_ERR_TAGNOTFOUND: return "The specified tag could not be found or there are no tags."; + case FMOD_ERR_TOOMANYCHANNELS: return "The sound created exceeds the allowable input channel count. This can be increased using the 'maxinputchannels' parameter in System::setSoftwareFormat."; + case FMOD_ERR_TRUNCATED: return "The retrieved string is too long to fit in the supplied buffer and has been truncated."; + case FMOD_ERR_UNIMPLEMENTED: return "Something in FMOD hasn't been implemented when it should be! contact support!"; + case FMOD_ERR_UNINITIALIZED: return "This command failed because System::init or System::setDriver was not called."; + case FMOD_ERR_UNSUPPORTED: return "A command issued was not supported by this object. Possibly a plugin without certain callbacks specified."; + case FMOD_ERR_VERSION: return "The version number of this file format is not supported."; + case FMOD_ERR_EVENT_ALREADY_LOADED: return "The specified bank has already been loaded."; + case FMOD_ERR_EVENT_LIVEUPDATE_BUSY: return "The live update connection failed due to the game already being connected."; + case FMOD_ERR_EVENT_LIVEUPDATE_MISMATCH: return "The live update connection failed due to the game data being out of sync with the tool."; + case FMOD_ERR_EVENT_LIVEUPDATE_TIMEOUT: return "The live update connection timed out."; + case FMOD_ERR_EVENT_NOTFOUND: return "The requested event, bus or vca could not be found."; + case FMOD_ERR_STUDIO_UNINITIALIZED: return "The Studio::System object is not yet initialized."; + case FMOD_ERR_STUDIO_NOT_LOADED: return "The specified resource is not loaded, so it can't be unloaded."; + case FMOD_ERR_INVALID_STRING: return "An invalid string was passed to this function."; + case FMOD_ERR_ALREADY_LOCKED: return "The specified resource is already locked."; + case FMOD_ERR_NOT_LOCKED: return "The specified resource is not locked, so it can't be unlocked."; + case FMOD_ERR_RECORD_DISCONNECTED: return "The specified recording driver has been disconnected."; + case FMOD_ERR_TOOMANYSAMPLES: return "The length provided exceeds the allowable limit."; + default : return "Unknown error."; +/*$ preserve start $*/ + }; +} + +#endif +/*$ preserve end $*/ diff --git a/app/src/main/cpp/inc/fmod_output.h b/app/src/main/cpp/inc/fmod_output.h new file mode 100644 index 0000000..ff90557 --- /dev/null +++ b/app/src/main/cpp/inc/fmod_output.h @@ -0,0 +1,174 @@ +/* ======================================================================================================== */ +/* FMOD Studio - output development header file. Copyright (c), Firelight Technologies Pty, Ltd. 2004-2017. */ +/* */ +/* Use this header if you are wanting to develop your own output plugin to use with */ +/* FMOD's output system. With this header you can make your own output plugin that FMOD */ +/* can register and use. See the documentation and examples on how to make a working plugin. */ +/* */ +/* ======================================================================================================== */ + +#ifndef _FMOD_OUTPUT_H +#define _FMOD_OUTPUT_H + +#define FMOD_OUTPUT_PLUGIN_VERSION 2 + +typedef struct FMOD_OUTPUT_STATE FMOD_OUTPUT_STATE; +typedef struct FMOD_OUTPUT_OBJECT3DINFO FMOD_OUTPUT_OBJECT3DINFO; + +/* + FMOD_OUTPUT_DESCRIPTION callbacks +*/ +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_GETNUMDRIVERS_CALLBACK) (FMOD_OUTPUT_STATE *output_state, int *numdrivers); +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_GETDRIVERINFO_CALLBACK) (FMOD_OUTPUT_STATE *output_state, int id, char *name, int namelen, FMOD_GUID *guid, int *systemrate, FMOD_SPEAKERMODE *speakermode, int *speakermodechannels); +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_INIT_CALLBACK) (FMOD_OUTPUT_STATE *output_state, int selecteddriver, FMOD_INITFLAGS flags, int *outputrate, FMOD_SPEAKERMODE *speakermode, int *speakermodechannels, FMOD_SOUND_FORMAT *outputformat, int dspbufferlength, int dspnumbuffers, void *extradriverdata); +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_START_CALLBACK) (FMOD_OUTPUT_STATE *output_state); +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_STOP_CALLBACK) (FMOD_OUTPUT_STATE *output_state); +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_CLOSE_CALLBACK) (FMOD_OUTPUT_STATE *output_state); +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_UPDATE_CALLBACK) (FMOD_OUTPUT_STATE *output_state); +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_GETHANDLE_CALLBACK) (FMOD_OUTPUT_STATE *output_state, void **handle); +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_GETPOSITION_CALLBACK) (FMOD_OUTPUT_STATE *output_state, unsigned int *pcm); +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_LOCK_CALLBACK) (FMOD_OUTPUT_STATE *output_state, unsigned int offset, unsigned int length, void **ptr1, void **ptr2, unsigned int *len1, unsigned int *len2); +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_UNLOCK_CALLBACK) (FMOD_OUTPUT_STATE *output_state, void *ptr1, void *ptr2, unsigned int len1, unsigned int len2); +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_MIXER_CALLBACK) (FMOD_OUTPUT_STATE *output_state); + +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_OBJECT3DGETINFO_CALLBACK) (FMOD_OUTPUT_STATE *output_state, int *maxhardwareobjects); +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_OBJECT3DALLOC_CALLBACK) (FMOD_OUTPUT_STATE *output_state, void **object3d); +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_OBJECT3DFREE_CALLBACK) (FMOD_OUTPUT_STATE *output_state, void *object3d); +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_OBJECT3DUPDATE_CALLBACK) (FMOD_OUTPUT_STATE *output_state, void *object3d, const FMOD_OUTPUT_OBJECT3DINFO *info); + +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_OPENPORT_CALLBACK) (FMOD_OUTPUT_STATE *output, FMOD_PORT_TYPE portType, FMOD_PORT_INDEX portIndex, int *portId, int *portRate, int *portChannels, FMOD_SOUND_FORMAT *portFormat); +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_CLOSEPORT_CALLBACK) (FMOD_OUTPUT_STATE *output, int portId); + + +/* + FMOD_OUTPUT_STATE functions +*/ +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_READFROMMIXER) (FMOD_OUTPUT_STATE *output_state, void *buffer, unsigned int length); +typedef FMOD_RESULT (F_CALLBACK *FMOD_OUTPUT_COPYPORT) (FMOD_OUTPUT_STATE *output, int portId, void *buffer, unsigned int length); +typedef void * (F_CALLBACK *FMOD_OUTPUT_ALLOC) (unsigned int size, unsigned int align, const char *file, int line); +typedef void (F_CALLBACK *FMOD_OUTPUT_FREE) (void *ptr, const char *file, int line); +typedef void (F_CALLBACK *FMOD_OUTPUT_LOG) (FMOD_DEBUG_FLAGS level, const char *file, int line, const char *function, const char *string, ...); + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + When creating an output, declare one of these and provide the relevant callbacks and name for FMOD to use when it creates and uses an output of this type. + + [REMARKS] + There are several methods for driving the FMOD mixer to service the audio hardware. + + * Polled: if the audio hardware must be polled regularly set 'polling' to TRUE, FMOD will create a mixer thread that calls back via FMOD_OUTPUT_GETPOSITION_CALLBACK. Once an entire block of samples have played FMOD will call FMOD_OUTPUT_LOCK_CALLBACK to allow you to provide a destination pointer to write the next mix. + * Callback: if the audio hardware provides a callback where you must provide a buffer of samples then set 'polling' to FALSE and directly call FMOD_OUTPUT_READFROMMIXER. + * Synchronization: if the audio hardware provides a synchronization primitive to wait on then set 'polling' to FALSE and give a FMOD_OUTPUT_MIXER_CALLBACK pointer. FMOD will create a mixer thread and call you repeatedly once FMOD_OUTPUT_START_CALLBACK has finished, you must wait on your primitive in this callback and upon wake call FMOD_OUTPUT_READFROMMIXER. + * Non-realtime: if you are writing a file or driving a non-realtime output call FMOD_OUTPUT_READFROMMIXER from FMOD_OUTPUT_UPDATE_CALLBACK. + + Callbacks marked with 'user thread' will be called in response to the user of the FMOD low level API, in the case of the Studio runtime API, the user is the Studio Update thread. + + Members marked with [r] mean read only for the developer, read/write for the FMOD system. + + Members marked with [w] mean read/write for the developer, read only for the FMOD system. + + [SEE_ALSO] + FMOD_OUTPUT_STATE + FMOD_OUTPUT_GETNUMDRIVERS_CALLBACK + FMOD_OUTPUT_GETDRIVERINFO_CALLBACK + FMOD_OUTPUT_INIT_CALLBACK + FMOD_OUTPUT_START_CALLBACK + FMOD_OUTPUT_STOP_CALLBACK + FMOD_OUTPUT_CLOSE_CALLBACK + FMOD_OUTPUT_UPDATE_CALLBACK + FMOD_OUTPUT_GETHANDLE_CALLBACK + FMOD_OUTPUT_GETPOSITION_CALLBACK + FMOD_OUTPUT_LOCK_CALLBACK + FMOD_OUTPUT_UNLOCK_CALLBACK + FMOD_OUTPUT_MIXER_CALLBACK + FMOD_OUTPUT_OBJECT3DGETINFO_CALLBACK + FMOD_OUTPUT_OBJECT3DALLOC_CALLBACK + FMOD_OUTPUT_OBJECT3DFREE_CALLBACK + FMOD_OUTPUT_OBJECT3DUPDATE_CALLBACK +] +*/ +typedef struct FMOD_OUTPUT_DESCRIPTION +{ + unsigned int apiversion; /* [w] The output plugin API version this plugin is built for. Set to this to FMOD_OUTPUT_PLUGIN_VERSION. */ + const char *name; /* [w] Name of the output plugin. */ + unsigned int version; /* [w] Version of the output plugin. */ + int polling; /* [w] If TRUE (non-zero) a mixer thread is created that calls FMOD_OUTPUT_GETPOSITION_CALLBACK / FMOD_OUTPUT_LOCK_CALLBACK / FMOD_OUTPUT_UNLOCK_CALLBACK to drive the mixer. If FALSE (zero) you must call FMOD_OUTPUT_READFROMMIXER to drive the mixer yourself. */ + FMOD_OUTPUT_GETNUMDRIVERS_CALLBACK getnumdrivers; /* [w] Required user thread callback to provide the number of attached sound devices. Called from System::getNumDrivers. */ + FMOD_OUTPUT_GETDRIVERINFO_CALLBACK getdriverinfo; /* [w] Required user thread callback to provide information about a particular sound device. Called from System::getDriverInfo. */ + FMOD_OUTPUT_INIT_CALLBACK init; /* [w] Required user thread callback to allocate resources and provide information about hardware capabilities. Called from System::init. */ + FMOD_OUTPUT_START_CALLBACK start; /* [w] Optional user thread callback just before mixing should begin, calls to FMOD_OUTPUT_GETPOSITION_CALLBACK / FMOD_OUTPUT_LOCK_CALLBACK / FMOD_OUTPUT_UNLOCK_CALLBACK / FMOD_OUTPUT_MIXER_CALLBACK will start, you may call FMOD_OUTPUT_READFROMMIXER after this point. Called from System::init. */ + FMOD_OUTPUT_STOP_CALLBACK stop; /* [w] Optional user thread callback just after mixing has finished, calls to FMOD_OUTPUT_GETPOSITION_CALLBACK / FMOD_OUTPUT_LOCK_CALLBACK / FMOD_OUTPUT_UNLOCK_CALLBACK / FMOD_OUTPUT_MIXER_CALLBACK have stopped, you may not call FMOD_OUTPUT_READFROMMIXER after this point. Called from System::close. */ + FMOD_OUTPUT_CLOSE_CALLBACK close; /* [w] Required user thread callback to clean up resources allocated during FMOD_OUTPUT_INIT_CALLBACK. Called from System::init and System::close. */ + FMOD_OUTPUT_UPDATE_CALLBACK update; /* [w] Optional user thread callback once per frame to update internal state. Called from System::update. */ + FMOD_OUTPUT_GETHANDLE_CALLBACK gethandle; /* [w] Optional user thread callback to provide a pointer to the internal device object used to share with other audio systems. Called from System::getOutputHandle. */ + FMOD_OUTPUT_GETPOSITION_CALLBACK getposition; /* [w] Required mixer thread callback (if 'polling' is TRUE) to provide the hardware playback position in the output ring buffer. Called before a mix. */ + FMOD_OUTPUT_LOCK_CALLBACK lock; /* [w] Required mixer thread callback (if 'polling' is TRUE) to provide a pointer the mixer can write to for the next block of audio data. Called before a mix. */ + FMOD_OUTPUT_UNLOCK_CALLBACK unlock; /* [w] Optional mixer thread callback (if 'polling' is TRUE) to signify the mixer has finished writing to the pointer from FMOD_OUTPUT_LOCK_CALLBACK. Called after a mix. */ + FMOD_OUTPUT_MIXER_CALLBACK mixer; /* [w] Optional mixer thread callback (if 'polling' is FALSE) called repeatedly to give a thread for waiting on an audio hardware synchronization primitive (see remarks for details). Ensure you have a reasonable timeout (~200ms) on your synchronization primitive and allow this callback to return once per wakeup to avoid deadlocks. */ + FMOD_OUTPUT_OBJECT3DGETINFO_CALLBACK object3dgetinfo; /* [w] Optional mixer thread callback to provide information about the capabilities of 3D object hardware. Called during a mix. */ + FMOD_OUTPUT_OBJECT3DALLOC_CALLBACK object3dalloc; /* [w] Optional mixer thread callback to reserve a hardware resources for a single 3D object. Called during a mix. */ + FMOD_OUTPUT_OBJECT3DFREE_CALLBACK object3dfree; /* [w] Optional mixer thread callback to release a hardware resource previously acquired with FMOD_OUTPUT_OBJECT3DALLOC_CALLBACK. Called during a mix. */ + FMOD_OUTPUT_OBJECT3DUPDATE_CALLBACK object3dupdate; /* [w] Optional mixer thread callback once for every acquired 3D object every mix to provide 3D information and buffered audio. Called during a mix. */ + FMOD_OUTPUT_OPENPORT_CALLBACK openport; /* [w] Optional main thread callback to open an auxiliary output port on the device. */ + FMOD_OUTPUT_CLOSEPORT_CALLBACK closeport; /* [w] Optional main thread callback to close an auxiliary output port on the device. */ +} FMOD_OUTPUT_DESCRIPTION; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + Output object state passed into every callback provides access to plugin developers data and system functionality. + + [REMARKS] + Members marked with [r] mean read only for the developer, read/write for the FMOD system. + Members marked with [w] mean read/write for the developer, read only for the FMOD system. + + [SEE_ALSO] + FMOD_OUTPUT_DESCRIPTION +] +*/ +struct FMOD_OUTPUT_STATE +{ + void *plugindata; /* [w] Pointer used to store any plugin specific state so it's available in all callbacks. */ + FMOD_OUTPUT_READFROMMIXER readfrommixer; /* [r] Function to execute the mixer producing a buffer of audio. Used to control when the mix occurs manually as an alternative to FMOD_OUTPUT_DESCRIPTION::polling == TRUE. */ + FMOD_OUTPUT_ALLOC alloc; /* [r] Function to allocate memory using the FMOD memory system. */ + FMOD_OUTPUT_FREE free; /* [r] Function to free memory allocated with FMOD_OUTPUT_ALLOC. */ + FMOD_OUTPUT_LOG log; /* [r] Function to write to the FMOD logging system. */ + FMOD_OUTPUT_COPYPORT copyport; /* [r] Function to copy the output from the mixer for the given auxiliary port */ +}; + + +/* +[STRUCTURE] +[ + [DESCRIPTION] + This structure is passed to the plugin via FMOD_OUTPUT_OBJECT3DUPDATE_CALLBACK, so that whatever object based panning solution available can position it in the speakers correctly. + Object based panning is a 3D panning solution that sends a mono only signal to a hardware device, such as Dolby Atmos or other similar panning solutions. + + [REMARKS] + FMOD does not attenuate the buffer, but provides a 'gain' parameter that the user must use to scale the buffer by. Rather than pre-attenuating the buffer, the plugin developer + can access untouched data for other purposes, like reverb sending for example. + The 'gain' parameter is based on the user's 3D custom rolloff model. + + Members marked with [r] mean read only for the developer, read/write for the FMOD system. + Members marked with [w] mean read/write for the developer, read only for the FMOD system. + + [SEE_ALSO] + FMOD_OUTPUT_OBJECT3DUPDATE_CALLBACK +] +*/ +struct FMOD_OUTPUT_OBJECT3DINFO +{ + float *buffer; /* [r] Mono PCM floating point buffer. This buffer needs to be scaled by the gain value to get distance attenuation. */ + unsigned int bufferlength; /* [r] Length in PCM samples of buffer. */ + FMOD_VECTOR position; /* [r] Vector relative between object and listener. */ + float gain; /* [r] 0.0 to 1.0 - 1 = 'buffer' is not attenuated, 0 = 'buffer' is fully attenuated. */ + float spread; /* [r] 0 - 360 degrees. 0 = point source, 360 = sound is spread around all speakers */ + float priority; /* [r] 0.0 to 1.0 - 0 = most important, 1 = least important. Based on height and distance (height is more important). */ +}; + +#endif /* _FMOD_OUTPUT_H */ diff --git a/app/src/main/cpp/sound.cpp b/app/src/main/cpp/sound.cpp new file mode 100644 index 0000000..6d70c50 --- /dev/null +++ b/app/src/main/cpp/sound.cpp @@ -0,0 +1,104 @@ +#include "inc/fmod.hpp" +#include +#include +#include "com_aserbao_androidcustomcamera_utils_VoiceUtils.h" + +#include + +#include +#define LOGI(FORMAT,...) __android_log_print(ANDROID_LOG_INFO,"zph",FORMAT,##__VA_ARGS__); +#define LOGE(FORMAT,...) __android_log_print(ANDROID_LOG_ERROR,"zph",FORMAT,##__VA_ARGS__); + +#define MODE_NORMAL 0 +#define MODE_LUOLI 1 +#define MODE_DASHU 2 +#define MODE_JINGSONG 3 +#define MODE_GAOGUAI 4 +#define MODE_KONGLING 5 + +using namespace FMOD; + +JNIEXPORT void JNICALL Java_com_aserbao_androidcustomcamera_utils_VoiceUtils_fix(JNIEnv *env, + jclass jcls, jstring path_jstr, jint type) { + //声音引擎 + System *system; + //声音 + Sound *sound; + //数字处理(音效) + DSP *dsp; + //正在播放 + bool playing = true; + //音乐轨道 + Channel *channel; + //播放速度 + float frequency = 0; + //音频地址 + const char* path_cstr = env->GetStringUTFChars(path_jstr, NULL); + + System_Create(&system); + system->init(32, FMOD_INIT_NORMAL, NULL); + + try { + //创建声音 + system->createSound(path_cstr, FMOD_DEFAULT, NULL, &sound); + switch (type) { + case MODE_NORMAL: + //原生播放 + system->playSound(sound, 0, false, &channel); + break; + case MODE_LUOLI: + //提升或者降低音调的一种音效 + system->createDSPByType(FMOD_DSP_TYPE_PITCHSHIFT, &dsp); + //设置音调的参数 + dsp->setParameterFloat(FMOD_DSP_PITCHSHIFT_PITCH, 1.8); + //添加进到channel,添加进轨道 + system->playSound(sound, 0, false, &channel); + channel->addDSP(0, dsp); + break; + case MODE_DASHU: + system->createDSPByType(FMOD_DSP_TYPE_PITCHSHIFT, &dsp); + dsp->setParameterFloat(FMOD_DSP_PITCHSHIFT_PITCH, 0.8); + system->playSound(sound, 0, false, &channel); + channel->addDSP(0, dsp); + break; + case MODE_JINGSONG: + system->createDSPByType(FMOD_DSP_TYPE_TREMOLO, &dsp); + dsp->setParameterFloat(FMOD_DSP_TREMOLO_SKEW, 0.8); + system->playSound(sound, 0, false, &channel); + channel->addDSP(0, dsp); + break; + case MODE_GAOGUAI: + //提高说话的速度 + system->playSound(sound, 0, false, &channel); + channel->getFrequency(&frequency); + frequency = frequency * 2; + channel->setFrequency(frequency); + break; + case MODE_KONGLING: + system->createDSPByType(FMOD_DSP_TYPE_ECHO, &dsp); + dsp->setParameterFloat(FMOD_DSP_ECHO_DELAY, 300); + dsp->setParameterFloat(FMOD_DSP_ECHO_FEEDBACK, 20); + system->playSound(sound, 0, false, &channel); + channel->addDSP(0, dsp); + break; + } + } catch (...) { + LOGE("%s", "发生异常"); + goto end; + } + system->update(); + + //单位是微妙 + //每秒钟判断下是否是播放 + while (playing) { + channel->isPlaying(&playing); + usleep(1000); + } + goto end; + + //释放资源 + end: env->ReleaseStringUTFChars(path_jstr, path_cstr); + sound->release(); + system->close(); + system->release(); +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/HomeActivity.kt b/app/src/main/java/com/aserbao/androidcustomcamera/HomeActivity.kt new file mode 100644 index 0000000..fd3c68e --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/HomeActivity.kt @@ -0,0 +1,19 @@ +package com.aserbao.androidcustomcamera + +import android.view.View +import com.aserbao.androidcustomcamera.base.activity.RVBaseActivity +import com.aserbao.androidcustomcamera.base.beans.BaseRecyclerBean +import com.aserbao.androidcustomcamera.blocks.BlocksActivity +import com.aserbao.androidcustomcamera.blocks.ffmpeg.FFmpegActivity +import com.aserbao.androidcustomcamera.blocks.others.changeVoice.ChangeVoiceActivity +import com.aserbao.androidcustomcamera.whole.WholeActivity + +class HomeActivity : RVBaseActivity() { + override fun initGetData() { + mBaseRecyclerBeen.add(BaseRecyclerBean("每个功能点单独代码实现", BlocksActivity::class.java)) + mBaseRecyclerBeen.add(BaseRecyclerBean("所有功能点整合代码实现", WholeActivity::class.java)) + mBaseRecyclerBeen.add(BaseRecyclerBean("当前调用界面", FFmpegActivity::class.java)) + } + + override fun itemClickBack(view: View, position: Int, isLongClick: Boolean, comeFrom: Int) {} +} \ No newline at end of file diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/MainActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/MainActivity.java deleted file mode 100644 index 424c40d..0000000 --- a/app/src/main/java/com/aserbao/androidcustomcamera/MainActivity.java +++ /dev/null @@ -1,17 +0,0 @@ -package com.aserbao.androidcustomcamera; - -import com.aserbao.androidcustomcamera.base.activity.RVBaseActivity; -import com.aserbao.androidcustomcamera.base.beans.ClassBean; -import com.aserbao.androidcustomcamera.blocks.BlocksActivity; -import com.aserbao.androidcustomcamera.whole.WholeActivity; - -import java.util.List; - -public class MainActivity extends RVBaseActivity { - @Override - public List initData() { - mClassBeans.add(new ClassBean("每个功能点单独代码实现", BlocksActivity.class)); - mClassBeans.add(new ClassBean("所有功能点整合代码实现", WholeActivity.class)); - return mClassBeans; - } -} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/WelcomeActivity.kt b/app/src/main/java/com/aserbao/androidcustomcamera/WelcomeActivity.kt new file mode 100644 index 0000000..0ef193a --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/WelcomeActivity.kt @@ -0,0 +1,48 @@ +package com.aserbao.androidcustomcamera + +import android.animation.Animator +import android.animation.AnimatorListenerAdapter +import android.animation.ObjectAnimator +import android.animation.PropertyValuesHolder +import android.content.Intent +import android.util.Log +import com.aserbao.androidcustomcamera.base.activity.BaseActivity +import com.aserbao.androidcustomcamera.utils.CheckPermissionUtil +import com.aserbao.androidcustomcamera.whole.record.RecorderActivity +import kotlinx.android.synthetic.main.activity_welcome.* + + +class WelcomeActivity : BaseActivity() { + + override fun setLayoutId(): Int { + return R.layout.activity_welcome + } + + override fun initView() { + super.initView() + } + + fun exectorAnimator(){ + val valuesHolder0 = PropertyValuesHolder.ofFloat("scaleX", 1.0f, 1.5f) + val valuesHolder1 = PropertyValuesHolder.ofFloat("scaleY", 1.0f, 1.5f) + val objectAnimator: ObjectAnimator = ObjectAnimator.ofPropertyValuesHolder(bgIV, valuesHolder0, valuesHolder1) + objectAnimator.addListener(object : AnimatorListenerAdapter() { + override fun onAnimationEnd(animation: Animator?) { + super.onAnimationEnd(animation) + startActivity(Intent(this@WelcomeActivity, RecorderActivity::class.java)) + finish() + } + }) + objectAnimator.setDuration(2000).start() + } + + override fun onRequestPermissionsResult(requestCode: Int, permissions: Array, grantResults: IntArray) { + super.onRequestPermissionsResult(requestCode, permissions, grantResults) + Log.e("TAG", "onRequestPermissionsResult: $requestCode") + if(CheckPermissionUtil.isCameraGranted()) { + exectorAnimator() + }else{ + startRequestPermission() + } + } +} \ No newline at end of file diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/agithubProject/readme.txt b/app/src/main/java/com/aserbao/androidcustomcamera/agithubProject/readme.txt new file mode 100644 index 0000000..e69de29 diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/base/MyApplication.java b/app/src/main/java/com/aserbao/androidcustomcamera/base/MyApplication.java index baa9d2b..9faeadb 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/base/MyApplication.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/base/MyApplication.java @@ -4,6 +4,8 @@ import android.content.Context; import android.util.DisplayMetrics; +import com.danikula.videocache.HttpProxyCacheServer; + /** * description: * Created by aserbao on 2018/5/15. @@ -23,9 +25,28 @@ public void onCreate() { .getDisplayMetrics(); screenWidth = mDisplayMetrics.widthPixels; screenHeight = mDisplayMetrics.heightPixels; + app = this; } public static Context getContext() { return mContext; } + + public static MyApplication app; + public static MyApplication getInstance() { + return app; + } + + //=====================================================缓存区 + private HttpProxyCacheServer proxy; + + public static HttpProxyCacheServer getProxy() { + MyApplication app = getInstance(); + return app.proxy == null ? (app.proxy = app.newProxy()) : app.proxy; + } + + private HttpProxyCacheServer newProxy() { + return new HttpProxyCacheServer(this); + } + } diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/base/activity/BaseActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/base/activity/BaseActivity.java index a1c9fe9..99f4259 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/base/activity/BaseActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/base/activity/BaseActivity.java @@ -30,7 +30,7 @@ public void onCreate(Bundle savedInstanceState) { initView(); } - private void startRequestPermission() { + protected void startRequestPermission() { ActivityCompat.requestPermissions(this,BASIC_PERMISSIONS,123); } public void initView(){ diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/base/activity/RVBaseActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/base/activity/RVBaseActivity.java index a03f1d9..2f9b7d2 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/base/activity/RVBaseActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/base/activity/RVBaseActivity.java @@ -7,7 +7,9 @@ import com.aserbao.androidcustomcamera.R; import com.aserbao.androidcustomcamera.base.adapter.CommonAdapter; -import com.aserbao.androidcustomcamera.base.beans.ClassBean; +import com.aserbao.androidcustomcamera.base.beans.BaseRecyclerBean; +import com.aserbao.androidcustomcamera.base.utils.APermissionUtils; +import com.aserbao.androidcustomcamera.base.viewHolder.IBaseRecyclerItemClickListener; import java.util.ArrayList; import java.util.List; @@ -15,29 +17,31 @@ import butterknife.BindView; import butterknife.ButterKnife; -public abstract class RVBaseActivity extends AppCompatActivity { - +public abstract class RVBaseActivity extends AppCompatActivity implements IBaseRecyclerItemClickListener { + public final String TAG = this.getClass().getCanonicalName(); @BindView(R.id.base_rv) - RecyclerView mBaseRv; + public RecyclerView mBaseRv; public CommonAdapter mCommonAdapter; public LinearLayoutManager mLinearLayoutManager; - public List mClassBeans; + public List mBaseRecyclerBeen = new ArrayList<>(); @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_base); ButterKnife.bind(this); - mClassBeans = new ArrayList<>(); + initGetData(); initView(); } + protected abstract void initGetData(); + + protected void initView() { - mCommonAdapter = new CommonAdapter(this, this, initData()); + mCommonAdapter = new CommonAdapter(this, this, mBaseRecyclerBeen,this); mLinearLayoutManager = new LinearLayoutManager(this, LinearLayoutManager.VERTICAL, false); mBaseRv.setLayoutManager(mLinearLayoutManager); mBaseRv.setAdapter(mCommonAdapter); + APermissionUtils.checkPermission(this); } - - public abstract List initData(); } diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/base/adapter/CommonAdapter.java b/app/src/main/java/com/aserbao/androidcustomcamera/base/adapter/CommonAdapter.java index 69b9df3..2d4b539 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/base/adapter/CommonAdapter.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/base/adapter/CommonAdapter.java @@ -10,7 +10,10 @@ import android.widget.Button; import com.aserbao.androidcustomcamera.R; -import com.aserbao.androidcustomcamera.base.beans.ClassBean; +import com.aserbao.androidcustomcamera.base.beans.BaseRecyclerBean; +import com.aserbao.androidcustomcamera.base.utils.StaticFinalValues; +import com.aserbao.androidcustomcamera.base.viewHolder.IBaseRecyclerItemClickListener; +import com.aserbao.androidcustomcamera.base.viewHolder.TextViewHolder; import java.util.ArrayList; import java.util.List; @@ -23,53 +26,59 @@ * Created by aserbao on 2018/5/4. */ -public class CommonAdapter extends RecyclerView.Adapter { +public class CommonAdapter extends RecyclerView.Adapter { private Context mContext; private Activity mActivity; - private List mClassBeen = new ArrayList<>(); - - public CommonAdapter(Context context, Activity activity, List classBeen) { + private List mBaseRecyclerBean = new ArrayList<>(); + protected IBaseRecyclerItemClickListener mIBaseRecyclerItemClickListener; + public CommonAdapter(Context context, Activity activity, List classBeen,IBaseRecyclerItemClickListener listener) { mContext = context; mActivity = activity; - mClassBeen = classBeen; + mBaseRecyclerBean = classBeen; + mIBaseRecyclerItemClickListener = listener; } @Override - public CommonViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { - View view = LayoutInflater.from(mContext).inflate(R.layout.common_item, parent, false); - return new CommonViewHolder(view); + public int getItemViewType(int position) { + if (mBaseRecyclerBean != null ){ + return mBaseRecyclerBean.get(position % mBaseRecyclerBean.size()).getViewType(); + } + return StaticFinalValues.VIEW_HOLDER_TEXT; } @Override - public void onBindViewHolder(CommonViewHolder holder, int position) { - if(mClassBeen != null && position < mClassBeen.size()) { - final ClassBean classBean = mClassBeen.get(position); - holder.mBtnItemCommon.setText(classBean.getName()); - holder.mBtnItemCommon.setOnClickListener(new View.OnClickListener() { - @Override - public void onClick(View v) { - mActivity.startActivity(new Intent(mActivity, classBean.getClazz())); - } - }); + public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { + View view; + switch (viewType){ + case StaticFinalValues.VIEW_HOLDER_CLASS: + view = LayoutInflater.from(mContext).inflate(R.layout.common_item, parent, false); + return new CommonViewHolder(view); + case StaticFinalValues.VIEW_HOLDER_TEXT: + view = LayoutInflater.from(mContext).inflate(R.layout.base_recycler_view_text_item, parent, false); + return new TextViewHolder(view); + } + return null; + } + + @Override + public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { + final BaseRecyclerBean classBean = mBaseRecyclerBean.get(position % mBaseRecyclerBean.size()); + if (holder instanceof TextViewHolder) { + ((TextViewHolder) holder).setDataSource(classBean,holder.getAdapterPosition(),mIBaseRecyclerItemClickListener); + }else if (holder instanceof CommonViewHolder){ + ((CommonViewHolder) holder).setDataSource(classBean,mActivity); } } @Override public int getItemCount() { int ret = 0; - if (mClassBeen.size() > 0) { - ret = mClassBeen.size(); + if (mBaseRecyclerBean.size() > 0) { + ret = mBaseRecyclerBean.size(); } return ret; } - public static class CommonViewHolder extends RecyclerView.ViewHolder { - @BindView(R.id.btn_item_common) - Button mBtnItemCommon; - public CommonViewHolder(View itemView) { - super(itemView); - ButterKnife.bind(this, itemView); - } - } + } diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/base/adapter/CommonViewHolder.java b/app/src/main/java/com/aserbao/androidcustomcamera/base/adapter/CommonViewHolder.java new file mode 100644 index 0000000..70089d2 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/base/adapter/CommonViewHolder.java @@ -0,0 +1,44 @@ +package com.aserbao.androidcustomcamera.base.adapter; + +import android.app.Activity; +import android.content.Intent; +import android.support.v7.widget.RecyclerView; +import android.view.View; +import android.widget.Button; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.beans.BaseRecyclerBean; + +import butterknife.BindView; +import butterknife.ButterKnife; + +/** + * 功能: + * + * @author aserbao + * @date : On 2020-01-15 17:32 + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.base.adapter + */ +public class CommonViewHolder extends RecyclerView.ViewHolder { + @BindView(R.id.btn_item_common) + Button mBtnItemCommon; + private Activity mActivity; + private BaseRecyclerBean mClassBean; + public CommonViewHolder(View itemView) { + super(itemView); + ButterKnife.bind(this, itemView); + } + + public void setDataSource(BaseRecyclerBean classBean,Activity activity){ + mActivity = activity; + mClassBean = classBean; + mBtnItemCommon.setText(classBean.getName()); + mBtnItemCommon.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View v) { + mActivity.startActivity(new Intent(mActivity, mClassBean.getClazz())); + } + }); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/base/beans/BaseRecyclerBean.java b/app/src/main/java/com/aserbao/androidcustomcamera/base/beans/BaseRecyclerBean.java new file mode 100644 index 0000000..50d2311 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/base/beans/BaseRecyclerBean.java @@ -0,0 +1,69 @@ +package com.aserbao.androidcustomcamera.base.beans; + +import com.aserbao.androidcustomcamera.base.utils.StaticFinalValues; + +/** + * description: + * Created by aserbao on 2018/1/25. + */ + + +public class BaseRecyclerBean { + private String name; + String extra_info; //补充信息 + int tag = -1; //标记 + private Class clazz; + int viewType = StaticFinalValues.VIEW_HOLDER_TEXT; + + public BaseRecyclerBean(String name, int tag) { + this.name = name; + this.tag = tag; + this.viewType = StaticFinalValues.VIEW_HOLDER_TEXT; + } + + public BaseRecyclerBean(String name, Class clazz) { + this.name = name; + this.clazz = clazz; + this.viewType = StaticFinalValues.VIEW_HOLDER_CLASS; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Class getClazz() { + return clazz; + } + + public void setClazz(Class clazz) { + this.clazz = clazz; + } + + public String getExtra_info() { + return extra_info; + } + + public void setExtra_info(String extra_info) { + this.extra_info = extra_info; + } + + public int getTag() { + return tag; + } + + public void setTag(int tag) { + this.tag = tag; + } + + public int getViewType() { + return viewType; + } + + public void setViewType(int viewType) { + this.viewType = viewType; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/base/beans/ClassBean.java b/app/src/main/java/com/aserbao/androidcustomcamera/base/beans/ClassBean.java deleted file mode 100644 index e783efd..0000000 --- a/app/src/main/java/com/aserbao/androidcustomcamera/base/beans/ClassBean.java +++ /dev/null @@ -1,33 +0,0 @@ -package com.aserbao.androidcustomcamera.base.beans; - -/** - * description: - * Created by aserbao on 2018/1/25. - */ - - -public class ClassBean { - private String name; - private Class clazz; - - public ClassBean(String name, Class clazz) { - this.name = name; - this.clazz = clazz; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public Class getClazz() { - return clazz; - } - - public void setClazz(Class clazz) { - this.clazz = clazz; - } -} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/base/interfaces/IDetailCallBackListener.java b/app/src/main/java/com/aserbao/androidcustomcamera/base/interfaces/IDetailCallBackListener.java new file mode 100644 index 0000000..17ee542 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/base/interfaces/IDetailCallBackListener.java @@ -0,0 +1,16 @@ +package com.aserbao.androidcustomcamera.base.interfaces; + +/** + * 功能: + * + * @author aserbao + * @date : On 2019/1/3 7:02 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.base.interfaces + * @Copyright: 个人版权所有 + */ +public interface IDetailCallBackListener { + void success(); + void failed(Exception e); +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/APermissionUtils.java b/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/APermissionUtils.java new file mode 100644 index 0000000..cb32e59 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/APermissionUtils.java @@ -0,0 +1,42 @@ +package com.aserbao.androidcustomcamera.base.utils; + +import android.Manifest; +import android.app.Activity; +import android.content.pm.PackageManager; +import android.support.v4.app.ActivityCompat; +import android.support.v4.content.ContextCompat; +import android.util.Log; + +/** + * 功能: + * + * @author aserbao + * @date : On 2019/1/4 5:37 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.base.utils + * @Copyright: 个人版权所有 + */ +public class APermissionUtils { + + public static final String[] BASIC_PERMISSIONS = new String[]{ + Manifest.permission.WRITE_EXTERNAL_STORAGE, + Manifest.permission.CAMERA, + Manifest.permission.RECORD_AUDIO, + }; + + /** + * 检测权限 + * @param activity + */ + public static void checkPermission(Activity activity) { + ActivityCompat.requestPermissions(activity, BASIC_PERMISSIONS, 1); + // TODO: 2019/1/4 之后再完善权限请求 + /*for (String basicPermission : BASIC_PERMISSIONS) { + Log.e("wer", "checkPermission: " ); + if (ContextCompat.checkSelfPermission(activity, basicPermission) != PackageManager.PERMISSION_GRANTED) { + ActivityCompat.requestPermissions(activity, new String[]{basicPermission}, 1); + } + }*/ + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/CommonUtils.java b/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/CommonUtils.java new file mode 100644 index 0000000..e526858 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/CommonUtils.java @@ -0,0 +1,103 @@ +package com.aserbao.androidcustomcamera.base.utils; + +import android.graphics.Bitmap; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.text.Layout; +import android.text.StaticLayout; +import android.text.TextPaint; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Created by Administrator on 2017/4/21. + */ + +public class CommonUtils { + + + /** + * 判断是否为电话号码的正则表达式 + * + * @param phoneNumber + * @return + */ + public static boolean isPhoneNumberValid(String phoneNumber) { + boolean isValid = false; + /* + * 可接受的电话格式有: + */ + String expression = "^((13[0-9])|(15[^4,\\D])|(18[0-9]))\\d{8}$"; + + CharSequence inputStr = phoneNumber; + Pattern pattern = Pattern.compile(expression); + Matcher matcher = pattern.matcher(inputStr); + + if (matcher.matches()) { + isValid = true; + } + return isValid; + } + + + + //将文本转换成Bitmap + public static Bitmap textAsBitmap(String text, float textSize) { + + TextPaint textPaint = new TextPaint(); + + // textPaint.setARGB(0x31, 0x31, 0x31, 0); + textPaint.setColor(Color.WHITE); + + textPaint.setTextSize(textSize); + + StaticLayout layout = new StaticLayout(text, textPaint, getTextWidth(textPaint,text), + Layout.Alignment.ALIGN_NORMAL, 1.3f, 0.0f, true); + Bitmap bitmap = Bitmap.createBitmap(layout.getWidth() + 20, + layout.getHeight() + 20, Bitmap.Config.ARGB_8888); + Canvas canvas = new Canvas(bitmap); + canvas.translate(10, 10); +// canvas.drawColor(0xFF31306B); + canvas.drawColor(0x00000000); + + layout.draw(canvas); + return bitmap; + } + + + + public static int getTextWidth(Paint paint, String str) { + int iRet = 0; + if (str != null && str.length() > 0) { + int len = str.length(); + float[] widths = new float[len]; + paint.getTextWidths(str, widths); + for (int j = 0; j < len; j++) { + iRet += (int) Math.ceil(widths[j]); + } + } + return iRet; + } + + + /** + * 计算分享内容的字数,一个汉字=两个英文字母,一个中文标点=两个英文标点 注意:该函数的不适用于对单个字符进行计算,因为单个字符四舍五入后都是1 + * + * @param c + * @return + */ + public static long calculateLength(CharSequence c) { + double len = 0; + for (int i = 0; i < c.length(); i++) { + int tmp = (int) c.charAt(i); + if (tmp > 0 && tmp < 127) { + len += 0.5; + } else { + len++; + } + } + return Math.round(len); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/ConstantUtils.java b/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/ConstantUtils.java new file mode 100644 index 0000000..403025e --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/ConstantUtils.java @@ -0,0 +1,25 @@ +package com.aserbao.androidcustomcamera.base.utils; + +import com.aserbao.androidcustomcamera.R; + +import java.util.Random; + +/** + * description: + * Created by aserbao on 2018/1/25. + */ + + +public class ConstantUtils { + + public static int getDrawable(){ + return drawables[new Random().nextInt(drawables.length)]; + } + public static int[] drawables = { + R.drawable.emoji_00, + R.drawable.emoji_01, + R.drawable.emoji_02, + R.drawable.emoji_03, + R.drawable.emoji_04, + }; +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/FlowLayout.java b/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/FlowLayout.java new file mode 100644 index 0000000..5c31cba --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/FlowLayout.java @@ -0,0 +1,217 @@ +package com.aserbao.androidcustomcamera.base.utils; + +import android.content.Context; +import android.content.res.TypedArray; +import android.support.v4.text.TextUtilsCompat; +import android.util.AttributeSet; +import android.util.LayoutDirection; +import android.view.View; +import android.view.ViewGroup; + +import com.aserbao.androidcustomcamera.R; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Locale; + +public class FlowLayout extends ViewGroup { + private static final String TAG = "FlowLayout"; + private static final int LEFT = -1; + private static final int CENTER = 0; + private static final int RIGHT = 1; + + protected List> mAllViews = new ArrayList>(); + protected List mLineHeight = new ArrayList(); + protected List mLineWidth = new ArrayList(); + private int mGravity; + private List lineViews = new ArrayList<>(); + + public FlowLayout(Context context, AttributeSet attrs, int defStyle) { + super(context, attrs, defStyle); + TypedArray ta = context.obtainStyledAttributes(attrs, R.styleable.TagFlowLayout); + mGravity = ta.getInt(R.styleable.TagFlowLayout_tag_gravity, LEFT); + int layoutDirection = TextUtilsCompat.getLayoutDirectionFromLocale(Locale.getDefault()); + if (layoutDirection == LayoutDirection.RTL) { + if (mGravity == LEFT) { + mGravity = RIGHT; + } else { + mGravity = LEFT; + } + } + ta.recycle(); + } + + public FlowLayout(Context context, AttributeSet attrs) { + this(context, attrs, 0); + } + + public FlowLayout(Context context) { + this(context, null); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + int sizeWidth = MeasureSpec.getSize(widthMeasureSpec); + int modeWidth = MeasureSpec.getMode(widthMeasureSpec); + int sizeHeight = MeasureSpec.getSize(heightMeasureSpec); + int modeHeight = MeasureSpec.getMode(heightMeasureSpec); + + // wrap_content + int width = 0; + int height = 0; + + int lineWidth = 0; + int lineHeight = 0; + + int cCount = getChildCount(); + + for (int i = 0; i < cCount; i++) { + View child = getChildAt(i); + if (child.getVisibility() == View.GONE) { + if (i == cCount - 1) { + width = Math.max(lineWidth, width); + height += lineHeight; + } + continue; + } + measureChild(child, widthMeasureSpec, heightMeasureSpec); + MarginLayoutParams lp = (MarginLayoutParams) child + .getLayoutParams(); + + int childWidth = child.getMeasuredWidth() + lp.leftMargin + + lp.rightMargin; + int childHeight = child.getMeasuredHeight() + lp.topMargin + + lp.bottomMargin; + + if (lineWidth + childWidth > sizeWidth - getPaddingLeft() - getPaddingRight()) { + width = Math.max(width, lineWidth); + lineWidth = childWidth; + height += lineHeight; + lineHeight = childHeight; + } else { + lineWidth += childWidth; + lineHeight = Math.max(lineHeight, childHeight); + } + if (i == cCount - 1) { + width = Math.max(lineWidth, width); + height += lineHeight; + } + } + setMeasuredDimension( + // + modeWidth == MeasureSpec.EXACTLY ? sizeWidth : width + getPaddingLeft() + getPaddingRight(), + modeHeight == MeasureSpec.EXACTLY ? sizeHeight : height + getPaddingTop() + getPaddingBottom()// + ); + + } + + + @Override + protected void onLayout(boolean changed, int l, int t, int r, int b) { + mAllViews.clear(); + mLineHeight.clear(); + mLineWidth.clear(); + lineViews.clear(); + + int width = getWidth(); + + int lineWidth = 0; + int lineHeight = 0; + + int cCount = getChildCount(); + + for (int i = 0; i < cCount; i++) { + View child = getChildAt(i); + if (child.getVisibility() == View.GONE) continue; + MarginLayoutParams lp = (MarginLayoutParams) child + .getLayoutParams(); + + int childWidth = child.getMeasuredWidth(); + int childHeight = child.getMeasuredHeight(); + + if (childWidth + lineWidth + lp.leftMargin + lp.rightMargin > width - getPaddingLeft() - getPaddingRight()) { + mLineHeight.add(lineHeight); + mAllViews.add(lineViews); + mLineWidth.add(lineWidth); + + lineWidth = 0; + lineHeight = childHeight + lp.topMargin + lp.bottomMargin; + lineViews = new ArrayList(); + } + lineWidth += childWidth + lp.leftMargin + lp.rightMargin; + lineHeight = Math.max(lineHeight, childHeight + lp.topMargin + + lp.bottomMargin); + lineViews.add(child); + + } + mLineHeight.add(lineHeight); + mLineWidth.add(lineWidth); + mAllViews.add(lineViews); + + + int left = getPaddingLeft(); + int top = getPaddingTop(); + + int lineNum = mAllViews.size(); + + for (int i = 0; i < lineNum; i++) { + lineViews = mAllViews.get(i); + lineHeight = mLineHeight.get(i); + + // set gravity + int currentLineWidth = this.mLineWidth.get(i); + switch (this.mGravity) { + case LEFT: + left = getPaddingLeft(); + break; + case CENTER: + left = (width - currentLineWidth) / 2 + getPaddingLeft(); + break; + case RIGHT: + // 适配了rtl,需要补偿一个padding值 + left = width - (currentLineWidth + getPaddingLeft()) - getPaddingRight(); + // 适配了rtl,需要把lineViews里面的数组倒序排 + Collections.reverse(lineViews); + break; + } + + for (int j = 0; j < lineViews.size(); j++) { + View child = lineViews.get(j); + if (child.getVisibility() == View.GONE) { + continue; + } + + MarginLayoutParams lp = (MarginLayoutParams) child + .getLayoutParams(); + + int lc = left + lp.leftMargin; + int tc = top + lp.topMargin; + int rc = lc + child.getMeasuredWidth(); + int bc = tc + child.getMeasuredHeight(); + + child.layout(lc, tc, rc, bc); + + left += child.getMeasuredWidth() + lp.leftMargin + + lp.rightMargin; + } + top += lineHeight; + } + + } + + @Override + public LayoutParams generateLayoutParams(AttributeSet attrs) { + return new MarginLayoutParams(getContext(), attrs); + } + + @Override + protected LayoutParams generateDefaultLayoutParams() { + return new MarginLayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); + } + + @Override + protected LayoutParams generateLayoutParams(LayoutParams p) { + return new MarginLayoutParams(p); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/StaticFinalValues.java b/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/StaticFinalValues.java index 07f7d82..3d430a8 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/StaticFinalValues.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/StaticFinalValues.java @@ -1,5 +1,9 @@ package com.aserbao.androidcustomcamera.base.utils; +import android.os.Environment; + +import com.aserbao.androidcustomcamera.whole.record.other.MagicFilterType; + /** * description: * Created by aserbao on 2018/5/15. @@ -10,8 +14,53 @@ public class StaticFinalValues { //int final public static final int RECORD_MIN_TIME = 5 * 1000; //=======================handler + public static final int EMPTY = 0; public static final int DELAY_DETAL = 1; public static final int MY_TOPIC_ADAPTER = 9; public static final int CHANGE_IMAGE = 10; public static final int OVER_CLICK = 11;//视频定时结束 + //================================================path + public static final String SAVETOPHOTOPATH = "/storage/emulated/0/DCIM/Camera/";//保存至本地相册路径 + public static final String ISSAVEVIDEOTEMPEXIST = "/storage/emulated/0/aserbaoCamera/"; + public static final String VIDEOTEMP = "/storage/emulated/0/aserbaoCamera/videotemp/"; + public static final String STORAGE_TEMP_VIDEO_PATH = Environment.getExternalStorageDirectory().getAbsolutePath() + "/123.mp4"; + public static final String STORAGE_TEMP_VIDEO_PATH1 = Environment.getExternalStorageDirectory().getAbsolutePath() + "/1233.mp4"; + + //======================string + public static final String MAX_NUMBER = "MaxNumber"; + public static final String RESULT_PICK_VIDEO = "ResultPickVideo"; + public static final String VIDEOFILEPATH = "VideoFilePath"; + public static final String MISNOTCOMELOCAL = "mIsNotComeLocal";//0表示本地视频,1表示非本地视频 + public static final String BUNDLE = "bundle"; + public static final String CUT_TIME = "cut_time"; + + private static final String VIDEO_PATH = "video_path"; + + //-------------type--------------- + public static final int VIEW_HOLDER_HEAD = 99; + public static final int VIEW_HOLDER_TEXT = 100; + public static final int VIEW_HOLDER_IMAGE_100H = 101; + public static final int VIEW_HOLDER_CIRCLE_IMAGE_ITEM = 1001; + public static final int VIEW_FULL_IMAGE_ITEM = 1002; + public static final int VIEW_HOLDER_CLASS = 102; + public static final int VIEW_BLEND_MODE = 103; + + //=======================requestCode and resultCode + public static final int COMR_FROM_SEL_COVER_TIME_ACTIVITY = 1; + public static final int COMR_FROM_VIDEO_EDIT_TIME_ACTIVITY = 2; + //======================int + public static final int REQUEST_CODE_PICK_VIDEO = 0x200; + public static final int REQUEST_CODE_TAKE_VIDEO = 0x201; + public static float VIDEO_WIDTH_HEIGHT = 0.85f; + + + //滤镜效果 + public static MagicFilterType[] types = new MagicFilterType[]{ + MagicFilterType.NONE, + MagicFilterType.WARM, + MagicFilterType.COOL,//淡雅 + MagicFilterType.HUDSON,//粉嫩 + MagicFilterType.WARM, + MagicFilterType.N1977,//红润 + }; } diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/StatusBarUtil.java b/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/StatusBarUtil.java new file mode 100644 index 0000000..c669cdf --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/base/utils/StatusBarUtil.java @@ -0,0 +1,192 @@ +package com.aserbao.androidcustomcamera.base.utils; + +import android.annotation.TargetApi; +import android.app.Activity; +import android.graphics.Color; +import android.os.Build; +import android.view.View; +import android.view.Window; +import android.view.WindowManager; + +import java.lang.reflect.Field; +import java.lang.reflect.Method; + +/** + * http://blog.csdn.net/fkq_2016/article/details/77967157?locationNum=9&fps=1 + * Created by Administrator on 2017/12/18. + */ + +public class StatusBarUtil { + + /** + * 修改状态栏为全透明 + * @param activity + */ + @TargetApi(19) + public static void transparencyBar(Activity activity){ + if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + Window window = activity.getWindow(); + window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS); + window.getDecorView().setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN + | View.SYSTEM_UI_FLAG_LAYOUT_STABLE); + window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS); + window.setStatusBarColor(Color.TRANSPARENT); + + } else + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { + Window window =activity.getWindow(); + window.setFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS, + WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS); + } + } + + /** + * 修改状态栏颜色,支持4.4以上版本 + * @param activity + * @param colorId + */ + public static void setStatusBarColor(Activity activity, int colorId) { + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + Window window = activity.getWindow(); +// window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS); + window.setStatusBarColor(activity.getResources().getColor(colorId)); + } +// } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { +// //使用SystemBarTint库使4.4版本状态栏变色,需要先将状态栏设置为透明 +// transparencyBar(activity); +// SystemBarTintManager tintManager = new SystemBarTintManager(activity); +// tintManager.setStatusBarTintEnabled(true); +// tintManager.setStatusBarTintResource(colorId); +// } + } + + /** + *状态栏亮色模式,设置状态栏黑色文字、图标, + * 适配4.4以上版本MIUIV、Flyme和6.0以上版本其他Android + * @param activity + * @return 1:MIUUI 2:Flyme 3:android6.0 + */ + public static int StatusBarLightMode(Activity activity){ + int result=0; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { + if(MIUISetStatusBarLightMode(activity, true)){ + result=1; + }else if(FlymeSetStatusBarLightMode(activity.getWindow(), true)){ + result=2; + }else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + activity.getWindow().getDecorView().setSystemUiVisibility( View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN| View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR); + result=3; + } + } + return result; + } + + /** + * 已知系统类型时,设置状态栏黑色文字、图标。 + * 适配4.4以上版本MIUIV、Flyme和6.0以上版本其他Android + * @param activity + * @param type 1:MIUUI 2:Flyme 3:android6.0 + */ + public static void StatusBarLightMode(Activity activity, int type){ + if(type==1){ + MIUISetStatusBarLightMode(activity, true); + }else if(type==2){ + FlymeSetStatusBarLightMode(activity.getWindow(), true); + }else if(type==3){ + activity.getWindow().getDecorView().setSystemUiVisibility( View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN| View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR); + } + + } + + /** + * 状态栏暗色模式,清除MIUI、flyme或6.0以上版本状态栏黑色文字、图标 + */ + public static void StatusBarDarkMode(Activity activity, int type){ + if(type==1){ + MIUISetStatusBarLightMode(activity, false); + }else if(type==2){ + FlymeSetStatusBarLightMode(activity.getWindow(), false); + }else if(type==3){ + activity.getWindow().getDecorView().setSystemUiVisibility(View.SYSTEM_UI_FLAG_VISIBLE); + } + + } + + + /** + * 设置状态栏图标为深色和魅族特定的文字风格 + * 可以用来判断是否为Flyme用户 + * @param window 需要设置的窗口 + * @param dark 是否把状态栏文字及图标颜色设置为深色 + * @return boolean 成功执行返回true + * + */ + public static boolean FlymeSetStatusBarLightMode(Window window, boolean dark) { + boolean result = false; + if (window != null) { + try { + WindowManager.LayoutParams lp = window.getAttributes(); + Field darkFlag = WindowManager.LayoutParams.class + .getDeclaredField("MEIZU_FLAG_DARK_STATUS_BAR_ICON"); + Field meizuFlags = WindowManager.LayoutParams.class + .getDeclaredField("meizuFlags"); + darkFlag.setAccessible(true); + meizuFlags.setAccessible(true); + int bit = darkFlag.getInt(null); + int value = meizuFlags.getInt(lp); + if (dark) { + value |= bit; + } else { + value &= ~bit; + } + meizuFlags.setInt(lp, value); + window.setAttributes(lp); + result = true; + } catch (Exception e) { + + } + } + return result; + } + + /** + * 需要MIUIV6以上 + * @param activity + * @param dark 是否把状态栏文字及图标颜色设置为深色 + * @return boolean 成功执行返回true + * + */ + public static boolean MIUISetStatusBarLightMode(Activity activity, boolean dark) { + boolean result = false; + Window window=activity.getWindow(); + if (window != null) { + Class clazz = window.getClass(); + try { + int darkModeFlag = 0; + Class layoutParams = Class.forName("android.view.MiuiWindowManager$LayoutParams"); + Field field = layoutParams.getField("EXTRA_FLAG_STATUS_BAR_DARK_MODE"); + darkModeFlag = field.getInt(layoutParams); + Method extraFlagField = clazz.getMethod("setExtraFlags", int.class, int.class); + if(dark){ + extraFlagField.invoke(window,darkModeFlag,darkModeFlag);//状态栏透明且黑色字体 + }else{ + extraFlagField.invoke(window, 0, darkModeFlag);//清除黑色字体 + } + result=true; + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + //开发版 7.7.13 及以后版本采用了系统API,旧方法无效但不会报错,所以两个方式都要加上 + if(dark){ + activity.getWindow().getDecorView().setSystemUiVisibility( View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN| View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR); + }else { + activity.getWindow().getDecorView().setSystemUiVisibility(View.SYSTEM_UI_FLAG_VISIBLE); + } + } + }catch (Exception e){ + + } + } + return result; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/base/viewHolder/BaseClickViewHolder.java b/app/src/main/java/com/aserbao/androidcustomcamera/base/viewHolder/BaseClickViewHolder.java new file mode 100644 index 0000000..78482d1 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/base/viewHolder/BaseClickViewHolder.java @@ -0,0 +1,65 @@ +package com.aserbao.androidcustomcamera.base.viewHolder; + +import android.support.annotation.NonNull; +import android.support.v7.widget.RecyclerView; +import android.view.View; + +import butterknife.ButterKnife; + +/** + * 功能: + * + * @author aserbao + * @date : On 2019-08-16 15:39 + * @project:AserbaosAndroid + * @package:com.aserbao.aserbaosandroid.base.viewHolder + */ +public class BaseClickViewHolder extends RecyclerView.ViewHolder { + + public static final int COME_FROM_RV_ITEM = 0; + + public BaseClickViewHolder(@NonNull View itemView) { + super(itemView); + ButterKnife.bind(this,itemView); + } + + public void setDataSource(final int position, final IBaseRecyclerItemClickListener mIBaseRecyclerItemClickListener){ + itemView.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View v) { + int tempFlag = getTempFlag(v, position); + if (mIBaseRecyclerItemClickListener != null) { + mIBaseRecyclerItemClickListener.itemClickBack(v, tempFlag,false, COME_FROM_RV_ITEM); + } + } + }); + + itemView.setOnLongClickListener(new View.OnLongClickListener() { + @Override + public boolean onLongClick(View v) { + int tempFlag = getTempFlag(v, position); + if (mIBaseRecyclerItemClickListener != null) { + mIBaseRecyclerItemClickListener.itemClickBack(v, tempFlag,true, COME_FROM_RV_ITEM); + } + return true; + } + }); + } + + private int getTempFlag(View v, int position) { + int tempFlag = 0; + Object viewTag = v.getTag(); + if (viewTag instanceof Integer) { + int tag = (int) viewTag; + if (tag >= 0) { + tempFlag = tag; + } else { + tempFlag = position; + } + } else { + tempFlag = position; + } + return tempFlag; + } + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/base/viewHolder/IBaseRecyclerItemClickListener.java b/app/src/main/java/com/aserbao/androidcustomcamera/base/viewHolder/IBaseRecyclerItemClickListener.java new file mode 100644 index 0000000..a813460 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/base/viewHolder/IBaseRecyclerItemClickListener.java @@ -0,0 +1,17 @@ +package com.aserbao.androidcustomcamera.base.viewHolder; + +import android.view.View; + +/** + * 功能: + * + * @author aserbao + * @date : On 2019/2/19 4:53 PM + * @email: this is empty email + * @project:AserbaosAndroid + * @package:com.aserbao.aserbaosandroid.base.interfaces + */ +public interface IBaseRecyclerItemClickListener { + void itemClickBack(View view, int position, boolean isLongClick, int comeFrom); + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/base/viewHolder/TextViewHolder.java b/app/src/main/java/com/aserbao/androidcustomcamera/base/viewHolder/TextViewHolder.java new file mode 100644 index 0000000..5f8b291 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/base/viewHolder/TextViewHolder.java @@ -0,0 +1,34 @@ +package com.aserbao.androidcustomcamera.base.viewHolder; + +import android.view.View; +import android.widget.TextView; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.beans.BaseRecyclerBean; + +import butterknife.BindView; +import butterknife.ButterKnife; + +public class TextViewHolder extends BaseClickViewHolder { + @BindView(R.id.base_recycler_view_item_tv) + public TextView mBaseRecyclerViewItemTv; + + public TextViewHolder(View itemView) { + super(itemView); + ButterKnife.bind(this, itemView); + } + + public void setDataSource(BaseRecyclerBean classBean, int position, IBaseRecyclerItemClickListener mIBaseRecyclerItemClickListener){ + super.setDataSource(position,mIBaseRecyclerItemClickListener); + int tag = classBean.getTag(); + String name = classBean.getName(); + if (tag >= 0) { + itemView.setTag(tag); + name = name + String.valueOf(tag); + } else { + name = name + String.valueOf(position); + } + mBaseRecyclerViewItemTv.setText(name); + } + +} \ No newline at end of file diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/BlocksActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/BlocksActivity.java index 18083dd..551d937 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/BlocksActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/BlocksActivity.java @@ -1,16 +1,32 @@ package com.aserbao.androidcustomcamera.blocks; +import android.view.View; + import com.aserbao.androidcustomcamera.base.activity.RVBaseActivity; -import com.aserbao.androidcustomcamera.base.beans.ClassBean; +import com.aserbao.androidcustomcamera.base.beans.BaseRecyclerBean; +import com.aserbao.androidcustomcamera.blocks.audioRecord.AudioRecordActivity; +import com.aserbao.androidcustomcamera.blocks.others.OthersActivity; +import com.aserbao.androidcustomcamera.blocks.others.changeHue.ChangeHueActivity; +import com.aserbao.androidcustomcamera.blocks.mediaExtractor.MediaExtractorActivity; import com.aserbao.androidcustomcamera.blocks.mediaCodec.MediaCodecActivity; - -import java.util.List; +import com.aserbao.androidcustomcamera.blocks.mediaMuxer.MediaMuxerActivity; +import com.aserbao.androidcustomcamera.blocks.mediaMuxer.functions.CreateVideoAddAudioToMp4; public class BlocksActivity extends RVBaseActivity { + @Override - public List initData() { - mClassBeans.add(new ClassBean("MediaCodec", MediaCodecActivity.class)); - return mClassBeans; + protected void initGetData() { + mBaseRecyclerBeen.add(new BaseRecyclerBean("修改hue", ChangeHueActivity.class)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("AudioRecord", AudioRecordActivity.class)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("MediaCodec", MediaCodecActivity.class)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("MediaExtractor", MediaExtractorActivity.class)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("MediaMuxer", MediaMuxerActivity.class)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("合成", CreateVideoAddAudioToMp4.class)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("其他", OthersActivity.class)); } + @Override + public void itemClickBack(View view, int position, boolean isLongClick, int comeFrom) { + + } } diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/atestcases/EncodeDecodeTest.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/atestcases/EncodeDecodeTest.java new file mode 100644 index 0000000..c47c091 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/atestcases/EncodeDecodeTest.java @@ -0,0 +1,993 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.aserbao.androidcustomcamera.blocks.atestcases; +import android.graphics.SurfaceTexture; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaCodecList; +import android.media.MediaFormat; +import android.opengl.EGL14; +import android.opengl.GLES20; +import android.opengl.GLES11Ext; +import android.opengl.GLSurfaceView; +import android.opengl.Matrix; +import android.os.Environment; +import android.test.AndroidTestCase; +import android.util.Log; +import android.view.Surface; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.FloatBuffer; +import java.util.Arrays; +import javax.microedition.khronos.egl.EGL10; +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.egl.EGLContext; +import javax.microedition.khronos.egl.EGLDisplay; +import javax.microedition.khronos.egl.EGLSurface; +import javax.microedition.khronos.opengles.GL; +import javax.microedition.khronos.opengles.GL10; +/** + * Generates a series of video frames, encodes them, decodes them, and tests for significant + * divergence from the original. + *

    + * There are two ways to connect an encoder to a decoder. The first is to pass the output + * buffers from the encoder to the input buffers of the decoder, using ByteBuffer.put() to + * copy the bytes. With this approach, we need to watch for BUFFER_FLAG_CODEC_CONFIG, and + * if seen we use format.setByteBuffer("csd-0") followed by decoder.configure() to pass the + * meta-data through. + *

    + * The second way is to write the buffers to a file and then stream it back in. With this + * approach it is necessary to use a MediaExtractor to retrieve the format info and skip past + * the meta-data. + *

    + * The former can be done entirely in memory, but requires that the encoder and decoder + * operate simultaneously (the I/O buffers are owned by MediaCodec). The latter requires + * writing to disk, because MediaExtractor can only accept a file or URL as a source. + *

    + * The direct encoder-to-decoder approach isn't currently tested elsewhere in this CTS + * package, so we use that here. + * + * @link https://android.googlesource.com/platform/cts/+/b04c81bfc2761b21293f9c095da38c757e570fd3/tests/tests/media/src/android/media/cts/EncodeDecodeTest.java + */ +public class EncodeDecodeTest extends AndroidTestCase { + private static final String TAG = "EncodeDecodeTest"; + private static final boolean VERBOSE = false; // lots of logging + private static final boolean DEBUG_SAVE_FILE = false; // save copy of encoded movie + private static final String DEBUG_FILE_NAME_BASE = "/storage/emulated/0/"; + // parameters for the encoder + private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding + private static final int BIT_RATE = 1000000; // 1Mbps + private static final int FRAME_RATE = 15; // 15fps + private static final int IFRAME_INTERVAL = 10; // 10 seconds between I-frames + // movie length, in frames + private static final int NUM_FRAMES = 30; // two seconds of video + private static final int TEST_Y = 240; // YUV values for colored rect + private static final int TEST_U = 220; + private static final int TEST_V = 200; + private static final int TEST_R0 = 0; // RGB eqivalent of {0,0,0} + private static final int TEST_G0 = 136; + private static final int TEST_B0 = 0; + private static final int TEST_R1 = 255; // RGB equivalent of {240,220,200} + private static final int TEST_G1 = 166; + private static final int TEST_B1 = 255; + // size of a frame, in pixels + private int mWidth = -1; + private int mHeight = -1; + /** + * Tests streaming of AVC video through the encoder and decoder. Data is encoded from + * a series of byte[] buffers and decoded into ByteBuffers. The output is checked for + * validity. + */ + public void testEncodeDecodeVideoFromBufferToBufferQCIF() throws Exception { + setSize(176, 144); + testEncodeDecodeVideoFromBuffer(false); + } + public void testEncodeDecodeVideoFromBufferToBufferQVGA() throws Exception { + setSize(320, 240); + testEncodeDecodeVideoFromBuffer(false); + } + public void testEncodeDecodeVideoFromBufferToBuffer720p() throws Exception { + setSize(1280, 720); + testEncodeDecodeVideoFromBuffer(false); + } + /** + * Tests streaming of AVC video through the encoder and decoder. Data is encoded from + * a series of byte[] buffers and decoded into Surfaces. The output is checked for + * validity but some frames may be dropped. + *

    + * Because of the way SurfaceTexture.OnFrameAvailableListener works, we need to run this + * test on a thread that doesn't have a Looper configured. If we don't, the test will + * pass, but we won't actually test the output because we'll never receive the "frame + * available" notifications". The CTS test framework seems to be configuring a Looper on + * the test thread, so we have to hand control off to a new thread for the duration of + * the test. + */ + public void testEncodeDecodeVideoFromBufferToSurfaceQCIF() throws Throwable { + setSize(176, 144); + BufferToSurfaceWrapper.runTest(this); + } + public void testEncodeDecodeVideoFromBufferToSurfaceQVGA() throws Throwable { + setSize(320, 240); + BufferToSurfaceWrapper.runTest(this); + } + public void testEncodeDecodeVideoFromBufferToSurface720p() throws Throwable { + setSize(1280, 720); + BufferToSurfaceWrapper.runTest(this); + } + /** Wraps testEncodeDecodeVideoFromBuffer(true) */ + private static class BufferToSurfaceWrapper implements Runnable { + private Throwable mThrowable; + private EncodeDecodeTest mTest; + private BufferToSurfaceWrapper(EncodeDecodeTest test) { + mTest = test; + } + public void run() { + try { + mTest.testEncodeDecodeVideoFromBuffer(true); + } catch (Throwable th) { + mThrowable = th; + } + } + /** + * Entry point. + */ + public static void runTest(EncodeDecodeTest obj) throws Throwable { + BufferToSurfaceWrapper wrapper = new BufferToSurfaceWrapper(obj); + Thread th = new Thread(wrapper, "codec test"); + th.start(); + th.join(); + if (wrapper.mThrowable != null) { + throw wrapper.mThrowable; + } + } + } + /** + * Sets the desired frame size. + */ + private void setSize(int width, int height) { + if ((width % 16) != 0 || (height % 16) != 0) { + Log.w(TAG, "WARNING: width or height not multiple of 16"); + } + mWidth = width; + mHeight = height; + } + /** + * Tests encoding and subsequently decoding video from frames generated into a buffer. + *

    + * We encode several frames of a video test pattern using MediaCodec, then decode the + * output with MediaCodec and do some simple checks. + *

    + * See http://b.android.com/37769 for a discussion of input format pitfalls. + */ + private void testEncodeDecodeVideoFromBuffer(boolean toSurface) throws Exception { + MediaCodecInfo codecInfo = selectCodec(MIME_TYPE); + if (codecInfo == null) { + // Don't fail CTS if they don't have an AVC codec (not here, anyway). + Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE); + return; + } + if (VERBOSE) Log.d(TAG, "found codec: " + codecInfo.getName()); + int colorFormat = selectColorFormat(codecInfo, MIME_TYPE); + if (VERBOSE) Log.d(TAG, "found colorFormat: " + colorFormat); + // We avoid the device-specific limitations on width and height by using values that + // are multiples of 16, which all tested devices seem to be able to handle. + MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); + // Set some properties. Failing to specify some of these can cause the MediaCodec + // configure() call to throw an unhelpful exception. + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); + format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE); + format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); + if (VERBOSE) Log.d(TAG, "format: " + format); + // Create a MediaCodec for the desired codec, then configure it as an encoder with + // our desired properties. + MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName()); + encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + encoder.start(); + // Create a MediaCodec for the decoder, just based on the MIME type. The various + // format details will be passed through the csd-0 meta-data later on. + MediaCodec decoder = MediaCodec.createDecoderByType(MIME_TYPE); + try { + encodeDecodeVideoFromBuffer(encoder, colorFormat, decoder, toSurface); + } finally { + if (VERBOSE) Log.d(TAG, "releasing codecs"); + encoder.stop(); + decoder.stop(); + encoder.release(); + decoder.release(); + } + } + /** + * Returns the first codec capable of encoding the specified MIME type, or null if no + * match was found. + */ + private static MediaCodecInfo selectCodec(String mimeType) { + int numCodecs = MediaCodecList.getCodecCount(); + for (int i = 0; i < numCodecs; i++) { + MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); + if (!codecInfo.isEncoder()) { + continue; + } + String[] types = codecInfo.getSupportedTypes(); + for (int j = 0; j < types.length; j++) { + if (types[j].equalsIgnoreCase(mimeType)) { + return codecInfo; + } + } + } + return null; + } + /** + * Returns a color format that is supported by the codec and by this test code. If no + * match is found, this throws a test failure -- the set of formats known to the test + * should be expanded for new platforms. + */ + private static int selectColorFormat(MediaCodecInfo codecInfo, String mimeType) { + MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType); + for (int i = 0; i < capabilities.colorFormats.length; i++) { + int colorFormat = capabilities.colorFormats[i]; + switch (colorFormat) { + // these are the formats we know how to handle for this test + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar: + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar: + case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar: + return colorFormat; + default: + break; + } + } + fail("couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType); + return 0; // not reached + } + /** + * Does the actual work for encoding frames from buffers of byte[]. + */ + private void encodeDecodeVideoFromBuffer(MediaCodec encoder, int encoderColorFormat, + MediaCodec decoder, boolean toSurface) { + final int TIMEOUT_USEC = 10000; + ByteBuffer[] encoderInputBuffers = encoder.getInputBuffers(); + ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers(); + ByteBuffer[] decoderInputBuffers = null; + ByteBuffer[] decoderOutputBuffers = null; + MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); + int decoderColorFormat = -12345; // init to invalid value + int generateIndex = 0; + int checkIndex = 0; + boolean decoderConfigured = false; + SurfaceStuff surfaceStuff = null; + // The size of a frame of video data, in the formats we handle, is stride*sliceHeight + // for Y, and (stride/2)*(sliceHeight/2) for each of the Cb and Cr channels. Application + // of algebra and assuming that stride==width and sliceHeight==height yields: + byte[] frameData = new byte[mWidth * mHeight * 3 / 2]; + // Just out of curiosity. + long rawSize = 0; + long encodedSize = 0; + // Save a copy to disk. Useful for debugging the test. + FileOutputStream outputStream = null; + if (DEBUG_SAVE_FILE) { + String fileName = DEBUG_FILE_NAME_BASE + mWidth + "x" + mHeight + ".mp4"; + try { + outputStream = new FileOutputStream(fileName); + Log.d(TAG, "encoded output will be saved as " + fileName); + } catch (IOException ioe) { + Log.w(TAG, "Unable to create debug output file " + fileName); + throw new RuntimeException(ioe); + } + } + if (toSurface) { + surfaceStuff = new SurfaceStuff(mWidth, mHeight); + } + // Loop until the output side is done. + boolean inputDone = false; + boolean encoderDone = false; + boolean outputDone = false; + while (!outputDone) { + if (VERBOSE) Log.d(TAG, "loop"); + // If we're not done submitting frames, generate a new one and submit it. By + // doing this on every loop we're working to ensure that the encoder always has + // work to do. + // + // We don't really want a timeout here, but sometimes there's a delay opening + // the encoder device, so a short timeout can keep us from spinning hard. + if (!inputDone) { + int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC); + if (VERBOSE) Log.d(TAG, "inputBufIndex=" + inputBufIndex); + if (inputBufIndex >= 0) { + long ptsUsec = generateIndex * 1000000 / FRAME_RATE; + if (generateIndex == NUM_FRAMES) { + // Send an empty frame with the end-of-stream flag set. If we set EOS + // on a frame with data, that frame data will be ignored, and the + // output will be short one frame. + encoder.queueInputBuffer(inputBufIndex, 0, 0, ptsUsec, + MediaCodec.BUFFER_FLAG_END_OF_STREAM); + inputDone = true; + if (VERBOSE) Log.d(TAG, "sent input EOS (with zero-length frame)"); + } else { + generateFrame(generateIndex, encoderColorFormat, frameData); + ByteBuffer inputBuf = encoderInputBuffers[inputBufIndex]; + // the buffer should be sized to hold one full frame + assertTrue(inputBuf.capacity() >= frameData.length); + inputBuf.clear(); + inputBuf.put(frameData); + encoder.queueInputBuffer(inputBufIndex, 0, frameData.length, ptsUsec, 0); + if (VERBOSE) Log.d(TAG, "submitted frame " + generateIndex + " to enc"); + } + generateIndex++; + } else { + // either all in use, or we timed out during initial setup + if (VERBOSE) Log.d(TAG, "input buffer not available"); + } + } + // Check for output from the encoder. If there's no output yet, we either need to + // provide more input, or we need to wait for the encoder to work its magic. We + // can't actually tell which is the case, so if we can't get an output buffer right + // away we loop around and see if it wants more input. + // + // Once we get EOS from the encoder, we don't need to do this anymore. + if (!encoderDone) { + int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC); + if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + // no output available yet + if (VERBOSE) Log.d(TAG, "no output from encoder available"); + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + // not expected for an encoder + encoderOutputBuffers = encoder.getOutputBuffers(); + if (VERBOSE) Log.d(TAG, "encoder output buffers changed"); + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + // not expected for an encoder + MediaFormat newFormat = encoder.getOutputFormat(); + if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat); + } else if (encoderStatus < 0) { + fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); + } else { // encoderStatus >= 0 + ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; + if (encodedData == null) { + fail("encoderOutputBuffer " + encoderStatus + " was null"); + } + // It's usually necessary to adjust the ByteBuffer values to match BufferInfo. + encodedData.position(info.offset); + encodedData.limit(info.offset + info.size); + encodedSize += info.size; + if (outputStream != null) { + byte[] data = new byte[info.size]; + encodedData.get(data); + encodedData.position(info.offset); + try { + outputStream.write(data); + } catch (IOException ioe) { + Log.w(TAG, "failed writing debug data to file"); + throw new RuntimeException(ioe); + } + } + if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + // Codec config info. Only expected on first packet. + assertFalse(decoderConfigured); + MediaFormat format = + MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); + format.setByteBuffer("csd-0", encodedData); + decoder.configure(format, toSurface ? surfaceStuff.getSurface() : null, + null, 0); + decoder.start(); + decoderInputBuffers = decoder.getInputBuffers(); + decoderOutputBuffers = decoder.getOutputBuffers(); + decoderConfigured = true; + if (VERBOSE) Log.d(TAG, "decoder configured (" + info.size + " bytes)"); + } else { + // Get a decoder input buffer, blocking until it's available. + assertTrue(decoderConfigured); + int inputBufIndex = decoder.dequeueInputBuffer(-1); + ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; + inputBuf.clear(); + inputBuf.put(encodedData); + decoder.queueInputBuffer(inputBufIndex, 0, info.size, info.presentationTimeUs, + info.flags); + encoderDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; + if (VERBOSE) Log.d(TAG, "passed " + info.size + " bytes to decoder" + + (encoderDone ? " (EOS)" : "")); + } + encoder.releaseOutputBuffer(encoderStatus, false); + } + } + // Check for output from the decoder. We want to do this on every loop to avoid + // the possibility of stalling the pipeline. We use a short timeout to avoid + // burning CPU if the decoder is hard at work but the next frame isn't quite ready. + // + // If we're decoding to a Surface, we'll get notified here as usual but the + // ByteBuffer references will be null. The data is sent to Surface instead. + if (decoderConfigured) { + int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC); + if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + // no output available yet + if (VERBOSE) Log.d(TAG, "no output from decoder available"); + } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + if (VERBOSE) Log.d(TAG, "decoder output buffers changed"); + decoderOutputBuffers = decoder.getOutputBuffers(); + } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + // this happens before the first frame is returned + MediaFormat decoderOutputFormat = decoder.getOutputFormat(); + decoderColorFormat = + decoderOutputFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT); + if (VERBOSE) Log.d(TAG, "decoder output format changed: " + + decoderOutputFormat); + } else if (decoderStatus < 0) { + fail("unexpected result from deocder.dequeueOutputBuffer: " + decoderStatus); + } else { // decoderStatus >= 0 + if (!toSurface) { + ByteBuffer outputFrame = decoderOutputBuffers[decoderStatus]; + outputFrame.position(info.offset); + outputFrame.limit(info.offset + info.size); + rawSize += info.size; + if (info.size == 0) { + if (VERBOSE) Log.d(TAG, "got empty frame"); + } else { + if (VERBOSE) Log.d(TAG, "decoded, checking frame " + checkIndex); + checkFrame(checkIndex++, decoderColorFormat, outputFrame); + } + if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + if (VERBOSE) Log.d(TAG, "output EOS"); + outputDone = true; + } + } else { + // Before we release+render this buffer, check to see if data from a + // previous go-round has latched. + surfaceStuff.checkNewImageIfAvailable(); + if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus + + " (size=" + info.size + ")"); + rawSize += info.size; + if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + if (VERBOSE) Log.d(TAG, "output EOS"); + outputDone = true; + } + } + // If output is going to a Surface, the second argument should be true. + // If not, the value doesn't matter. + // + // If we are sending to a Surface, then some time after we call this the + // data will be made available to SurfaceTexture, and the onFrameAvailable() + // callback will fire. + decoder.releaseOutputBuffer(decoderStatus, true /*render*/); + } + } + } + if (VERBOSE) Log.d(TAG, "encoded " + NUM_FRAMES + " frames at " + + mWidth + "x" + mHeight + ": raw=" + rawSize + ", enc=" + encodedSize); + if (outputStream != null) { + try { + outputStream.close(); + } catch (IOException ioe) { + Log.w(TAG, "failed closing debug file"); + throw new RuntimeException(ioe); + } + } + } + /** + * Generates data for frame N into the supplied buffer. We have an 8-frame animation + * sequence that wraps around. It looks like this: + *

    +     *   0 1 2 3
    +     *   7 6 5 4
    +     * 
    + * We draw one of the eight rectangles and leave the rest set to the zero-fill color. + */ + private void generateFrame(int frameIndex, int colorFormat, byte[] frameData) { + final int HALF_WIDTH = mWidth / 2; + boolean semiPlanar = isSemiPlanarYUV(colorFormat); + // Set to zero. In YUV this is a dull green. + Arrays.fill(frameData, (byte) 0); + int startX, startY, countX, countY; + frameIndex %= 8; + //frameIndex = (frameIndex / 8) % 8; // use this instead for debug -- easier to see + if (frameIndex < 4) { + startX = frameIndex * (mWidth / 4); + startY = 0; + } else { + startX = (7 - frameIndex) * (mWidth / 4); + startY = mHeight / 2; + } + for (int y = startY + (mHeight/2) - 1; y >= startY; --y) { + for (int x = startX + (mWidth/4) - 1; x >= startX; --x) { + if (semiPlanar) { + // full-size Y, followed by CbCr pairs at half resolution + // e.g. Nexus 4 OMX.qcom.video.encoder.avc COLOR_FormatYUV420SemiPlanar + // e.g. Galaxy Nexus OMX.TI.DUCATI1.VIDEO.H264E + // OMX_TI_COLOR_FormatYUV420PackedSemiPlanar + frameData[y * mWidth + x] = (byte) TEST_Y; + if ((x & 0x01) == 0 && (y & 0x01) == 0) { + frameData[mWidth*mHeight + y * HALF_WIDTH + x] = (byte) TEST_U; + frameData[mWidth*mHeight + y * HALF_WIDTH + x + 1] = (byte) TEST_V; + } + } else { + // full-size Y, followed by quarter-size Cb and quarter-size Cr + // e.g. Nexus 10 OMX.Exynos.AVC.Encoder COLOR_FormatYUV420Planar + // e.g. Nexus 7 OMX.Nvidia.h264.encoder COLOR_FormatYUV420Planar + frameData[y * mWidth + x] = (byte) TEST_Y; + if ((x & 0x01) == 0 && (y & 0x01) == 0) { + frameData[mWidth*mHeight + (y/2) * HALF_WIDTH + (x/2)] = (byte) TEST_U; + frameData[mWidth*mHeight + HALF_WIDTH * (mHeight / 2) + + (y/2) * HALF_WIDTH + (x/2)] = (byte) TEST_V; + } + } + } + } + if (false) { + // make sure that generate and check agree + Log.d(TAG, "SPOT CHECK"); + checkFrame(frameIndex, colorFormat, ByteBuffer.wrap(frameData)); + Log.d(TAG, "SPOT CHECK DONE"); + } + } + /** + * Performs a simple check to see if the frame is more or less right. + *

    + * See {@link generateFrame} for a description of the layout. The idea is to sample + * one pixel from the middle of the 8 regions, and verify that the correct one has + * the non-background color. We can't know exactly what the video encoder has done + * with our frames, so we just check to see if it looks like more or less the right thing. + *

    + * Throws a failure if the frame looks wrong. + */ + private void checkFrame(int frameIndex, int colorFormat, ByteBuffer frameData) { + final int HALF_WIDTH = mWidth / 2; + boolean frameFailed = false; + if (colorFormat == 0x7FA30C03) { + // Nexus 4 decoder output OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka + Log.d(TAG, "unable to check frame contents for colorFormat=" + + Integer.toHexString(colorFormat)); + return; + } + boolean semiPlanar = isSemiPlanarYUV(colorFormat); + frameIndex %= 8; + for (int i = 0; i < 8; i++) { + int x, y; + if (i < 4) { + x = i * (mWidth / 4) + (mWidth / 8); + y = mHeight / 4; + } else { + x = (7 - i) * (mWidth / 4) + (mWidth / 8); + y = (mHeight * 3) / 4; + } + int testY, testU, testV; + if (semiPlanar) { + // Galaxy Nexus uses OMX_TI_COLOR_FormatYUV420PackedSemiPlanar + testY = frameData.get(y * mWidth + x) & 0xff; + testU = frameData.get(mWidth*mHeight + 2*(y/2) * HALF_WIDTH + 2*(x/2)) & 0xff; + testV = frameData.get(mWidth*mHeight + 2*(y/2) * HALF_WIDTH + 2*(x/2) + 1) & 0xff; + } else { + // Nexus 10, Nexus 7 use COLOR_FormatYUV420Planar + testY = frameData.get(y * mWidth + x) & 0xff; + testU = frameData.get(mWidth*mHeight + (y/2) * HALF_WIDTH + (x/2)) & 0xff; + testV = frameData.get(mWidth*mHeight + HALF_WIDTH * (mHeight / 2) + + (y/2) * HALF_WIDTH + (x/2)) & 0xff; + } + boolean failed = false; + if (i == frameIndex) { + failed = !isColorClose(testY, TEST_Y) || + !isColorClose(testU, TEST_U) || + !isColorClose(testV, TEST_V); + } else { + // should be our zeroed-out buffer + failed = !isColorClose(testY, 0) || + !isColorClose(testU, 0) || + !isColorClose(testV, 0); + } + if (failed) { + Log.w(TAG, "Bad frame " + frameIndex + " (r=" + i + ": Y=" + testY + + " U=" + testU + " V=" + testV + ")"); + frameFailed = true; + } + } + if (frameFailed) { + fail("bad frame (" + frameIndex + ")"); + } + } + /** + * Returns true if the actual color value is close to the expected color value. + */ + static boolean isColorClose(int actual, int expected) { + if (expected < 5) { + return actual < (expected + 5); + } else if (expected > 250) { + return actual > (expected - 5); + } else { + return actual > (expected - 5) && actual < (expected + 5); + } + } + /** + * Returns true if the specified color format is semi-planar YUV. Throws an exception + * if the color format is not recognized (e.g. not YUV). + */ + private static boolean isSemiPlanarYUV(int colorFormat) { + switch (colorFormat) { + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar: + return false; + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar: + case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar: + return true; + default: + throw new RuntimeException("unknown format " + colorFormat); + } + } + /** + * Holds state associated with a Surface used for output. + *

    + * By default, the Surface will be using a BufferQueue in asynchronous mode, so we + * will likely miss a number of frames. + */ + private static class SurfaceStuff implements SurfaceTexture.OnFrameAvailableListener { + private static final int EGL_OPENGL_ES2_BIT = 4; + private EGL10 mEGL; + private EGLDisplay mEGLDisplay; + private EGLContext mEGLContext; + private EGLSurface mEGLSurface; + private SurfaceTexture mSurfaceTexture; + private Surface mSurface; + private boolean mFrameAvailable = false; // guarded by "this" + private int mWidth; + private int mHeight; + private VideoRender mVideoRender; + public SurfaceStuff(int width, int height) { + mWidth = width; + mHeight = height; + eglSetup(); + mVideoRender = new VideoRender(); + mVideoRender.onSurfaceCreated(); + // Even if we don't access the SurfaceTexture after the constructor returns, we + // still need to keep a reference to it. The Surface doesn't retain a reference + // at the Java level, so if we don't either then the object can get GCed, which + // causes the native finalizer to run. + if (VERBOSE) Log.d(TAG, "textureID=" + mVideoRender.getTextureId()); + mSurfaceTexture = new SurfaceTexture(mVideoRender.getTextureId()); + // This doesn't work if SurfaceStuff is created on the thread that CTS started for + // these test cases. + // + // The CTS-created thread has a Looper, and the SurfaceTexture constructor will + // create a Handler that uses it. The "frame available" message is delivered + // there, but since we're not a Looper-based thread we'll never see it. For + // this to do anything useful, SurfaceStuff must be created on a thread without + // a Looper, so that SurfaceTexture uses the main application Looper instead. + // + // Java language note: passing "this" out of a constructor is generally unwise, + // but we should be able to get away with it here. + mSurfaceTexture.setOnFrameAvailableListener(this); + mSurface = new Surface(mSurfaceTexture); + } + /** + * Prepares EGL. We want a GLES 2.0 context and a surface that supports pbuffer. + */ + private void eglSetup() { + mEGL = (EGL10)EGLContext.getEGL(); + mEGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); + if (!mEGL.eglInitialize(mEGLDisplay, null)) { + fail("unable to initialize EGL10"); + } + // Configure surface for pbuffer and OpenGL ES 2.0. We want enough RGB bits + // to be able to tell if the frame is reasonable. + int[] attribList = { + EGL10.EGL_RED_SIZE, 8, + EGL10.EGL_GREEN_SIZE, 8, + EGL10.EGL_BLUE_SIZE, 8, + EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT, + EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, + EGL10.EGL_NONE + }; + EGLConfig[] configs = new EGLConfig[1]; + int[] numConfigs = new int[1]; + if (!mEGL.eglChooseConfig(mEGLDisplay, attribList, configs, 1, numConfigs)) { + fail("unable to find RGB888+pbuffer EGL config"); + } + // Configure context for OpenGL ES 2.0. + int[] attrib_list = { + EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, + EGL10.EGL_NONE + }; + mEGLContext = mEGL.eglCreateContext(mEGLDisplay, configs[0], EGL10.EGL_NO_CONTEXT, + attrib_list); + checkEglError("eglCreateContext"); + assertNotNull(mEGLContext); + // Create a pbuffer surface. By using this for output, we can use glReadPixels + // to test values in the output. + int[] surfaceAttribs = { + EGL10.EGL_WIDTH, mWidth, + EGL10.EGL_HEIGHT, mHeight, + EGL10.EGL_NONE + }; + mEGLSurface = mEGL.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs); + checkEglError("eglCreatePbufferSurface"); + assertNotNull(mEGLSurface); + if (!mEGL.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) { + fail("eglMakeCurrent failed"); + } + } + /** + * Checks for EGL errors. + */ + private void checkEglError(String msg) { + boolean failed = false; + int error; + while ((error = mEGL.eglGetError()) != EGL10.EGL_SUCCESS) { + Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error)); + failed = true; + } + if (failed) { + fail("EGL error encountered (see log)"); + } + } + /** + * Returns the Surface that the MediaCodec will draw onto. + */ + public Surface getSurface() { + return mSurface; + } + /** + * Latches the next buffer into the texture if one is available, and checks it for + * validity. Must be called from the thread that created the SurfaceStuff object. + */ + public void checkNewImageIfAvailable() { + boolean newStuff = false; + synchronized (this) { + if (mSurfaceTexture != null && mFrameAvailable) { + mFrameAvailable = false; + newStuff = true; + } + } + if (newStuff) { + mVideoRender.checkGlError("before updateTexImage"); + mSurfaceTexture.updateTexImage(); + mVideoRender.onDrawFrame(mSurfaceTexture); + checkSurfaceFrame(); + } + } + @Override + public void onFrameAvailable(SurfaceTexture st) { + if (VERBOSE) Log.d(TAG, "new frame available"); + synchronized (this) { + mFrameAvailable = true; + } + } + /** + * Attempts to check the frame for correctness. + *

    + * Our definition of "correct" is based on knowing what the frame sequence number is, + * which we can't reliably get by counting frames since the underlying mechanism can + * drop frames. The alternative would be to use the presentation time stamp that + * we passed to the video encoder, but there's no way to get that from the texture. + *

    + * All we can do is verify that it looks something like a frame we'd expect, i.e. + * green with exactly one pink rectangle. + */ + private void checkSurfaceFrame() { + ByteBuffer pixelBuf = ByteBuffer.allocateDirect(4); // TODO - reuse this + int numColoredRects = 0; + int rectPosn = -1; + for (int i = 0; i < 8; i++) { + // Note the coordinates are inverted on the Y-axis in GL. + int x, y; + if (i < 4) { + x = i * (mWidth / 4) + (mWidth / 8); + y = (mHeight * 3) / 4; + } else { + x = (7 - i) * (mWidth / 4) + (mWidth / 8); + y = mHeight / 4; + } + GLES20.glReadPixels(x, y, 1, 1, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, pixelBuf); + int r = pixelBuf.get(0) & 0xff; + int g = pixelBuf.get(1) & 0xff; + int b = pixelBuf.get(2) & 0xff; + if (isColorClose(r, TEST_R0) && + isColorClose(g, TEST_G0) && + isColorClose(b, TEST_B0)) { + // empty space + } else if (isColorClose(r, TEST_R1) && + isColorClose(g, TEST_G1) && + isColorClose(b, TEST_B1)) { + // colored rect + numColoredRects++; + rectPosn = i; + } else { + // wtf + Log.w(TAG, "found unexpected color r=" + r + " g=" + g + " b=" + b); + } + } + if (numColoredRects != 1) { + fail("Found surface with colored rects != 1 (" + numColoredRects + ")"); + } else { + if (VERBOSE) Log.d(TAG, "good surface, looks like index " + rectPosn); + } + } + } + /** + * GL code to fill a surface with a texture. This class was largely copied from + * VideoSurfaceView.VideoRender. + *

    + * TODO: merge implementations + */ + private static class VideoRender { + private static final int FLOAT_SIZE_BYTES = 4; + private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; + private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; + private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; + private final float[] mTriangleVerticesData = { + // X, Y, Z, U, V + -1.0f, -1.0f, 0, 0.f, 0.f, + 1.0f, -1.0f, 0, 1.f, 0.f, + -1.0f, 1.0f, 0, 0.f, 1.f, + 1.0f, 1.0f, 0, 1.f, 1.f, + }; + private FloatBuffer mTriangleVertices; + private final String mVertexShader = + "uniform mat4 uMVPMatrix;\n" + + "uniform mat4 uSTMatrix;\n" + + "attribute vec4 aPosition;\n" + + "attribute vec4 aTextureCoord;\n" + + "varying vec2 vTextureCoord;\n" + + "void main() {\n" + + " gl_Position = uMVPMatrix * aPosition;\n" + + " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + + "}\n"; + private final String mFragmentShader = + "#extension GL_OES_EGL_image_external : require\n" + + "precision mediump float;\n" + + "varying vec2 vTextureCoord;\n" + + "uniform samplerExternalOES sTexture;\n" + + "void main() {\n" + + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + + "}\n"; + private float[] mMVPMatrix = new float[16]; + private float[] mSTMatrix = new float[16]; + private int mProgram; + private int mTextureID = -12345; + private int muMVPMatrixHandle; + private int muSTMatrixHandle; + private int maPositionHandle; + private int maTextureHandle; + public VideoRender() { + mTriangleVertices = ByteBuffer.allocateDirect( + mTriangleVerticesData.length * FLOAT_SIZE_BYTES) + .order(ByteOrder.nativeOrder()).asFloatBuffer(); + mTriangleVertices.put(mTriangleVerticesData).position(0); + Matrix.setIdentityM(mSTMatrix, 0); + } + public int getTextureId() { + return mTextureID; + } + public void onDrawFrame(SurfaceTexture st) { + checkGlError("onDrawFrame start"); + st.getTransformMatrix(mSTMatrix); + GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f); + GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); + GLES20.glUseProgram(mProgram); + checkGlError("glUseProgram"); + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID); + mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); + GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, + TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); + checkGlError("glVertexAttribPointer maPosition"); + GLES20.glEnableVertexAttribArray(maPositionHandle); + checkGlError("glEnableVertexAttribArray maPositionHandle"); + mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); + GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false, + TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); + checkGlError("glVertexAttribPointer maTextureHandle"); + GLES20.glEnableVertexAttribArray(maTextureHandle); + checkGlError("glEnableVertexAttribArray maTextureHandle"); + Matrix.setIdentityM(mMVPMatrix, 0); + GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0); + GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + checkGlError("glDrawArrays"); + GLES20.glFinish(); + } + public void onSurfaceCreated() { + mProgram = createProgram(mVertexShader, mFragmentShader); + if (mProgram == 0) { + Log.e(TAG, "failed creating program"); + return; + } + maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition"); + checkGlError("glGetAttribLocation aPosition"); + if (maPositionHandle == -1) { + throw new RuntimeException("Could not get attrib location for aPosition"); + } + maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord"); + checkGlError("glGetAttribLocation aTextureCoord"); + if (maTextureHandle == -1) { + throw new RuntimeException("Could not get attrib location for aTextureCoord"); + } + muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix"); + checkGlError("glGetUniformLocation uMVPMatrix"); + if (muMVPMatrixHandle == -1) { + throw new RuntimeException("Could not get attrib location for uMVPMatrix"); + } + muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix"); + checkGlError("glGetUniformLocation uSTMatrix"); + if (muSTMatrixHandle == -1) { + throw new RuntimeException("Could not get attrib location for uSTMatrix"); + } + int[] textures = new int[1]; + GLES20.glGenTextures(1, textures, 0); + mTextureID = textures[0]; + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID); + checkGlError("glBindTexture mTextureID"); + GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, + GLES20.GL_NEAREST); + GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, + GLES20.GL_LINEAR); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, + GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, + GLES20.GL_CLAMP_TO_EDGE); + checkGlError("glTexParameter"); + } + private int loadShader(int shaderType, String source) { + int shader = GLES20.glCreateShader(shaderType); + checkGlError("glCreateShader type=" + shaderType); + GLES20.glShaderSource(shader, source); + GLES20.glCompileShader(shader); + int[] compiled = new int[1]; + GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); + if (compiled[0] == 0) { + Log.e(TAG, "Could not compile shader " + shaderType + ":"); + Log.e(TAG, GLES20.glGetShaderInfoLog(shader)); + GLES20.glDeleteShader(shader); + shader = 0; + } + return shader; + } + private int createProgram(String vertexSource, String fragmentSource) { + int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); + if (vertexShader == 0) { + return 0; + } + int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); + if (pixelShader == 0) { + return 0; + } + int program = GLES20.glCreateProgram(); + checkGlError("glCreateProgram"); + if (program == 0) { + Log.e(TAG, "Could not create program"); + } + GLES20.glAttachShader(program, vertexShader); + checkGlError("glAttachShader"); + GLES20.glAttachShader(program, pixelShader); + checkGlError("glAttachShader"); + GLES20.glLinkProgram(program); + int[] linkStatus = new int[1]; + GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); + if (linkStatus[0] != GLES20.GL_TRUE) { + Log.e(TAG, "Could not link program: "); + Log.e(TAG, GLES20.glGetProgramInfoLog(program)); + GLES20.glDeleteProgram(program); + program = 0; + } + return program; + } + public void checkGlError(String op) { + int error; + while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { + Log.e(TAG, op + ": glError " + error); + throw new RuntimeException(op + ": glError " + error); + } + } + } +} \ No newline at end of file diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/atestcases/TestCaseActivity.kt b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/atestcases/TestCaseActivity.kt new file mode 100644 index 0000000..085b90a --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/atestcases/TestCaseActivity.kt @@ -0,0 +1,22 @@ +package com.aserbao.androidcustomcamera.blocks.atestcases + +import android.support.v7.app.AppCompatActivity +import android.os.Bundle +import android.view.View +import com.aserbao.androidcustomcamera.base.activity.RVBaseActivity +import com.aserbao.androidcustomcamera.base.beans.BaseRecyclerBean + +/** + * https://android.googlesource.com/platform/cts/+/b04c81bfc2761b21293f9c095da38c757e570fd3/tests/tests/media/src/android/media + */ +class TestCaseActivity : RVBaseActivity() { + override fun itemClickBack(view: View?, position: Int, isLongClick: Boolean, comeFrom: Int) { + when(position){ +// 0 -> EncodeDecodeTest + } + } + override fun initGetData() { + mBaseRecyclerBeen.add(BaseRecyclerBean("EncodeDecodeTest",0)) + } + +} \ No newline at end of file diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/audioRecord/AudioRecordActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/audioRecord/AudioRecordActivity.java new file mode 100644 index 0000000..b2b122e --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/audioRecord/AudioRecordActivity.java @@ -0,0 +1,25 @@ +package com.aserbao.androidcustomcamera.blocks.audioRecord; + +import android.os.Bundle; +import android.support.v7.app.AppCompatActivity; + +import com.aserbao.androidcustomcamera.R; + +import butterknife.ButterKnife; +import butterknife.OnClick; + +public class AudioRecordActivity extends AppCompatActivity { + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_audio_record); + ButterKnife.bind(this); + } + + @OnClick(R.id.audio_record_btn) + public void onViewClicked() { + new AudioRecordDemo().getNoiseLevel(); + } + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/audioRecord/AudioRecordDemo.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/audioRecord/AudioRecordDemo.java new file mode 100644 index 0000000..8750972 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/audioRecord/AudioRecordDemo.java @@ -0,0 +1,70 @@ +package com.aserbao.androidcustomcamera.blocks.audioRecord; + +import android.media.AudioFormat; +import android.media.AudioRecord; +import android.media.MediaRecorder; +import android.util.Log; + +/** + * Created by greatpresident on 2014/8/5. + */ +public class AudioRecordDemo { + + private static final String TAG = "AudioRecord"; + static final int SAMPLE_RATE_IN_HZ = 8000; + static final int BUFFER_SIZE = AudioRecord.getMinBufferSize(SAMPLE_RATE_IN_HZ, + AudioFormat.CHANNEL_IN_DEFAULT, AudioFormat.ENCODING_PCM_16BIT); + AudioRecord mAudioRecord; + boolean isGetVoiceRun; + Object mLock; + + public AudioRecordDemo() { + mLock = new Object(); + } + + public void getNoiseLevel() { + if (isGetVoiceRun) { + Log.e(TAG, "还在录着呢"); + return; + } + mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, + SAMPLE_RATE_IN_HZ, AudioFormat.CHANNEL_IN_DEFAULT, + AudioFormat.ENCODING_PCM_16BIT, BUFFER_SIZE); + if (mAudioRecord == null) { + Log.e("sound", "mAudioRecord初始化失败"); + } + isGetVoiceRun = true; + + new Thread(new Runnable() { + @Override + public void run() { + mAudioRecord.startRecording(); + short[] buffer = new short[BUFFER_SIZE]; + while (isGetVoiceRun) { + //r是实际读取的数据长度,一般而言r会小于buffersize + int r = mAudioRecord.read(buffer, 0, BUFFER_SIZE); + long v = 0; + // 将 buffer 内容取出,进行平方和运算 + for (int i = 0; i < buffer.length; i++) { + v += buffer[i] * buffer[i]; + } + // 平方和除以数据总长度,得到音量大小。 + double mean = v / (double) r; + double volume = 10 * Math.log10(mean); + Log.d(TAG, "分贝值:" + volume); + // 大概一秒十次 + synchronized (mLock) { + try { + mLock.wait(100); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + mAudioRecord.stop(); + mAudioRecord.release(); + mAudioRecord = null; + } + }).start(); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/FFmpegActivity.kt b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/FFmpegActivity.kt new file mode 100644 index 0000000..fc15dee --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/FFmpegActivity.kt @@ -0,0 +1,208 @@ +package com.aserbao.androidcustomcamera.blocks.ffmpeg + +import Jni.FFmpegCmd +import VideoHandle.* +import android.os.Environment +import android.support.annotation.MainThread +import android.util.Log +import android.view.View +import android.widget.Toast +import com.aserbao.androidcustomcamera.base.activity.RVBaseActivity +import com.aserbao.androidcustomcamera.base.beans.BaseRecyclerBean +import com.aserbao.androidcustomcamera.blocks.ffmpeg.beans.WaterFilter +import com.aserbao.androidcustomcamera.blocks.ffmpeg.utils.FFmpegUtils +import java.util.* +import kotlin.collections.ArrayList + + +var absolutePath = Environment.getExternalStorageDirectory().absolutePath + +class FFmpegActivity : RVBaseActivity(),OnEditorListener { + + override fun initGetData() { + mBaseRecyclerBeen.add(BaseRecyclerBean("取消", 100)) + mBaseRecyclerBeen.add(BaseRecyclerBean("视频中抽取音频", 0)) + mBaseRecyclerBeen.add(BaseRecyclerBean("视频添加水印", 1)) + mBaseRecyclerBeen.add(BaseRecyclerBean("无损视频合并", 2)) + mBaseRecyclerBeen.add(BaseRecyclerBean("多段视频合并", 3)) + mBaseRecyclerBeen.add(BaseRecyclerBean("多段视频加水印并合成", 4)) + mBaseRecyclerBeen.add(BaseRecyclerBean("视频添加配乐并调整音量大小", 5)) + + mInputs.add(WaterFilter(videoPath1,png1)) + mInputs.add(WaterFilter(videoPath2,png2)) + mInputs.add(WaterFilter(videoPath3,png3)) + } + var testVideoPath = "/storage/emulated/0/playground/temp/1588820387250.mp4" + var testPicPath = "/storage/emulated/0/playground/temp/1588820387250.png" + + var videoPath1 = absolutePath + "/123.mp4" + var videoPath2 = absolutePath + "/4.mp4" + var videoPath3 = absolutePath + "/5.mp4" + + var png1 = absolutePath + "/1.png" + var png2 = absolutePath + "/2.png" + var png3 = absolutePath + "/3.png" + + var outputMusicPath = absolutePath + "/out_aserbao.mp3" + var outputPath1 = absolutePath + "/out_aserbao1.mp4" + var outputPath2 = absolutePath + "/out_aserbao2.mp4" + var outputPath3 = absolutePath + "/out_aserbao3.mp4" + var outputPathMp4 = absolutePath + "/out_aserbao.mp4" + + var mInputs : MutableList = ArrayList() + + + var mStartTime:Long = 0 + override fun itemClickBack(view: View, position: Int, isLongClick: Boolean, comeFrom: Int) { + mStartTime = System.currentTimeMillis() + when(position){ + 100 ->{ +// FFmpegCmd.exit() + addMusicToVideo1() + } + 0 ->{ + FFmpegUtils.demuxer(videoPath1,outputMusicPath,EpEditor.Format.MP3,this) + } + 1 ->{ + var tempVideoPath = "/storage/emulated/0/Android/data/com.getremark.playground/files/Movies/15871817738614870009935443.mp4" + var tempBitmapPath = "/storage/emulated/0/playground/temp/123.png" + var epVideo1 = EpVideo(tempVideoPath) +// var epVideo1 = EpVideo(videoPath1) + epVideo1.addDraw(EpDraw(tempBitmapPath,0,0,576f,1024f,false)) + val outputOption = EpEditor.OutputOption(outputPathMp4) + EpEditor.exec(epVideo1, outputOption,this) + } + 2 ->{ + var epVideo1 = EpVideo(outputPath1) + var epVideo2 = EpVideo(outputPath2) + var epVideo3 = EpVideo(outputPath3) + val list = listOf(epVideo1, epVideo2,epVideo3) + var outputOption = EpEditor.OutputOption(outputPathMp4) + EpEditor.mergeByLc(this@FFmpegActivity,list,outputOption,this) + } + 3 ->{ + var epVideo1 = EpVideo(videoPath1) + var epVideo2 = EpVideo(videoPath2) + var epVideo3 = EpVideo(videoPath3) + val list = listOf(epVideo1, epVideo2,epVideo3) + var outputOption = EpEditor.OutputOption(outputPathMp4) + EpEditor.merge(list,outputOption,this) + } + 4 ->{ +// addWaterFilter(0) + addWaterFilterOneLine() + } + 5 ->{ + addMusicToVideo() + } + } + } + + fun addMusicToVideo(){ + var inputVideo = absolutePath + "/5.mp4" +// var inputVideo = absolutePath + "/temp.mp4" + var inputMusic = absolutePath + "/input.mp3" + var outputVideo = absolutePath + "/output.mp4" + var videoVolume = 0.5f + var musicVolume = 1f + FFmpegUtils.music(inputVideo,inputVideo,outputVideo,videoVolume,musicVolume,this) +// FFmpegUtils.addMusicForMp4(inputVideo,inputMusic,videoVolume,musicVolume,outputVideo,this) + } + fun addMusicToVideo1(){ +// var inputVideo = absolutePath + "/5.mp4" +// var inputVideo = absolutePath + "/temp.mp4" +// var inputMusic = absolutePath + "/input.mp3" +// var inputVideo = "/storage/emulated/0/playground/temp/.capture/.remark-1588920936552.mp4" + var inputVideo = absolutePath + "/test1.mp4" + var inputMusic = absolutePath +"/er.m4a" + var outputVideo = absolutePath + "/output.mp4" + var videoVolume = 1f + var musicVolume = 1f + FFmpegUtils.music(inputVideo,inputMusic,outputVideo,videoVolume,musicVolume,this) + } + + + private fun addWaterFilterOneLine() { +// ffmpeg -i 2.mp4 -i 3.mp4 -i img1.png -i img2.png -filter_complex "[0:v][2:v]overlay=0:0[in1];[1:v][3:v]overlay=0:10[in2];[in1][in2]concat" -y output.mp4 + //开始处理 + var sb= StringBuffer() + val cmd = CmdList() + cmd.append("ffmpeg") + cmd.append("-y") + mInputs.forEachIndexed{i,waterFilter -> + cmd.append("-i") + cmd.append(waterFilter.videoPath) + cmd.append("-i") + cmd.append(waterFilter.picturePath) + } + cmd.append("-filter_complex") +// cmd.append("“") + cmd.append("\"") + for(i in 0 until mInputs.size){ + var inflag = "[in" +i.toString()+"]" + var firstIndex = i * 2 + var firstElement = firstIndex.toString() + var secondElement = (firstIndex+1).toString() + cmd.append("[$firstElement:v][$secondElement:v]overlay=0:0").append(inflag).append(";") + sb.append(inflag) + } +// sb.append("concat”") + sb.append("concat\"") + cmd.append(sb.toString()) + cmd.append(outputPathMp4) + val cmds = cmd.toTypedArray() + var cmdLog = "" + for (ss in cmds) { + cmdLog += cmds + } + Log.v(TAG, "cmd:$cmdLog") + for (s in cmd) { + Log.e(TAG,"------:"+ s); + } + FFmpegCmd.exec(cmds, 46*1000, this) + } + + fun addWaterFilter(index:Int){ + when(index){ + 0 ->{ + var epVideo1 = EpVideo(videoPath1) + epVideo1.addDraw(EpDraw(png1,0,0,576f,1024f,false)) + val outputOption1 = EpEditor.OutputOption(outputPath1) + EpEditor.exec(epVideo1, outputOption1,this) + } + 1 ->{ + var epVideo2 = EpVideo(videoPath2) + epVideo2.addDraw(EpDraw(png2,0,0,576f,1024f,false)) + val outputOption2 = EpEditor.OutputOption(outputPath2) + EpEditor.exec(epVideo2, outputOption2,this) + } + 2 ->{ + var epVideo3 = EpVideo(videoPath3) + epVideo3.addDraw(EpDraw(png3,0,0,576f,1024f,false)) + val outputOption3 = EpEditor.OutputOption(outputPath3) + EpEditor.exec(epVideo3, outputOption3,this) + } + } + + } + + var cuurIndex = 0; + + override fun onSuccess() { + /*cuurIndex++ + addWaterFilter(cuurIndex) + if(cuurIndex == 3){ + itemClickBack(mBaseRv,2,false,2) + }*/ + + Log.e(TAG, ": onSuccess 耗时: " + (System.currentTimeMillis() - mStartTime) ); + } + + override fun onFailure() { + Log.e(TAG, ": onFailure"); + } + + override fun onProgress(progress: Float) { + Log.e(TAG, ": onProgress" + progress ); + } +} \ No newline at end of file diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/beans/WaterFilter.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/beans/WaterFilter.java new file mode 100644 index 0000000..09cfecd --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/beans/WaterFilter.java @@ -0,0 +1,27 @@ +package com.aserbao.androidcustomcamera.blocks.ffmpeg.beans; + +public class WaterFilter { + String videoPath; + String picturePath; + + public WaterFilter(String videoPath, String picturePath) { + this.videoPath = videoPath; + this.picturePath = picturePath; + } + + public String getVideoPath() { + return videoPath; + } + + public void setVideoPath(String videoPath) { + this.videoPath = videoPath; + } + + public String getPicturePath() { + return picturePath; + } + + public void setPicturePath(String picturePath) { + this.picturePath = picturePath; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/utils/FFmpegUtils.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/utils/FFmpegUtils.java new file mode 100644 index 0000000..db9787d --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/utils/FFmpegUtils.java @@ -0,0 +1,432 @@ +package com.aserbao.androidcustomcamera.blocks.ffmpeg.utils; + +import android.content.Context; +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.util.Log; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import Jni.FFmpegCmd; +import Jni.FileUtils; +import Jni.TrackUtils; +import Jni.VideoUitls; +import VideoHandle.CmdList; +import VideoHandle.EpDraw; +import VideoHandle.EpEditor; +import VideoHandle.EpVideo; +import VideoHandle.OnEditorListener; + +public class FFmpegUtils { + private static final int DEFAULT_WIDTH = 480;//默认输出宽度 + private static final int DEFAULT_HEIGHT = 360;//默认输出高度 + + public enum Format { + MP3, MP4 + } + + public enum PTS { + VIDEO, AUDIO, ALL + } + + private FFmpegUtils() { + } + + + + /** + * 添加背景音乐 + * + * @param videoin 视频文件 + * @param audioin 音频文件 + * @param output 输出路径 + * @param videoVolume 视频原声音音量(例:0.7为70%) + * @param audioVolume 背景音乐音量(例:1.5为150%) + * @param onEditorListener 回调监听 + */ + public static void music(String videoin, String audioin, String output, float videoVolume, float audioVolume, OnEditorListener onEditorListener) { + MediaExtractor mediaExtractor = new MediaExtractor(); + try { + mediaExtractor.setDataSource(videoin); + } catch (IOException e) { + e.printStackTrace(); + return; + } + int at = TrackUtils.selectAudioTrack(mediaExtractor); + CmdList cmd = new CmdList(); + cmd.append("ffmpeg").append("-y").append("-i").append(videoin); + if (at == -1) { + int vt = TrackUtils.selectVideoTrack(mediaExtractor); + float duration = (float) mediaExtractor.getTrackFormat(vt).getLong(MediaFormat.KEY_DURATION) / 1000 / 1000; + cmd.append("-ss").append("0").append("-t").append(duration).append("-i").append(audioin).append("-acodec").append("copy").append("-vcodec").append("copy"); + } else { + cmd.append("-i").append(audioin).append("-filter_complex") + .append("[0:a]aformat=sample_fmts=fltp:sample_rates=44100:channel_layouts=stereo,volume=" + videoVolume + "[a0];[1:a]aformat=sample_fmts=fltp:sample_rates=44100:channel_layouts=stereo,volume=" + audioVolume + "[a1];[a0][a1]amix=inputs=2:duration=first[aout]") + .append("-map").append("[aout]").append("-ac").append("2").append("-c:v") + .append("copy").append("-map").append("0:v:0"); + } + cmd.append(output); + mediaExtractor.release(); + long d = VideoUitls.getDuration(videoin); + execCmd(cmd, d, onEditorListener); + } + + /** + * 给视频添加配乐 + * @param inputVideoPath + * @param inputMusicPath + * @param videoVolume 0~1 + * @param musicVolume 0~1 + * @param outputVideoPath + */ + public static void addMusicForMp4(String inputVideoPath,String inputMusicPath,float videoVolume,float musicVolume,String outputVideoPath,final OnEditorListener onEditorListener){ +// ffmpeg -y -i 123.mp4 -i 5.aac -filter_complex "[0:a]aformat=sample_fmts=fltp:sample_rates=44100:channel_layouts=stereo,volume=1.0[a0]; +// [1:a]aformat=sample_fmts=fltp:sample_rates=44100:channel_layouts=stereo,volume=0.5[a1];[a0][a1]amix=inputs=2:duration=first[aout]" -map "[aout]" -ac 2 -c:v copy -map 0:v:0 output.mp4 + CmdList cmd = new CmdList(); + cmd.append("ffmpeg").append("-y").append("-i").append(inputVideoPath) + .append("-i").append(inputMusicPath) + .append("-filter_complex") + .append("[0:a]volume=" + videoVolume + "[a0];[1:a]volume=" + musicVolume + "[a1];[a0][a1]amix=inputs=2:duration=first[aout]") + .append("-map") + .append("[aout]") + .append("-ac") + .append("2") + /*.append("-c:v") + .append("-copy")*/ + .append("-map") + .append("0:v:0") + .append(outputVideoPath); + long d = VideoUitls.getDuration(inputVideoPath); + execCmd(cmd, d, onEditorListener); + } + + /** + * 音视频分离 + * + * @param videoin 视频文件 + * @param out 输出文件路径 + * @param format 输出类型 + * @param onEditorListener 回调监听 + */ + public static void demuxer(String videoin, String out, EpEditor.Format format, OnEditorListener onEditorListener) { + CmdList cmd = new CmdList(); + cmd.append("ffmpeg").append("-y").append("-i").append(videoin); + switch (format) { + case MP3: + cmd.append("-vn").append("-acodec").append("libmp3lame"); + break; + case MP4: + cmd.append("-vcodec").append("copy").append("-an"); + break; + } + cmd.append(out); + long d = VideoUitls.getDuration(videoin); + execCmd(cmd, d, onEditorListener); + } + + /** + * 音视频倒放 + * + * @param videoin 视频文件 + * @param out 输出文件路径 + * @param vr 是否视频倒放 + * @param ar 是否音频倒放 + * @param onEditorListener 回调监听 + */ + public static void reverse(String videoin, String out, boolean vr, boolean ar, OnEditorListener onEditorListener) { + if (!vr && !ar) { + Log.e("ffmpeg", "parameter error"); + onEditorListener.onFailure(); + return; + } + CmdList cmd = new CmdList(); + cmd.append("ffmpeg").append("-y").append("-i").append(videoin).append("-filter_complex"); + String filter = ""; + if (vr) { + filter += "[0:v]reverse[v];"; + } + if (ar) { + filter += "[0:a]areverse[a];"; + } + cmd.append(filter.substring(0, filter.length() - 1)); + if (vr) { + cmd.append("-map").append("[v]"); + } + if (ar) { + cmd.append("-map").append("[a]"); + } + if (ar && !vr) { + cmd.append("-acodec").append("libmp3lame"); + } + cmd.append("-preset").append("superfast").append(out); + long d = VideoUitls.getDuration(videoin); + execCmd(cmd, d, onEditorListener); + } + + /** + * 音视频变速 + * + * @param videoin 音视频文件 + * @param out 输出路径 + * @param times 倍率(调整范围0.25-4) + * @param pts 加速类型 + * @param onEditorListener 回调接口 + */ + public static void changePTS(String videoin, String out, float times, EpEditor.PTS pts, OnEditorListener onEditorListener) { + if (times < 0.25f || times > 4.0f) { + Log.e("ffmpeg", "times can only be 0.25 to 4"); + onEditorListener.onFailure(); + return; + } + CmdList cmd = new CmdList(); + cmd.append("ffmpeg").append("-y").append("-i").append(videoin); + String t = "atempo=" + times; + if (times < 0.5f) { + t = "atempo=0.5,atempo=" + (times / 0.5f); + } else if (times > 2.0f) { + t = "atempo=2.0,atempo=" + (times / 2.0f); + } + Log.v("ffmpeg", "atempo:" + t); + switch (pts) { + case VIDEO: + cmd.append("-filter_complex").append("[0:v]setpts=" + (1 / times) + "*PTS").append("-an"); + break; + case AUDIO: + cmd.append("-filter:a").append(t); + break; + case ALL: + cmd.append("-filter_complex").append("[0:v]setpts=" + (1 / times) + "*PTS[v];[0:a]" + t + "[a]") + .append("-map").append("[v]").append("-map").append("[a]"); + break; + } + cmd.append("-preset").append("superfast").append(out); + long d = VideoUitls.getDuration(videoin); + double dd = d / times; + long ddd = (long) dd; + execCmd(cmd, ddd, onEditorListener); + } + + /** + * 视频转图片 + * + * @param videoin 音视频文件 + * @param out 输出路径 + * @param w 输出图片宽度 + * @param h 输出图片高度 + * @param rate 每秒视频数 + * @param onEditorListener 回调接口 + */ + public static void video2pic(String videoin, String out, int w, int h, float rate, OnEditorListener onEditorListener) { + if (w <= 0 || h <= 0) { + Log.e("ffmpeg", "width and height must greater than 0"); + onEditorListener.onFailure(); + return; + } + if(rate <= 0){ + Log.e("ffmpeg", "rate must greater than 0"); + onEditorListener.onFailure(); + return; + } + CmdList cmd = new CmdList(); + cmd.append("ffmpeg").append("-y").append("-i").append(videoin) + .append("-r").append(rate).append("-s").append(w+"x"+h).append("-q:v").append(2) + .append("-f").append("image2").append("-preset").append("superfast").append(out); + long d = VideoUitls.getDuration(videoin); + execCmd(cmd, d, onEditorListener); + } + + /** + * 图片转视频 + * + * @param videoin 视频文件 + * @param out 输出路径 + * @param w 输出视频宽度 + * @param h 输出视频高度 + * @param rate 输出视频帧率 + * @param onEditorListener 回调接口 + */ + public static void pic2video(String videoin, String out, int w, int h, float rate, OnEditorListener onEditorListener) { + if (w < 0 || h < 0) { + Log.e("ffmpeg", "width and height must greater than 0"); + onEditorListener.onFailure(); + return; + } + if(rate <= 0){ + Log.e("ffmpeg", "rate must greater than 0"); + onEditorListener.onFailure(); + return; + } + CmdList cmd = new CmdList(); + cmd.append("ffmpeg").append("-y").append("-f").append("image2").append("-i").append(videoin) + .append("-vcodec").append("libx264") + .append("-r").append(rate); +// .append("-b").append("10M"); + if(w > 0 && h > 0) { + cmd.append("-s").append(w + "x" + h); + } + cmd.append(out); + long d = VideoUitls.getDuration(videoin); + execCmd(cmd, d, onEditorListener); + } + + + /** + * 输出选项设置 + */ + public static class OutputOption { + static final int ONE_TO_ONE = 1;// 1:1 + static final int FOUR_TO_THREE = 2;// 4:3 + static final int SIXTEEN_TO_NINE = 3;// 16:9 + static final int NINE_TO_SIXTEEN = 4;// 9:16 + static final int THREE_TO_FOUR = 5;// 3:4 + + String outPath;//输出路径 + public int frameRate = 0;//帧率 + public int bitRate = 0;//比特率(一般设置10M) + public String outFormat = "";//输出格式(目前暂时只支持mp4,x264,mp3,gif) + private int width = 0;//输出宽度 + private int height = 0;//输出高度 + private int sar = 6;//输出宽高比 + + public OutputOption(String outPath) { + this.outPath = outPath; + } + + /** + * 获取宽高比 + * + * @return 1 + */ + public String getSar() { + String res; + switch (sar) { + case ONE_TO_ONE: + res = "1/1"; + break; + case FOUR_TO_THREE: + res = "4/3"; + break; + case THREE_TO_FOUR: + res = "3/4"; + break; + case SIXTEEN_TO_NINE: + res = "16/9"; + break; + case NINE_TO_SIXTEEN: + res = "9/16"; + break; + default: + res = width + "/" + height; + break; + } + return res; + } + + public void setSar(int sar) { + this.sar = sar; + } + + /** + * 获取输出信息 + * + * @return 1 + */ + String getOutputInfo() { + StringBuilder res = new StringBuilder(); + if (frameRate != 0) { + res.append(" -r ").append(frameRate); + } + if (bitRate != 0) { + res.append(" -b ").append(bitRate).append("M"); + } + if (!outFormat.isEmpty()) { + res.append(" -f ").append(outFormat); + } + return res.toString(); + } + + /** + * 设置宽度 + * + * @param width 宽 + */ + public void setWidth(int width) { + if (width % 2 != 0) width -= 1; + this.width = width; + } + + /** + * 设置高度 + * + * @param height 高 + */ + public void setHeight(int height) { + if (height % 2 != 0) height -= 1; + this.height = height; + } + } + + /** + * 开始处理 + * + * @param cmd 命令 + * @param duration 视频时长(单位微秒) + * @param onEditorListener 回调接口 + */ + public static void execCmd(String cmd, long duration, final OnEditorListener onEditorListener) { + cmd = "ffmpeg " + cmd; + String[] cmds = cmd.split(" "); + FFmpegCmd.exec(cmds, duration, new OnEditorListener() { + @Override + public void onSuccess() { + onEditorListener.onSuccess(); + } + + @Override + public void onFailure() { + onEditorListener.onFailure(); + } + + @Override + public void onProgress(final float progress) { + onEditorListener.onProgress(progress); + } + }); + } + + /** + * 开始处理 + * + * @param cmd 命令 + * @param duration 视频时长(单位微秒) + * @param onEditorListener 回调接口 + */ + private static void execCmd(CmdList cmd, long duration, final OnEditorListener onEditorListener) { + String[] cmds = cmd.toArray(new String[cmd.size()]); + StringBuffer sb = new StringBuffer(); + for (String ss : cmds) { + sb.append(ss).append(" "); + } + Log.v("使用的命令为:", "cmd: = " + sb.toString()); + FFmpegCmd.exec(cmds, duration, new OnEditorListener() { + @Override + public void onSuccess() { + onEditorListener.onSuccess(); + } + + @Override + public void onFailure() { + onEditorListener.onFailure(); + } + + @Override + public void onProgress(final float progress) { + onEditorListener.onProgress(progress); + } + }); + } + + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/utils/TEst.kt b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/utils/TEst.kt new file mode 100644 index 0000000..4a1d1b1 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/utils/TEst.kt @@ -0,0 +1,14 @@ +package com.aserbao.androidcustomcamera.blocks.ffmpeg.utils + +import VideoHandle.OnEditorListener +import android.view.View +import com.aserbao.androidcustomcamera.base.activity.RVBaseActivity + +class TEst : RVBaseActivity(), OnEditorListener { + override fun onSuccess() {} + override fun onFailure() {} + override fun onProgress(progress: Float) {} + override fun initGetData() {} + override fun itemClickBack(view: View, position: Int, isLongClick: Boolean, comeFrom: Int) {} + override fun onPointerCaptureChanged(hasCapture: Boolean) {} +} \ No newline at end of file diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/interfaces/ICallBackListener.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/interfaces/ICallBackListener.java new file mode 100644 index 0000000..acb39ac --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/interfaces/ICallBackListener.java @@ -0,0 +1,16 @@ +package com.aserbao.androidcustomcamera.blocks.interfaces; + +/** + * 功能: + * + * @author aserbao + * @date : On 2019/1/7 11:01 AM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.blocks.interfaces + * @Copyright: 个人版权所有 + */ +public interface ICallBackListener { + void success(); + void failed(Exception e); +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/MediaCodecActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/MediaCodecActivity.java index 632fb90..a94e043 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/MediaCodecActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/MediaCodecActivity.java @@ -1,21 +1,32 @@ package com.aserbao.androidcustomcamera.blocks.mediaCodec; +import android.view.View; + import com.aserbao.androidcustomcamera.base.activity.RVBaseActivity; -import com.aserbao.androidcustomcamera.base.beans.ClassBean; +import com.aserbao.androidcustomcamera.base.beans.BaseRecyclerBean; import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.BigflakeActivity; +import com.aserbao.androidcustomcamera.blocks.mediaCodec.primary.CreatMusicVideoByMediaCodecActivity; import com.aserbao.androidcustomcamera.blocks.mediaCodec.primary.PrimaryMediaCodecActivity; +import com.aserbao.androidcustomcamera.blocks.mediaCodec.primary.mp3TranslateAAC.Mp3TranslateAACActivity; import com.aserbao.androidcustomcamera.blocks.mediaCodec.recordBaseCamera.RecordBaseCameraActivity; import com.aserbao.androidcustomcamera.blocks.mediaCodec.recordCamera.RecordCameraActivity; - -import java.util.List; +import com.aserbao.androidcustomcamera.blocks.mediaCodec.show.MediaCodecShowOnGlSurfaceView; public class MediaCodecActivity extends RVBaseActivity { + @Override - public List initData() { - mClassBeans.add(new ClassBean("MediaCodec基本方法使用Bigflake",BigflakeActivity.class)); - mClassBeans.add(new ClassBean("MediaCodec基本方法使用",PrimaryMediaCodecActivity.class)); - mClassBeans.add(new ClassBean("MediaCodec仅录制相机数据",RecordBaseCameraActivity.class)); - mClassBeans.add(new ClassBean("音视频混合录制,通过SurfaceView显示相机数据",RecordCameraActivity.class)); - return mClassBeans; + protected void initGetData() { + mBaseRecyclerBeen.add(new BaseRecyclerBean("MediaCodec基本方法使用Bigflake",BigflakeActivity.class)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("MediaCodec基本方法使用",PrimaryMediaCodecActivity.class)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("MediaCodec仅录制相机数据",RecordBaseCameraActivity.class)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("音视频混合录制,通过SurfaceView显示相机数据",RecordCameraActivity.class)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("MediaCodec录制随音乐变化的视频",CreatMusicVideoByMediaCodecActivity.class)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("MediaCodec处理音乐",Mp3TranslateAACActivity.class)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("MediaCodec解码视频在GlSurfaceView上显示", MediaCodecShowOnGlSurfaceView.class)); + } + + @Override + public void itemClickBack(View view, int position, boolean isLongClick, int comeFrom) { + } } diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/BigFlakeBaseActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/BigFlakeBaseActivity.java index 6df0b2a..2e24bdf 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/BigFlakeBaseActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/BigFlakeBaseActivity.java @@ -8,8 +8,7 @@ import android.widget.Toast; import com.aserbao.androidcustomcamera.R; -import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.decodeEditEncode.DecodeEditEncodeActivity; -import com.aserbao.androidcustomcamera.whole.videoPlayer.VideoPlayerActivity; +import com.aserbao.androidcustomcamera.whole.videoPlayer.VideoViewPlayerActivity; import butterknife.BindView; import butterknife.ButterKnife; @@ -37,7 +36,8 @@ public void onViewClicked(View view) { switch (view.getId()) { case R.id.start: if(mStart.getText().equals("开始录制")) { - new Thread(new Runnable() { +// runOnUiThread(new Runnable() { + new Thread(new Runnable() { @Override public void run() { try { @@ -58,7 +58,7 @@ public void run() { break; case R.id.player: if (!TextUtils.isEmpty(mOutputPath)) { - VideoPlayerActivity.launch(BigFlakeBaseActivity.this, mOutputPath); + VideoViewPlayerActivity.launch(BigFlakeBaseActivity.this, mOutputPath); } break; } diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/BigflakeActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/BigflakeActivity.java index cf714bd..ba3f191 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/BigflakeActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/BigflakeActivity.java @@ -1,7 +1,9 @@ package com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake; +import android.view.View; + import com.aserbao.androidcustomcamera.base.activity.RVBaseActivity; -import com.aserbao.androidcustomcamera.base.beans.ClassBean; +import com.aserbao.androidcustomcamera.base.beans.BaseRecyclerBean; import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.cameraToMpeg.CameraToMpegActivity; import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.decodeEditEncode.DecodeEditEncodeActivity; import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.encodeAndMux.EncodeAndMuxActivity; @@ -9,18 +11,21 @@ import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.extractDecodeEditEncodeMux.ExtractDecodeEditEncodeMuxActivity; import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.extractMpegFrames.ExtractMpegFramesActivity; -import java.util.List; - public class BigflakeActivity extends RVBaseActivity { + @Override - public List initData() { - mClassBeans.add(new ClassBean("EncodeAndMux",EncodeAndMuxActivity.class)); - mClassBeans.add(new ClassBean("CameraToMpeg",CameraToMpegActivity.class)); - mClassBeans.add(new ClassBean("EncodeDecode",EncodeDecodeActivity.class)); - mClassBeans.add(new ClassBean("ExtractDecodeEditEncodeMux",ExtractDecodeEditEncodeMuxActivity.class)); - mClassBeans.add(new ClassBean("DecodeEditEncodeActivity",DecodeEditEncodeActivity.class)); - mClassBeans.add(new ClassBean("ExtractMpegFramesActivity",ExtractMpegFramesActivity.class)); - return mClassBeans; + protected void initGetData() { + mBaseRecyclerBeen.add(new BaseRecyclerBean("EncodeAndMux",EncodeAndMuxActivity.class)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("CameraToMpeg",CameraToMpegActivity.class)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("EncodeDecode",EncodeDecodeActivity.class)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("ExtractDecodeEditEncodeMux",ExtractDecodeEditEncodeMuxActivity.class)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("DecodeEditEncodeActivity",DecodeEditEncodeActivity.class)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("ExtractMpegFramesActivity",ExtractMpegFramesActivity.class)); + } + + @Override + public void itemClickBack(View view, int position, boolean isLongClick, int comeFrom) { + } } diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/decodeEditEncode/DecodeEditEncodeActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/decodeEditEncode/DecodeEditEncodeActivity.java index 56ebab5..ff55f36 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/decodeEditEncode/DecodeEditEncodeActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/decodeEditEncode/DecodeEditEncodeActivity.java @@ -19,21 +19,11 @@ import android.media.MediaCodecList; import android.media.MediaFormat; import android.opengl.GLES20; -import android.os.Bundle; -import android.support.v7.app.AppCompatActivity; -import android.test.AndroidTestCase; -import android.text.TextUtils; import android.util.Log; -import android.view.View; -import android.widget.Button; -import android.widget.Toast; -import com.aserbao.androidcustomcamera.R; import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.BigFlakeBaseActivity; -import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.encodeDecode.EncodeDecodeActivity; import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.encodeDecode.InputSurface; import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.encodeDecode.OutputSurface; -import com.aserbao.androidcustomcamera.whole.videoPlayer.VideoPlayerActivity; import java.io.BufferedOutputStream; import java.io.File; @@ -43,10 +33,6 @@ import java.util.ArrayList; import javax.microedition.khronos.opengles.GL10; -import butterknife.BindView; -import butterknife.ButterKnife; -import butterknife.OnClick; - import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertTrue; import static junit.framework.Assert.fail; diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/encodeDecode/EncodeDecodeActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/encodeDecode/EncodeDecodeActivity.java index e2682ca..3ad8418 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/encodeDecode/EncodeDecodeActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/encodeDecode/EncodeDecodeActivity.java @@ -19,18 +19,9 @@ import android.media.MediaCodecList; import android.media.MediaFormat; import android.opengl.GLES20; -import android.os.Bundle; -import android.support.v7.app.AppCompatActivity; -import android.text.TextUtils; import android.util.Log; -import android.view.View; -import android.widget.Button; -import android.widget.Toast; -import com.aserbao.androidcustomcamera.R; import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.BigFlakeBaseActivity; -import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.extractDecodeEditEncodeMux.ExtractDecodeEditEncodeMuxActivity; -import com.aserbao.androidcustomcamera.whole.videoPlayer.VideoPlayerActivity; import java.io.FileOutputStream; import java.io.IOException; @@ -38,10 +29,6 @@ import java.util.Arrays; import javax.microedition.khronos.opengles.GL10; -import butterknife.BindView; -import butterknife.ButterKnife; -import butterknife.OnClick; - import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertFalse; import static junit.framework.Assert.assertTrue; diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/extractDecodeEditEncodeMux/ExtractDecodeEditEncodeMuxActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/extractDecodeEditEncodeMux/ExtractDecodeEditEncodeMuxActivity.java index 39e537b..e0093e9 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/extractDecodeEditEncodeMux/ExtractDecodeEditEncodeMuxActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/bigflake/extractDecodeEditEncodeMux/ExtractDecodeEditEncodeMuxActivity.java @@ -24,28 +24,19 @@ import android.media.MediaExtractor; import android.media.MediaFormat; import android.media.MediaMuxer; -import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.os.HandlerThread; import android.os.Looper; import android.os.Message; -import android.support.v7.app.AppCompatActivity; -import android.test.AndroidTestCase; -import android.text.TextUtils; import android.util.Log; import android.view.Surface; -import android.view.View; -import android.widget.Button; import android.widget.Toast; import com.aserbao.androidcustomcamera.R; import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.BigFlakeBaseActivity; -import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.BigflakeActivity; -import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.encodeDecode.EncodeDecodeActivity; import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.encodeDecode.InputSurface; import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.encodeDecode.OutputSurface; -import com.aserbao.androidcustomcamera.whole.videoPlayer.VideoPlayerActivity; import java.io.File; import java.io.IOException; @@ -53,10 +44,6 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.LinkedList; -import butterknife.BindView; -import butterknife.ButterKnife; -import butterknife.OnClick; - import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertTrue; import static junit.framework.Assert.fail; diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/CreatMusicVideoByMediaCodecActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/CreatMusicVideoByMediaCodecActivity.java new file mode 100644 index 0000000..60cb842 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/CreatMusicVideoByMediaCodecActivity.java @@ -0,0 +1,332 @@ +package com.aserbao.androidcustomcamera.blocks.mediaCodec.primary; + +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.Rect; +import android.graphics.RectF; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.os.Handler; +import android.os.Message; +import android.text.TextUtils; +import android.util.Log; +import android.view.Surface; +import android.view.View; +import android.widget.Button; +import android.widget.TextView; +import android.widget.Toast; +import android.widget.VideoView; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.activity.BaseActivity; +import com.aserbao.androidcustomcamera.base.utils.FileUtils; + +import java.io.File; +import java.io.IOException; +import java.lang.ref.WeakReference; +import java.nio.ByteBuffer; + +import butterknife.BindView; +import butterknife.OnClick; + +public class CreatMusicVideoByMediaCodecActivity extends BaseActivity { + private static final String TAG = "PrimaryMediaCodecActivi"; + private static final String MIME_TYPE = "video/avc"; + private static final int WIDTH = 720; + private static final int HEIGHT = 1280; + private static final int BIT_RATE = 4000000; + private static final int FRAMES_PER_SECOND = 4; + private static final int IFRAME_INTERVAL = 5; + + private static final int NUM_FRAMES = 4 * 100; + private static final int START_RECORDING = 0; + private static final int STOP_RECORDING = 1; + + @BindView(R.id.btn_recording) + Button mBtnRecording; + @BindView(R.id.btn_watch) + Button mBtnWatch; + @BindView(R.id.primary_mc_tv) + TextView mPrimaryMcTv; + public MediaCodec.BufferInfo mBufferInfo; + public MediaCodec mMediaCodec; + @BindView(R.id.primary_vv) + VideoView mPrimaryVv; + private Surface mInputSurface; + public MediaMuxer mMuxer; + private boolean mMuxerStarted; + private int mTrackIndex; + private long mFakePts; + private boolean isRecording; + + private int cuurFrame = 0; + + private MyHanlder mMyHanlder = new MyHanlder(this); + public File mOutputFile; + + @OnClick({R.id.btn_recording, R.id.btn_watch}) + public void onViewClicked(View view) { + switch (view.getId()) { + case R.id.btn_recording: + if (mBtnRecording.getText().equals("开始录制")) { + try { +// mOutputFile = new File(Environment.getExternalStorageDirectory().getAbsolutePath(), System.currentTimeMillis() + ".mp4"); + mOutputFile = new File(FileUtils.getStorageMp4("PrimaryMediaCodecActivity")); + startRecording(mOutputFile); + mPrimaryMcTv.setText("文件保存路径为:" + mOutputFile.toString()); + mBtnRecording.setText("停止录制"); + isRecording = true; + } catch (IOException e) { + e.printStackTrace(); + mBtnRecording.setText("出现异常了,请查明原因"); + } + } else if (mBtnRecording.getText().equals("停止录制")) { + mBtnRecording.setText("开始录制"); + stopRecording(); + } + break; + case R.id.btn_watch: + String absolutePath = mOutputFile.getAbsolutePath(); + if (!TextUtils.isEmpty(absolutePath)) { + if(mBtnWatch.getText().equals("查看视频")) { + mBtnWatch.setText("删除视频"); + mPrimaryVv.setVideoPath(absolutePath); + mPrimaryVv.start(); + }else if(mBtnWatch.getText().equals("删除视频")){ + if (mOutputFile.exists()){ + mOutputFile.delete(); + mBtnWatch.setText("查看视频"); + } + } + }else{ + Toast.makeText(this, "请先录制", Toast.LENGTH_SHORT).show(); + } + break; + } + } + + private Bitmap mBitmap; + private static class MyHanlder extends Handler { + private WeakReference mPrimaryMediaCodecActivityWeakReference; + + public MyHanlder(CreatMusicVideoByMediaCodecActivity activity) { + mPrimaryMediaCodecActivityWeakReference = new WeakReference(activity); + } + + @Override + public void handleMessage(Message msg) { + CreatMusicVideoByMediaCodecActivity activity = mPrimaryMediaCodecActivityWeakReference.get(); + if (activity != null) { + switch (msg.what) { + case START_RECORDING: + activity.drainEncoder(false); + activity.generateFrame(activity.cuurFrame); + Log.e(TAG, "handleMessage: " + activity.cuurFrame); + if (activity.cuurFrame < NUM_FRAMES) { + this.sendEmptyMessage(START_RECORDING); + } else { + activity.drainEncoder(true); + activity.mBtnRecording.setText("开始录制"); + activity.releaseEncoder(); + } + activity.cuurFrame++; + break; + case STOP_RECORDING: + Log.e(TAG, "handleMessage: STOP_RECORDING"); + activity.drainEncoder(true); + activity.mBtnRecording.setText("开始录制"); + activity.releaseEncoder(); + break; + } + } + } + } + + @Override + protected int setLayoutId() { + return R.layout.activity_primary_media_codec; + } + + + private void startRecording(File outputFile) throws IOException { + cuurFrame = 0; + mBitmap = BitmapFactory.decodeResource(getResources(),R.drawable.katong); + prepareEncoder(outputFile); + mMyHanlder.sendEmptyMessage(START_RECORDING); + } + + private void stopRecording() { + mMyHanlder.removeMessages(START_RECORDING); + mMyHanlder.sendEmptyMessage(STOP_RECORDING); + } + + /** + * 准备视频编码器,muxer,和一个输入表面。 + */ + private void prepareEncoder(File outputFile) throws IOException { + mBufferInfo = new MediaCodec.BufferInfo(); + MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, WIDTH, HEIGHT); + + //1. 设置一些属性。没有指定其中的一些可能会导致MediaCodec.configure()调用抛出一个无用的异常。 + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); + format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);//比特率(比特率越高,音视频质量越高,编码文件越大) + format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAMES_PER_SECOND);//设置帧速 + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);//设置关键帧间隔时间 + + //2.创建一个MediaCodec编码器,并配置格式。获取一个我们可以用于输入的表面,并将其封装到处理EGL工作的类中。 + mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE); + mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + mInputSurface = mMediaCodec.createInputSurface(); + mMediaCodec.start(); + //3. 创建一个MediaMuxer。我们不能在这里添加视频跟踪和开始合成,因为我们的MediaFormat里面没有缓冲数据。 + // 只有在编码器开始处理数据后才能从编码器获得这些数据。我们实际上对多路复用音频没有兴趣。我们只是想要 + // 将从MediaCodec获得的原始H.264基本流转换为.mp4文件。 + mMuxer = new MediaMuxer(outputFile.toString(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + + mMuxerStarted = false; + mTrackIndex = -1; + } + + private void drainEncoder(boolean endOfStream) { + final int TIMEOUT_USEC = 10000; + if (endOfStream) { + mMediaCodec.signalEndOfInputStream();//在输入信号end-of-stream。相当于提交一个空缓冲区。视频编码完结 + } + ByteBuffer[] encoderOutputBuffers = mMediaCodec.getOutputBuffers(); + while (true) { + int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); + Log.e(TAG, "drainEncoder: " + outputBufferIndex); + if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {//没有可以输出的数据使用时 + if (!endOfStream) { + break; // out of while + } + } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + //输出缓冲区已经更改,客户端必须引用新的 + encoderOutputBuffers = mMediaCodec.getOutputBuffers(); + } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + //输出格式发生了变化,后续数据将使用新的数据格式。 + if (mMuxerStarted) { + throw new RuntimeException("format changed twice"); + } + MediaFormat newFormat = mMediaCodec.getOutputFormat(); + mTrackIndex = mMuxer.addTrack(newFormat); + mMuxer.start(); + mMuxerStarted = true; + } else if (outputBufferIndex < 0) { + } else { + ByteBuffer encodedData = encoderOutputBuffers[outputBufferIndex]; + if (encodedData == null) { + throw new RuntimeException("encoderOutputBuffer " + outputBufferIndex + + " was null"); + } + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + //当我们得到的时候,编解码器的配置数据被拉出来,并给了muxer。这时候可以忽略。 + mBufferInfo.size = 0; + } + if (mBufferInfo.size != 0) { + if (!mMuxerStarted) { + throw new RuntimeException("muxer hasn't started"); + } + //调整ByteBuffer值以匹配BufferInfo。 + encodedData.position(mBufferInfo.offset); + encodedData.limit(mBufferInfo.offset + mBufferInfo.size); + mBufferInfo.presentationTimeUs = mFakePts; + mFakePts += 1000000L / FRAMES_PER_SECOND; + + mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo); + } + mMediaCodec.releaseOutputBuffer(outputBufferIndex, false); + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + if (!endOfStream) { + Log.e(TAG, "意外结束"); + } else { + Toast.makeText(this, "已完成……", Toast.LENGTH_SHORT).show(); + Log.e(TAG, "正常结束"); + } + isRecording = false; + break; + } + } + } + } + + + private void generateFrame(int frameNum){ + Canvas canvas = mInputSurface.lockCanvas(null); + Paint paint = new Paint(); + try { + int width = canvas.getWidth(); + int height = canvas.getHeight(); + String color = "#FFCA39"; + if (frameNum %2 == 0 ){ + color = "#FFCA39"; + }else{ + color = "#FFF353"; + } + int color1 = Color.parseColor(color); + canvas.drawColor(color1); + paint.setTextSize(100); + paint.setColor(0xff000000); + canvas.drawText("第"+ String.valueOf(frameNum) + "帧",width/2,height/2,paint); + Rect srcRect = new Rect(0, 0, mBitmap.getWidth(), mBitmap.getHeight()); + int margain = 30; + Rect decRect = new Rect(margain, margain, width - margain, height-margain); + canvas.drawBitmap(mBitmap,srcRect,decRect,paint); + + int roundMargain = 60; + int roundHeight = 300; + int roundRadius = 25; + int roundLineWidth = 10; + paint.setStyle(Paint.Style.FILL);//充满 + paint.setAntiAlias(true);// 设置画笔的锯齿效果 + RectF roundRect1 = new RectF(roundMargain - roundLineWidth,roundMargain - roundLineWidth,width - roundMargain + roundLineWidth,roundHeight + roundMargain + roundLineWidth); + paint.setColor(Color.BLACK); + canvas.drawRoundRect(roundRect1,roundRadius,roundRadius,paint); + paint.setColor(color1); + RectF roundRect2 = new RectF(roundMargain,roundMargain,width - roundMargain,roundHeight + roundMargain); + canvas.drawRoundRect(roundRect2,roundRadius,roundRadius,paint); + +// paint.setStyle(Paint.Style.STROKE);//充满 + int timeMargain = roundMargain + 50; + String sTime = "2018/12/29 00:39"; + paint.setTextAlign(Paint.Align.CENTER); + paint.setTextSize(40); + paint.setColor(Color.BLACK); + canvas.drawText(sTime,width/2,timeMargain,paint); + + int soundMargain = timeMargain + 80; + String soundTime = "party 是我家"; + String soundTime2 = "party party 是我家"; + paint.setTextAlign(Paint.Align.CENTER); + paint.setTextSize(80); + canvas.drawText(soundTime,width/2,soundMargain,paint); + canvas.drawText(soundTime2,width/2,soundMargain + 80,paint); + + } finally { + mInputSurface.unlockCanvasAndPost(canvas); + } + + } + + private void releaseEncoder() { + if (mMediaCodec != null) { + mMediaCodec.stop(); + mMediaCodec.release(); + mMediaCodec = null; + } + if (mInputSurface != null) { + mInputSurface.release(); + mInputSurface = null; + } + if (mMuxer != null) { + mMuxer.stop(); + mMuxer.release(); + mMuxer = null; + } + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/PrimaryMediaCodecActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/PrimaryMediaCodecActivity.java index bcb6363..49b152a 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/PrimaryMediaCodecActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/PrimaryMediaCodecActivity.java @@ -35,9 +35,9 @@ public class PrimaryMediaCodecActivity extends BaseActivity { private static final String MIME_TYPE = "video/avc"; private static final int WIDTH = 720; private static final int HEIGHT = 1280; - private static final int BIT_RATE = 4000000; - private static final int FRAMES_PER_SECOND = 4; - private static final int IFRAME_INTERVAL = 5; + private static final int BIT_RATE = 3000000; + private static final int FRAMES_PER_SECOND = 30; + private static final int IFRAME_INTERVAL = 1; private static final int NUM_FRAMES = 4 * 100; private static final int START_RECORDING = 0; @@ -71,8 +71,8 @@ public void onViewClicked(View view) { case R.id.btn_recording: if (mBtnRecording.getText().equals("开始录制")) { try { -// mOutputFile = new File(Environment.getExternalStorageDirectory().getAbsolutePath(), System.currentTimeMillis() + ".mp4"); - mOutputFile = new File(FileUtils.getStorageMp4("PrimaryMediaCodecActivity")); + mOutputFile = new File(Environment.getExternalStorageDirectory().getAbsolutePath(), System.currentTimeMillis() + ".mp4"); +// mOutputFile = new File(FileUtils.getStorageMp4("PrimaryMediaCodecActivity")); startRecording(mOutputFile); mPrimaryMcTv.setText("文件保存路径为:" + mOutputFile.toString()); mBtnRecording.setText("停止录制"); diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/mp3TranslateAAC/AudioCodec.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/mp3TranslateAAC/AudioCodec.java new file mode 100644 index 0000000..0b853d2 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/mp3TranslateAAC/AudioCodec.java @@ -0,0 +1,504 @@ +package com.aserbao.androidcustomcamera.blocks.mediaCodec.primary.mp3TranslateAAC; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.util.Log; + +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.ArrayList; +/** + * 功能: + * + * @author aserbao + * @date : On 2019/1/3 5:37 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.blocks.mediaCodec.primary.mp3TranslateAAC + * @Copyright: 个人版权所有 + */ +public class AudioCodec { + private static final String TAG = "AudioCodec"; + private String encodeType; + private String srcPath; + private String dstPath; + private MediaCodec mediaDecode; + private MediaCodec mediaEncode; + private MediaExtractor mediaExtractor; + private ByteBuffer[] decodeInputBuffers; + private ByteBuffer[] decodeOutputBuffers; + private ByteBuffer[] encodeInputBuffers; + private ByteBuffer[] encodeOutputBuffers; + private MediaCodec.BufferInfo decodeBufferInfo; + private MediaCodec.BufferInfo encodeBufferInfo; + private FileOutputStream fos; + private BufferedOutputStream bos; + private FileInputStream fis; + private BufferedInputStream bis; + private ArrayList chunkPCMDataContainer;//PCM数据块容器 + private OnCompleteListener onCompleteListener; + private OnProgressListener onProgressListener; + private long fileTotalSize; + private long decodeSize; + + + public static AudioCodec newInstance() { + return new AudioCodec(); + } + + /** + * 设置编码器类型 + * @param encodeType + */ + public void setEncodeType(String encodeType) { + this.encodeType=encodeType; + } + + /** + * 设置输入输出文件位置 + * @param srcPath + * @param dstPath + */ + public void setIOPath(String srcPath, String dstPath) { + this.srcPath=srcPath; + this.dstPath=dstPath; + } + + /** + * 此类已经过封装 + * 调用prepare方法 会初始化Decode 、Encode 、输入输出流 等一些列操作 + */ + public void prepare() { + + if (encodeType == null) { + throw new IllegalArgumentException("encodeType can't be null"); + } + + if (srcPath == null) { + throw new IllegalArgumentException("srcPath can't be null"); + } + + if (dstPath == null) { + throw new IllegalArgumentException("dstPath can't be null"); + } + + try { + fos = new FileOutputStream(new File(dstPath)); + bos = new BufferedOutputStream(fos,200*1024); + File file = new File(srcPath); + fileTotalSize=file.length(); + } catch (IOException e) { + e.printStackTrace(); + } + chunkPCMDataContainer= new ArrayList<>(); + initMediaDecode();//解码器 + + if (encodeType == MediaFormat.MIMETYPE_AUDIO_AAC) { + initAACMediaEncode();//AAC编码器 + }else if (encodeType == MediaFormat.MIMETYPE_AUDIO_MPEG) { + initMPEGMediaEncode();//mp3编码器 + } + + } + + /** + * 初始化解码器 + */ + private void initMediaDecode() { + try { + mediaExtractor=new MediaExtractor();//此类可分离视频文件的音轨和视频轨道 + mediaExtractor.setDataSource(srcPath);//媒体文件的位置 + for (int i = 0; i < mediaExtractor.getTrackCount(); i++) {//遍历媒体轨道 此处我们传入的是音频文件,所以也就只有一条轨道 + MediaFormat format = mediaExtractor.getTrackFormat(i); + String mime = format.getString(MediaFormat.KEY_MIME); + if (mime.startsWith("audio")) {//获取音频轨道 +// format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 200 * 1024); + mediaExtractor.selectTrack(i);//选择此音频轨道 + mediaDecode = MediaCodec.createDecoderByType(mime);//创建Decode解码器 + mediaDecode.configure(format, null, null, 0); + break; + } + } + } catch (IOException e) { + e.printStackTrace(); + } + + if (mediaDecode == null) { + Log.e(TAG, "create mediaDecode failed"); + return; + } + mediaDecode.start();//启动MediaCodec ,等待传入数据 +// decodeInputBuffers=mediaDecode.getInputBuffers();//MediaCodec在此ByteBuffer[]中获取输入数据 +// decodeOutputBuffers=mediaDecode.getOutputBuffers();//MediaCodec将解码后的数据放到此ByteBuffer[]中 我们可以直接在这里面得到PCM数据 + decodeBufferInfo=new MediaCodec.BufferInfo();//用于描述解码得到的byte[]数据的相关信息 + showLog("buffers:" + decodeInputBuffers.length); + } + + + /** + * 初始化AAC编码器 + */ + private void initAACMediaEncode() { + try { + MediaFormat encodeFormat = MediaFormat.createAudioFormat(encodeType, 44100, 2);//参数对应-> mime type、采样率、声道数 + encodeFormat.setInteger(MediaFormat.KEY_BIT_RATE, 96000);//比特率 + encodeFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); + encodeFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 100 * 1024); + mediaEncode = MediaCodec.createEncoderByType(encodeType); + mediaEncode.configure(encodeFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + } catch (IOException e) { + e.printStackTrace(); + } + + if (mediaEncode == null) { + Log.e(TAG, "create mediaEncode failed"); + return; + } + mediaEncode.start(); + encodeInputBuffers=mediaEncode.getInputBuffers(); + encodeOutputBuffers=mediaEncode.getOutputBuffers(); + encodeBufferInfo=new MediaCodec.BufferInfo(); + } + + /** + * 初始化MPEG编码器 + */ + private void initMPEGMediaEncode() { + + } + + private boolean codeOver = false; + /** + * 开始转码 + * 音频数据{@link #srcPath}先解码成PCM PCM数据在编码成想要得到的{@link #encodeType}音频格式 + * mp3->PCM->aac + */ + public void startAsync() { + showLog("start"); + + new Thread(new DecodeRunnable()).start(); + new Thread(new EncodeRunnable()).start(); + + } + + /** + * 将PCM数据存入{@link #chunkPCMDataContainer} + * @param pcmChunk PCM数据块 + */ + private void putPCMData(byte[] pcmChunk) { + synchronized (AudioCodec.class) {//记得加锁 + chunkPCMDataContainer.add(pcmChunk); + } + } + + /** + * 在Container中{@link #chunkPCMDataContainer}取出PCM数据 + * @return PCM数据块 + */ + private byte[] getPCMData() { + synchronized (AudioCodec.class) {//记得加锁 + showLog("getPCM:"+chunkPCMDataContainer.size()); + if (chunkPCMDataContainer.isEmpty()) { + return null; + } + + byte[] pcmChunk = chunkPCMDataContainer.get(0);//每次取出index 0 的数据 + chunkPCMDataContainer.remove(pcmChunk);//取出后将此数据remove掉 既能保证PCM数据块的取出顺序 又能及时释放内存 + return pcmChunk; + } + } + + + /** + * 解码{@link #srcPath}音频文件 得到PCM数据块 + * @return 是否解码完所有数据 + */ +// private void srcAudioFormatToPCM() { +// if (decodeInputBuffers == null || mediaDecode == null) { +// return; +// } +// for (int i = 0; i < decodeInputBuffers.length-1; i++) { +// int inputIndex = mediaDecode.dequeueInputBuffer(-1);//获取可用的inputBuffer -1代表一直等待,0表示不等待 建议-1,避免丢帧 +// if (inputIndex < 0) { +// codeOver =true; +// return; +// } +// +// ByteBuffer inputBuffer = decodeInputBuffers[inputIndex];//拿到inputBuffer +// inputBuffer.clear();//清空之前传入inputBuffer内的数据 +// int sampleSize = mediaExtractor.readSampleData(inputBuffer, 0);//MediaExtractor读取数据到inputBuffer中 +// if (sampleSize <0) {//小于0 代表所有数据已读取完成 +// codeOver=true; +// }else { +// mediaDecode.queueInputBuffer(inputIndex, 0, sampleSize, 0, 0);//通知MediaDecode解码刚刚传入的数据 +// mediaExtractor.advance();//MediaExtractor移动到下一取样处 +// decodeSize+=sampleSize; +// } +// } +// +// //获取解码得到的byte[]数据 参数BufferInfo上面已介绍 10000同样为等待时间 同上-1代表一直等待,0代表不等待。此处单位为微秒 +// //此处建议不要填-1 有些时候并没有数据输出,那么他就会一直卡在这 等待 +// int outputIndex = mediaDecode.dequeueOutputBuffer(decodeBufferInfo, 10000); +// +//// showLog("decodeOutIndex:" + outputIndex); +// ByteBuffer outputBuffer; +// byte[] chunkPCM; +// while (outputIndex >= 0) {//每次解码完成的数据不一定能一次吐出 所以用while循环,保证解码器吐出所有数据 +// outputBuffer = decodeOutputBuffers[outputIndex];//拿到用于存放PCM数据的Buffer +// chunkPCM = new byte[decodeBufferInfo.size];//BufferInfo内定义了此数据块的大小 +// outputBuffer.get(chunkPCM);//将Buffer内的数据取出到字节数组中 +// outputBuffer.clear();//数据取出后一定记得清空此Buffer MediaCodec是循环使用这些Buffer的,不清空下次会得到同样的数据 +// putPCMData(chunkPCM);//自己定义的方法,供编码器所在的线程获取数据,下面会贴出代码 +// mediaDecode.releaseOutputBuffer(outputIndex, false);//此操作一定要做,不然MediaCodec用完所有的Buffer后 将不能向外输出数据 +// outputIndex = mediaDecode.dequeueOutputBuffer(decodeBufferInfo, 10000);//再次获取数据,如果没有数据输出则outputIndex=-1 循环结束 +// } +// +// } + + int inputIndex = 0,outputIndex = 0; + private void srcAudioFormatToPCM() { + for (int i = 0; i < 4; i++) { //这个4就是原来dequeueInputBuffers的长度-1,打印多首歌都为4,有待考证 + inputIndex = mediaDecode.dequeueInputBuffer(-1); + // -1代表一直等待,0表示不等待 + // 建议-1,避免丢帧 + if (inputIndex < 0) { + codeOver = true; + return; + } + ByteBuffer inputBuffer = mediaDecode.getInputBuffer(inputIndex);// 拿到inputBuffer + inputBuffer.clear();// 清空之前传入inputBuffer内的数据 + int sampleSize = mediaExtractor.readSampleData(inputBuffer, 0);// MediaExtractor读取数据到inputBuffer中 + if (sampleSize < 0) {// 小于0 代表所有数据已读取完成 + codeOver = true; + } else { + mediaDecode.queueInputBuffer(inputIndex, 0, sampleSize, 0, 0);// 通知MediaDecode解码刚刚传入的数据 + mediaExtractor.advance();// MediaExtractor移动到下一取样处 + decodeSize += sampleSize; + } + } + outputIndex = mediaDecode.dequeueOutputBuffer(decodeBufferInfo, 10000); + // showLog("decodeOutIndex:" + outputIndex); + ByteBuffer outputBuffer; + byte[] chunkPCM; + while (outputIndex >= 0) {// 每次解码完成的数据不一定能一次吐出 所以用while循环,保证解码器吐出所有数据 + outputBuffer = mediaDecode.getOutputBuffer(outputIndex);// 拿到用于存放PCM数据的Buffer + chunkPCM = new byte[decodeBufferInfo.size];// BufferInfo内定义了此数据块的大小 + outputBuffer.get(chunkPCM);// 将Buffer内的数据取出到字节数组中 + outputBuffer.clear();// 数据取出后一定记得清空此Buffer + // MediaCodec是循环使用这些Buffer的,不清空下次会得到同样的数据 + putPCMData(chunkPCM);// 自己定义的方法,供编码器所在的线程获取数据,下面会贴出代码 + /*try { + pcmBos.write(chunkPCM); // 存放PCM数据 + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + }*/ + mediaDecode.releaseOutputBuffer(outputIndex, false);// 此操作一定要做,不然MediaCodec用完所有的Buffer后 + // 将不能向外输出数据 + outputIndex = mediaDecode.dequeueOutputBuffer(decodeBufferInfo, + 10000);// 再次获取数据,如果没有数据输出则outputIndex=-1 循环结束 + } + } + + /** + * 编码PCM数据 得到{@link #encodeType}格式的音频文件,并保存到{@link #dstPath} + */ + private void dstAudioFormatFromPCM() { + + int inputIndex; + ByteBuffer inputBuffer; + int outputIndex; + ByteBuffer outputBuffer; + byte[] chunkAudio; + int outBitSize; + int outPacketSize; + byte[] chunkPCM; + +// showLog("doEncode"); + for (int i = 0; i < encodeInputBuffers.length-1; i++) { + chunkPCM=getPCMData();//获取解码器所在线程输出的数据 代码后边会贴上 + if (chunkPCM == null) { + break; + } + inputIndex = mediaEncode.dequeueInputBuffer(-1);//同解码器 + inputBuffer = encodeInputBuffers[inputIndex];//同解码器 + inputBuffer.clear();//同解码器 + inputBuffer.limit(chunkPCM.length); + inputBuffer.put(chunkPCM);//PCM数据填充给inputBuffer + mediaEncode.queueInputBuffer(inputIndex, 0, chunkPCM.length, 0, 0);//通知编码器 编码 + } + + outputIndex = mediaEncode.dequeueOutputBuffer(encodeBufferInfo, 10000);//同解码器 + while (outputIndex >= 0) {//同解码器 + + outBitSize=encodeBufferInfo.size; + outPacketSize=outBitSize+7;//7为ADTS头部的大小 + outputBuffer = encodeOutputBuffers[outputIndex];//拿到输出Buffer + outputBuffer.position(encodeBufferInfo.offset); + outputBuffer.limit(encodeBufferInfo.offset + outBitSize); + chunkAudio = new byte[outPacketSize]; + addADTStoPacket(chunkAudio,outPacketSize);//添加ADTS 代码后面会贴上 + outputBuffer.get(chunkAudio, 7, outBitSize);//将编码得到的AAC数据 取出到byte[]中 偏移量offset=7 你懂得 + outputBuffer.position(encodeBufferInfo.offset); +// showLog("outPacketSize:" + outPacketSize + " encodeOutBufferRemain:" + outputBuffer.remaining()); + try { + bos.write(chunkAudio,0,chunkAudio.length);//BufferOutputStream 将文件保存到内存卡中 *.aac + } catch (IOException e) { + e.printStackTrace(); + } + + mediaEncode.releaseOutputBuffer(outputIndex,false); + outputIndex = mediaEncode.dequeueOutputBuffer(encodeBufferInfo, 10000); + + } + } + + /** + * 添加ADTS头 + * @param packet + * @param packetLen + */ + private void addADTStoPacket(byte[] packet, int packetLen) { + int profile = 2; // AAC LC + int freqIdx = 4; // 44.1KHz + int chanCfg = 2; // CPE + + +// fill in ADTS data + packet[0] = (byte) 0xFF; + packet[1] = (byte) 0xF9; + packet[2] = (byte) (((profile - 1) << 6) + (freqIdx << 2) + (chanCfg >> 2)); + packet[3] = (byte) (((chanCfg & 3) << 6) + (packetLen >> 11)); + packet[4] = (byte) ((packetLen & 0x7FF) >> 3); + packet[5] = (byte) (((packetLen & 7) << 5) + 0x1F); + packet[6] = (byte) 0xFC; + } + + /** + * 释放资源 + */ + public void release() { + try { + if (bos != null) { + bos.flush(); + } + } catch (IOException e) { + e.printStackTrace(); + }finally { + if (bos != null) { + try { + bos.close(); + } catch (IOException e) { + e.printStackTrace(); + }finally { + bos=null; + } + } + } + + try { + if (fos != null) { + fos.close(); + } + } catch (IOException e) { + e.printStackTrace(); + }finally { + fos=null; + } + + if (mediaEncode != null) { + mediaEncode.stop(); + mediaEncode.release(); + mediaEncode=null; + } + + if (mediaDecode != null) { + mediaDecode.stop(); + mediaDecode.release(); + mediaDecode=null; + } + + if (mediaExtractor != null) { + mediaExtractor.release(); + mediaExtractor=null; + } + + if (onCompleteListener != null) { + onCompleteListener=null; + } + + if (onProgressListener != null) { + onProgressListener=null; + } + showLog("release"); + } + + /** + * 解码线程 + */ + private class DecodeRunnable implements Runnable{ + + @Override + public void run() { + while (!codeOver) { + srcAudioFormatToPCM(); + } + } + } + + /** + * 编码线程 + */ + private class EncodeRunnable implements Runnable { + + @Override + public void run() { + long t=System.currentTimeMillis(); + while (!codeOver || !chunkPCMDataContainer.isEmpty()) { + dstAudioFormatFromPCM(); + } + if (onCompleteListener != null) { + onCompleteListener.completed(); + } + showLog("size:"+fileTotalSize+" decodeSize:"+decodeSize+"time:"+(System.currentTimeMillis()-t)); + } + } + + + /** + * 转码完成回调接口 + */ + public interface OnCompleteListener{ + void completed(); + } + + /** + * 转码进度监听器 + */ + public interface OnProgressListener{ + void progress(); + } + + /** + * 设置转码完成监听器 + * @param onCompleteListener + */ + public void setOnCompleteListener(OnCompleteListener onCompleteListener) { + this.onCompleteListener=onCompleteListener; + } + + public void setOnProgressListener(OnProgressListener onProgressListener) { + this.onProgressListener = onProgressListener; + } + + private void showLog(String msg) { + Log.e("AudioCodec", msg); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/mp3TranslateAAC/AudioDecoder.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/mp3TranslateAAC/AudioDecoder.java new file mode 100644 index 0000000..1d589f0 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/mp3TranslateAAC/AudioDecoder.java @@ -0,0 +1,225 @@ +package com.aserbao.androidcustomcamera.blocks.mediaCodec.primary.mp3TranslateAAC; + +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.os.Build; +import android.support.annotation.RequiresApi; +import android.util.Log; + +import java.io.File; +import java.io.FileDescriptor; +import java.io.FileInputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +/** + * Created by turbo on 2018/2/9. + */ + +@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN) +public class AudioDecoder { + + private static final String TAG = AudioDecoder.class.getSimpleName(); + private static ExecutorService mExecutorService = Executors.newSingleThreadExecutor(); + private MediaExtractor mMediaExtractor; + private MediaCodec mDecoder; + private static final int TIMEOUT_US = 1000; + private ByteBuffer[] mInputByteBuffers; + private ByteBuffer[] mOutputByteBuffers; + private MediaCodec.BufferInfo mBufferInfo; + private String mMusicPath; + private OnCapturePCMListener mOnCapturePCMListener; + private int mSampleRate = 0; + private boolean eosReceived; + + public AudioDecoder(String musicPath) throws IOException { + if (musicPath == null) { + throw new NullPointerException("musicPath can't be null"); + } + mMusicPath = musicPath; + eosReceived = false; + initDecoder(); + mDecoder.start(); + mExecutorService.execute(new DecodeRunnable()); + } + + private void initDecoder() throws IOException, NullPointerException { + mMediaExtractor = new MediaExtractor(); + File file = new File(mMusicPath); + FileInputStream fis = null; + try { + fis = new FileInputStream(file); + FileDescriptor fd = fis.getFD(); + mMediaExtractor.setDataSource(fd); + } catch (Exception e) { + e.printStackTrace(); + } finally { + //Release stuff + mMediaExtractor.release(); + try { + if(fis != null) { + fis.close(); + } + } catch (Exception e){ + e.printStackTrace(); + } + } +// mMediaExtractor.setDataSource(mMusicPath); + int channel = 0; + int numTracks = mMediaExtractor.getTrackCount(); + for (int i = 0; i < numTracks; ++i) { + MediaFormat format = mMediaExtractor.getTrackFormat(i); + String mime = format.getString(MediaFormat.KEY_MIME); + if (mime.startsWith("audio")) { + mMediaExtractor.selectTrack(i); + mDecoder = MediaCodec.createDecoderByType(mime); + if (mime.equals(MediaFormat.MIMETYPE_AUDIO_AAC)) { + // AAC ADTS头部处理 + ByteBuffer csd = format.getByteBuffer("csd-0"); + for (int k = 0; k < csd.capacity(); ++k) { + Log.e(TAG, "csd : " + csd.array()[k]); + } + mSampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); + channel = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); + format = makeADTSData(MediaCodecInfo.CodecProfileLevel.AACObjectLC, mSampleRate, channel); + } + } + mDecoder.configure(format, null, null, 0); + break; + } + } + + private MediaFormat makeADTSData(int audioProfile, int sampleRate, int channelConfig) { + MediaFormat format = new MediaFormat(); + format.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_AUDIO_AAC); + format.setInteger(MediaFormat.KEY_SAMPLE_RATE, sampleRate); + format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, channelConfig); + + int samplingFreq[] = { + 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, + 16000, 12000, 11025, 8000 + }; + + // Search the Sampling Frequencies + int sampleIndex = -1; + for (int i = 0; i < samplingFreq.length; ++i) { + if (samplingFreq[i] == sampleRate) { + Log.d(TAG, "kSamplingFreq " + samplingFreq[i] + " i : " + i); + sampleIndex = i; + } + } + + if (sampleIndex == -1) { + return null; + } + + ByteBuffer csd = ByteBuffer.allocate(2); + csd.put((byte) ((audioProfile << 3) | (sampleIndex >> 1))); + + csd.position(1); + csd.put((byte) ((byte) ((sampleIndex << 7) & 0x80) | (channelConfig << 3))); + csd.flip(); + format.setByteBuffer("csd-0", csd); // add csd-0 + + for (int k = 0; k < csd.capacity(); ++k) { + Log.e(TAG, "csd : " + csd.array()[k]); + } + + return format; + } + + public void decode() { + mInputByteBuffers = mDecoder.getInputBuffers(); + mOutputByteBuffers = mDecoder.getOutputBuffers(); + mBufferInfo = new MediaCodec.BufferInfo(); + + while (!eosReceived) { + int inIndex = mDecoder.dequeueInputBuffer(TIMEOUT_US); + if (inIndex >= 0) { + ByteBuffer buffer = mInputByteBuffers[inIndex]; + int sampleSize = mMediaExtractor.readSampleData(buffer, 0); + if (sampleSize < 0) { + // We shouldn't stop the playback at this point, just pass the EOS + // flag to mDecoder, we will get it again from the + // dequeueOutputBuffer + Log.d(TAG, "InputBuffer BUFFER_FLAG_END_OF_STREAM"); + mDecoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); + + } else { + mDecoder.queueInputBuffer(inIndex, 0, sampleSize, mMediaExtractor.getSampleTime(), 0); + mMediaExtractor.advance(); + } + + int outIndex = mDecoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_US); + switch (outIndex) { + case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: + Log.d(TAG, "INFO_OUTPUT_BUFFERS_CHANGED"); + mOutputByteBuffers = mDecoder.getOutputBuffers(); + break; + + case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: + MediaFormat format = mDecoder.getOutputFormat(); + Log.d(TAG, "New format " + format); + + break; + case MediaCodec.INFO_TRY_AGAIN_LATER: + Log.d(TAG, "dequeueOutputBuffer timed out!"); + break; + + default: + ByteBuffer outBuffer = mOutputByteBuffers[outIndex]; + Log.v(TAG, "We can't use this buffer but render it due to the API limit, " + outBuffer); + + final byte[] chunk = new byte[mBufferInfo.size]; + outBuffer.get(chunk); // Read the buffer all at once + outBuffer.clear(); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN + + MediaFormat mFormat = mDecoder.getOutputFormat(); + mOnCapturePCMListener.capturePCM(chunk, mFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE), mFormat.getInteger + (MediaFormat.KEY_CHANNEL_COUNT)); + mDecoder.releaseOutputBuffer(outIndex, false); + break; + } + + // All decoded frames have been rendered, we can stop playing now + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + Log.d(TAG, "OutputBuffer BUFFER_FLAG_END_OF_STREAM"); + break; + } + } + } + + mDecoder.stop(); + mDecoder.release(); + mDecoder = null; + + mMediaExtractor.release(); + mMediaExtractor = null; + eosReceived = true; + + mExecutorService.shutdown(); + mExecutorService = null; + } + + private class DecodeRunnable implements Runnable { + + @Override + public void run() { + while (!eosReceived) { + decode(); + } + } + } + + public interface OnCapturePCMListener { + void capturePCM(byte[] pcm, int sampleRate, int channel); + } + + public void setOnCapturePCMListener(OnCapturePCMListener OnCapturePCMListener) { + mOnCapturePCMListener = OnCapturePCMListener; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/mp3TranslateAAC/Mp3TranslateAACActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/mp3TranslateAAC/Mp3TranslateAACActivity.java new file mode 100644 index 0000000..12c61b1 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/mp3TranslateAAC/Mp3TranslateAACActivity.java @@ -0,0 +1,104 @@ +package com.aserbao.androidcustomcamera.blocks.mediaCodec.primary.mp3TranslateAAC; + +import android.media.MediaFormat; +import android.media.MediaPlayer; +import android.os.Bundle; +import android.os.Environment; +import android.support.v7.app.AppCompatActivity; +import android.util.Log; +import android.view.View; +import android.widget.Toast; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.TransAacHandlerPure; + +import java.io.IOException; + +import butterknife.ButterKnife; +import butterknife.OnClick; + +public class Mp3TranslateAACActivity extends AppCompatActivity { + + private static final String TAG = "Mp3TranslateAACActivity"; + private AudioCodec audioCodec; + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_mp3_translate_aac); + ButterKnife.bind(this); + } + + String path = Environment.getExternalStorageDirectory().getAbsolutePath(); + + @OnClick({R.id.start1_btn, R.id.start2_btn,R.id.start3_btn}) + public void onViewClicked(View view) { + switch (view.getId()) { + case R.id.start1_btn: + audioCodec = AudioCodec.newInstance(); + audioCodec.setEncodeType(MediaFormat.MIMETYPE_AUDIO_MPEG); +// audioCodec.setEncodeType(MediaFormat.MIMETYPE_AUDIO_AAC); + audioCodec.setIOPath(path + "/123.aac", path + "/456.mp3"); +// audioCodec.setIOPath(path + "/five.mp3", path + "/codec.aac"); + audioCodec.prepare(); + audioCodec.startAsync(); + audioCodec.setOnCompleteListener(new AudioCodec.OnCompleteListener() { + @Override + public void completed() { + Toast.makeText(Mp3TranslateAACActivity.this, "成功", Toast.LENGTH_SHORT).show(); + audioCodec.release(); + } + }); + break; + case R.id.start2_btn: + + TransAacHandlerPure aacHandlerPure = new TransAacHandlerPure(path + "/five.mp3", path + "/codec"); + aacHandlerPure.setListener(new TransAacHandlerPure.OnProgressListener() { + @Override + public void onStart() { + Log.e(TAG, "onStart: " ); + } + + @Override + public void onProgress(int max, int progress) { + Log.e(TAG, "onProgress: " + progress); + } + + @Override + public void onSuccess() { + Log.e(TAG, "onSuccess: " ); + } + + @Override + public void onFail() { + Log.e(TAG, "onFail: "); + } + }); + aacHandlerPure.start(); + break; + case R.id.start3_btn: + new Thread(new Runnable() { + @Override + public void run() { + try { + AudioDecoder audioDecoder = new AudioDecoder(path + "/five.mp3"); + audioDecoder.setOnCapturePCMListener(new AudioDecoder.OnCapturePCMListener() { + @Override + public void capturePCM(byte[] pcm, int sampleRate, int channel) { + Log.e(TAG, "capturePCM: " + pcm + " sampleRate = " + sampleRate + " channel = " + channel); + } + }); + audioDecoder.decode(); + } catch (IOException e) { + e.printStackTrace(); + } + } + }).start(); + break; + } + } + + public void test(){ + MediaPlayer mediaPlayer = new MediaPlayer(); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/recordBaseCamera/RecordBaseCameraActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/recordBaseCamera/RecordBaseCameraActivity.java index 72df1df..05e5a83 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/recordBaseCamera/RecordBaseCameraActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/recordBaseCamera/RecordBaseCameraActivity.java @@ -12,7 +12,7 @@ import com.aserbao.androidcustomcamera.R; import com.aserbao.androidcustomcamera.base.activity.BaseActivity; -import com.aserbao.androidcustomcamera.whole.videoPlayer.VideoPlayerActivity; +import com.aserbao.androidcustomcamera.whole.videoPlayer.VideoViewPlayerActivity; import java.io.IOException; @@ -146,7 +146,7 @@ public void onViewClicked(View view) { break; case R.id.btn_record_base_player: if (mEncoder != null && !TextUtils.isEmpty(mEncoder.getPath())) { - VideoPlayerActivity.launch(RecordBaseCameraActivity.this, mEncoder.getPath()); + VideoViewPlayerActivity.launch(RecordBaseCameraActivity.this, mEncoder.getPath()); } break; } diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/recordCamera/RecordCameraActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/recordCamera/RecordCameraActivity.java index dca93d0..f1295a7 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/recordCamera/RecordCameraActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/recordCamera/RecordCameraActivity.java @@ -12,7 +12,7 @@ import com.aserbao.androidcustomcamera.base.activity.BaseActivity; import com.aserbao.androidcustomcamera.blocks.mediaCodec.recordCamera.thread.MediaMuxerThread; import com.aserbao.androidcustomcamera.blocks.mediaCodec.recordCamera.utils.FileUtils; -import com.aserbao.androidcustomcamera.whole.videoPlayer.VideoPlayerActivity; +import com.aserbao.androidcustomcamera.whole.videoPlayer.VideoViewPlayerActivity; import java.io.IOException; import java.util.Vector; @@ -41,7 +41,7 @@ public void onViewClicked(View view) { switch (view.getId()){ case R.id.btn_record_player: Vector videoFileNameIsMp4 = FileUtils.getVideoFileNameIsMp4(FileUtils.VIDEO_PATH); - VideoPlayerActivity.launch(RecordCameraActivity.this,FileUtils.VIDEO_PATH + videoFileNameIsMp4.firstElement()); + VideoViewPlayerActivity.launch(RecordCameraActivity.this,FileUtils.VIDEO_PATH + videoFileNameIsMp4.firstElement()); break; case R.id.btn_record_status: if(mBtnRecordStatus.getText().equals("开始录制")){ @@ -87,7 +87,7 @@ public void onAutoFocus(boolean success, Camera camera) { Camera.Parameters parameters = mCamera.getParameters(); parameters = mCamera.getParameters(); parameters.setPictureFormat(PixelFormat.JPEG); //图片输出格式 -// mParameters.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);//预览持续发光 + parameters.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);//预览持续发光 parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);//持续对焦模式 mCamera.setParameters(parameters); mCamera.startPreview(); diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/recordCamera/thread/AudioEncoderThread.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/recordCamera/thread/AudioEncoderThread.java index 8990b75..3e8a14c 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/recordCamera/thread/AudioEncoderThread.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/recordCamera/thread/AudioEncoderThread.java @@ -94,7 +94,7 @@ private void startMediaCodec() throws IOException { mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE); mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mMediaCodec.start(); - Log.i(TAG, "prepare finishing"); + Log.i(TAG, "prepareAudio finishing"); prepareAudioRecord(); diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/show/MediaCodecShowOnGlSurfaceView.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/show/MediaCodecShowOnGlSurfaceView.java new file mode 100644 index 0000000..8f5e21b --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/show/MediaCodecShowOnGlSurfaceView.java @@ -0,0 +1,246 @@ +package com.aserbao.androidcustomcamera.blocks.mediaCodec.show; + +import android.content.Intent; +import android.media.MediaCodec; +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.media.MediaMetadataRetriever; +import android.media.MediaPlayer; +import android.net.Uri; +import android.opengl.GLSurfaceView; +import android.os.Bundle; +import android.os.Message; +import android.os.SystemClock; +import android.support.v7.app.AppCompatActivity; +import android.util.Log; +import android.view.Surface; +import android.view.SurfaceHolder; +import android.view.SurfaceView; +import android.view.View; +import android.widget.Toast; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.utils.StaticFinalValues; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.localEdit.LocalVideoActivity; +import com.aserbao.androidcustomcamera.whole.editVideo.VideoEditActivity; +import com.aserbao.androidcustomcamera.whole.editVideo.mediacodec.VideoClipper; +import com.aserbao.androidcustomcamera.whole.editVideo.view.BaseImageView; +import com.aserbao.androidcustomcamera.whole.pickvideo.VideoPickActivity; +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.VideoFile; +import com.aserbao.androidcustomcamera.whole.record.RecorderActivity; +import com.aserbao.androidcustomcamera.whole.record.other.MagicFilterType; +import com.aserbao.androidcustomcamera.whole.videoPlayer.VideoPlayerActivity2; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.ArrayList; + +import butterknife.BindView; +import butterknife.ButterKnife; +import butterknife.OnClick; + +import static com.aserbao.androidcustomcamera.base.utils.StaticFinalValues.MAX_NUMBER; +import static com.aserbao.androidcustomcamera.base.utils.StaticFinalValues.STORAGE_TEMP_VIDEO_PATH; +import static com.aserbao.androidcustomcamera.base.utils.StaticFinalValues.STORAGE_TEMP_VIDEO_PATH1; +import static com.aserbao.androidcustomcamera.whole.pickvideo.BaseActivity.IS_NEED_FOLDER_LIST; +import static com.aserbao.androidcustomcamera.whole.pickvideo.VideoPickActivity.IS_NEED_CAMERA; + +/** + * MediaCodec解码显示到GlSurfaceView上 + */ +public class MediaCodecShowOnGlSurfaceView extends AppCompatActivity implements SurfaceHolder.Callback{ + private static final String TAG = "MediaCodecShowOnGlSurfa"; + @BindView(R.id.mSurface) + SurfaceView mSurfaceView; + + public SurfaceHolder mHolder; + private long mStartTime; + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_media_codec_show_on_gl_surface_view); + ButterKnife.bind(this); + mHolder = mSurfaceView.getHolder(); + mHolder.addCallback(this); + } + + @OnClick({R.id.sel_btn, R.id.decode_show_btn,R.id.detail_video_btn}) + public void onViewClicked(View view) { + switch (view.getId()) { + case R.id.sel_btn: + Intent intent2 = new Intent(this, VideoPickActivity.class); + intent2.putExtra(IS_NEED_CAMERA, false); + intent2.putExtra(MAX_NUMBER, 1); + intent2.putExtra(IS_NEED_FOLDER_LIST, true); + startActivityForResult(intent2, StaticFinalValues.REQUEST_CODE_PICK_VIDEO); + break; + case R.id.decode_show_btn: + MediaCodecUtil1 mediaCodecUtil1 = new MediaCodecUtil1(videoFileName, mHolder.getSurface()); + mediaCodecUtil1.start(); + break; + case R.id.detail_video_btn: + new Thread(new Runnable() { + @Override + public void run() { + final String outputPath1 = STORAGE_TEMP_VIDEO_PATH1; + videoClipper(videoFileName1,outputPath1); + final String outputPath = STORAGE_TEMP_VIDEO_PATH; + videoClipper(videoFileName,outputPath); + } + }).start(); + break; + + } + } + +// String videoFileName = "/storage/emulated/0/12345.mp4"; + String videoFileName1 = "/storage/emulated/0/DCIM/Camera/VIDEO_2019122719_06011577444761071.mp4"; + String videoFileName = "/storage/emulated/0/DCIM/Camera/VIDEO_2019122622_50551577371855425.mp4"; + @Override + protected void onActivityResult(int requestCode, int resultCode, Intent data) { + super.onActivityResult(requestCode, resultCode, data); + switch (requestCode) { + case StaticFinalValues.REQUEST_CODE_PICK_VIDEO: + if (resultCode == RESULT_OK) { + ArrayList list = data.getParcelableArrayListExtra(StaticFinalValues.RESULT_PICK_VIDEO); + for (VideoFile file : list) { + videoFileName = file.getPath(); + } + Toast.makeText(this, "视频已选择成功\n" + videoFileName, Toast.LENGTH_SHORT).show(); + break; + } + } + } + + @Override + public void surfaceCreated(SurfaceHolder holder) { + + } + + @Override + public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { + } + + @Override + public void surfaceDestroyed(SurfaceHolder holder) { + + } + + public class MediaCodecUtil1 { + private String mFilePath; + private MediaCodec mMediaCodec; + private MediaExtractor mMediaExtractor; + private Surface mSurface; + private boolean mIsAvailable; + private ByteBuffer[] mInputBuffers; + private ByteBuffer[] mOutputBuffers; + + public MediaCodecUtil1(String filePath, Surface surface) { + mFilePath = filePath; + mSurface = surface; + } + + private void init() { + mIsAvailable = false; + mMediaExtractor = new MediaExtractor(); + try { + mMediaExtractor.setDataSource(mFilePath); + int trackCount = mMediaExtractor.getTrackCount(); + for (int i = 0; i < trackCount; i++) { + MediaFormat mediaFormat = mMediaExtractor.getTrackFormat(i); + String mime = mediaFormat.getString(MediaFormat.KEY_MIME); + if (mime.startsWith("video/")) { + mMediaExtractor.selectTrack(i); + mMediaCodec = MediaCodec.createDecoderByType(mime); + mMediaCodec.configure(mediaFormat, mSurface, null, 0); + mIsAvailable = true; + break; + } + } + } catch (IOException e) { + e.printStackTrace(); + } + } + + public void start() { + init(); + if (mIsAvailable) { + mMediaCodec.start(); + mInputBuffers = mMediaCodec.getInputBuffers(); + mOutputBuffers = mMediaCodec.getOutputBuffers(); + new Thread(new EncoderThread()).start(); + } + } + + private class EncoderThread implements Runnable { + @Override + public void run() { + MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); //每个缓冲区元数据包括指定相关编解码器(输出)缓冲区中有效数据范围的偏移量和大小。 + long startTime = System.currentTimeMillis(); + while (mIsAvailable) { + int inputBufferIndex = mMediaCodec.dequeueInputBuffer(-1);//获取输入队列中有效数据的索引 + if (inputBufferIndex >= 0) { + ByteBuffer inputBuffer = mInputBuffers[inputBufferIndex]; + inputBuffer.clear(); + int sampleSize = mMediaExtractor.readSampleData(inputBuffer, 0); //检索当前已编码的示例并将其存储到字节缓冲区中,从给定的偏移量开始。 + if (sampleSize > 0) { + mMediaExtractor.advance(); + mMediaCodec.queueInputBuffer(inputBufferIndex, 0, sampleSize, mMediaExtractor.getSampleTime(), 0);// 通知MediaDecode解码刚刚传入的数据 + } + } + + int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);//返回已成功解码的输出缓冲区的索引 + if (outputBufferIndex >= 0) { + long sleepTime = bufferInfo.presentationTimeUs / 1000 - (System.currentTimeMillis() - startTime); + if (sleepTime > 0) { + SystemClock.sleep(sleepTime); + } +// ByteBuffer outBuffer = mOutputBuffers[outputBufferIndex]; + mMediaCodec.releaseOutputBuffer(outputBufferIndex, true); + } + } + mMediaExtractor.release(); + mMediaCodec.stop(); + mMediaCodec.release(); + Log.i("==", "播放完成"); + } + } + + public void stop() { + mIsAvailable = false; + } + + } + + public void videoClipper(String videoFileName,final String outputPath){ + mStartTime = System.currentTimeMillis(); + VideoClipper clipper = new VideoClipper(); + clipper.setInputVideoPath(videoFileName); + clipper.setFilterType(MagicFilterType.NONE); + clipper.setOutputVideoPath(outputPath); + clipper.setOnVideoCutFinishListener(new VideoClipper.OnVideoCutFinishListener() { + @Override + public void onFinish() { + Log.e(TAG, "onFinish: 生成完成耗时" + ((System.currentTimeMillis() - mStartTime) / 1000)); + VideoPlayerActivity2.launch(MediaCodecShowOnGlSurfaceView.this,outputPath); + } + + @Override + public void onProgress(float percent) { + Log.e(TAG, "onProgress: " +percent ); + } + }); + try { + final MediaMetadataRetriever mediaMetadata = new MediaMetadataRetriever(); + mediaMetadata.setDataSource(this, Uri.parse(videoFileName)); + int clipDur = Integer.parseInt(mediaMetadata.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)); + Log.e(TAG, "onViewClicked: 时长 " + clipDur); +// int clipDur = 5032000; + clipper.clipVideo(0, clipDur * 1000,new ArrayList(), getResources()); + } catch (IOException e) { + e.printStackTrace(); + } + } + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/MediaExtractorActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/MediaExtractorActivity.java new file mode 100644 index 0000000..0bdf1c0 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/MediaExtractorActivity.java @@ -0,0 +1,177 @@ +package com.aserbao.androidcustomcamera.blocks.mediaExtractor; + +import android.os.Bundle; +import android.os.Environment; +import android.support.v7.app.AppCompatActivity; +import android.util.Log; +import android.view.View; +import android.widget.Button; +import android.widget.Toast; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.interfaces.IDetailCallBackListener; +import com.aserbao.androidcustomcamera.blocks.interfaces.ICallBackListener; +import com.aserbao.androidcustomcamera.blocks.mediaExtractor.combineTwoVideo.CombineTwoVideos; +import com.aserbao.androidcustomcamera.blocks.mediaExtractor.combineTwoVideo.CombineVideoAndMusic; +import com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.FrequencyView; +import com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.TransAacHandlerPure; +import com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.decoder.DecoderAudioAAC2PCMPlay; +import com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.decoder.DecoderAudioAndGetDb; +import com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.decoder.DecoderMp3FromMp4; +import com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.decoder.DecoderNoVoiceMp4FromMp4; +import com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.encoder.EncoderAudioAAC; +import com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.official.AMediaExtractorOfficial; + +import java.io.File; + +import butterknife.BindView; +import butterknife.ButterKnife; +import butterknife.OnClick; + +import static android.media.MediaFormat.MIMETYPE_AUDIO_AAC; +import static android.media.MediaFormat.MIMETYPE_AUDIO_MPEG; + +public class MediaExtractorActivity extends AppCompatActivity implements IDetailCallBackListener { + private static final String TAG = "MediaExtractorActivity"; + @BindView(R.id.record_and_encoder_mp3) + Button mRecordAndEncoderMp3; + @BindView(R.id.record_mp3_stop) + Button mRecordMp3Stop; + @BindView(R.id.frequency_view) + FrequencyView mFrequencyView; + private DecoderAudioAAC2PCMPlay decoderAAC; + private EncoderAudioAAC encoderAudioAAC; + private long startTime; + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_media_extractor); + ButterKnife.bind(this); + } + + String path = Environment.getExternalStorageDirectory().getAbsolutePath(); + + @OnClick({R.id.audio_extractor_data,R.id.extractor_mp3_from_mp4,R.id.extractor_no_voice_mp4_from_mp4,R.id.extractor_video_and_audio, R.id.exchange_video_and_audio, + R.id.decoder_aac_and_player, R.id.decoder_mp3_and_player, + R.id.record_and_encoder_mp3, R.id.record_mp3_stop, R.id.mp3_translate_aac_btn}) + public void onViewClicked(View view) { + switch (view.getId()) { + case R.id.audio_extractor_data: + AMediaExtractorOfficial.mediaExtractorDecoderAudio(path + "/dj_dance.mp3"); + break; + case R.id.extractor_mp3_from_mp4: +// new DecoderMp3FromMp4(path + "/123.mp4", path + "/out_aserbao.mp3", new ICallBackListener() { +// new DecoderMp3FromMp4(path + "/123.mp4", path + "/out_aserbao.pcm", new ICallBackListener() { + new DecoderMp3FromMp4(path + "/123.mp4", path + "/out_aserbao.mp3", new ICallBackListener() { + @Override + public void success() { + Toast.makeText(MediaExtractorActivity.this, "成功", Toast.LENGTH_SHORT).show(); + } + + @Override + public void failed(Exception e) { + Toast.makeText(MediaExtractorActivity.this, "失败" + e.toString(), Toast.LENGTH_SHORT).show(); + } + }).start(); + break; + case R.id.extractor_no_voice_mp4_from_mp4: + new DecoderNoVoiceMp4FromMp4(path + "/lan.mp4", path + "/out_aserbao", new ICallBackListener() { + @Override + public void success() { + Toast.makeText(MediaExtractorActivity.this, "成功", Toast.LENGTH_SHORT).show(); + } + + @Override + public void failed(Exception e) { + Toast.makeText(MediaExtractorActivity.this, "失败" + e.toString(), Toast.LENGTH_SHORT).show(); + } + }).start(); + break; + case R.id.extractor_video_and_audio: + String audioMp3Path1 = Environment.getExternalStorageDirectory().getAbsolutePath() + "/five.mp3"; +// String audioMp3Path1 = Environment.getExternalStorageDirectory().getAbsolutePath() + "/aac.aac"; +// String audioMp3Path1 = Environment.getExternalStorageDirectory().getAbsolutePath() + "/own.m4a"; + DecoderAudioAndGetDb decoderAudioAndGetDb = new DecoderAudioAndGetDb(); + decoderAudioAndGetDb.start(audioMp3Path1, MIMETYPE_AUDIO_MPEG, new DecoderAudioAndGetDb.DbCallBackListener() { + @Override + public void cuurentFrequenty(int cuurentFrequenty, double volume) { + mFrequencyView.addInt(cuurentFrequenty/100); + } + }); +// decoderAudioAndGetDb.start(audioMp3Path1, MIMETYPE_AUDIO_MPEG); + break; + case R.id.exchange_video_and_audio: +// CombineTwoVideos.combineTwoVideos(path + "/aserbao.mp4", 0, path + "/lan.mp4", new File(path + "/aserbao.mp3"), this); + String inputVideo = "/storage/emulated/0/douyin.mp4"; + String outputVideo = "/storage/emulated/0/douyinOut.mp4"; + String inputMusic = "/storage/emulated/0/pg/.bgm/40e613e5e3695ab44b4f31e25088d7ac"; + CombineVideoAndMusic.combineTwoVideos(inputMusic, 0, inputVideo, new File(outputVideo), this); + break; + case R.id.decoder_aac_and_player: + String audioPath = Environment.getExternalStorageDirectory().getAbsolutePath() + "/aac.aac"; + decoderAAC = new DecoderAudioAAC2PCMPlay(); + decoderAAC.start(audioPath, MIMETYPE_AUDIO_AAC); + break; + case R.id.decoder_mp3_and_player: + String audioMp3Path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/five.mp3"; + decoderAAC = new DecoderAudioAAC2PCMPlay(); + decoderAAC.start(audioMp3Path, MIMETYPE_AUDIO_MPEG); + break; + case R.id.record_and_encoder_mp3: + mRecordMp3Stop.setVisibility(View.VISIBLE); + mRecordAndEncoderMp3.setVisibility(View.GONE); + String encoderAACPath = Environment.getExternalStorageDirectory().getAbsolutePath() + "/encoder_aac.aac"; + if (encoderAudioAAC == null) { + encoderAudioAAC = new EncoderAudioAAC(); + } + encoderAudioAAC.start(encoderAACPath); + break; + case R.id.record_mp3_stop: + mRecordAndEncoderMp3.setVisibility(View.VISIBLE); + mRecordMp3Stop.setVisibility(View.GONE); + if (encoderAudioAAC != null) { + encoderAudioAAC.stop(); + encoderAudioAAC = null; + } + break; + case R.id.mp3_translate_aac_btn: + TransAacHandlerPure aacHandlerPure = new TransAacHandlerPure(path + "/bell.mp3", path + "/codec.aac"); + aacHandlerPure.setListener(new TransAacHandlerPure.OnProgressListener() { + @Override + public void onStart() { + startTime = System.currentTimeMillis(); + Log.e(TAG, "onStart: " + startTime); + } + + @Override + public void onProgress(int max, int progress) { + Log.e(TAG, "onProgress: "); + } + + @Override + public void onSuccess() { + float v = (System.currentTimeMillis() - startTime) / (float) 1000; + Log.d(TAG, "onSuccess() called 一共耗时 : " + v + "s");// 10s的mp3转aac差不多2.5s + } + + @Override + public void onFail() { + Log.d(TAG, "onFail() called"); + } + }); + aacHandlerPure.start(); + break; + } + } + + @Override + public void success() { + Toast.makeText(MediaExtractorActivity.this, "成功", Toast.LENGTH_SHORT).show(); + } + + @Override + public void failed(Exception e) { + Toast.makeText(MediaExtractorActivity.this, e.toString(), Toast.LENGTH_SHORT).show(); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/combineTwoVideo/CombineTwoVideos.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/combineTwoVideo/CombineTwoVideos.java new file mode 100644 index 0000000..4c7262f --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/combineTwoVideo/CombineTwoVideos.java @@ -0,0 +1,153 @@ +package com.aserbao.androidcustomcamera.blocks.mediaExtractor.combineTwoVideo; + +import android.media.MediaCodec; +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.util.Log; + +import com.aserbao.androidcustomcamera.base.interfaces.IDetailCallBackListener; + +import java.io.File; +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * 功能: 合成视频1的音频和视频2的图像 + * + * @author aserbao + * @date : On 2019/1/3 6:12 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.blocks.mediaCodec.primary.mp3TranslateAAC + * @Copyright: 个人版权所有 + */ +public class CombineTwoVideos { + private static final String TAG = "CombineTwoVideos"; + /** + * 合成视频1的音频和视频2的图像 + * + * @param audioVideoPath 提供音频的视频 + * @param audioStartTime 音频的开始时间 + * @param frameVideoPath 提供图像的视频 + * @param combinedVideoOutFile 合成后的文件 + */ + public static void combineTwoVideos(String audioVideoPath, + long audioStartTime, + String frameVideoPath, + File combinedVideoOutFile, + IDetailCallBackListener iDetailCallBackListener) { + MediaExtractor audioVideoExtractor = new MediaExtractor(); + int mainAudioExtractorTrackIndex = -1; //提供音频的视频的音频轨(有点拗口) + int mainAudioMuxerTrackIndex = -1; //合成后的视频的音频轨 + int mainAudioMaxInputSize = 0; //能获取的音频的最大值 + + MediaExtractor frameVideoExtractor = new MediaExtractor(); + int frameExtractorTrackIndex = -1; //视频轨 + int frameMuxerTrackIndex = -1; //合成后的视频的视频轨 + int frameMaxInputSize = 0; //能获取的视频的最大值 + int frameRate = 0; //视频的帧率 + long frameDuration = 0; + + MediaMuxer muxer = null; //用于合成音频与视频 + + try { + muxer = new MediaMuxer(combinedVideoOutFile.getPath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + + audioVideoExtractor.setDataSource(audioVideoPath); //设置视频源 + //音轨信息 + int audioTrackCount = audioVideoExtractor.getTrackCount(); //获取数据源的轨道数 + //在此循环轨道数,目的是找到我们想要的音频轨 + for (int i = 0; i < audioTrackCount; i++) { + MediaFormat format = audioVideoExtractor.getTrackFormat(i); //得到指定索引的记录格式 + String mimeType = format.getString(MediaFormat.KEY_MIME); //主要描述mime类型的媒体格式 + if (mimeType.startsWith("audio/")) { //找到音轨 + mainAudioExtractorTrackIndex = i; + mainAudioMuxerTrackIndex = muxer.addTrack(format); //将音轨添加到MediaMuxer,并返回新的轨道 + mainAudioMaxInputSize = format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); //得到能获取的有关音频的最大值 +// mainAudioDuration = format.getLong(MediaFormat.KEY_DURATION); + } + } + + //图像信息 + frameVideoExtractor.setDataSource(frameVideoPath); //设置视频源 + int trackCount = frameVideoExtractor.getTrackCount(); //获取数据源的轨道数 + //在此循环轨道数,目的是找到我们想要的视频轨 + for (int i = 0; i < trackCount; i++) { + MediaFormat format = frameVideoExtractor.getTrackFormat(i); //得到指定索引的媒体格式 + String mimeType = format.getString(MediaFormat.KEY_MIME); //主要描述mime类型的媒体格式 + if (mimeType.startsWith("video/")) { //找到视频轨 + frameExtractorTrackIndex = i; + frameMuxerTrackIndex = muxer.addTrack(format); //将视频轨添加到MediaMuxer,并返回新的轨道 + frameMaxInputSize = format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); //得到能获取的有关视频的最大值 + frameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE); //获取视频的帧率 + frameDuration = format.getLong(MediaFormat.KEY_DURATION); //获取视频时长 + } + } + + muxer.start(); //开始合成 + + audioVideoExtractor.selectTrack(mainAudioExtractorTrackIndex); //将提供音频的视频选择到音轨上 + MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo(); + ByteBuffer audioByteBuffer = ByteBuffer.allocate(mainAudioMaxInputSize); + while (true) { + int readSampleSize = audioVideoExtractor.readSampleData(audioByteBuffer, 0); //检索当前编码的样本并将其存储在字节缓冲区中 + if (readSampleSize < 0) { //如果没有可获取的样本则退出循环 + audioVideoExtractor.unselectTrack(mainAudioExtractorTrackIndex); + break; + } + + long sampleTime = audioVideoExtractor.getSampleTime(); //获取当前展示样本的时间(单位毫秒) + + if (sampleTime < audioStartTime) { //如果样本时间小于我们想要的开始时间就快进 + audioVideoExtractor.advance(); //推进到下一个样本,类似快进 + continue; + } + + if (sampleTime > audioStartTime + frameDuration) { //如果样本时间大于开始时间+视频时长,就退出循环 + break; + } + //设置样本编码信息 + audioBufferInfo.size = readSampleSize; + audioBufferInfo.offset = 0; + audioBufferInfo.flags = audioVideoExtractor.getSampleFlags(); + audioBufferInfo.presentationTimeUs = sampleTime - audioStartTime; + + muxer.writeSampleData(mainAudioMuxerTrackIndex, audioByteBuffer, audioBufferInfo); //将样本写入 + audioVideoExtractor.advance(); //推进到下一个样本,类似快进 + } + + frameVideoExtractor.selectTrack(frameExtractorTrackIndex); //将提供视频图像的视频选择到视频轨上 + MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo(); + ByteBuffer videoByteBuffer = ByteBuffer.allocate(frameMaxInputSize); + while (true) { + int readSampleSize = frameVideoExtractor.readSampleData(videoByteBuffer, 0); //检索当前编码的样本并将其存储在字节缓冲区中 + if (readSampleSize < 0) { //如果没有可获取的样本则退出循环 + frameVideoExtractor.unselectTrack(frameExtractorTrackIndex); + break; + } + //设置样本编码信息 + videoBufferInfo.size = readSampleSize; + videoBufferInfo.offset = 0; + videoBufferInfo.flags = frameVideoExtractor.getSampleFlags(); + videoBufferInfo.presentationTimeUs += 1000 * 1000 / frameRate; + + muxer.writeSampleData(frameMuxerTrackIndex, videoByteBuffer, videoBufferInfo); //将样本写入 + frameVideoExtractor.advance(); //推进到下一个样本,类似快进 + } + } catch (IOException e) { + iDetailCallBackListener.failed(e); + Log.e(TAG, "combineTwoVideos: ", e); + } finally { + //释放资源 + audioVideoExtractor.release(); + frameVideoExtractor.release(); + if (muxer != null) { + muxer.release(); + } + iDetailCallBackListener.success(); + Log.e(TAG, "combineTwoVideos: " ); + } + } + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/combineTwoVideo/CombineVideoAndMusic.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/combineTwoVideo/CombineVideoAndMusic.java new file mode 100644 index 0000000..0ab90a5 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/combineTwoVideo/CombineVideoAndMusic.java @@ -0,0 +1,152 @@ +package com.aserbao.androidcustomcamera.blocks.mediaExtractor.combineTwoVideo; + +import android.media.MediaCodec; +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.util.Log; + +import com.aserbao.androidcustomcamera.base.interfaces.IDetailCallBackListener; + +import java.io.File; +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * 功能: 替换视频1中的视频 + * @author aserbao + * @date : On 2019/1/3 6:12 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.blocks.mediaCodec.primary.mp3TranslateAAC + * @Copyright: 个人版权所有 + */ +public class CombineVideoAndMusic { + private static final String TAG = "CombineTwoVideos"; + /** + * 合成视频1的音频和视频2的图像 + * + * @param audioVideoPath 提供音频的视频 + * @param audioStartTime 音频的开始时间 + * @param frameVideoPath 提供图像的视频 + * @param combinedVideoOutFile 合成后的文件 + */ + public static void combineTwoVideos(String audioVideoPath, + long audioStartTime, + String frameVideoPath, + File combinedVideoOutFile, + IDetailCallBackListener iDetailCallBackListener) { + MediaExtractor audioVideoExtractor = new MediaExtractor(); + int mainAudioExtractorTrackIndex = -1; //提供音频的视频的音频轨(有点拗口) + int mainAudioMuxerTrackIndex = -1; //合成后的视频的音频轨 + int mainAudioMaxInputSize = 0; //能获取的音频的最大值 + + MediaExtractor frameVideoExtractor = new MediaExtractor(); + int frameExtractorTrackIndex = -1; //视频轨 + int frameMuxerTrackIndex = -1; //合成后的视频的视频轨 + int frameMaxInputSize = 0; //能获取的视频的最大值 + int frameRate = 0; //视频的帧率 + long frameDuration = 0; + + MediaMuxer muxer = null; //用于合成音频与视频 + + try { + muxer = new MediaMuxer(combinedVideoOutFile.getPath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + + audioVideoExtractor.setDataSource(audioVideoPath); //设置视频源 + //音轨信息 + int audioTrackCount = audioVideoExtractor.getTrackCount(); //获取数据源的轨道数 + //在此循环轨道数,目的是找到我们想要的音频轨 + for (int i = 0; i < audioTrackCount; i++) { + MediaFormat format = audioVideoExtractor.getTrackFormat(i); //得到指定索引的记录格式 + String mimeType = format.getString(MediaFormat.KEY_MIME); //主要描述mime类型的媒体格式 + if (mimeType.startsWith("audio/")) { //找到音轨 + mainAudioExtractorTrackIndex = i; + mainAudioMuxerTrackIndex = muxer.addTrack(format); //将音轨添加到MediaMuxer,并返回新的轨道 + mainAudioMaxInputSize = format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); //得到能获取的有关音频的最大值 +// mainAudioDuration = format.getLong(MediaFormat.KEY_DURATION); + } + } + + //图像信息 + frameVideoExtractor.setDataSource(frameVideoPath); //设置视频源 + int trackCount = frameVideoExtractor.getTrackCount(); //获取数据源的轨道数 + //在此循环轨道数,目的是找到我们想要的视频轨 + for (int i = 0; i < trackCount; i++) { + MediaFormat format = frameVideoExtractor.getTrackFormat(i); //得到指定索引的媒体格式 + String mimeType = format.getString(MediaFormat.KEY_MIME); //主要描述mime类型的媒体格式 + if (mimeType.startsWith("video/")) { //找到视频轨 + frameExtractorTrackIndex = i; + frameMuxerTrackIndex = muxer.addTrack(format); //将视频轨添加到MediaMuxer,并返回新的轨道 + frameMaxInputSize = format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); //得到能获取的有关视频的最大值 + frameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE); //获取视频的帧率 + frameDuration = format.getLong(MediaFormat.KEY_DURATION); //获取视频时长 + } + } + + muxer.start(); //开始合成 + + audioVideoExtractor.selectTrack(mainAudioExtractorTrackIndex); //将提供音频的视频选择到音轨上 + MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo(); + ByteBuffer audioByteBuffer = ByteBuffer.allocate(mainAudioMaxInputSize); + while (true) { + int readSampleSize = audioVideoExtractor.readSampleData(audioByteBuffer, 0); //检索当前编码的样本并将其存储在字节缓冲区中 + if (readSampleSize < 0) { //如果没有可获取的样本则退出循环 + audioVideoExtractor.unselectTrack(mainAudioExtractorTrackIndex); + break; + } + + long sampleTime = audioVideoExtractor.getSampleTime(); //获取当前展示样本的时间(单位毫秒) + + if (sampleTime < audioStartTime) { //如果样本时间小于我们想要的开始时间就快进 + audioVideoExtractor.advance(); //推进到下一个样本,类似快进 + continue; + } + + if (sampleTime > audioStartTime + frameDuration) { //如果样本时间大于开始时间+视频时长,就退出循环 + break; + } + //设置样本编码信息 + audioBufferInfo.size = readSampleSize; + audioBufferInfo.offset = 0; + audioBufferInfo.flags = audioVideoExtractor.getSampleFlags(); + audioBufferInfo.presentationTimeUs = sampleTime - audioStartTime; + + muxer.writeSampleData(mainAudioMuxerTrackIndex, audioByteBuffer, audioBufferInfo); //将样本写入 + audioVideoExtractor.advance(); //推进到下一个样本,类似快进 + } + + frameVideoExtractor.selectTrack(frameExtractorTrackIndex); //将提供视频图像的视频选择到视频轨上 + MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo(); + ByteBuffer videoByteBuffer = ByteBuffer.allocate(frameMaxInputSize); + while (true) { + int readSampleSize = frameVideoExtractor.readSampleData(videoByteBuffer, 0); //检索当前编码的样本并将其存储在字节缓冲区中 + if (readSampleSize < 0) { //如果没有可获取的样本则退出循环 + frameVideoExtractor.unselectTrack(frameExtractorTrackIndex); + break; + } + //设置样本编码信息 + videoBufferInfo.size = readSampleSize; + videoBufferInfo.offset = 0; + videoBufferInfo.flags = frameVideoExtractor.getSampleFlags(); + videoBufferInfo.presentationTimeUs += 1000 * 1000 / frameRate; + + muxer.writeSampleData(frameMuxerTrackIndex, videoByteBuffer, videoBufferInfo); //将样本写入 + frameVideoExtractor.advance(); //推进到下一个样本,类似快进 + } + } catch (IOException e) { + iDetailCallBackListener.failed(e); + Log.e(TAG, "combineTwoVideos: ", e); + } finally { + //释放资源 + audioVideoExtractor.release(); + frameVideoExtractor.release(); + if (muxer != null) { + muxer.release(); + } + iDetailCallBackListener.success(); + Log.e(TAG, "combineTwoVideos: " ); + } + } + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/FrequencyView.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/FrequencyView.java new file mode 100644 index 0000000..a2fffee --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/FrequencyView.java @@ -0,0 +1,60 @@ +package com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary; + +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.support.annotation.Nullable; +import android.util.AttributeSet; +import android.view.View; + +import java.util.LinkedList; + +/** + * 功能: + * + * @author aserbao + * @date : On 2019/1/5 4:09 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.blocks.MediaExtractor.primary + * @Copyright: 个人版权所有 + */ +public class FrequencyView extends View{ + + private Paint paint; + + public FrequencyView(Context context) { + this(context,null); + } + + public FrequencyView(Context context, @Nullable AttributeSet attrs) { + this(context, attrs,0); + } + + public FrequencyView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { + super(context, attrs, defStyleAttr); + init(); + } + + private void init() { + paint = new Paint(); + paint.setStrokeWidth(1); + paint.setColor(Color.BLACK); + paint.setAntiAlias(true); + paint.setStyle(Paint.Style.FILL); + } + + private LinkedList mlist = new LinkedList<>(); + public void addInt(int i){ + mlist.add(i); + invalidate(); + } + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + for (int i = 0; i < mlist.size(); i++) { + canvas.drawLine(i,0,i+ 1,mlist.get(i),paint); + } + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/TransAacHandlerPure.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/TransAacHandlerPure.java new file mode 100644 index 0000000..65788dc --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/TransAacHandlerPure.java @@ -0,0 +1,495 @@ +package com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary; + +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.media.MediaPlayer; +import android.os.Build; +import android.support.annotation.RequiresApi; +import android.text.TextUtils; +import android.util.Log; + +import java.io.DataOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.util.Queue; +import java.util.concurrent.LinkedBlockingQueue; + +/** + * 功能:mp3转换成为aac编码 + * + * @author aserbao + * @date : On 2019/1/3 5:37 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.blocks.mediaCodec.primary.mp3TranslateAAC + * @Copyright: 个人版权所有 + */ +@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN) +public class TransAacHandlerPure { + private String srcFile; + private String outFile; + private long rangeStart = -1; + private long rangeEnd = -1; + private OnProgressListener listener; + + + public TransAacHandlerPure(String srcFile, String outFile) { + this(srcFile, outFile, null); + } + + public TransAacHandlerPure(String srcFile, String outFile, OnProgressListener listener) { + this(srcFile, outFile, -1, -1, listener); + } + + public TransAacHandlerPure(String srcFile, String outFile, long rangeStart, long rangeEnd, OnProgressListener listener) { + this.srcFile = srcFile; + this.outFile = outFile; + this.rangeStart = rangeStart; + this.rangeEnd = rangeEnd; + this.listener = listener; + } + + public void start() { + DecodeTask task = new DecodeTask(srcFile, outFile, listener); + task.setRangeTime(rangeStart, rangeEnd); + new Thread(task).start(); + } + + public void setRangeTime(long rangeStart, long rangeEnd) { + this.rangeStart = rangeStart; + this.rangeEnd = rangeEnd; + } + + + public void setListener(OnProgressListener listener) { + this.listener = listener; + } + + private static class DecodeTask implements Runnable, IDataObtain { + private static final long TIME_OUT = 5000; + private Queue mRawQueue; + private MediaExtractor extractor; + private boolean isFinish = false; + private String srcFile; + private MediaCodec codec; + private String outFile; + private OnProgressListener listener; + private long rangeStart; + private long rangeEnd; + private int duration = 0; + private OutputStream mOutput; + + public void setRangeTime(long rangeStart, long rangeEnd) { + this.rangeStart = rangeStart; + this.rangeEnd = rangeEnd; + } + + public DecodeTask(String srcFile, String outFile, OnProgressListener listener) { + this.srcFile = srcFile; + this.outFile = outFile; + this.listener = listener; + mRawQueue = new LinkedBlockingQueue<>(); + } + + private void pushAvFrame(byte[] frame) { + if (frame != null) { + int len = mRawQueue.size(); + while (len > 10) { + try { + Thread.sleep(10); + } catch (InterruptedException e) { + e.printStackTrace(); + } + len = mRawQueue.size(); + } + synchronized (mRawQueue) { + mRawQueue.offer(frame); + } + } + } + + + @Override + public void run() { + TransAacHandlerPure.logMsg("decodec run"); + if (listener != null) { + listener.onStart(); + } + boolean isPrepare = false; + try { + prepare(); + isPrepare = true; + } catch (IOException e) { + e.printStackTrace(); + } + TransAacHandlerPure.logMsg("decodec isPrepare " + isPrepare); + if (isPrepare) { + decode(); + } + release(); + if (!isPrepare && listener != null) { + listener.onFail(); + } + isFinish = true; + } + + private void release() { + if (extractor != null) { + extractor.release(); + extractor = null; + } + if (codec != null) { + codec.stop(); + codec.release(); + codec = null; + } + } + + + private void prepare() throws IOException { + extractor = new MediaExtractor(); + extractor.setDataSource(srcFile); + int numTracks = extractor.getTrackCount(); + for (int i = 0; i < numTracks; i++) { + MediaFormat format = extractor.getTrackFormat(i); + String mine = format.getString(MediaFormat.KEY_MIME); + if (!TextUtils.isEmpty(mine) && mine.startsWith("audio")) { + extractor.selectTrack(i); + try { + duration = format.getInteger(MediaFormat.KEY_DURATION) / 1000; + } catch (Exception e) { + e.printStackTrace(); + MediaPlayer mediaPlayer = new MediaPlayer(); + mediaPlayer.setDataSource(srcFile); + mediaPlayer.prepare(); + duration = mediaPlayer.getDuration(); + mediaPlayer.release(); + } + codec = MediaCodec.createDecoderByType(mine); + codec.configure(format, null, null, 0); + codec.start(); + TransAacHandlerPure.logMsg("New decode codec start:" + format.toString()); + break; + } + } + createFile(outFile + ".pcm", true);//测试 输出pcm格式 + mOutput = new DataOutputStream(new FileOutputStream(outFile + ".pcm")); + } + + long last; + + private void decode() { + ByteBuffer[] inputBuffers = codec.getInputBuffers(); + ByteBuffer[] outputBuffers = codec.getOutputBuffers(); + MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); + TransAacHandlerPure.logMsg("loopDecode start"); + if (rangeStart > 0) {//如果有裁剪,seek到裁剪的地方 + extractor.seekTo(rangeStart * 1000, MediaExtractor.SEEK_TO_CLOSEST_SYNC); + } + boolean isEOS = false; + while (true) { + long timestamp = 0; + if (!isEOS) { + int inIndex = codec.dequeueInputBuffer(TIME_OUT); + if (inIndex >= 0) { + ByteBuffer buffer = inputBuffers[inIndex]; + int sampleSize = extractor.readSampleData(buffer, 0); + long timestampTemp = extractor.getSampleTime(); + timestamp = timestampTemp / 1000; + TransAacHandlerPure.logMsg("loopDecode readSampleData end sampleSize " + sampleSize + " buffer.capacity()=" + buffer.capacity()); + TransAacHandlerPure.logMsg("loopDecode readSampleData end timestamp" + timestamp); + if (rangeEnd > 0 && timestamp > rangeEnd) { + sampleSize = -1; + } + if (sampleSize <= 0) { + codec.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); + isEOS = true; + } else { + codec.queueInputBuffer(inIndex, 0, sampleSize, timestampTemp, 0); + extractor.advance(); + } + } + } + int outIndex = codec.dequeueOutputBuffer(info, TIME_OUT); +// TransAacHandlerPure.logMsg(" switch (outIndex)"); + switch (outIndex) { + case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: + outputBuffers = codec.getOutputBuffers(); + TransAacHandlerPure.logMsg("dequeueOutputBuffer INFO_OUTPUT_BUFFERS_CHANGED!"); + break; + case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: + MediaFormat mf = codec.getOutputFormat(); + //开始编码线程 + EncodeTask encodeTask = new EncodeTask(outFile, this, listener); + int sampleRate = mf.getInteger(MediaFormat.KEY_SAMPLE_RATE); + int pcmEncoding = mf.getInteger(MediaFormat.KEY_PCM_ENCODING); + int channelCount = mf.getInteger(MediaFormat.KEY_CHANNEL_COUNT); + encodeTask.setAudioParams(sampleRate, pcmEncoding, channelCount); + new Thread(encodeTask).start(); + TransAacHandlerPure.logMsg("New format " + mf.toString()); + break; + case MediaCodec.INFO_TRY_AGAIN_LATER: + TransAacHandlerPure.logMsg("dequeueOutputBuffer timed out!"); + break; + default: + if (last == 0) { + last = System.currentTimeMillis(); + } + long now = System.currentTimeMillis(); + TransAacHandlerPure.logMsg("解码时间:" + (now - last) + " info.size " + info.size); + last = now; + ByteBuffer buffer = outputBuffers[outIndex]; + byte[] outData = new byte[info.size]; + buffer.get(outData, 0, info.size); + codec.releaseOutputBuffer(outIndex, true); + try { + mOutput.write(outData); + } catch (IOException e) { + e.printStackTrace(); + } + pushAvFrame(outData); + if (listener != null) { + listener.onProgress(rangeEnd > 0 ? (int) rangeEnd : duration, rangeStart > 0 ? (int) (timestamp - rangeStart) : (int) timestamp); + } + break; + } + // All decoded frames have been rendered, we can stop playing now + if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + TransAacHandlerPure.logMsg("OutputBuffer BUFFER_FLAG_END_OF_STREAM"); + break; + } + } + } + + + @Override + public byte[] getRawFrame() { + int len = mRawQueue.size(); + if (len > 0) { + synchronized (mRawQueue) { + return mRawQueue.poll(); + } + } + return null; + } + + @Override + public boolean isFinish() { + return isFinish; + } + } + + private static void logMsg(String msg) { + Log.d(TransAacHandlerPure.class.getSimpleName(), msg); + } + + private static class EncodeTask implements Runnable { + private static final long TIME_OUT = 5000; + private IDataObtain obtain; + private String outFile; + private MediaCodec encoder; + private OutputStream mOutput; + private OnProgressListener listener; + private long last; + private int sampleRate; + private int pcmEncoding; + private int channelCount; + + public EncodeTask(String outFile, IDataObtain obtain, OnProgressListener listener) { + this.obtain = obtain; + this.outFile = outFile; + this.listener = listener; + } + + public void setAudioParams(int sampleRate, int pcmEncoding, int channelCount) { + this.sampleRate = sampleRate; + this.pcmEncoding = pcmEncoding; + this.channelCount = channelCount; + } + + @Override + public void run() { + boolean isPrepare = false; + try { + prepare(); + isPrepare = true; + } catch (IOException e) { + e.printStackTrace(); + } + if (isPrepare && obtain != null) { + encode(); + } + release(); + if (listener != null) { + if (isPrepare) { + listener.onSuccess(); + } else { + listener.onFail(); + } + + } + + } + + private void release() { + if (encoder != null) { + encoder.stop(); + encoder.release(); + encoder = null; + } + if (mOutput != null) { + try { + mOutput.flush(); + mOutput.close(); + } catch (IOException e) { + e.printStackTrace(); + } + mOutput = null; + } + } + + private void encode() { + boolean isFinish = false; + while (true) { + if (!isFinish) { + byte[] rawData = obtain.getRawFrame(); + if (rawData == null) { + if (obtain.isFinish()) { + isFinish = true; + int inIndex = encoder.dequeueInputBuffer(TIME_OUT); + encoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); + } else { + try { + Thread.sleep(10); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + continue; + } + ByteBuffer[] inputBuffers = encoder.getInputBuffers(); + + int inIndex = encoder.dequeueInputBuffer(TIME_OUT); + if (inIndex >= 0) { + ByteBuffer inputBuffer = inputBuffers[inIndex]; + inputBuffer.clear(); + inputBuffer.put(rawData); + encoder.queueInputBuffer(inIndex, 0, rawData.length, System.nanoTime(), 0); + } + } + ByteBuffer[] outputBuffers = encoder.getOutputBuffers(); + MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); + int outIndex = encoder.dequeueOutputBuffer(info, TIME_OUT); + if (outIndex >= 0) { + if (last == 0) { + last = System.currentTimeMillis(); + } + long now = System.currentTimeMillis(); + TransAacHandlerPure.logMsg("编码码时间:" + (now - last) + " info.size " + info.size); + last = now; + while (outIndex >= 0) { + ByteBuffer outputBuffer = outputBuffers[outIndex]; + int len = info.size + 7; + byte[] outData = new byte[len]; + addADTStoPacket(outData, len); + outputBuffer.get(outData, 7, info.size); + encoder.releaseOutputBuffer(outIndex, false); + try { + mOutput.write(outData); + } catch (Exception e) { + e.printStackTrace(); + } catch (Error e) { + e.printStackTrace(); + } + outIndex = encoder.dequeueOutputBuffer(info, TIME_OUT); + } + } + if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + Log.d("123","encode OutputBuffer BUFFER_FLAG_END_OF_STREAM"); + break; + } + } + } + + /** + * 给编码出的aac裸流添加adts头字段 + * + * @param packet 要空出前7个字节,否则会搞乱数据 + * @param packetLen + */ + private void addADTStoPacket(byte[] packet, int packetLen) { + int profile = 2; //AAC LC + int freqIdx = 4; //44.1KHz + int chanCfg = 2; //CPE + packet[0] = (byte) 0xFF; + packet[1] = (byte) 0xF9; + packet[2] = (byte) (((profile - 1) << 6) + (freqIdx << 2) + (chanCfg >> 2)); + packet[3] = (byte) (((chanCfg & 3) << 6) + (packetLen >> 11)); + packet[4] = (byte) ((packetLen & 0x7FF) >> 3); + packet[5] = (byte) (((packetLen & 7) << 5) + 0x1F); + packet[6] = (byte) 0xFC; + } + + private void prepare() throws IOException { + String mime = MediaFormat.MIMETYPE_AUDIO_AAC; + encoder = MediaCodec.createEncoderByType(mime); + MediaFormat format = MediaFormat.createAudioFormat(mime, sampleRate, channelCount); + format.setInteger(MediaFormat.KEY_BIT_RATE, 96000); + format.setInteger(MediaFormat.KEY_PCM_ENCODING, pcmEncoding); + format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 20 * 1024); + format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); + logMsg(" New " + format.toString()); + encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + encoder.start(); + createFile(outFile, true); + mOutput = new DataOutputStream(new FileOutputStream(outFile)); + } + + } + + private static boolean createFile(String filePath, boolean recreate) { + if (TextUtils.isEmpty(filePath)) { + return false; + } + try { + File file = new File(filePath); + if (file.exists()) { + if (recreate) { + file.delete(); + file.createNewFile(); + } + } else { + // 如果路径不存在,先创建路径 + File parentFile = file.getParentFile(); + if (!parentFile.exists()) { + parentFile.mkdirs(); + } + file.createNewFile(); + } + } catch (Exception e) { + return false; + } + return true; + } + + public interface IDataObtain { + byte[] getRawFrame(); + + boolean isFinish(); + } + + public static interface OnProgressListener { + void onStart(); + + void onProgress(int max, int progress); + + void onSuccess(); + + void onFail(); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/decoder/DecoderAudioAAC2PCMPlay.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/decoder/DecoderAudioAAC2PCMPlay.java new file mode 100644 index 0000000..02ecaa1 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/decoder/DecoderAudioAAC2PCMPlay.java @@ -0,0 +1,199 @@ +package com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.decoder; + +import android.media.AudioFormat; +import android.media.AudioManager; +import android.media.AudioTrack; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.util.Log; + +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * 功能: 解码AAC 格式音频成PCM并播放 + * @author aserbao + * @date : On 2019/1/4 4:05 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.blocks.MediaExtractor.primary + * @Copyright: 个人版权所有 + */ +public class DecoderAudioAAC2PCMPlay { + private static final String TAG = "DecoderAudioAAC2PCMPlay"; + public DecoderAudioAAC2PCMPlay() { + } + + private DecoderAACThread mDecoderAACThread; + private byte[] mPcmData; + + public void start(String inputAudioPath,String mimeType){ + if (mDecoderAACThread == null) { + mDecoderAACThread = new DecoderAACThread(inputAudioPath,mimeType); + mDecoderAACThread.setRunning(true); + try { + mDecoderAACThread.start(); + } catch (Exception e) { + Log.w(TAG, "decode already start"); + } + } + } + + public void stop() { + if (mDecoderAACThread != null) { + mDecoderAACThread.setRunning(false); + mDecoderAACThread = null; + } + } + + public class DecoderAACThread extends Thread{ + String MIME_TYPE = "audio/mp4a-latm"; + int KEY_CHANNEL_COUNT = 2; + int KEY_SAMPLE_RATE = 44100; + int KEY_BIT_RATE = 64000; + int KEY_AAC_PROFILE = MediaCodecInfo.CodecProfileLevel.AACObjectLC; + int WAIT_TIME = 10000; + int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT; + int CHANNEL_MODE = AudioFormat.CHANNEL_IN_STEREO; + int BUFFFER_SIZE = 2048; + + private String mInputAudioPath;//音频路径 + private String mInputAudioMimeType; + private MediaExtractor mMediaExtractor; + private MediaCodec mMediaCodec; + private AudioTrack mPcmPlayer; + private MediaCodec.BufferInfo mBufferInfo; + private boolean running; + + private void setRunning(boolean running) { + this.running = running; + } + + public DecoderAACThread(String inputAudioPath,String mimeType) { + mInputAudioPath = inputAudioPath; + mInputAudioMimeType = mimeType; + } + + @Override + public void run() { + super.run(); + if (!prepare()) { + running = false; + Log.e(TAG, "音频解码器初始化失败"); + return; + } + decode(); + release(); + } + + + public boolean prepare(){ + mBufferInfo = new MediaCodec.BufferInfo(); + mMediaExtractor = new MediaExtractor(); + mPcmPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, KEY_SAMPLE_RATE, + AudioFormat.CHANNEL_OUT_STEREO, + AUDIO_FORMAT, BUFFFER_SIZE, AudioTrack.MODE_STREAM); + mPcmPlayer.play(); + try { + mMediaExtractor.setDataSource(mInputAudioPath); + int audioIndex = -1;//音频通道 + int trackCount = mMediaExtractor.getTrackCount();//获取通道总数 + for (int i = 0; i < trackCount; i++) { + MediaFormat trackFormat = mMediaExtractor.getTrackFormat(i); + if (trackFormat.getString(MediaFormat.KEY_MIME).startsWith("audio/")) { + audioIndex = i; + }//获取音频通道 + } + mMediaExtractor.selectTrack(audioIndex);//切换到音频通道 + MediaFormat mediaFormat = mMediaExtractor.getTrackFormat(audioIndex); + mMediaCodec = MediaCodec.createDecoderByType(mInputAudioMimeType); +/* mediaFormat.setString(MediaFormat.KEY_MIME, MIME_TYPE); + mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, KEY_CHANNEL_COUNT); + mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, KEY_SAMPLE_RATE); + mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, KEY_BIT_RATE); + mediaFormat.setInteger(MediaFormat.KEY_IS_ADTS, 1); + mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, KEY_AAC_PROFILE); + ByteBuffer key(暂时不了解该参数的含义,但必须设置) + byte[] data = new byte[]{(byte) 0x11, (byte) 0x90}; + ByteBuffer csd_0 = ByteBuffer.wrap(data); + mediaFormat.setByteBuffer("csd-0", csd_0);*/ + mMediaCodec.configure(mediaFormat, null, null, 0); + } catch (IOException e) { + e.printStackTrace(); + return false; + } + if (mMediaCodec == null) { + Log.e(TAG, "create mediaDecode failed"); + return false; + } + mMediaCodec.start(); + return true; + } + + + private void decode() { + while (running) { + int inputIndex = mMediaCodec.dequeueInputBuffer(-1); + if (inputIndex >= 0) { + ByteBuffer inputBuffer = mMediaCodec.getInputBuffer(inputIndex); + if (inputBuffer == null) { + return; + } + inputBuffer.clear(); + int sampleSize = mMediaExtractor.readSampleData(inputBuffer, 0); + if (sampleSize < 0) { + mMediaCodec.queueInputBuffer(inputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); + running = false; + } else { + mMediaCodec.queueInputBuffer(inputIndex, 0, sampleSize, mMediaExtractor.getSampleTime(), 0); + mMediaExtractor.advance(); + } + } + int outputIndex = mMediaCodec.dequeueOutputBuffer(mBufferInfo, WAIT_TIME); + ByteBuffer outputBuffer; + if (outputIndex >= 0) { + // Simply ignore codec config buffers. + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + Log.i(TAG, "audio encoder: codec config buffer"); + mMediaCodec.releaseOutputBuffer(outputIndex, false); + continue; + } + if (mBufferInfo.size != 0) { + outputBuffer = mMediaCodec.getOutputBuffer(outputIndex); + if (mPcmData == null || mPcmData.length < mBufferInfo.size) { + mPcmData = new byte[mBufferInfo.size]; + } + if (outputBuffer != null) { + outputBuffer.get(mPcmData, 0, mBufferInfo.size); + outputBuffer.clear(); + } + Log.e(TAG, "decode: mPcmData.length = " + mPcmData.length + " mBufferInfo " + mBufferInfo.toString()); + mPcmPlayer.write(mPcmData, 0, mBufferInfo.size); + } + mMediaCodec.releaseOutputBuffer(outputIndex, false); + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + Log.i(TAG, "saw output EOS."); + } + } + } + mMediaExtractor.release(); + } + + /** + * 释放资源 + */ + private void release() { + if (mMediaCodec != null) { + mMediaCodec.stop(); + mMediaCodec.release(); + } + if (mPcmPlayer != null) { + mPcmPlayer.stop(); + mPcmPlayer.release(); + mPcmPlayer = null; + } + } + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/decoder/DecoderAudioAndGetDb.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/decoder/DecoderAudioAndGetDb.java new file mode 100644 index 0000000..fda58e8 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/decoder/DecoderAudioAndGetDb.java @@ -0,0 +1,233 @@ +package com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.decoder; + +import android.media.AudioFormat; +import android.media.AudioManager; +import android.media.AudioTrack; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.util.Log; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.CharBuffer; +import java.nio.charset.Charset; + +import static java.lang.Math.log10; + +/** + * 功能: 解码获取音频帧分贝大小 + * @author aserbao + * @date : On 2019/1/4 4:05 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.blocks.MediaExtractor.primary + * @Copyright: 个人版权所有 + */ +public class DecoderAudioAndGetDb { + private static final String TAG = "DecoderAudioAAC2PCMPlay"; + public DecoderAudioAndGetDb() { + } + + private DecoderAACThread mDecoderAACThread; + private byte[] mPcmData; + + public void start(String inputAudioPath,String mimeType,DbCallBackListener dbCallBackListener){ + mDbCallBackListener = dbCallBackListener; + if (mDecoderAACThread == null) { + mDecoderAACThread = new DecoderAACThread(inputAudioPath,mimeType); + mDecoderAACThread.setRunning(true); + try { + mDecoderAACThread.start(); + } catch (Exception e) { + Log.w(TAG, "decode already start"); + } + } + + } + + public void stop() { + if (mDecoderAACThread != null) { + mDecoderAACThread.setRunning(false); + mDecoderAACThread = null; + } + } + + public class DecoderAACThread extends Thread{ + String MIME_TYPE = "audio/mp4a-latm"; + int KEY_CHANNEL_COUNT = 2; + int KEY_SAMPLE_RATE = 44100; + int KEY_BIT_RATE = 64000; + int KEY_AAC_PROFILE = MediaCodecInfo.CodecProfileLevel.AACObjectLC; + int WAIT_TIME = 10000; + int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT; + int CHANNEL_MODE = AudioFormat.CHANNEL_IN_STEREO; + int BUFFFER_SIZE = 2048; + + private String mInputAudioPath;//音频路径 + private String mInputAudioMimeType; + private MediaExtractor mMediaExtractor; + private MediaCodec mMediaCodec; + private AudioTrack mPcmPlayer; + private MediaCodec.BufferInfo mBufferInfo; + private boolean running; + private long mStartTime; + + private void setRunning(boolean running) { + this.running = running; + } + + public DecoderAACThread(String inputAudioPath,String mimeType) { + mInputAudioPath = inputAudioPath; + mInputAudioMimeType = mimeType; + } + + @Override + public void run() { + super.run(); + mStartTime = System.currentTimeMillis(); + if (!prepare()) { + running = false; + Log.e(TAG, "音频解码器初始化失败"); + return; + } + decode(); + release(); + } + + public boolean prepare(){ + mBufferInfo = new MediaCodec.BufferInfo(); + mMediaExtractor = new MediaExtractor(); + mPcmPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, KEY_SAMPLE_RATE, + AudioFormat.CHANNEL_OUT_STEREO, + AUDIO_FORMAT, BUFFFER_SIZE, AudioTrack.MODE_STREAM); + mPcmPlayer.play(); + try { + mMediaExtractor.setDataSource(mInputAudioPath); + int audioIndex = -1;//音频通道 + int trackCount = mMediaExtractor.getTrackCount();//获取通道总数 + for (int i = 0; i < trackCount; i++) { + MediaFormat trackFormat = mMediaExtractor.getTrackFormat(i); + String string = trackFormat.getString(MediaFormat.KEY_MIME); + if (string.startsWith("audio/")) { + audioIndex = i; + } + } + mMediaExtractor.selectTrack(audioIndex);//切换到音频通道 + MediaFormat mediaFormat = mMediaExtractor.getTrackFormat(audioIndex); + mMediaCodec = MediaCodec.createDecoderByType(mInputAudioMimeType); + mMediaCodec.configure(mediaFormat, null, null, 0); + } catch (IOException e) { + e.printStackTrace(); + return false; + } + if (mMediaCodec == null) { + Log.e(TAG, "create mediaDecode failed"); + return false; + } + mMediaCodec.start(); + return true; + } + + + private void decode() { + while (running) { + int inputIndex = mMediaCodec.dequeueInputBuffer(-1); + if (inputIndex >= 0) { + ByteBuffer inputBuffer = mMediaCodec.getInputBuffer(inputIndex); + if (inputBuffer == null) { + return; + } + inputBuffer.clear(); + int sampleSize = mMediaExtractor.readSampleData(inputBuffer, 0); + if (sampleSize < 0) { + mMediaCodec.queueInputBuffer(inputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); + running = false; + } else { + mMediaCodec.queueInputBuffer(inputIndex, 0, sampleSize, mMediaExtractor.getSampleTime(), 0); + mMediaExtractor.advance(); + } + } + int outputIndex = mMediaCodec.dequeueOutputBuffer(mBufferInfo, WAIT_TIME); + ByteBuffer outputBuffer; + if (outputIndex >= 0) { + // Simply ignore codec config buffers. + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + Log.i(TAG, "audio encoder: codec config buffer"); + mMediaCodec.releaseOutputBuffer(outputIndex, false); + continue; + } + if (mBufferInfo.size != 0) { + outputBuffer = mMediaCodec.getOutputBuffer(outputIndex); + if (mPcmData == null || mPcmData.length < mBufferInfo.size) { + mPcmData = new byte[mBufferInfo.size]; + } + if (outputBuffer != null) { + outputBuffer.get(mPcmData, 0, mBufferInfo.size); + outputBuffer.clear(); + } + float v = mMediaExtractor.getSampleTime() / (float) (1000 * 1000); + +// calcFrequency(mPcmData,KEY_SAMPLE_RATE); + Log.e(TAG, "解析到的时间点为:"+ v + "s decode: mPcmData.length = " + mPcmData.length + " mBufferInfo " + mBufferInfo.toString()); +// mPcmPlayer.write(mPcmData, 0, mBufferInfo.size); + } + mMediaCodec.releaseOutputBuffer(outputIndex, false); + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + Log.i(TAG, "saw output EOS."); + } + } + } + mMediaExtractor.release(); + Log.e(TAG, "decode: " + (System.currentTimeMillis() - mStartTime)/1000 + "s" ); + } + + /** + * 释放资源 + */ + private void release() { + if (mMediaCodec != null) { + mMediaCodec.stop(); + mMediaCodec.release(); + } + if (mPcmPlayer != null) { + mPcmPlayer.stop(); + mPcmPlayer.release(); + mPcmPlayer = null; + } + } + } + + public void calcFrequency(byte[] fft, int samplingRate){ + float[] magnitudes = new float[fft.length / 2]; + int max = 0; + for (int i = 0; i < magnitudes.length; i++) { + magnitudes[i] = (float) Math.hypot(fft[2 * i], fft[2 * i + 1]); + if (magnitudes[max] < magnitudes[i]) { + max = i; + } + } + + int currentFrequency = max * samplingRate / fft.length; + if (currentFrequency<0){ + return; + } + long v = 0; + for (int i = 0; i < fft.length; i++) { + v += Math.pow(fft[i], 2); + } + + double volume = 10 * log10(v / (double) fft.length); + mDbCallBackListener.cuurentFrequenty(currentFrequency,volume); + Log.e(TAG, "calcFrequency: currentFrequency = " + currentFrequency + " volume = " + volume + " max = " + max ); + } + + private DbCallBackListener mDbCallBackListener; + public interface DbCallBackListener { + void cuurentFrequenty(int cuurentFrequenty, double volume); + } + + + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/decoder/DecoderMp3FromMp4.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/decoder/DecoderMp3FromMp4.java new file mode 100644 index 0000000..4b3ffd6 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/decoder/DecoderMp3FromMp4.java @@ -0,0 +1,97 @@ +package com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.decoder; + +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.util.Log; + +import com.aserbao.androidcustomcamera.blocks.interfaces.ICallBackListener; + +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * 功能:将mp3从mp4中分离出来 + * + * @author aserbao + * @date : On 2019/1/7 10:47 AM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.decoder + * @Copyright: 个人版权所有 + */ +public class DecoderMp3FromMp4 { + + private String inputMp4Path = ""; + private String outputMp3Path = ""; + private ICallBackListener mICallBackListener; + + public DecoderMp3FromMp4(String inputMp4Path, String outputMp3Path, ICallBackListener iCallBackListener) { + this.inputMp4Path = inputMp4Path; + this.outputMp3Path = outputMp3Path; + mICallBackListener = iCallBackListener; + } + + public void start() { + MediaExtractor mediaExtractor = new MediaExtractor(); + int audioIndex = -1; + try { + mediaExtractor.setDataSource(inputMp4Path); + int trackCount = mediaExtractor.getTrackCount(); + for (int i = 0; i < trackCount; i++) { + MediaFormat trackFormat = mediaExtractor.getTrackFormat(i); + if (trackFormat.getString(MediaFormat.KEY_MIME).startsWith("audio/")) { + audioIndex = i; + } + } + mediaExtractor.selectTrack(audioIndex); + MediaFormat trackFormat = mediaExtractor.getTrackFormat(audioIndex); + MediaMuxer mediaMuxer = new MediaMuxer(outputMp3Path, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + int writeAudioIndex = mediaMuxer.addTrack(trackFormat); + mediaMuxer.start(); + ByteBuffer byteBuffer = ByteBuffer.allocate(500 * 1024); + MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); + + long stampTime = 0; + //获取帧之间的间隔时间 + { + mediaExtractor.readSampleData(byteBuffer, 0); + if (mediaExtractor.getSampleFlags() == MediaExtractor.SAMPLE_FLAG_SYNC) { + mediaExtractor.advance(); + } + mediaExtractor.readSampleData(byteBuffer, 0); + long secondTime = mediaExtractor.getSampleTime(); + mediaExtractor.advance(); + mediaExtractor.readSampleData(byteBuffer, 0); + long thirdTime = mediaExtractor.getSampleTime(); + stampTime = Math.abs(thirdTime - secondTime); + } + + mediaExtractor.unselectTrack(audioIndex); + mediaExtractor.selectTrack(audioIndex); + while (true) { + int readSampleSize = mediaExtractor.readSampleData(byteBuffer, 0); + if (readSampleSize < 0) { + break; + } + mediaExtractor.advance(); + + bufferInfo.size = readSampleSize; + bufferInfo.flags = mediaExtractor.getSampleFlags(); + bufferInfo.offset = 0; + bufferInfo.presentationTimeUs += stampTime; + + mediaMuxer.writeSampleData(writeAudioIndex, byteBuffer, bufferInfo); + } + mediaMuxer.stop(); + mediaMuxer.release(); + mediaExtractor.release(); + mICallBackListener.success(); + } catch (IOException e) { + e.printStackTrace(); + mICallBackListener.failed(e); + } + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/decoder/DecoderNoVoiceMp4FromMp4.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/decoder/DecoderNoVoiceMp4FromMp4.java new file mode 100644 index 0000000..0da9b09 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/decoder/DecoderNoVoiceMp4FromMp4.java @@ -0,0 +1,94 @@ +package com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.decoder; + +import android.media.MediaCodec; +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.util.Log; + +import com.aserbao.androidcustomcamera.blocks.interfaces.ICallBackListener; + +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * 功能: + * + * @author aserbao + * @date : On 2019/1/7 10:55 AM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.decoder + * @Copyright: 个人版权所有 + */ +public class DecoderNoVoiceMp4FromMp4 { + private String outputMp4Path; + private String inputMp4Path; + private ICallBackListener mICallBackListener; + + public DecoderNoVoiceMp4FromMp4( String inputMp4Path,String outputMp4Path, ICallBackListener iCallBackListener) { + this.inputMp4Path = inputMp4Path; + this.outputMp4Path = outputMp4Path; + mICallBackListener = iCallBackListener; + } + + public void start() { + MediaExtractor mediaExtractor = new MediaExtractor(); + int videoIndex = -1; + try { + mediaExtractor.setDataSource(inputMp4Path); + int trackCount = mediaExtractor.getTrackCount(); + for (int i = 0; i < trackCount; i++) { + MediaFormat trackFormat = mediaExtractor.getTrackFormat(i); + String mimeType = trackFormat.getString(MediaFormat.KEY_MIME); + if (mimeType.startsWith("video/")) { + videoIndex = i; + } + } + + mediaExtractor.selectTrack(videoIndex); + MediaFormat trackFormat = mediaExtractor.getTrackFormat(videoIndex); + MediaMuxer mediaMuxer = new MediaMuxer(outputMp4Path, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + int trackIndex = mediaMuxer.addTrack(trackFormat); + ByteBuffer byteBuffer = ByteBuffer.allocate(1024 * 500); + MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); + mediaMuxer.start(); + long videoSampleTime; + { + mediaExtractor.readSampleData(byteBuffer, 0); + //skip first I frame + if (mediaExtractor.getSampleFlags() == MediaExtractor.SAMPLE_FLAG_SYNC) + mediaExtractor.advance(); + mediaExtractor.readSampleData(byteBuffer, 0); + long firstVideoPTS = mediaExtractor.getSampleTime(); + mediaExtractor.advance(); + mediaExtractor.readSampleData(byteBuffer, 0); + long SecondVideoPTS = mediaExtractor.getSampleTime(); + videoSampleTime = Math.abs(SecondVideoPTS - firstVideoPTS); + } + + mediaExtractor.unselectTrack(videoIndex); + mediaExtractor.selectTrack(videoIndex); + while (true) { + int readSampleSize = mediaExtractor.readSampleData(byteBuffer, 0); + if (readSampleSize < 0) { + break; + } + mediaExtractor.advance(); + bufferInfo.size = readSampleSize; + bufferInfo.offset = 0; + bufferInfo.flags = mediaExtractor.getSampleFlags(); + bufferInfo.presentationTimeUs += videoSampleTime; + + mediaMuxer.writeSampleData(trackIndex, byteBuffer, bufferInfo); + } + mediaMuxer.stop(); + mediaExtractor.release(); + mediaMuxer.release(); + mICallBackListener.success(); + } catch (IOException e) { + e.printStackTrace(); + mICallBackListener.failed(e); + } + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/encoder/EncoderAudioAAC.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/encoder/EncoderAudioAAC.java new file mode 100644 index 0000000..2125a73 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/encoder/EncoderAudioAAC.java @@ -0,0 +1,188 @@ +package com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.encoder; + +import android.media.AudioFormat; +import android.media.AudioRecord; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaFormat; +import android.media.MediaRecorder; +import android.util.Log; + +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * 功能:录音编码生成aac音频文件 + * @author aserbao + * @date : On 2019/1/4 5:24 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.blocks.MediaExtractor.primary + * @Copyright: 个人版权所有 + */ +public class EncoderAudioAAC { + private static final String TAG = "EncoderAudioAAC"; + private byte[] mFrameByte; + private EncodeAudioThread mEncodeAudioThread; + + public void start(String outputAudioPath){ + if (mEncodeAudioThread == null) { + mEncodeAudioThread = new EncodeAudioThread(outputAudioPath); + mEncodeAudioThread.setRunning(true); + try { + mEncodeAudioThread.start(); + } catch (Exception e) { + Log.w(TAG, "encode already start"); + } + } + } + + public void stop() { + if (mEncodeAudioThread != null) { + mEncodeAudioThread.setRunning(false); + mEncodeAudioThread = null; + } + } + + public class EncodeAudioThread extends Thread{ + String MIME_TYPE = "audio/mp4a-latm"; + int KEY_CHANNEL_COUNT = 2; + int KEY_SAMPLE_RATE = 44100; + int KEY_BIT_RATE = 64000; + int KEY_AAC_PROFILE = MediaCodecInfo.CodecProfileLevel.AACObjectLC; + int WAIT_TIME = 10000; + int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT; + int CHANNEL_MODE = AudioFormat.CHANNEL_IN_STEREO; + int BUFFFER_SIZE = 2048; + + private final int mFrameSize = 2048; + private byte[] mBuffer; + private boolean running; + private MediaCodec mEncoder; + private AudioRecord mRecord; + private MediaCodec.BufferInfo mBufferInfo; + private String mOutputPath; + private FileOutputStream fileOutputStream; + + public EncodeAudioThread(String mOutputPath) { + this.mOutputPath = mOutputPath; + } + + @Override + public void run() { + if (!prepare()) { + Log.d(TAG, "音频编码器初始化失败"); + running = false; + } + while (running) { + int num = mRecord.read(mBuffer, 0, mFrameSize); + Log.d(TAG,"num = " + num); + try { + encode(mBuffer); + } catch (IOException e) { + e.printStackTrace(); + } + } + try { + fileOutputStream.close(); + } catch (IOException e) { + e.printStackTrace(); + } + release(); + } + + private void setRunning(boolean running) { + this.running = running; + } + + /** + * 释放资源 + */ + private void release() { + if (mEncoder != null) { + mEncoder.stop(); + mEncoder.release(); + } + if (mRecord != null) { + mRecord.stop(); + mRecord.release(); + mRecord = null; + } + } + + private boolean prepare() { + try { + fileOutputStream = new FileOutputStream(mOutputPath); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } + try { + mBufferInfo = new MediaCodec.BufferInfo(); + mEncoder = MediaCodec.createEncoderByType(MIME_TYPE); + MediaFormat mediaFormat = MediaFormat.createAudioFormat(MIME_TYPE, KEY_SAMPLE_RATE, KEY_CHANNEL_COUNT); + mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, KEY_BIT_RATE); + mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, KEY_AAC_PROFILE); + mEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + mEncoder.start(); + } catch (IOException e) { + e.printStackTrace(); + return false; + } + mBuffer = new byte[mFrameSize]; + int minBufferSize = AudioRecord.getMinBufferSize(KEY_SAMPLE_RATE, CHANNEL_MODE, AUDIO_FORMAT); + mRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, KEY_SAMPLE_RATE, CHANNEL_MODE, AUDIO_FORMAT, minBufferSize * 2); + mRecord.startRecording(); + return true; + } + + private void encode(byte[] data) throws IOException { + int inputBufferIndex = mEncoder.dequeueInputBuffer(-1); + if (inputBufferIndex >= 0) { + ByteBuffer inputBuffer = mEncoder.getInputBuffer(inputBufferIndex); + if (inputBuffer == null) return; + inputBuffer.clear(); + inputBuffer.put(data); + inputBuffer.limit(data.length); + mEncoder.queueInputBuffer(inputBufferIndex, 0, data.length, + System.nanoTime(), 0); + } + int outputBufferIndex = mEncoder.dequeueOutputBuffer(mBufferInfo, 0); + while (outputBufferIndex >= 0) { + ByteBuffer outputBuffer = mEncoder.getOutputBuffer(outputBufferIndex); + if (outputBuffer == null) return; + //给adts头字段空出7的字节 + int length = mBufferInfo.size + 7; + if (mFrameByte == null || mFrameByte.length < length) { + mFrameByte = new byte[length]; + } + addADTStoPacket(mFrameByte, length); + outputBuffer.get(mFrameByte, 7, mBufferInfo.size); + //TODO mFrameByte编码返回的数据 + fileOutputStream.write(mFrameByte, 0, mFrameByte.length); + mEncoder.releaseOutputBuffer(outputBufferIndex, false); + outputBufferIndex = mEncoder.dequeueOutputBuffer(mBufferInfo, 0); + } + } + + /** + * 给编码出的aac裸流添加adts头字段 + * + * @param packet 要空出前7个字节,否则会搞乱数据 + * @param packetLen 7 + */ + private void addADTStoPacket(byte[] packet, int packetLen) { + int profile = 2; //AAC LC + int freqIdx = 4; //44.1KHz + int chanCfg = 2; //CPE + packet[0] = (byte) 0xFF; + packet[1] = (byte) 0xF9; + packet[2] = (byte) (((profile - 1) << 6) + (freqIdx << 2) + (chanCfg >> 2)); + packet[3] = (byte) (((chanCfg & 3) << 6) + (packetLen >> 11)); + packet[4] = (byte) ((packetLen & 0x7FF) >> 3); + packet[5] = (byte) (((packetLen & 7) << 5) + 0x1F); + packet[6] = (byte) 0xFC; + } + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/official/AMediaExtractorOfficial.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/official/AMediaExtractorOfficial.java new file mode 100644 index 0000000..4d0a7c7 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/primary/official/AMediaExtractorOfficial.java @@ -0,0 +1,59 @@ +package com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.official; + +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.media.MediaRecorder; +import android.util.Log; + +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * 功能: + * + * @author aserbao + * @date : On 2019/1/5 10:35 AM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.blocks.MediaExtractor.primary.decoder + * @Copyright: 个人版权所有 + */ +public class AMediaExtractorOfficial { + private static final String TAG = "AMediaExtractorOfficial"; + private static long mStartTime; + + public static void mediaExtractorDecoderAudio(String inputAudioPath){ + mStartTime = System.currentTimeMillis(); + MediaExtractor extractor = new MediaExtractor(); + try { + extractor.setDataSource(inputAudioPath); + int audioIndex = -1;//音频通道 + int numTracks = extractor.getTrackCount(); + for (int i = 0; i < numTracks; ++i) { + MediaFormat format = extractor.getTrackFormat(i); + String mime = format.getString(MediaFormat.KEY_MIME); + if (format.getString(MediaFormat.KEY_MIME).startsWith("audio/")) { + audioIndex = i; + } + } + extractor.selectTrack(audioIndex);//切换到音频通道 + + ByteBuffer inputBuffer = ByteBuffer.allocate(1024*200); + int readSampleData = extractor.readSampleData(inputBuffer, 0); + while (readSampleData >= 0) { + int trackIndex = extractor.getSampleTrackIndex(); + long presentationTimeUs = extractor.getSampleTime(); // 拿到解析到音频的时间 + Log.e(TAG, "meidaExtractorDecoderAudio: trackIndex = " + trackIndex + " presentationTimeUs = " + presentationTimeUs + " readSampleData =" + readSampleData ); + extractor.advance(); + } + } catch (IOException e) { + e.printStackTrace(); + Log.e(TAG, "meidaExtractorDecoderAudio: " + e ); + } + + extractor.release(); + Log.e(TAG, "mediaExtractorDecoderAudio: " + (System.currentTimeMillis() - mStartTime)/(float)1000 + "s" ); + extractor = null; + } + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaMuxer/MediaMuxerActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaMuxer/MediaMuxerActivity.java new file mode 100644 index 0000000..866b127 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaMuxer/MediaMuxerActivity.java @@ -0,0 +1,61 @@ +package com.aserbao.androidcustomcamera.blocks.mediaMuxer; + +import android.os.Bundle; +import android.os.Environment; +import android.support.v7.app.AppCompatActivity; +import android.util.Log; +import android.view.View; +import android.widget.Toast; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.blocks.interfaces.ICallBackListener; +import com.aserbao.androidcustomcamera.blocks.mediaMuxer.primary.MuxerVoiceAndVideoToMp4; +import com.aserbao.androidcustomcamera.blocks.mediaMuxer.primary.MuxerVoiceDbToMp4; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Locale; + +import butterknife.ButterKnife; +import butterknife.OnClick; + +import static android.media.MediaFormat.MIMETYPE_AUDIO_AAC; + +public class MediaMuxerActivity extends AppCompatActivity { + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_media_muxer); + ButterKnife.bind(this); + } + String path = Environment.getExternalStorageDirectory().getAbsolutePath(); + + @OnClick({R.id.muxer_aac_video_to_mp4, R.id.muxer_aac_db_to_mp4}) + public void onViewClicked(View view) { + switch (view.getId()) { + case R.id.muxer_aac_video_to_mp4: + new MuxerVoiceAndVideoToMp4(path + "/own.m4a", path + "/aserbao.mp4", path + "/out_aserbao.mp4", new ICallBackListener() { + @Override + public void success() { + Toast.makeText(MediaMuxerActivity.this, "成功", Toast.LENGTH_SHORT).show(); + } + + @Override + public void failed(Exception e) { + Toast.makeText(MediaMuxerActivity.this, "失败", Toast.LENGTH_SHORT).show(); + } + }).start(); + break; + case R.id.muxer_aac_db_to_mp4: + new MuxerVoiceDbToMp4().start(path + "/own.m4a", path + "/output_aserbao1.mp4", MIMETYPE_AUDIO_AAC, new MuxerVoiceDbToMp4.DbCallBackListener() { + @Override + public void cuurentFrequenty(int cuurentFrequenty, double volume) { + } + }); + break; + } + } + + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaMuxer/functions/CreateVideoAddAudio.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaMuxer/functions/CreateVideoAddAudio.java new file mode 100644 index 0000000..45c886c --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaMuxer/functions/CreateVideoAddAudio.java @@ -0,0 +1,335 @@ +package com.aserbao.androidcustomcamera.blocks.mediaMuxer.functions; + +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.Rect; +import android.graphics.RectF; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.os.Handler; +import android.os.Message; +import android.text.TextUtils; +import android.util.Log; +import android.view.Surface; +import android.view.View; +import android.widget.Button; +import android.widget.TextView; +import android.widget.Toast; +import android.widget.VideoView; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.activity.BaseActivity; +import com.aserbao.androidcustomcamera.base.utils.FileUtils; + +import java.io.File; +import java.io.IOException; +import java.lang.ref.WeakReference; +import java.nio.ByteBuffer; + +import butterknife.BindView; +import butterknife.OnClick; + +/** + * 生成视频文件 + */ +public class CreateVideoAddAudio extends BaseActivity { + private static final String TAG = "PrimaryMediaCodecActivi"; + private static final String MIME_TYPE = "video/avc"; + private static final int WIDTH = 720; + private static final int HEIGHT = 1280; + private static final int BIT_RATE = 4000000; + private static final int FRAMES_PER_SECOND = 4; + private static final int IFRAME_INTERVAL = 5; + + private static final int NUM_FRAMES = 4 * 100; + private static final int START_RECORDING = 0; + private static final int STOP_RECORDING = 1; + + @BindView(R.id.btn_recording) + Button mBtnRecording; + @BindView(R.id.btn_watch) + Button mBtnWatch; + @BindView(R.id.primary_mc_tv) + TextView mPrimaryMcTv; + public MediaCodec.BufferInfo mBufferInfo; + public MediaCodec mMediaCodec; + @BindView(R.id.primary_vv) + VideoView mPrimaryVv; + private Surface mInputSurface; + public MediaMuxer mMuxer; + private boolean mMuxerStarted; + private int mTrackIndex; + private long mFakePts; + private boolean isRecording; + + private int cuurFrame = 0; + + private MyHanlder mMyHanlder = new MyHanlder(this); + public File mOutputFile; + + @OnClick({R.id.btn_recording, R.id.btn_watch}) + public void onViewClicked(View view) { + switch (view.getId()) { + case R.id.btn_recording: + if (mBtnRecording.getText().equals("开始录制")) { + try { +// mOutputFile = new File(Environment.getExternalStorageDirectory().getAbsolutePath(), System.currentTimeMillis() + ".mp4"); + mOutputFile = new File(FileUtils.getStorageMp4("PrimaryMediaCodecActivity")); + startRecording(mOutputFile); + mPrimaryMcTv.setText("文件保存路径为:" + mOutputFile.toString()); + mBtnRecording.setText("停止录制"); + isRecording = true; + } catch (IOException e) { + e.printStackTrace(); + mBtnRecording.setText("出现异常了,请查明原因"); + } + } else if (mBtnRecording.getText().equals("停止录制")) { + mBtnRecording.setText("开始录制"); + stopRecording(); + } + break; + case R.id.btn_watch: + String absolutePath = mOutputFile.getAbsolutePath(); + if (!TextUtils.isEmpty(absolutePath)) { + if(mBtnWatch.getText().equals("查看视频")) { + mBtnWatch.setText("删除视频"); + mPrimaryVv.setVideoPath(absolutePath); + mPrimaryVv.start(); + }else if(mBtnWatch.getText().equals("删除视频")){ + if (mOutputFile.exists()){ + mOutputFile.delete(); + mBtnWatch.setText("查看视频"); + } + } + }else{ + Toast.makeText(this, "请先录制", Toast.LENGTH_SHORT).show(); + } + break; + } + } + + private Bitmap mBitmap; + private static class MyHanlder extends Handler { + private WeakReference mPrimaryMediaCodecActivityWeakReference; + + public MyHanlder(CreateVideoAddAudio activity) { + mPrimaryMediaCodecActivityWeakReference = new WeakReference(activity); + } + + @Override + public void handleMessage(Message msg) { + CreateVideoAddAudio activity = mPrimaryMediaCodecActivityWeakReference.get(); + if (activity != null) { + switch (msg.what) { + case START_RECORDING: + activity.drainEncoder(false); + activity.generateFrame(activity.cuurFrame); + Log.e(TAG, "handleMessage: " + activity.cuurFrame); + if (activity.cuurFrame < NUM_FRAMES) { + this.sendEmptyMessage(START_RECORDING); + } else { + activity.drainEncoder(true); + activity.mBtnRecording.setText("开始录制"); + activity.releaseEncoder(); + } + activity.cuurFrame++; + break; + case STOP_RECORDING: + Log.e(TAG, "handleMessage: STOP_RECORDING"); + activity.drainEncoder(true); + activity.mBtnRecording.setText("开始录制"); + activity.releaseEncoder(); + break; + } + } + } + } + + @Override + protected int setLayoutId() { + return R.layout.activity_primary_media_codec; + } + + + private void startRecording(File outputFile) throws IOException { + cuurFrame = 0; + mBitmap = BitmapFactory.decodeResource(getResources(),R.drawable.katong); + prepareEncoder(outputFile); + mMyHanlder.sendEmptyMessage(START_RECORDING); + } + + private void stopRecording() { + mMyHanlder.removeMessages(START_RECORDING); + mMyHanlder.sendEmptyMessage(STOP_RECORDING); + } + + /** + * 准备视频编码器,muxer,和一个输入表面。 + */ + private void prepareEncoder(File outputFile) throws IOException { + mBufferInfo = new MediaCodec.BufferInfo(); + MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, WIDTH, HEIGHT); + + //1. 设置一些属性。没有指定其中的一些可能会导致MediaCodec.configure()调用抛出一个无用的异常。 + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); + format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);//比特率(比特率越高,音视频质量越高,编码文件越大) + format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAMES_PER_SECOND);//设置帧速 + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);//设置关键帧间隔时间 + + //2.创建一个MediaCodec编码器,并配置格式。获取一个我们可以用于输入的表面,并将其封装到处理EGL工作的类中。 + mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE); + mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + mInputSurface = mMediaCodec.createInputSurface(); + mMediaCodec.start(); + //3. 创建一个MediaMuxer。我们不能在这里添加视频跟踪和开始合成,因为我们的MediaFormat里面没有缓冲数据。 + // 只有在编码器开始处理数据后才能从编码器获得这些数据。我们实际上对多路复用音频没有兴趣。我们只是想要 + // 将从MediaCodec获得的原始H.264基本流转换为.mp4文件。 + mMuxer = new MediaMuxer(outputFile.toString(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + + mMuxerStarted = false; + mTrackIndex = -1; + } + + private void drainEncoder(boolean endOfStream) { + final int TIMEOUT_USEC = 10000; + if (endOfStream) { + mMediaCodec.signalEndOfInputStream();//在输入信号end-of-stream。相当于提交一个空缓冲区。视频编码完结 + } + ByteBuffer[] encoderOutputBuffers = mMediaCodec.getOutputBuffers(); + while (true) { + int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); + Log.e(TAG, "drainEncoder: " + outputBufferIndex); + if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {//没有可以输出的数据使用时 + if (!endOfStream) { + break; // out of while + } + } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + //输出缓冲区已经更改,客户端必须引用新的 + encoderOutputBuffers = mMediaCodec.getOutputBuffers(); + } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + //输出格式发生了变化,后续数据将使用新的数据格式。 + if (mMuxerStarted) { + throw new RuntimeException("format changed twice"); + } + MediaFormat newFormat = mMediaCodec.getOutputFormat(); + mTrackIndex = mMuxer.addTrack(newFormat); + mMuxer.start(); + mMuxerStarted = true; + } else if (outputBufferIndex < 0) { + } else { + ByteBuffer encodedData = encoderOutputBuffers[outputBufferIndex]; + if (encodedData == null) { + throw new RuntimeException("encoderOutputBuffer " + outputBufferIndex + + " was null"); + } + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + //当我们得到的时候,编解码器的配置数据被拉出来,并给了muxer。这时候可以忽略。 + mBufferInfo.size = 0; + } + if (mBufferInfo.size != 0) { + if (!mMuxerStarted) { + throw new RuntimeException("muxer hasn't started"); + } + //调整ByteBuffer值以匹配BufferInfo。 + encodedData.position(mBufferInfo.offset); + encodedData.limit(mBufferInfo.offset + mBufferInfo.size); + mBufferInfo.presentationTimeUs = mFakePts; + mFakePts += 1000000L / FRAMES_PER_SECOND; + + mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo); + } + mMediaCodec.releaseOutputBuffer(outputBufferIndex, false); + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + if (!endOfStream) { + Log.e(TAG, "意外结束"); + } else { + Toast.makeText(this, "已完成……", Toast.LENGTH_SHORT).show(); + Log.e(TAG, "正常结束"); + } + isRecording = false; + break; + } + } + } + } + + + private void generateFrame(int frameNum){ + Canvas canvas = mInputSurface.lockCanvas(null); + Paint paint = new Paint(); + try { + int width = canvas.getWidth(); + int height = canvas.getHeight(); + String color = "#FFCA39"; + if (frameNum %2 == 0 ){ + color = "#FFCA39"; + }else{ + color = "#FFF353"; + } + int color1 = Color.parseColor(color); + canvas.drawColor(color1); + paint.setTextSize(100); + paint.setColor(0xff000000); + canvas.drawText("第"+ String.valueOf(frameNum) + "帧",width/2,height/2,paint); + Rect srcRect = new Rect(0, 0, mBitmap.getWidth(), mBitmap.getHeight()); + int margain = 30; + Rect decRect = new Rect(margain, margain, width - margain, height-margain); + canvas.drawBitmap(mBitmap,srcRect,decRect,paint); + + int roundMargain = 60; + int roundHeight = 300; + int roundRadius = 25; + int roundLineWidth = 10; + paint.setStyle(Paint.Style.FILL);//充满 + paint.setAntiAlias(true);// 设置画笔的锯齿效果 + RectF roundRect1 = new RectF(roundMargain - roundLineWidth,roundMargain - roundLineWidth,width - roundMargain + roundLineWidth,roundHeight + roundMargain + roundLineWidth); + paint.setColor(Color.BLACK); + canvas.drawRoundRect(roundRect1,roundRadius,roundRadius,paint); + paint.setColor(color1); + RectF roundRect2 = new RectF(roundMargain,roundMargain,width - roundMargain,roundHeight + roundMargain); + canvas.drawRoundRect(roundRect2,roundRadius,roundRadius,paint); + +// paint.setStyle(Paint.Style.STROKE);//充满 + int timeMargain = roundMargain + 50; + String sTime = "2018/12/29 00:39"; + paint.setTextAlign(Paint.Align.CENTER); + paint.setTextSize(40); + paint.setColor(Color.BLACK); + canvas.drawText(sTime,width/2,timeMargain,paint); + + int soundMargain = timeMargain + 80; + String soundTime = "party 是我家"; + String soundTime2 = "party party 是我家"; + paint.setTextAlign(Paint.Align.CENTER); + paint.setTextSize(80); + canvas.drawText(soundTime,width/2,soundMargain,paint); + canvas.drawText(soundTime2,width/2,soundMargain + 80,paint); + + } finally { + mInputSurface.unlockCanvasAndPost(canvas); + } + + } + + private void releaseEncoder() { + if (mMediaCodec != null) { + mMediaCodec.stop(); + mMediaCodec.release(); + mMediaCodec = null; + } + if (mInputSurface != null) { + mInputSurface.release(); + mInputSurface = null; + } + if (mMuxer != null) { + mMuxer.stop(); + mMuxer.release(); + mMuxer = null; + } + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaMuxer/functions/CreateVideoAddAudioToMp4.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaMuxer/functions/CreateVideoAddAudioToMp4.java new file mode 100644 index 0000000..00c15c9 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaMuxer/functions/CreateVideoAddAudioToMp4.java @@ -0,0 +1,399 @@ +package com.aserbao.androidcustomcamera.blocks.mediaMuxer.functions; + +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.Rect; +import android.graphics.RectF; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.os.Environment; +import android.os.Handler; +import android.os.Message; +import android.text.TextUtils; +import android.util.Log; +import android.view.Surface; +import android.view.View; +import android.widget.Button; +import android.widget.TextView; +import android.widget.Toast; +import android.widget.VideoView; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.activity.BaseActivity; +import com.aserbao.androidcustomcamera.base.utils.FileUtils; +import com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.decoder.DecoderAudioAndGetDb; + +import java.io.File; +import java.io.IOException; +import java.lang.ref.WeakReference; +import java.nio.ByteBuffer; + +import VideoHandle.EpEditor; +import VideoHandle.OnEditorListener; +import butterknife.BindView; +import butterknife.OnClick; + +import static android.media.MediaFormat.MIMETYPE_AUDIO_AAC; +import static android.media.MediaFormat.MIMETYPE_AUDIO_MPEG; + +public class CreateVideoAddAudioToMp4 extends BaseActivity { + private static final String TAG = "PrimaryMediaCodecActivi"; + private static final String MIME_TYPE = "video/avc"; + private static final int WIDTH = 720; + private static final int HEIGHT = 1280; + private static final int BIT_RATE = 4000000; + private static final int FRAMES_PER_SECOND = 30; + private static final int IFRAME_INTERVAL = 5; + + private static final int NUM_FRAMES = 1 * 100; + private static final int START_RECORDING = 0; + private static final int STOP_RECORDING = 1; + + @BindView(R.id.btn_recording) + Button mBtnRecording; + @BindView(R.id.btn_watch) + Button mBtnWatch; + @BindView(R.id.primary_mc_tv) + TextView mPrimaryMcTv; + public MediaCodec.BufferInfo mBufferInfo; + public MediaCodec mMediaCodec; + @BindView(R.id.primary_vv) + VideoView mPrimaryVv; + private Surface mInputSurface; + public MediaMuxer mMuxer; + private boolean mMuxerStarted; + private int mTrackIndex; + private long mFakePts; + private boolean isRecording; + + private int cuurFrame = 0; + + private MyHanlder mMyHanlder = new MyHanlder(this); + public File mOutputFile; + private double mVolume; + private float mMusictime; + @OnClick({R.id.btn_recording, R.id.btn_watch}) + public void onViewClicked(View view) { + switch (view.getId()) { + case R.id.btn_recording: + String path = Environment.getExternalStorageDirectory().getAbsolutePath(); + if (mBtnRecording.getText().equals("开始录制")) { + try { +// mOutputFile = new File(Environment.getExternalStorageDirectory().getAbsolutePath(), System.currentTimeMillis() + ".mp4"); + mOutputFile = new File(FileUtils.getStorageMp4("PrimaryMediaCodecActivity")); + startRecording(mOutputFile); + mPrimaryMcTv.setText("文件保存路径为:" + mOutputFile.toString()); + mBtnRecording.setText("停止录制"); + isRecording = true; + } catch (IOException e) { + e.printStackTrace(); + mBtnRecording.setText("出现异常了,请查明原因"); + } + } else if (mBtnRecording.getText().equals("停止录制")) { + mBtnRecording.setText("开始录制"); + stopRecording(); + } +// new DecoderAndGetAudioDb().start(path + "/own.m4a", MIMETYPE_AUDIO_AAC, new DecoderAndGetAudioDb.IGetVideoDbCallBackListener() { + new DecoderAndGetAudioDb().start(path + "/five.mp3", MIMETYPE_AUDIO_MPEG, new DecoderAndGetAudioDb.DbCallBackListener() { +// new DecoderAndGetAudioDb().start(path + "/dj_dance.mp3", MIMETYPE_AUDIO_MPEG, new DecoderAndGetAudioDb.IGetVideoDbCallBackListener() { + @Override + public void cuurentFrequenty(final int cuurentFrequenty, final double volume, final float decoderTime) { + runOnUiThread(new Runnable() { + @Override + public void run() { + if(volume != -1 && isRecording) { + mVolume = volume / 100; + mMusictime = decoderTime; + int i = cuurFrame * 1000 / FRAMES_PER_SECOND; + if(decoderTime > i) { + update(); + Log.e(TAG, "run:volume = " +mVolume + " decoderTime = " + decoderTime + " 第"+ cuurFrame + "帧时间为:"+ i); + } + }else{ + Log.e(TAG, "run: out" ); + if (isRecording) { + drainEncoder(true); + } + releaseEncoder(); + } + } + }); + } + }); + break; + case R.id.btn_watch: + /* String absolutePath = mOutputFile.getAbsolutePath(); + if (!TextUtils.isEmpty(absolutePath)) { + if(mBtnWatch.getText().equals("查看视频")) { + mBtnWatch.setText("删除视频"); + mPrimaryVv.setVideoPath(absolutePath); + mPrimaryVv.start(); + }else if(mBtnWatch.getText().equals("删除视频")){ + if (mOutputFile.exists()){ + mOutputFile.delete(); + mBtnWatch.setText("查看视频"); + } + } + }else{ + Toast.makeText(this, "请先录制", Toast.LENGTH_SHORT).show(); + }*/ + addMusicToMp4(); + break; + } + } + + public void update(){ + drainEncoder(false); + generateFrame(cuurFrame); + cuurFrame++; + Log.e(TAG, "handleMessage: " + cuurFrame); + } + private Bitmap mBitmap; + private static class MyHanlder extends Handler { + private WeakReference mPrimaryMediaCodecActivityWeakReference; + + public MyHanlder(CreateVideoAddAudioToMp4 activity) { + mPrimaryMediaCodecActivityWeakReference = new WeakReference(activity); + } + + @Override + public void handleMessage(Message msg) { + CreateVideoAddAudioToMp4 activity = mPrimaryMediaCodecActivityWeakReference.get(); + if (activity != null) { + switch (msg.what) { + case START_RECORDING: + activity.update(); + break; + case STOP_RECORDING: + Log.e(TAG, "handleMessage: STOP_RECORDING"); + activity.drainEncoder(true); + activity.mBtnRecording.setText("开始录制"); + activity.releaseEncoder(); + break; + } + } + } + } + + @Override + protected int setLayoutId() { + return R.layout.activity_primary_media_codec; + } + + + private void startRecording(File outputFile) throws IOException { + cuurFrame = 0; + mBitmap = BitmapFactory.decodeResource(getResources(),R.drawable.katong); + prepareEncoder(outputFile); + mMyHanlder.sendEmptyMessage(START_RECORDING); + } + + private void stopRecording() { + mMyHanlder.removeMessages(START_RECORDING); + mMyHanlder.sendEmptyMessage(STOP_RECORDING); + isRecording = false; + } + + /** + * 准备视频编码器,muxer,和一个输入表面。 + */ + private void prepareEncoder(File outputFile) throws IOException { + mBufferInfo = new MediaCodec.BufferInfo(); + MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, WIDTH, HEIGHT); + + //1. 设置一些属性。没有指定其中的一些可能会导致MediaCodec.configure()调用抛出一个无用的异常。 + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); + format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);//比特率(比特率越高,音视频质量越高,编码文件越大) + format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAMES_PER_SECOND);//设置帧速 + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);//设置关键帧间隔时间 + + //2.创建一个MediaCodec编码器,并配置格式。获取一个我们可以用于输入的表面,并将其封装到处理EGL工作的类中。 + mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE); + mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + mInputSurface = mMediaCodec.createInputSurface(); + mMediaCodec.start(); + //3. 创建一个MediaMuxer。我们不能在这里添加视频跟踪和开始合成,因为我们的MediaFormat里面没有缓冲数据。 + // 只有在编码器开始处理数据后才能从编码器获得这些数据。我们实际上对多路复用音频没有兴趣。我们只是想要 + // 将从MediaCodec获得的原始H.264基本流转换为.mp4文件。 +// mMuxer = new MediaMuxer(Environment.getExternalStorageDirectory().getAbsolutePath() + "/output_aserbao.mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + String absolutePath = Environment.getExternalStorageDirectory().getAbsolutePath(); + mMuxer = new MediaMuxer(absolutePath+ "/input_aserbao1.mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + + mMuxerStarted = false; + mTrackIndex = -1; + } + + private void drainEncoder(boolean endOfStream) { + final int TIMEOUT_USEC = 10000; + if (endOfStream) { + mMediaCodec.signalEndOfInputStream();//在输入信号end-of-stream。相当于提交一个空缓冲区。视频编码完结 + } + ByteBuffer[] encoderOutputBuffers = mMediaCodec.getOutputBuffers(); + while (true) { + int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); + Log.e(TAG, "drainEncoder: " + outputBufferIndex); + if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {//没有可以输出的数据使用时 + if (!endOfStream) { + break; // out of while + } + } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + //输出缓冲区已经更改,客户端必须引用新的 + encoderOutputBuffers = mMediaCodec.getOutputBuffers(); + } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + //输出格式发生了变化,后续数据将使用新的数据格式。 + if (mMuxerStarted) { + throw new RuntimeException("format changed twice"); + } + MediaFormat newFormat = mMediaCodec.getOutputFormat(); + mTrackIndex = mMuxer.addTrack(newFormat); + mMuxer.start(); + mMuxerStarted = true; + } else if (outputBufferIndex < 0) { + } else { + ByteBuffer encodedData = encoderOutputBuffers[outputBufferIndex]; + if (encodedData == null) { + throw new RuntimeException("encoderOutputBuffer " + outputBufferIndex + + " was null"); + } + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + //当我们得到的时候,编解码器的配置数据被拉出来,并给了muxer。这时候可以忽略。 + mBufferInfo.size = 0; + } + if (mBufferInfo.size != 0) { + if (!mMuxerStarted) { + throw new RuntimeException("muxer hasn't started"); + } + //调整ByteBuffer值以匹配BufferInfo。 + encodedData.position(mBufferInfo.offset); + encodedData.limit(mBufferInfo.offset + mBufferInfo.size); + mBufferInfo.presentationTimeUs = mFakePts; + mFakePts += 1000000L / FRAMES_PER_SECOND; + + mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo); + } + mMediaCodec.releaseOutputBuffer(outputBufferIndex, false); + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + if (!endOfStream) { + Log.e(TAG, "意外结束"); + } else { + Toast.makeText(this, "已完成……", Toast.LENGTH_SHORT).show(); +// + } + isRecording = false; + break; + } + } + } + } + private void generateFrame(int frameNum){ + Canvas canvas = mInputSurface.lockCanvas(null); + Paint paint = new Paint(); + try { + int width = canvas.getWidth(); + int height = canvas.getHeight(); + int color1 = changeHue((float) mVolume); + canvas.drawColor(color1); + paint.setTextSize(100); + paint.setColor(0xff000000); + canvas.drawText("第"+ String.valueOf(frameNum) + "帧",width/2,height/2,paint); + Rect srcRect = new Rect(0, 0, mBitmap.getWidth(), mBitmap.getHeight()); + int margain = 30; + Rect decRect = new Rect(margain, margain, width - margain, height-margain); + canvas.drawBitmap(mBitmap,srcRect,decRect,paint); + + int roundMargain = 60; + int roundHeight = 300; + int roundRadius = 25; + int roundLineWidth = 10; + paint.setStyle(Paint.Style.FILL);//充满 + paint.setAntiAlias(true);// 设置画笔的锯齿效果 + RectF roundRect1 = new RectF(roundMargain - roundLineWidth,roundMargain - roundLineWidth,width - roundMargain + roundLineWidth,roundHeight + roundMargain + roundLineWidth); + paint.setColor(Color.BLACK); + canvas.drawRoundRect(roundRect1,roundRadius,roundRadius,paint); + paint.setColor(color1); + RectF roundRect2 = new RectF(roundMargain,roundMargain,width - roundMargain,roundHeight + roundMargain); + canvas.drawRoundRect(roundRect2,roundRadius,roundRadius,paint); + +// paint.setStyle(Paint.Style.STROKE);//充满 + int timeMargain = roundMargain + 50; + String sTime = "2018/12/29 00:39"; + paint.setTextAlign(Paint.Align.CENTER); + paint.setTextSize(40); + paint.setColor(Color.BLACK); + canvas.drawText(sTime,width/2,timeMargain,paint); + + int soundMargain = timeMargain + 80; + String soundTime = "party 是我家"; + String soundTime2 = "party party 是我家"; + paint.setTextAlign(Paint.Align.CENTER); + paint.setTextSize(80); + canvas.drawText(soundTime,width/2,soundMargain,paint); + canvas.drawText(soundTime2,width/2,soundMargain + 80,paint); + + } finally { + mInputSurface.unlockCanvasAndPost(canvas); + } + + } + + /** + * @param progress 0 ~ 360 + * @return + */ + public int changeHue(float progress){ + float[] hsbVals = new float[3]; + int inputColor = Color.parseColor("#FFF757"); + Color.colorToHSV(inputColor,hsbVals); + float v = (float) progress / (float) 360; + hsbVals[0] = progress; + int color = Color.HSVToColor(hsbVals); + return color; + } + + private void releaseEncoder() { + if (mMediaCodec != null) { + mMediaCodec.stop(); + mMediaCodec.release(); + mMediaCodec = null; + } + if (mInputSurface != null) { + mInputSurface.release(); + mInputSurface = null; + } + if (mMuxer != null) { + mMuxer.stop(); + mMuxer.release(); + mMuxer = null; + } + } + + private String outputVideoPath; + public void addMusicToMp4(){ + String absolutePath = Environment.getExternalStorageDirectory().getAbsolutePath(); + String inputMusic = absolutePath + "/five.mp3"; + outputVideoPath = absolutePath + "/output_aserbao1.mp4"; + String inputVideo = absolutePath+ "/input_aserbao1.mp4"; + String cmd = "-y -i "+ inputVideo + " -ss 0 -t "+ 35 + " -i "+ inputMusic + " -acodec copy -vcodec copy "+ outputVideoPath; + EpEditor.execCmd(cmd, 10000,new OnEditorListener() { + @Override + public void onSuccess() { + Log.e(TAG, "sssshahhah onSuccess: " ); + } + + @Override + public void onFailure() { + Log.e(TAG, "sssshahhah onFailure: " ); + } + + @Override + public void onProgress(float v) { + } + }); + } + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaMuxer/functions/DecoderAndGetAudioDb.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaMuxer/functions/DecoderAndGetAudioDb.java new file mode 100644 index 0000000..7b58002 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaMuxer/functions/DecoderAndGetAudioDb.java @@ -0,0 +1,292 @@ +package com.aserbao.androidcustomcamera.blocks.mediaMuxer.functions; + +import android.media.AudioFormat; +import android.media.AudioManager; +import android.media.AudioTrack; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.util.Log; + +import java.io.IOException; +import java.nio.ByteBuffer; + +import static java.lang.Math.log10; +import static java.lang.Math.max; + +/** + * 功能: 解码获取音频帧分贝大小 + * @author aserbao + * @date : On 2019/1/4 4:05 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.blocks.MediaExtractor.primary + * @Copyright: 个人版权所有 + */ +public class DecoderAndGetAudioDb { + private static final String TAG = "DecoderAudioAAC2PCMPlay"; + public DecoderAndGetAudioDb() { + } + + private DecoderAACThread mDecoderAACThread; + private byte[] mPcmData; + + public void start(String inputAudioPath,String mimeType,DbCallBackListener dbCallBackListener){ + mDbCallBackListener = dbCallBackListener; + if (mDecoderAACThread == null) { + mDecoderAACThread = new DecoderAACThread(inputAudioPath,mimeType); + mDecoderAACThread.setRunning(true); + try { + mDecoderAACThread.start(); + } catch (Exception e) { + Log.w(TAG, "decode already start"); + } + } + + } + + public void stop() { + if (mDecoderAACThread != null) { + mDecoderAACThread.setRunning(false); + mDecoderAACThread = null; + } + } + + public class DecoderAACThread extends Thread{ + String MIME_TYPE = "audio/mp4a-latm"; + int KEY_CHANNEL_COUNT = 2; + int KEY_SAMPLE_RATE = 44100; + int KEY_BIT_RATE = 64000; + int KEY_AAC_PROFILE = MediaCodecInfo.CodecProfileLevel.AACObjectLC; + int WAIT_TIME = 10000; + int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT; + int CHANNEL_MODE = AudioFormat.CHANNEL_IN_STEREO; + int BUFFFER_SIZE = 2048; + + private String mInputAudioPath;//音频路径 + private String mInputAudioMimeType; + private MediaExtractor mMediaExtractor; + private MediaCodec mMediaCodec; + private AudioTrack mPcmPlayer; + private MediaCodec.BufferInfo mBufferInfo; + private boolean running; + + private void setRunning(boolean running) { + this.running = running; + } + + public DecoderAACThread(String inputAudioPath,String mimeType) { + mInputAudioPath = inputAudioPath; + mInputAudioMimeType = mimeType; + } + + @Override + public void run() { + super.run(); + if (!prepare()) { + running = false; + Log.e(TAG, "音频解码器初始化失败"); + return; + } + decode(); + release(); + } + + + public boolean prepare(){ + mBufferInfo = new MediaCodec.BufferInfo(); + mMediaExtractor = new MediaExtractor(); + mPcmPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, KEY_SAMPLE_RATE, + AudioFormat.CHANNEL_OUT_STEREO, + AUDIO_FORMAT, BUFFFER_SIZE, AudioTrack.MODE_STREAM); + mPcmPlayer.play(); + try { + mMediaExtractor.setDataSource(mInputAudioPath); + int audioIndex = -1;//音频通道 + int trackCount = mMediaExtractor.getTrackCount();//获取通道总数 + for (int i = 0; i < trackCount; i++) { + MediaFormat trackFormat = mMediaExtractor.getTrackFormat(i); + String string = trackFormat.getString(MediaFormat.KEY_MIME); + if (string.startsWith("audio/")) { + audioIndex = i; + }//获取音频通道 + } + mMediaExtractor.selectTrack(audioIndex);//切换到音频通道 + MediaFormat mediaFormat = mMediaExtractor.getTrackFormat(audioIndex); + mMediaCodec = MediaCodec.createDecoderByType(mInputAudioMimeType); +/* mediaFormat.setString(MediaFormat.KEY_MIME, MIME_TYPE); + mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, KEY_CHANNEL_COUNT); + mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, KEY_SAMPLE_RATE); + mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, KEY_BIT_RATE); + mediaFormat.setInteger(MediaFormat.KEY_IS_ADTS, 1); + mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, KEY_AAC_PROFILE); + ByteBuffer key(暂时不了解该参数的含义,但必须设置) + byte[] data = new byte[]{(byte) 0x11, (byte) 0x90}; + ByteBuffer csd_0 = ByteBuffer.wrap(data); + mediaFormat.setByteBuffer("csd-0", csd_0);*/ + mMediaCodec.configure(mediaFormat, null, null, 0); + } catch (IOException e) { + e.printStackTrace(); + return false; + } + if (mMediaCodec == null) { + Log.e(TAG, "create mediaDecode failed"); + return false; + } + mMediaCodec.start(); + return true; + } + + + private void decode() { + while (running) { + int inputIndex = mMediaCodec.dequeueInputBuffer(-1); + if (inputIndex >= 0) { + ByteBuffer inputBuffer = mMediaCodec.getInputBuffer(inputIndex); + if (inputBuffer == null) { + return; + } + inputBuffer.clear(); + int sampleSize = mMediaExtractor.readSampleData(inputBuffer, 0); + if (sampleSize < 0) { + mMediaCodec.queueInputBuffer(inputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); + running = false; + } else { + mMediaCodec.queueInputBuffer(inputIndex, 0, sampleSize, mMediaExtractor.getSampleTime(), 0); + mMediaExtractor.advance(); + } + } + int outputIndex = mMediaCodec.dequeueOutputBuffer(mBufferInfo, WAIT_TIME); + ByteBuffer outputBuffer; + if (outputIndex >= 0) { + // Simply ignore codec config buffers. + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + Log.i(TAG, "audio encoder: codec config buffer"); + mMediaCodec.releaseOutputBuffer(outputIndex, false); + continue; + } + if (mBufferInfo.size != 0) { + outputBuffer = mMediaCodec.getOutputBuffer(outputIndex); + if (mPcmData == null || mPcmData.length < mBufferInfo.size) { + mPcmData = new byte[mBufferInfo.size]; + } + if (outputBuffer != null) { + outputBuffer.get(mPcmData, 0, mBufferInfo.size); + outputBuffer.clear(); + } + float v = mMediaExtractor.getSampleTime() / (float) (1000); + +// calcFrequency(mPcmData,KEY_SAMPLE_RATE); + calcFrequency2(mPcmData,v); + Log.e(TAG, "解析到的时间点为:"+ v + "ms decode: mPcmData.length = " + mPcmData.length + " mBufferInfo " + mBufferInfo.toString()); + mPcmPlayer.write(mPcmData, 0, mBufferInfo.size); + } + mMediaCodec.releaseOutputBuffer(outputIndex, false); + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + Log.i(TAG, "saw output EOS."); + } + } + } + mDbCallBackListener.cuurentFrequenty(-1,-1,-1); + mMediaExtractor.release(); + Log.e(TAG, "decode: maxVolume = " + maxVolume ); + } + + /** + * 释放资源 + */ + private void release() { + if (mMediaCodec != null) { + mMediaCodec.stop(); + mMediaCodec.release(); + } + if (mPcmPlayer != null) { + mPcmPlayer.stop(); + mPcmPlayer.release(); + mPcmPlayer = null; + } + } + } + + public void calcFrequency(byte[] fft, int samplingRate){ + float[] magnitudes = new float[fft.length / 2]; + int max = 0; + for (int i = 0; i < magnitudes.length; i++) { + magnitudes[i] = (float) Math.hypot(fft[2 * i], fft[2 * i + 1]); + if (magnitudes[max] < magnitudes[i]) { + max = i; + } + } + + int currentFrequency = max * samplingRate / fft.length; + if (currentFrequency<0){ + return; + } + long v = 0; + for (int i = 0; i < fft.length; i++) { + v += Math.pow(fft[i], 2); + } + + double volume = 10 * log10(v / (double) fft.length); + Log.e(TAG, "calcFrequency: currentFrequency = " + currentFrequency + " volume = " + volume + " max = " + max ); + } + + + /** + * 获取的值范围0 ~ 24366 + * @param pcmdata + * @param v + */ + public void calcFrequency2(byte[] pcmdata, float v) { + short[] music = (!isBigEnd()) ? byteArray2ShortArrayLittle( pcmdata, pcmdata.length / 2) : + byteArray2ShortArrayBig( pcmdata, pcmdata.length / 2); + calculateRealVolume(music,music.length,v); + } + + private boolean isBigEnd() { + short i = 0x1; + boolean bRet = ((i >> 8) == 0x1); + return bRet; + } + + private short[] byteArray2ShortArrayBig(byte[] data, int items) { + short[] retVal = new short[items]; + for (int i = 0; i < retVal.length; i++) + retVal[i] = (short) ((data[i * 2 + 1] & 0xff) | (data[i * 2] & 0xff) << 8); + + return retVal; + } + + private short[] byteArray2ShortArrayLittle(byte[] data, int items) { + short[] retVal = new short[items]; + for (int i = 0; i < retVal.length; i++) + retVal[i] = (short) ((data[i * 2] & 0xff) | (data[i * 2 + 1] & 0xff) << 8); + + return retVal; + } + + private int maxVolume = 0; + protected void calculateRealVolume(short[] buffer, int readSize, float v) { + double sum = 0; + for (int i = 0; i < readSize; i++) { + // 这里没有做运算的优化,为了更加清晰的展示代码 + sum += buffer[i] * buffer[i]; + } + if (readSize > 0) { + double amplitude = sum / readSize; + int mVolume = (int) Math.sqrt(amplitude); + Log.e(TAG, "calculateRealVolume: " + mVolume); + maxVolume = Math.max(mVolume,maxVolume); + mDbCallBackListener.cuurentFrequenty(-1,mVolume,v); + } + } + + private DbCallBackListener mDbCallBackListener; + public interface DbCallBackListener { + void cuurentFrequenty(int cuurentFrequenty, double volume,float cuurTime); + } + + + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaMuxer/primary/MuxerVoiceAndVideoToMp4.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaMuxer/primary/MuxerVoiceAndVideoToMp4.java new file mode 100644 index 0000000..9b1e53e --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaMuxer/primary/MuxerVoiceAndVideoToMp4.java @@ -0,0 +1,128 @@ +package com.aserbao.androidcustomcamera.blocks.mediaMuxer.primary; + +import android.media.MediaCodec; +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.util.Log; + +import com.aserbao.androidcustomcamera.blocks.interfaces.ICallBackListener; + +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * 功能: 合成aac音频和视频画面成mp4 + * + * @author aserbao + * @date : On 2019/1/7 2:52 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.blocks.mediaMuxer.primary + * @Copyright: 个人版权所有 + */ +public class MuxerVoiceAndVideoToMp4 { + private String inputVoicePath; + private String inputVideoPath; + private String outputVideoPath; + private ICallBackListener mICallBackListener; + + public MuxerVoiceAndVideoToMp4(String inputVoicePath, String inputVideoPath, String outputVideoPath, ICallBackListener mICallBackListener) { + this.inputVoicePath = inputVoicePath; + this.inputVideoPath = inputVideoPath; + this.outputVideoPath = outputVideoPath; + this.mICallBackListener = mICallBackListener; + } + + + public void start() { + try { + MediaExtractor videoExtractor = new MediaExtractor(); + videoExtractor.setDataSource(inputVideoPath); + MediaFormat videoFormat = null; + int videoTrackIndex = -1; + int videoTrackCount = videoExtractor.getTrackCount(); + for (int i = 0; i < videoTrackCount; i++) { + videoFormat = videoExtractor.getTrackFormat(i); + String mimeType = videoFormat.getString(MediaFormat.KEY_MIME); + if (mimeType.startsWith("video/")) { + videoTrackIndex = i; + break; + } + } + MediaExtractor audioExtractor = new MediaExtractor(); + audioExtractor.setDataSource(inputVoicePath); + MediaFormat audioFormat = null; + int audioTrackIndex = -1; + int audioTrackCount = audioExtractor.getTrackCount(); + for (int i = 0; i < audioTrackCount; i++) { + audioFormat = audioExtractor.getTrackFormat(i); + String mimeType = audioFormat.getString(MediaFormat.KEY_MIME); + if (mimeType.startsWith("audio/")) { + audioTrackIndex = i; + break; + } + } + + videoExtractor.selectTrack(videoTrackIndex); + audioExtractor.selectTrack(audioTrackIndex); + + MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo(); + MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo(); + + MediaMuxer mediaMuxer = new MediaMuxer(outputVideoPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + int writeVideoTrackIndex = mediaMuxer.addTrack(videoFormat); + int writeAudioTrackIndex = mediaMuxer.addTrack(audioFormat); + mediaMuxer.start(); + ByteBuffer byteBuffer = ByteBuffer.allocate(500 * 1024); + long sampleTime = 0; + { + videoExtractor.readSampleData(byteBuffer, 0); + if (videoExtractor.getSampleFlags() == MediaExtractor.SAMPLE_FLAG_SYNC) { + videoExtractor.advance(); + } + videoExtractor.readSampleData(byteBuffer, 0); + long secondTime = videoExtractor.getSampleTime(); + videoExtractor.advance(); + long thirdTime = videoExtractor.getSampleTime(); + sampleTime = Math.abs(thirdTime - secondTime); + } + videoExtractor.unselectTrack(videoTrackIndex); + videoExtractor.selectTrack(videoTrackIndex); + + while (true) { + int readVideoSampleSize = videoExtractor.readSampleData(byteBuffer, 0); + if (readVideoSampleSize < 0) { + break; + } + videoBufferInfo.size = readVideoSampleSize; + videoBufferInfo.presentationTimeUs += sampleTime; + videoBufferInfo.offset = 0; + videoBufferInfo.flags = videoExtractor.getSampleFlags(); + mediaMuxer.writeSampleData(writeVideoTrackIndex, byteBuffer, videoBufferInfo); + videoExtractor.advance(); + + int readAudioSampleSize = audioExtractor.readSampleData(byteBuffer, 0); + if (readAudioSampleSize < 0) { + break; + } + + audioBufferInfo.size = readAudioSampleSize; + audioBufferInfo.presentationTimeUs += sampleTime; + audioBufferInfo.offset = 0; + audioBufferInfo.flags = videoExtractor.getSampleFlags(); + mediaMuxer.writeSampleData(writeAudioTrackIndex, byteBuffer, audioBufferInfo); + audioExtractor.advance(); + } + mediaMuxer.stop(); + mediaMuxer.release(); + videoExtractor.release(); + audioExtractor.release(); + mICallBackListener.success(); + } catch (IOException e) { + e.printStackTrace(); + mICallBackListener.failed(e); + } + } +} + diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaMuxer/primary/MuxerVoiceDbToMp4.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaMuxer/primary/MuxerVoiceDbToMp4.java new file mode 100644 index 0000000..975115a --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaMuxer/primary/MuxerVoiceDbToMp4.java @@ -0,0 +1,632 @@ +package com.aserbao.androidcustomcamera.blocks.mediaMuxer.primary; + +import android.content.res.Resources; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.Rect; +import android.graphics.RectF; +import android.media.AudioFormat; +import android.media.AudioManager; +import android.media.AudioTrack; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.util.Log; +import android.view.Surface; +import android.widget.Toast; + +import com.aserbao.androidcustomcamera.R; + +import java.io.IOException; +import java.nio.ByteBuffer; + +import static java.lang.Math.log10; + +/** + * 功能: 解码获取音频帧分贝大小 + * @author aserbao + * @date : On 2019/1/4 4:05 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.blocks.MediaExtractor.primary + * @Copyright: 个人版权所有 + */ +public class MuxerVoiceDbToMp4 { + private static final String TAG = "DecoderAudioAAC2PCMPlay"; + + private Bitmap mBitmap; + private EncoderThread mEncoderThread; + + public MuxerVoiceDbToMp4() { + mBitmap = BitmapFactory.decodeResource(Resources.getSystem(), R.drawable.katong); + } + + private DecoderAACThread mDecoderAACThread; + private byte[] mPcmData; + + public void start(String inputAudioPath,String outputVideoPath,String mimeType,DbCallBackListener dbCallBackListener){ + initMediaMux(outputVideoPath); + mDbCallBackListener = dbCallBackListener; + /*mEncoderThread = new EncoderThread(); + mEncoderThread.start(); + mEncoderThread.prepareVideo();*/ + for (int i = 0; i < 50; i++) { + if (i == 49){ + mEncoderThread.start(true,i); + }else{ + mEncoderThread.start(false,i); + } + } + + + /*if (mDecoderAACThread == null) { + mDecoderAACThread = new DecoderAACThread(inputAudioPath, outputVideoPath, mimeType, new IRefreshCallBack() { + @Override + public void refresh(boolean isEnd) { + mEncoderThread.start(isEnd,cuurFrame); + cuurFrame ++; + } + }); + mDecoderAACThread.setRunning(true); + try { + mDecoderAACThread.start(); + } catch (Exception e) { + Log.w(TAG, "decode already start"); + } + }*/ + + } + + public void stop() { + if (mDecoderAACThread != null) { + mDecoderAACThread.setRunning(false); + mDecoderAACThread = null; + } + } + String MIME_TYPE = "audio/mp4a-latm"; + int KEY_CHANNEL_COUNT = 2; + int KEY_SAMPLE_RATE = 44100; + int KEY_BIT_RATE = 64000; + int KEY_AAC_PROFILE = MediaCodecInfo.CodecProfileLevel.AACObjectLC; + int WAIT_TIME = 10000; + int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT; + int CHANNEL_MODE = AudioFormat.CHANNEL_IN_STEREO; + int BUFFFER_SIZE = 2048; + + private String mInputAudioPath;//音频路径 + private String mInputAudioMimeType; + private MediaExtractor mAudioMediaExtractor; + private MediaCodec mAudioMediaCodec,mVideoMediaCodec; + private AudioTrack mPcmPlayer; + private MediaCodec.BufferInfo mAudioBufferInfo,mVideoBufferInfo; + private MediaMuxer mediaMuxer; + private int mWriteAudioTrackIndex,mWriteVideoTrackIndex; + private int cuurFrame =0; + + + public void initMediaMux(String outputVideoPath){ + try { + mediaMuxer = new MediaMuxer(outputVideoPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + } catch (IOException e) { + e.printStackTrace(); + } + } + + public class DecoderAACThread extends Thread{ + private boolean running; + private String outputVideoPath; + private IRefreshCallBack mIRefreshCallBack; + private void setRunning(boolean running) { + this.running = running; + } + + public DecoderAACThread(String inputAudioPath,String outputVideoPath,String mimeType,IRefreshCallBack iRefreshCallBack) { + mInputAudioPath = inputAudioPath; + mInputAudioMimeType = mimeType; + this.outputVideoPath = outputVideoPath; + mIRefreshCallBack = iRefreshCallBack; + } + + @Override + public void run() { + super.run(); + if (!prepareAudio() ) { + running = false; + Log.e(TAG, "音频解码器初始化失败"); + return; + } + decode(); + release(); + } + + + public boolean prepareAudio(){ + mAudioBufferInfo = new MediaCodec.BufferInfo(); + mAudioMediaExtractor = new MediaExtractor(); + mPcmPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, KEY_SAMPLE_RATE, + AudioFormat.CHANNEL_OUT_STEREO, + AUDIO_FORMAT, BUFFFER_SIZE, AudioTrack.MODE_STREAM); + mPcmPlayer.play(); + try { + mAudioMediaExtractor.setDataSource(mInputAudioPath); + int audioIndex = -1;//音频通道 + int trackCount = mAudioMediaExtractor.getTrackCount();//获取通道总数 + for (int i = 0; i < trackCount; i++) { + MediaFormat trackFormat = mAudioMediaExtractor.getTrackFormat(i); + String string = trackFormat.getString(MediaFormat.KEY_MIME); + if (string.startsWith("audio/")) { + audioIndex = i; + }//获取音频通道 + } + mAudioMediaExtractor.selectTrack(audioIndex);//切换到音频通道 + MediaFormat mediaFormat = mAudioMediaExtractor.getTrackFormat(audioIndex); + mAudioMediaCodec = MediaCodec.createDecoderByType(mInputAudioMimeType); + mAudioMediaCodec.configure(mediaFormat, null, null, 0); + mWriteAudioTrackIndex = mediaMuxer.addTrack(mediaFormat); + mediaMuxer.start(); + } catch (IOException e) { + e.printStackTrace(); + return false; + } + if (mAudioMediaCodec == null) { + Log.e(TAG, "create mediaDecode failed"); + return false; + } + mAudioMediaCodec.start(); + return true; + } + + private void decode() { + while (running) { + int inputIndex = mAudioMediaCodec.dequeueInputBuffer(-1); + if (inputIndex >= 0) { + ByteBuffer inputBuffer = mAudioMediaCodec.getInputBuffer(inputIndex); + if (inputBuffer == null) { + return; + } + inputBuffer.clear(); + int sampleSize = mAudioMediaExtractor.readSampleData(inputBuffer, 0); + if (sampleSize < 0) { + mAudioMediaCodec.queueInputBuffer(inputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); + running = false; + } else { + mAudioMediaCodec.queueInputBuffer(inputIndex, 0, sampleSize, mAudioMediaExtractor.getSampleTime(), 0); + mAudioMediaExtractor.advance(); + } + } + int outputIndex = mAudioMediaCodec.dequeueOutputBuffer(mAudioBufferInfo, WAIT_TIME); + ByteBuffer outputBuffer; + if (outputIndex >= 0) { + // Simply ignore codec config buffers. + if ((mAudioBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + Log.i(TAG, "audio encoder: codec config buffer"); + mAudioMediaCodec.releaseOutputBuffer(outputIndex, false); + continue; + } + if (mAudioBufferInfo.size != 0) { + outputBuffer = mAudioMediaCodec.getOutputBuffer(outputIndex); + mediaMuxer.writeSampleData(mWriteAudioTrackIndex,outputBuffer,mAudioBufferInfo); + if (mPcmData == null || mPcmData.length < mAudioBufferInfo.size) { + mPcmData = new byte[mAudioBufferInfo.size]; + } + if (outputBuffer != null) { + outputBuffer.get(mPcmData, 0, mAudioBufferInfo.size); + outputBuffer.clear(); + } + float v = mAudioMediaExtractor.getSampleTime() / (float) (1000 * 1000); + calcFrequency(mPcmData,KEY_SAMPLE_RATE); + +// Log.e(TAG, "解析到的时间点为:"+ v + "s decode: mPcmData.length = " + mPcmData.length + " mAudioBufferInfo " + mAudioBufferInfo.toString()); + mPcmPlayer.write(mPcmData, 0, mAudioBufferInfo.size); + } + mIRefreshCallBack.refresh(false); + mAudioMediaCodec.releaseOutputBuffer(outputIndex, false); + if ((mAudioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + Log.i(TAG, "saw output EOS."); + } + }else{ + mIRefreshCallBack.refresh(true); + Log.e(TAG, "decode: 播完了" ); + } + } + mAudioMediaExtractor.release(); + } + + /** + * 释放资源 + */ + private void release() { + if (mAudioMediaCodec != null) { + mAudioMediaCodec.stop(); + mAudioMediaCodec.release(); + } + if (mPcmPlayer != null) { + mPcmPlayer.stop(); + mPcmPlayer.release(); + mPcmPlayer = null; + } + } + public void calcFrequency(byte[] fft, int samplingRate){ + float[] magnitudes = new float[fft.length / 2]; + int max = 0; + for (int i = 0; i < magnitudes.length; i++) { + magnitudes[i] = (float) Math.hypot(fft[2 * i], fft[2 * i + 1]); + if (magnitudes[max] < magnitudes[i]) { + max = i; + } + } + + int currentFrequency = max * samplingRate / fft.length; + if (currentFrequency<0){ + return; + } + long v = 0; + for (int i = 0; i < fft.length; i++) { + v += Math.pow(fft[i], 2); + } + + double volume = 10 * log10(v / (double) fft.length); + mDbCallBackListener.cuurentFrequenty(currentFrequency,volume); + Log.e(TAG, "calcFrequency: currentFrequency = " + currentFrequency + " volume = " + volume + " max = " + max ); + } + + } + + public class EncoderThread extends Thread{ + @Override + public void run() { + super.run(); +// prepareVideo(); + } + public void start(boolean isEnd,int frameNum){ + if (isEnd){ + drainEncoder(isEnd); + generateFrame(frameNum); + }else{ + drainEncoder(true); + releaseEncoder(); + } + } + + private Surface mInputSurface; + private static final int WIDTH = 720; + private static final int HEIGHT = 1280; + private static final int BIT_RATE = 4000000; + private static final int FRAMES_PER_SECOND = 4; + private static final int IFRAME_INTERVAL = 5; + public boolean prepareVideo(){ + try { + mVideoBufferInfo = new MediaCodec.BufferInfo(); + MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, WIDTH, HEIGHT); + + //1. 设置一些属性。没有指定其中的一些可能会导致MediaCodec.configure()调用抛出一个无用的异常。 + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); + format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);//比特率(比特率越高,音视频质量越高,编码文件越大) + format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAMES_PER_SECOND);//设置帧速 + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);//设置关键帧间隔时间 + + //2.创建一个MediaCodec编码器,并配置格式。获取一个我们可以用于输入的表面,并将其封装到处理EGL工作的类中。 + mVideoMediaCodec = MediaCodec.createEncoderByType("video/avc"); + mVideoMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + mInputSurface = mVideoMediaCodec.createInputSurface(); + mVideoMediaCodec.start(); + + + MediaFormat newFormat = mVideoMediaCodec.getOutputFormat(); + mWriteVideoTrackIndex = mediaMuxer.addTrack(newFormat); + mediaMuxer.start(); + } catch (IOException e) { + e.printStackTrace(); + } + return true; + } + + private long mFakePts; + private void drainEncoder(boolean endOfStream) { + final int TIMEOUT_USEC = 10000; + if (endOfStream) { + mVideoMediaCodec.signalEndOfInputStream();//在输入信号end-of-stream。相当于提交一个空缓冲区。视频编码完结 + } + ByteBuffer[] encoderOutputBuffers = mVideoMediaCodec.getOutputBuffers(); + while (true) { + int outputBufferIndex = mVideoMediaCodec.dequeueOutputBuffer(mVideoBufferInfo, TIMEOUT_USEC); + Log.e(TAG, "drainEncoder: " + outputBufferIndex); + if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {//没有可以输出的数据使用时 + if (!endOfStream) { + break; // out of while + } + } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + //输出缓冲区已经更改,客户端必须引用新的 + encoderOutputBuffers = mVideoMediaCodec.getOutputBuffers(); + } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + + } else if (outputBufferIndex < 0) { + } else { + ByteBuffer encodedData = encoderOutputBuffers[outputBufferIndex]; + if (encodedData == null) { + throw new RuntimeException("encoderOutputBuffer " + outputBufferIndex + + " was null"); + } + if ((mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + //当我们得到的时候,编解码器的配置数据被拉出来,并给了muxer。这时候可以忽略。 + mVideoBufferInfo.size = 0; + } + if (mVideoBufferInfo.size != 0) { + //调整ByteBuffer值以匹配BufferInfo。 + encodedData.position(mVideoBufferInfo.offset); + encodedData.limit(mVideoBufferInfo.offset + mVideoBufferInfo.size); + mVideoBufferInfo.presentationTimeUs = mFakePts; + mFakePts += 1000000L / FRAMES_PER_SECOND; + + mediaMuxer.writeSampleData(mWriteVideoTrackIndex, encodedData, mVideoBufferInfo); + } + mVideoMediaCodec.releaseOutputBuffer(outputBufferIndex, false); + if ((mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + if (!endOfStream) { + Log.e(TAG, "意外结束"); + } else { + Log.e(TAG, "正常结束"); + } + break; + } + } + } + } + + + private void generateFrame(int frameNum){ + + Canvas canvas = mInputSurface.lockCanvas(null); + Paint paint = new Paint(); + try { + int width = canvas.getWidth(); + int height = canvas.getHeight(); + String color = "#FFCA39"; + if (frameNum %2 == 0 ){ + color = "#FFCA39"; + }else{ + color = "#FFF353"; + } + int color1 = Color.parseColor(color); + canvas.drawColor(color1); + paint.setTextSize(100); + paint.setColor(0xff000000); + canvas.drawText("第"+ String.valueOf(frameNum) + "帧",width/2,height/2,paint); + Rect srcRect = new Rect(0, 0, mBitmap.getWidth(), mBitmap.getHeight()); + int margain = 30; + Rect decRect = new Rect(margain, margain, width - margain, height-margain); + canvas.drawBitmap(mBitmap,srcRect,decRect,paint); + + int roundMargain = 60; + int roundHeight = 300; + int roundRadius = 25; + int roundLineWidth = 10; + paint.setStyle(Paint.Style.FILL);//充满 + paint.setAntiAlias(true);// 设置画笔的锯齿效果 + RectF roundRect1 = new RectF(roundMargain - roundLineWidth,roundMargain - roundLineWidth,width - roundMargain + roundLineWidth,roundHeight + roundMargain + roundLineWidth); + paint.setColor(Color.BLACK); + canvas.drawRoundRect(roundRect1,roundRadius,roundRadius,paint); + paint.setColor(color1); + RectF roundRect2 = new RectF(roundMargain,roundMargain,width - roundMargain,roundHeight + roundMargain); + canvas.drawRoundRect(roundRect2,roundRadius,roundRadius,paint); + +// paint.setStyle(Paint.Style.STROKE);//充满 + int timeMargain = roundMargain + 50; + String sTime = "2018/12/29 00:39"; + paint.setTextAlign(Paint.Align.CENTER); + paint.setTextSize(40); + paint.setColor(Color.BLACK); + canvas.drawText(sTime,width/2,timeMargain,paint); + + int soundMargain = timeMargain + 80; + String soundTime = "party 是我家"; + String soundTime2 = "party party 是我家"; + paint.setTextAlign(Paint.Align.CENTER); + paint.setTextSize(80); + canvas.drawText(soundTime,width/2,soundMargain,paint); + canvas.drawText(soundTime2,width/2,soundMargain + 80,paint); + + } finally { + mInputSurface.unlockCanvasAndPost(canvas); + } + + } + private void releaseEncoder() { + if (mVideoMediaCodec != null) { + mVideoMediaCodec.stop(); + mVideoMediaCodec.release(); + mVideoMediaCodec = null; + } + + if (mInputSurface != null) { + mInputSurface.release(); + mInputSurface = null; + } + if (mediaMuxer != null) { + mediaMuxer.stop(); + mediaMuxer.release(); + mediaMuxer = null; + } + } + } + + private Surface mInputSurface; + private static final int WIDTH = 720; + private static final int HEIGHT = 1280; + private static final int BIT_RATE = 4000000; + private static final int FRAMES_PER_SECOND = 4; + private static final int IFRAME_INTERVAL = 5; + public boolean prepareVideo(){ + try { + mVideoBufferInfo = new MediaCodec.BufferInfo(); + MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, WIDTH, HEIGHT); + + //1. 设置一些属性。没有指定其中的一些可能会导致MediaCodec.configure()调用抛出一个无用的异常。 + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); + format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);//比特率(比特率越高,音视频质量越高,编码文件越大) + format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAMES_PER_SECOND);//设置帧速 + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);//设置关键帧间隔时间 + + //2.创建一个MediaCodec编码器,并配置格式。获取一个我们可以用于输入的表面,并将其封装到处理EGL工作的类中。 + mVideoMediaCodec = MediaCodec.createEncoderByType("video/avc"); + mVideoMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + mInputSurface = mVideoMediaCodec.createInputSurface(); + mVideoMediaCodec.start(); + + + MediaFormat newFormat = mVideoMediaCodec.getOutputFormat(); + mWriteVideoTrackIndex = mediaMuxer.addTrack(newFormat); + mediaMuxer.start(); + } catch (IOException e) { + e.printStackTrace(); + } + return true; + } + + private long mFakePts; + private void drainEncoder(boolean endOfStream) { + final int TIMEOUT_USEC = 10000; + if (endOfStream) { + mVideoMediaCodec.signalEndOfInputStream();//在输入信号end-of-stream。相当于提交一个空缓冲区。视频编码完结 + } + ByteBuffer[] encoderOutputBuffers = mVideoMediaCodec.getOutputBuffers(); + while (true) { + int outputBufferIndex = mVideoMediaCodec.dequeueOutputBuffer(mVideoBufferInfo, TIMEOUT_USEC); + Log.e(TAG, "drainEncoder: " + outputBufferIndex); + if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {//没有可以输出的数据使用时 + if (!endOfStream) { + break; // out of while + } + } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + //输出缓冲区已经更改,客户端必须引用新的 + encoderOutputBuffers = mVideoMediaCodec.getOutputBuffers(); + } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + + } else if (outputBufferIndex < 0) { + } else { + ByteBuffer encodedData = encoderOutputBuffers[outputBufferIndex]; + if (encodedData == null) { + throw new RuntimeException("encoderOutputBuffer " + outputBufferIndex + + " was null"); + } + if ((mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + //当我们得到的时候,编解码器的配置数据被拉出来,并给了muxer。这时候可以忽略。 + mVideoBufferInfo.size = 0; + } + if (mVideoBufferInfo.size != 0) { + //调整ByteBuffer值以匹配BufferInfo。 + encodedData.position(mVideoBufferInfo.offset); + encodedData.limit(mVideoBufferInfo.offset + mVideoBufferInfo.size); + mVideoBufferInfo.presentationTimeUs = mFakePts; + mFakePts += 1000000L / FRAMES_PER_SECOND; + + mediaMuxer.writeSampleData(mWriteVideoTrackIndex, encodedData, mVideoBufferInfo); + } + mVideoMediaCodec.releaseOutputBuffer(outputBufferIndex, false); + if ((mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + if (!endOfStream) { + Log.e(TAG, "意外结束"); + } else { + Log.e(TAG, "正常结束"); + } + break; + } + } + } + } + + + private void generateFrame(int frameNum){ + + Canvas canvas = mInputSurface.lockCanvas(null); + Paint paint = new Paint(); + try { + int width = canvas.getWidth(); + int height = canvas.getHeight(); + String color = "#FFCA39"; + if (frameNum %2 == 0 ){ + color = "#FFCA39"; + }else{ + color = "#FFF353"; + } + int color1 = Color.parseColor(color); + canvas.drawColor(color1); + paint.setTextSize(100); + paint.setColor(0xff000000); + canvas.drawText("第"+ String.valueOf(frameNum) + "帧",width/2,height/2,paint); + Rect srcRect = new Rect(0, 0, mBitmap.getWidth(), mBitmap.getHeight()); + int margain = 30; + Rect decRect = new Rect(margain, margain, width - margain, height-margain); + canvas.drawBitmap(mBitmap,srcRect,decRect,paint); + + int roundMargain = 60; + int roundHeight = 300; + int roundRadius = 25; + int roundLineWidth = 10; + paint.setStyle(Paint.Style.FILL);//充满 + paint.setAntiAlias(true);// 设置画笔的锯齿效果 + RectF roundRect1 = new RectF(roundMargain - roundLineWidth,roundMargain - roundLineWidth,width - roundMargain + roundLineWidth,roundHeight + roundMargain + roundLineWidth); + paint.setColor(Color.BLACK); + canvas.drawRoundRect(roundRect1,roundRadius,roundRadius,paint); + paint.setColor(color1); + RectF roundRect2 = new RectF(roundMargain,roundMargain,width - roundMargain,roundHeight + roundMargain); + canvas.drawRoundRect(roundRect2,roundRadius,roundRadius,paint); + +// paint.setStyle(Paint.Style.STROKE);//充满 + int timeMargain = roundMargain + 50; + String sTime = "2018/12/29 00:39"; + paint.setTextAlign(Paint.Align.CENTER); + paint.setTextSize(40); + paint.setColor(Color.BLACK); + canvas.drawText(sTime,width/2,timeMargain,paint); + + int soundMargain = timeMargain + 80; + String soundTime = "party 是我家"; + String soundTime2 = "party party 是我家"; + paint.setTextAlign(Paint.Align.CENTER); + paint.setTextSize(80); + canvas.drawText(soundTime,width/2,soundMargain,paint); + canvas.drawText(soundTime2,width/2,soundMargain + 80,paint); + + } finally { + mInputSurface.unlockCanvasAndPost(canvas); + } + + } + private void releaseEncoder() { + if (mVideoMediaCodec != null) { + mVideoMediaCodec.stop(); + mVideoMediaCodec.release(); + mVideoMediaCodec = null; + } + + if (mInputSurface != null) { + mInputSurface.release(); + mInputSurface = null; + } + if (mediaMuxer != null) { + mediaMuxer.stop(); + mediaMuxer.release(); + mediaMuxer = null; + } + } + + + + + public interface IRefreshCallBack{ + void refresh(boolean isEnd); + } + + private DbCallBackListener mDbCallBackListener; + public interface DbCallBackListener { + void cuurentFrequenty(int cuurentFrequenty, double volume); + } + + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/others/OthersActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/others/OthersActivity.java new file mode 100644 index 0000000..4606de4 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/others/OthersActivity.java @@ -0,0 +1,23 @@ +package com.aserbao.androidcustomcamera.blocks.others; + +import android.view.View; + +import com.aserbao.androidcustomcamera.base.activity.RVBaseActivity; +import com.aserbao.androidcustomcamera.base.beans.BaseRecyclerBean; +import com.aserbao.androidcustomcamera.blocks.others.changeHue.ChangeHueActivity; +import com.aserbao.androidcustomcamera.blocks.others.changeVoice.ChangeVoiceActivity; + +public class OthersActivity extends RVBaseActivity { + + + @Override + protected void initGetData() { + mBaseRecyclerBeen.add(new BaseRecyclerBean("修改hue", ChangeHueActivity.class)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("变声", ChangeVoiceActivity.class)); + } + + @Override + public void itemClickBack(View view, int position, boolean isLongClick, int comeFrom) { + + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/others/changeHue/ChangeHueActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/others/changeHue/ChangeHueActivity.java new file mode 100644 index 0000000..68e81eb --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/others/changeHue/ChangeHueActivity.java @@ -0,0 +1,93 @@ +package com.aserbao.androidcustomcamera.blocks.others.changeHue; + +import android.content.res.Resources; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.Color; +import android.graphics.ColorFilter; +import android.os.Bundle; +import android.support.v7.app.AppCompatActivity; +import android.widget.ImageView; +import android.widget.SeekBar; +import android.widget.TextView; +import javax.*; + +import com.aserbao.androidcustomcamera.R; + +import butterknife.BindView; +import butterknife.ButterKnife; + +public class ChangeHueActivity extends AppCompatActivity { + + @BindView(R.id.change_hue_iv) + ImageView mChangeHueIv; + @BindView(R.id.change_hue_tv) + TextView mChangeHueTv; + @BindView(R.id.change_hue_sb) + SeekBar mChangeHueSb; + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_change_hue); + ButterKnife.bind(this); + init(); + } + + private void init() { + mChangeHueSb.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { + @Override + public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { + final Bitmap bitmap = BitmapFactory.decodeResource(getResources(), R.drawable.change_hue); +// Bitmap hue = hue(bitmap, progress); +// mChangeHueIv.setImageBitmap(hue); + mChangeHueTv.setText(String.valueOf(progress)); + changeHue(progress); +// ColorFilter colorFilter = ColorFilterGenerator.adjustHue(progress); +// mChangeHueIv.setColorFilter(colorFilter); + } + + @Override + public void onStartTrackingTouch(SeekBar seekBar) { + + } + + @Override + public void onStopTrackingTouch(SeekBar seekBar) { + + } + }); + } + + public static Bitmap hue(Bitmap bitmap, float hue) { + Bitmap newBitmap = bitmap.copy(bitmap.getConfig(), true); + final int width = newBitmap.getWidth(); + final int height = newBitmap.getHeight(); + float [] hsv = new float[3]; + + for(int y = 0; y < height; y++){ + for(int x = 0; x < width; x++){ + int pixel = newBitmap.getPixel(x,y); + Color.colorToHSV(pixel,hsv); + hsv[0] = hue; + newBitmap.setPixel(x,y,Color.HSVToColor(Color.alpha(pixel),hsv)); + } + } + +// bitmap.recycle(); +// bitmap = null; + + return newBitmap; + } + + + public void changeHue(int progress){ + float[] hsbVals = new float[3]; + int inputColor = Color.parseColor("#FFF757"); + Color.colorToHSV(inputColor,hsbVals); + float v = (float) progress / (float) 360; + hsbVals[0] = (float) progress; + int color = Color.HSVToColor(hsbVals); + mChangeHueTv.setBackgroundColor(color); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/others/changeHue/ColorFilterGenerator.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/others/changeHue/ColorFilterGenerator.java new file mode 100644 index 0000000..17c4e39 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/others/changeHue/ColorFilterGenerator.java @@ -0,0 +1,57 @@ +package com.aserbao.androidcustomcamera.blocks.others.changeHue; + +import android.graphics.ColorFilter; +import android.graphics.ColorMatrix; +import android.graphics.ColorMatrixColorFilter; + +public class ColorFilterGenerator +{ + /** + * Creates a HUE ajustment ColorFilter + * @see http://groups.google.com/group/android-developers/browse_thread/thread/9e215c83c3819953 + * @see http://gskinner.com/blog/archives/2007/12/colormatrix_cla.html + * @param value degrees to shift the hue. + * @return + */ +public static ColorFilter adjustHue(float value ) +{ + ColorMatrix cm = new ColorMatrix(); + + adjustHue(cm, value); + + return new ColorMatrixColorFilter(cm); +} + +/** + * @see http://groups.google.com/group/android-developers/browse_thread/thread/9e215c83c3819953 + * @see http://gskinner.com/blog/archives/2007/12/colormatrix_cla.html + * @param cm + * @param value + */ +public static void adjustHue(ColorMatrix cm, float value) +{ + value = cleanValue(value, 180f) / 180f * (float) Math.PI; + if (value == 0) + { + return; + } + float cosVal = (float) Math.cos(value); + float sinVal = (float) Math.sin(value); + float lumR = 0.213f; + float lumG = 0.715f; + float lumB = 0.072f; + float[] mat = new float[] + { + lumR + cosVal * (1 - lumR) + sinVal * (-lumR), lumG + cosVal * (-lumG) + sinVal * (-lumG), lumB + cosVal * (-lumB) + sinVal * (1 - lumB), 0, 0, + lumR + cosVal * (-lumR) + sinVal * (0.143f), lumG + cosVal * (1 - lumG) + sinVal * (0.140f), lumB + cosVal * (-lumB) + sinVal * (-0.283f), 0, 0, + lumR + cosVal * (-lumR) + sinVal * (-(1 - lumR)), lumG + cosVal * (-lumG) + sinVal * (lumG), lumB + cosVal * (1 - lumB) + sinVal * (lumB), 0, 0, + 0f, 0f, 0f, 1f, 0f, + 0f, 0f, 0f, 0f, 1f }; + cm.postConcat(new ColorMatrix(mat)); +} + +protected static float cleanValue(float p_val, float p_limit) +{ + return Math.min(p_limit, Math.max(-p_limit, p_val)); +} +} \ No newline at end of file diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/others/changeVoice/ChangeVoiceActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/others/changeVoice/ChangeVoiceActivity.java new file mode 100644 index 0000000..4c6dbe8 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/others/changeVoice/ChangeVoiceActivity.java @@ -0,0 +1,73 @@ +package com.aserbao.androidcustomcamera.blocks.others.changeVoice; + +import android.view.View; + +import com.aserbao.androidcustomcamera.base.activity.RVBaseActivity; +import com.aserbao.androidcustomcamera.base.beans.BaseRecyclerBean; +import com.aserbao.androidcustomcamera.utils.VoiceUtils; + +import org.fmod.FMOD; + +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +public class ChangeVoiceActivity extends RVBaseActivity { + private ExecutorService fixedThreadPool; + private PlayerThread playerThread; + private String path = "file:///android_asset/five.mp3"; + private int type; + + @Override + protected void initGetData() { + mBaseRecyclerBeen.add(new BaseRecyclerBean("原声",0)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("萝莉",1)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("大叔",2)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("惊悚",3)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("搞怪",4)); + mBaseRecyclerBeen.add(new BaseRecyclerBean("空灵",5)); + FMOD.init(this); + fixedThreadPool = Executors.newFixedThreadPool(1); + } + + + @Override + public void itemClickBack(View view, int position, boolean isLongClick, int comeFrom) { + switch (position){ + case 0: + type = VoiceUtils.MODE_NORMAL; + break; + case 1: + type = VoiceUtils.MODE_LUOLI; + break; + case 2: + type = VoiceUtils.MODE_DASHU; + break; + case 3: + type = VoiceUtils.MODE_JINGSONG; + break; + case 4: + type = VoiceUtils.MODE_GAOGUAI; + break; + case 5: + type = VoiceUtils.MODE_KONGLING; + break; + } +// VoiceUtils.fix(path, type); + playerThread = new PlayerThread(); + fixedThreadPool.execute(playerThread); + } + + class PlayerThread implements Runnable { + @Override + public void run() { + VoiceUtils.fix(path, type); + } + } + + + @Override + protected void onDestroy() { + super.onDestroy(); + FMOD.close(); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/utils/CheckPermissionUtil.java b/app/src/main/java/com/aserbao/androidcustomcamera/utils/CheckPermissionUtil.java new file mode 100755 index 0000000..f4ccf50 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/utils/CheckPermissionUtil.java @@ -0,0 +1,259 @@ +package com.aserbao.androidcustomcamera.utils; + +import android.Manifest; +import android.content.Context; +import android.content.pm.PackageManager; +import android.location.LocationManager; +import android.media.AudioFormat; +import android.media.AudioRecord; +import android.media.MediaRecorder; +import android.os.Build; +import android.support.v4.content.ContextCompat; +import android.util.Log; + +import static com.aserbao.androidcustomcamera.base.MyApplication.getContext; + + +/** + */ + +public class CheckPermissionUtil { + + private static String TAG = "CheckPermissionUtil"; + + // 音频获取源 + public static int audioSource = MediaRecorder.AudioSource.MIC; + // 设置音频采样率,44100是目前的标准,但是某些设备仍然支持22050,16000,11025 + public static int sampleRateInHz = 44100; + // 设置音频的录制的声道CHANNEL_IN_STEREO为双声道,CHANNEL_CONFIGURATION_MONO为单声道 + public static int channelConfig = AudioFormat.CHANNEL_IN_STEREO; + // 音频数据格式:PCM 16位每个样本。保证设备支持。PCM 8位每个样本。不一定能得到设备支持。 + public static int audioFormat = AudioFormat.ENCODING_PCM_16BIT; + // 缓冲区字节大小 + public static int bufferSizeInBytes = 0; + + /** + * @return true 已经授权 获取地理位置权限 + */ + public static boolean isLocationPermGrantedAndOpen(){ + boolean result = false; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + Log.i(TAG,"isLocationPermGrantedAndOpen()--- Build.VERSION.SDK_INT >= Build.VERSION_CODES.M "); + if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED || + ContextCompat.checkSelfPermission(getContext(), Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED ) + { + Log.i(TAG,"isLocationPermGrantedAndOpen()--- result = false"); + result = false; + } else { + if(isOPenGPS(getContext())){ + Log.i(TAG,"isLocationPermGrantedAndOpen()--- result = true"); + result = true; + } + } + } else { + Log.i(TAG,"isLocationPermGrantedAndOpen()--- Build.VERSION.SDK_INT < Build.VERSION_CODES.M "); + result = true; + } + return result; + } + + /** + * @return true 照相机权限 + */ + public static boolean isCameraGranted(){ + boolean result = false; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + Log.i(TAG,"isCameraGranted()--- Build.VERSION.SDK_INT >= Build.VERSION_CODES.M "); + if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED || + ContextCompat.checkSelfPermission(getContext(), Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED || + ContextCompat.checkSelfPermission(getContext(), Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED || + ContextCompat.checkSelfPermission(getContext(), Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) + { + Log.i(TAG,"isCameraGranted()--- result = false"); + result = false; + } else { + result = true; + } + } else { + Log.i(TAG,"isCameraGranted()--- Build.VERSION.SDK_INT < Build.VERSION_CODES.M "); + result = true; + } + return result; + } + + /** + * 只用这个方法,一些华为手机的地理权限打开与否,不能准确判断出,需要上面的方法 isLocationPermGrantedAndOpen + * @return true 已经授权 获取地理位置权限 + */ + public static boolean isLocationPermGranted(){ + boolean result = false; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED || + ContextCompat.checkSelfPermission(getContext(), Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED ) + { + result = false; + } else { + result = true; + } + } else { + result = true; + } + return result; + } + + /** + * @return true + */ + public static boolean isStoragePermGranted(){ + boolean result = false; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED || + ContextCompat.checkSelfPermission(getContext(), Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED ) + { + result = false; + } else { + result = true; + } + } else { + result = true; + } + return result; + } + + /** + * Function:判断录音权限,兼容android6.0以下以及以上系统 + */ + + /** + * 判断是是否有录音权限 + */ + public static boolean isHasPermission(final Context context){ + bufferSizeInBytes = 0; + bufferSizeInBytes = AudioRecord.getMinBufferSize(sampleRateInHz, + channelConfig, audioFormat); + + AudioRecord audioRecord = null; + try { + // 美图手机这里会抛 IllegalArgumentException + // https://fabric.io/getremark/android/apps/com.getremark.spot/issues/5b719a816007d59fcdac62f0?time=last-seven-days + audioRecord = new AudioRecord(audioSource, sampleRateInHz, + channelConfig, audioFormat, bufferSizeInBytes); + } catch (Exception e) { + e.printStackTrace(); + return false; + } + + //开始录制音频 + try{ + // 防止某些手机崩溃,例如联想 + audioRecord.startRecording(); + }catch (IllegalStateException e){ + e.printStackTrace(); + } + /** + * 根据开始录音判断是否有录音权限 + */ + if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { + return false; + } + audioRecord.stop(); + audioRecord.release(); + audioRecord = null; + + return true; + } + + /** + * @return true 已经授权 获取照相机权限 + */ + public static boolean isCameraPermissionGranted(){ + return isPermissionGranted(Manifest.permission.CAMERA); + } + + public static boolean isReadStoragePermissionsGranted(){ + return isPermissionGranted(Manifest.permission.READ_EXTERNAL_STORAGE); + } + + public static boolean isWriteStoragePermissionsGranted(){ + return isPermissionGranted(Manifest.permission.WRITE_EXTERNAL_STORAGE); + } + + public static boolean isPermissionGranted(String permission) { + boolean isRecorder = false; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + if (ContextCompat.checkSelfPermission(getContext(), permission) != PackageManager.PERMISSION_GRANTED) { + isRecorder = false; + } else { + isRecorder = true; + } + } else { + isRecorder = true; + } + return isRecorder; + } + + public static boolean isRecordAudioPermissionsGranted(){ + return isPermissionGranted(Manifest.permission.RECORD_AUDIO); + } + + public static boolean isWriteSettingPermissionsGranted(){ + boolean isRecorder = false; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.WRITE_SETTINGS) != PackageManager.PERMISSION_GRANTED) { + isRecorder = false; + } else { + isRecorder = true; + } + } else { + isRecorder = true; + } + return isRecorder; + } + + public static boolean isContactsPermissionGranted() { + boolean result = false; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.READ_CONTACTS) != PackageManager.PERMISSION_GRANTED) { + result = false; + } else { + result = true; + } + } else { + result = true; + } + Log.i(TAG, "isContactsPermissionGranted()--- result = " + result); + return result; + } + + + public static boolean isReadSmsPermissionGranted() { + boolean result; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.READ_SMS) != PackageManager.PERMISSION_GRANTED) { + result = false; + } else { + result = true; + } + }else { + result = false; + } + return result; + + } + + + public static boolean isOPenGPS(final Context context) { + LocationManager locationManager + = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE); + // 通过GPS卫星定位,定位级别可以精确到街(通过24颗卫星定位,在室外和空旷的地方定位准确、速度快) + boolean gps = locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER); + Log.i("isOPenGPS()","gps = "+gps); + // 通过WLAN或移动网络(3G/2G)确定的位置(也称作AGPS,辅助GPS定位。主要用于在室内或遮盖物(建筑群或茂密的深林等)密集的地方定位) + boolean network = locationManager.isProviderEnabled(LocationManager.NETWORK_PROVIDER); + Log.i("isOPenGPS()","network = "+network); + if (gps || network) { + return true; + } + return false; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/utils/VoiceUtils.class b/app/src/main/java/com/aserbao/androidcustomcamera/utils/VoiceUtils.class new file mode 100644 index 0000000..29591b1 Binary files /dev/null and b/app/src/main/java/com/aserbao/androidcustomcamera/utils/VoiceUtils.class differ diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/utils/VoiceUtils.java b/app/src/main/java/com/aserbao/androidcustomcamera/utils/VoiceUtils.java new file mode 100644 index 0000000..b7c15c0 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/utils/VoiceUtils.java @@ -0,0 +1,34 @@ +package com.aserbao.androidcustomcamera.utils; + +/** + * 功能: 变声器 + * 参考demo:https://github.com/AndroidHensen/NDKVoice + * @author aserbao + * @date : On 2020-01-14 19:45 + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.utils + */ +public class VoiceUtils { + + //音效类型 + public static final int MODE_NORMAL = 0; + public static final int MODE_LUOLI = 1; + public static final int MODE_DASHU = 2; + public static final int MODE_JINGSONG = 3; + public static final int MODE_GAOGUAI = 4; + public static final int MODE_KONGLING = 5; + + + /** + * 音效处理传2个值 + * + * @param path 音频文件路径 + * @param type 音频文件类型 + */ + public native static void fix(String path, int type); + + + static { + System.loadLibrary("sound"); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/WholeActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/WholeActivity.java index c4b2aac..77ebded 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/whole/WholeActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/WholeActivity.java @@ -2,10 +2,12 @@ import android.support.v7.app.AppCompatActivity; import android.os.Bundle; +import android.view.View; import com.aserbao.androidcustomcamera.R; import com.aserbao.androidcustomcamera.base.activity.RVBaseActivity; -import com.aserbao.androidcustomcamera.base.beans.ClassBean; +import com.aserbao.androidcustomcamera.base.beans.BaseRecyclerBean; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.CreateVideoByAudioDbActivity; import com.aserbao.androidcustomcamera.whole.record.RecorderActivity; import java.util.List; @@ -13,8 +15,12 @@ public class WholeActivity extends RVBaseActivity { @Override - public List initData() { - mClassBeans.add(new ClassBean("视频录制这边走", RecorderActivity.class)); - return mClassBeans; + protected void initGetData() { + mBaseRecyclerBeen.add(new BaseRecyclerBean("视频录制这边走", RecorderActivity.class)); + } + + @Override + public void itemClickBack(View view, int position, boolean isLongClick, int comeFrom) { + } } diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/CreateVideoByAudioDbActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/CreateVideoByAudioDbActivity.java new file mode 100644 index 0000000..34d1132 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/CreateVideoByAudioDbActivity.java @@ -0,0 +1,134 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice; + +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.os.Bundle; +import android.os.Environment; +import android.os.Handler; +import android.support.v7.app.AppCompatActivity; +import android.util.Log; +import android.view.View; +import android.widget.Button; +import android.widget.Toast; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.frameData.BackgroundFrame; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.frameData.MapFrame; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.frameData.TopPopFrame; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.interfaces.IEncoderVideoCallBackListener; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.interfaces.IGetVideoDbCallBackListener; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.interfaces.IMuxerVideoCallBackListener; + +import java.io.File; + +import butterknife.BindView; +import butterknife.ButterKnife; +import butterknife.OnClick; + +/** + * 更加音频振幅生成变色视频 + */ +public class CreateVideoByAudioDbActivity extends AppCompatActivity { + private static final String TAG = "CreateVideoByAudioDbAct"; + @BindView(R.id.create_video_analyze_btn) + Button mCreateVideoAnalyzeBtn; + @BindView(R.id.play_video_btn) + Button mPlayVideoBtn; + + private EncoderVideo mEncoderVideo; + private MuxerVoiceAndVideo mMuxerVoiceAndVideo; + public String inputAudioPath, outputMediaPath; + private GetAudioDb mGetAudioDb; + private File encoderOutputFile; + private boolean isEncoderFinish = false; + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_create_video_by_audio_db); + ButterKnife.bind(this); + } + + private float mStartTime; + + @OnClick({R.id.create_video_analyze_btn, R.id.play_video_btn}) + public void onViewClicked(View view) { + switch (view.getId()) { + case R.id.create_video_analyze_btn: + mStartTime = System.currentTimeMillis(); + String path = Environment.getExternalStorageDirectory().getAbsolutePath(); + encoderOutputFile = new File(path + "/ouput_dj_dance.mp4"); + inputAudioPath = path + "/dj_dance.mp3"; + outputMediaPath = path + "/output_dj_asebrao.mp4"; + mMuxerVoiceAndVideo = new MuxerVoiceAndVideo(new IMuxerVideoCallBackListener() { + @Override + public void success() { + runOnUiThread(new Runnable() { + @Override + public void run() { + Toast.makeText(CreateVideoByAudioDbActivity.this, "成功", Toast.LENGTH_SHORT).show(); + } + }); + Log.e(TAG, "update 合成 success: " + (System.currentTimeMillis() - mStartTime)+ "s"); + } + + @Override + public void failed() { + Log.e(TAG, "update 合成failed: "); + } + }); +// mMuxerVoiceAndVideo.startMuxer(encoderOutputFile.toString(), inputAudioPath,10,outputMediaPath); + mEncoderVideo = new EncoderVideo(new IEncoderVideoCallBackListener() { + @Override + public void success(final String outputMeidaPath, final float finalMediaTime) { + isEncoderFinish = true; + mGetAudioDb.stop(); + runOnUiThread(new Runnable() { + @Override + public void run() { + new Handler().postDelayed(new Runnable() { + @Override + public void run() { + Toast.makeText(CreateVideoByAudioDbActivity.this, "编码成功", Toast.LENGTH_SHORT).show(); + mMuxerVoiceAndVideo.startMuxer(outputMeidaPath, inputAudioPath,finalMediaTime,outputMediaPath); + Log.e(TAG, "update 编码 success: 耗时: " + (System.currentTimeMillis() - mStartTime) + "s"); + } + },100); + } + }); + } + + @Override + public void failed() { + Log.e(TAG, "update 编码 failed: "); + } + }); + + mEncoderVideo.addBaseDataFrameData(new BackgroundFrame()); + Bitmap bitmap = BitmapFactory.decodeResource(getResources(), R.drawable.katong); + mEncoderVideo.addBaseDataFrameData(new MapFrame(bitmap)); + mEncoderVideo.addBaseDataFrameData(new TopPopFrame("party 是我家,party party 是我家")); + + mEncoderVideo.startRecording(getResources(), encoderOutputFile); + + mGetAudioDb = new GetAudioDb(); + mGetAudioDb.start(inputAudioPath, new IGetVideoDbCallBackListener() { + @Override + public void cuurentFrequenty(boolean isEnd, double volume, float cuurTime) { + float volume1 = (float) volume / 100; + if (!isEncoderFinish) { + mEncoderVideo.update(isEnd, volume1, cuurTime); + if (cuurTime > 950) { + Log.e(TAG, "update cuurentFrequenty: isEnd : " + isEnd + " volume1 = " + volume1 + " cuurTime = " + cuurTime); + } + } + } + }); + + break; + case R.id.play_video_btn: + mMuxerVoiceAndVideo.startMuxer(encoderOutputFile.toString(), inputAudioPath, 10, outputMediaPath); + break; + } + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/EncoderVideo.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/EncoderVideo.java new file mode 100644 index 0000000..3f9c9b0 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/EncoderVideo.java @@ -0,0 +1,212 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice; + +import android.content.res.Resources; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.os.Handler; +import android.util.Log; +import android.view.Surface; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.frameData.BaseFrameData; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.frameData.FrameDataManager; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.interfaces.IEncoderVideoCallBackListener; + +import java.io.File; +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * 功能: + * + * @author aserbao + * @date : On 2019/1/8 3:59 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.whole.createVideoByVoice + * @Copyright: 个人版权所有 + */ +public class EncoderVideo { + private static final String MIME_TYPE = "video/avc"; + private static final int WIDTH = 720; + private static final int HEIGHT = 1280; + private static final int BIT_RATE = 4000000; + public static final int FRAMES_PER_SECOND = 30; + private static final int IFRAME_INTERVAL = 5; + private static final int MEDIA_MAX_TIME = 1 * 1000; // 生成视频的最大长度 + private Surface mInputSurface; + public MediaMuxer mMuxer; + private boolean mMuxerStarted; + private int mTrackIndex; + private long mFakePts; + private boolean isRecording; + private Bitmap mBitmap; + + private int cuurFrame = 0; + private MediaCodec.BufferInfo mBufferInfo; + private MediaCodec mMediaCodec; + private float finalMediaTime = 0;//最终生成的视频长度 + private IEncoderVideoCallBackListener mIEncoderVideoCallBackListener; + private FrameDataManager mFrameDataManager; + + public EncoderVideo(IEncoderVideoCallBackListener iEncoderVideoCallBackListener) { + mIEncoderVideoCallBackListener = iEncoderVideoCallBackListener; + mFrameDataManager = new FrameDataManager(); + } + + public void addBaseDataFrameData(BaseFrameData baseFrameData){ + mFrameDataManager.addBaseFrameData(baseFrameData); + } + + private File mOutputFile; + public void startRecording(Resources resources,File outputFile) { + mOutputFile = outputFile; + cuurFrame = 0; + mBitmap = BitmapFactory.decodeResource(resources, R.drawable.katong); + try { + prepareEncoder(outputFile); + } catch (IOException e) { + e.printStackTrace(); + } + isRecording = true; + } + + private static final String TAG = "EncoderVideo"; + public void update(boolean isEnd,float volume,float cuurTime){ + if (cuurTime < MEDIA_MAX_TIME){ + if (isRecording) { + if (!isEnd) { + if (cuurTime > 1000 / FRAMES_PER_SECOND * cuurFrame) { + drainEncoder(false); + mFrameDataManager.drawFrame(mInputSurface, cuurFrame, volume); + cuurFrame++; + Log.e(TAG, "update: " + cuurTime + " cuurFrame = " + cuurFrame + " volume = " + volume); + } + } else { + Log.e(TAG, "update: " + cuurTime + " cuurFrame = " + cuurFrame + " volume = " + volume + " over1 "); + finalMediaTime = cuurTime / (float) 1000; + stopRecording(); + } + } + }else{ + Log.e(TAG, "update: " + cuurTime + " cuurFrame = " + cuurFrame + " volume = "+ volume + " over2 "); + finalMediaTime = MEDIA_MAX_TIME/ (float) 1000; + stopRecording(); + } + } + public void stopRecording() { + isRecording = false; + drainEncoder(true); + releaseEncoder(); + } + + /** + * 准备视频编码器,muxer,和一个输入表面。 + */ + private void prepareEncoder(File outputFile) throws IOException { + mBufferInfo = new MediaCodec.BufferInfo(); + MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, WIDTH, HEIGHT); + //1. 设置一些属性。没有指定其中的一些可能会导致MediaCodec.configure()调用抛出一个无用的异常。 + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); + format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);//比特率(比特率越高,音视频质量越高,编码文件越大) + format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAMES_PER_SECOND);//设置帧速 + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);//设置关键帧间隔时间 + + //2.创建一个MediaCodec编码器,并配置格式。获取一个我们可以用于输入的表面,并将其封装到处理EGL工作的类中。 + mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE); + mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + mInputSurface = mMediaCodec.createInputSurface(); + mMediaCodec.start(); + //3. 创建一个MediaMuxer。我们不能在这里添加视频跟踪和开始合成,因为我们的MediaFormat里面没有缓冲数据。 + // 只有在编码器开始处理数据后才能从编码器获得这些数据。我们实际上对多路复用音频没有兴趣。我们只是想要 + // 将从MediaCodec获得的原始H.264基本流转换为.mp4文件。 + mMuxer = new MediaMuxer(outputFile.toString(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + + mMuxerStarted = false; + mTrackIndex = -1; + } + + private void drainEncoder(boolean endOfStream) { + final int TIMEOUT_USEC = 10000; + if (endOfStream) { + mMediaCodec.signalEndOfInputStream();//在输入信号end-of-stream。相当于提交一个空缓冲区。视频编码完结 + } + ByteBuffer[] encoderOutputBuffers = mMediaCodec.getOutputBuffers(); + while (true) { + int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); + if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {//没有可以输出的数据使用时 + if (!endOfStream) { + break; // out of while + } + } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + //输出缓冲区已经更改,客户端必须引用新的 + encoderOutputBuffers = mMediaCodec.getOutputBuffers(); + } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + //输出格式发生了变化,后续数据将使用新的数据格式。 + if (mMuxerStarted) { + throw new RuntimeException("format changed twice"); + } + MediaFormat newFormat = mMediaCodec.getOutputFormat(); + mTrackIndex = mMuxer.addTrack(newFormat); + mMuxer.start(); + mMuxerStarted = true; + } else if (outputBufferIndex < 0) { + } else { + ByteBuffer encodedData = encoderOutputBuffers[outputBufferIndex]; + if (encodedData == null) { + throw new RuntimeException("encoderOutputBuffer " + outputBufferIndex + + " was null"); + } + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + //当我们得到的时候,编解码器的配置数据被拉出来,并给了muxer。这时候可以忽略。 + mBufferInfo.size = 0; + } + if (mBufferInfo.size != 0) { + if (!mMuxerStarted) { + throw new RuntimeException("muxer hasn't started"); + } + //调整ByteBuffer值以匹配BufferInfo。 + encodedData.position(mBufferInfo.offset); + encodedData.limit(mBufferInfo.offset + mBufferInfo.size); + mBufferInfo.presentationTimeUs = mFakePts; + mFakePts += 1000000L / FRAMES_PER_SECOND; + + mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo); + } + mMediaCodec.releaseOutputBuffer(outputBufferIndex, false); + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + if (!endOfStream) { + mIEncoderVideoCallBackListener.failed(); + } else { + Log.e(TAG, "update drainEncoder: success" ); + mIEncoderVideoCallBackListener.success(mOutputFile.toString(),finalMediaTime); + } + isRecording = false; + break; + } + } + } + } + + + private void releaseEncoder() { + if (mMediaCodec != null) { + mMediaCodec.stop(); + mMediaCodec.release(); + mMediaCodec = null; + } + if (mInputSurface != null) { + mInputSurface.release(); + mInputSurface = null; + } + if (mMuxer != null) { + mMuxer.stop(); + mMuxer.release(); + mMuxer = null; + } + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/GetAudioDb.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/GetAudioDb.java new file mode 100644 index 0000000..b7213d6 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/GetAudioDb.java @@ -0,0 +1,235 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice; + +import android.media.AudioFormat; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.util.Log; + +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.interfaces.IGetVideoDbCallBackListener; + +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * 功能: 解码获取音频帧分贝大小 + * @author aserbao + * @date : On 2019/1/4 4:05 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.blocks.MediaExtractor.primary + * @Copyright: 个人版权所有 + */ +public class GetAudioDb { + private static final String TAG = "GetAudioDb"; + public GetAudioDb() { + } + + private DecoderAACThread mDecoderAACThread; + private byte[] mPcmData; + + public void start(String inputAudioPath,IGetVideoDbCallBackListener dbCallBackListener){ + mDbCallBackListener = dbCallBackListener; + if (mDecoderAACThread == null) { + mDecoderAACThread = new DecoderAACThread(inputAudioPath); + mDecoderAACThread.setRunning(true); + try { + mDecoderAACThread.start(); + } catch (Exception e) { + Log.w(TAG, "decode already start"); + } + } + + } + + public void stop() { + if (mDecoderAACThread != null) { + mDecoderAACThread.setRunning(false); + mDecoderAACThread = null; + } + } + + public class DecoderAACThread extends Thread{ + String MIME_TYPE = "audio/mp4a-latm"; + int KEY_CHANNEL_COUNT = 2; + int KEY_SAMPLE_RATE = 44100; + int KEY_BIT_RATE = 64000; + int KEY_AAC_PROFILE = MediaCodecInfo.CodecProfileLevel.AACObjectLC; + int WAIT_TIME = 10000; + int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT; + int CHANNEL_MODE = AudioFormat.CHANNEL_IN_STEREO; + int BUFFFER_SIZE = 2048; + + private String mInputAudioPath;//音频路径 + private String mInputAudioMimeType; + private MediaExtractor mMediaExtractor; + private MediaCodec mMediaCodec; + private MediaCodec.BufferInfo mBufferInfo; + private boolean running; + + private void setRunning(boolean running) { + this.running = running; + } + + public DecoderAACThread(String inputAudioPath) { + mInputAudioPath = inputAudioPath; + } + + @Override + public void run() { + super.run(); + if (!prepare()) { + running = false; + Log.e(TAG, "音频解码器初始化失败"); + return; + } + decode(); + release(); + } + + + public boolean prepare(){ + mBufferInfo = new MediaCodec.BufferInfo(); + mMediaExtractor = new MediaExtractor(); + try { + mMediaExtractor.setDataSource(mInputAudioPath); + int audioIndex = -1;//音频通道 + int trackCount = mMediaExtractor.getTrackCount();//获取通道总数 + for (int i = 0; i < trackCount; i++) { + MediaFormat trackFormat = mMediaExtractor.getTrackFormat(i); + mInputAudioMimeType = trackFormat.getString(MediaFormat.KEY_MIME); + if (mInputAudioMimeType.startsWith("audio/")) { + audioIndex = i; + } + } + mMediaExtractor.selectTrack(audioIndex);//切换到音频通道 + MediaFormat mediaFormat = mMediaExtractor.getTrackFormat(audioIndex); + mMediaCodec = MediaCodec.createDecoderByType(mInputAudioMimeType); + mMediaCodec.configure(mediaFormat, null, null, 0); + } catch (IOException e) { + e.printStackTrace(); + return false; + } + if (mMediaCodec == null) { + Log.e(TAG, "create mediaDecode failed"); + return false; + } + mMediaCodec.start(); + return true; + } + + private void decode() { + while (running) { + int inputIndex = mMediaCodec.dequeueInputBuffer(-1); + if (inputIndex >= 0) { + ByteBuffer inputBuffer = mMediaCodec.getInputBuffer(inputIndex); + if (inputBuffer == null) { + return; + } + inputBuffer.clear(); + int sampleSize = mMediaExtractor.readSampleData(inputBuffer, 0); + if (sampleSize < 0) { + mMediaCodec.queueInputBuffer(inputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); + running = false; + } else { + mMediaCodec.queueInputBuffer(inputIndex, 0, sampleSize, mMediaExtractor.getSampleTime(), 0); + mMediaExtractor.advance(); + } + } + int outputIndex = mMediaCodec.dequeueOutputBuffer(mBufferInfo, WAIT_TIME); + ByteBuffer outputBuffer; + if (outputIndex >= 0) { + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + mMediaCodec.releaseOutputBuffer(outputIndex, false); + continue; + } + float cuurGetSampleTime = 0; + if (mBufferInfo.size != 0) { + outputBuffer = mMediaCodec.getOutputBuffer(outputIndex); + if (mPcmData == null || mPcmData.length < mBufferInfo.size) { + mPcmData = new byte[mBufferInfo.size]; + } + if (outputBuffer != null) { + outputBuffer.get(mPcmData, 0, mBufferInfo.size); + outputBuffer.clear(); + } + cuurGetSampleTime = mMediaExtractor.getSampleTime() / (float) (1000); + calcFrequency2(mPcmData,cuurGetSampleTime); + Log.e(TAG, "解析到的时间点为:"+ cuurGetSampleTime + "ms decode: mPcmData.length = " + mPcmData.length + " mBufferInfo " + mBufferInfo.toString()); + } + mMediaCodec.releaseOutputBuffer(outputIndex, false); + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + mDbCallBackListener.cuurentFrequenty(true,0,cuurGetSampleTime); + } + } + } + mMediaExtractor.release(); + } + + /** + * 释放资源 + */ + private void release() { + if (mMediaCodec != null) { + mMediaCodec.stop(); + mMediaCodec.release(); + } + } + } + + + /** + * 获取的值范围0 ~ 24366 + * @param pcmdata + * @param v + */ + public void calcFrequency2(byte[] pcmdata, float v) { + short[] music = (!isBigEnd()) ? byteArray2ShortArrayLittle( pcmdata, pcmdata.length / 2) : + byteArray2ShortArrayBig( pcmdata, pcmdata.length / 2); + calculateRealVolume(music,music.length,v); + } + + private boolean isBigEnd() { + short i = 0x1; + boolean bRet = ((i >> 8) == 0x1); + return bRet; + } + + private short[] byteArray2ShortArrayBig(byte[] data, int items) { + short[] retVal = new short[items]; + for (int i = 0; i < retVal.length; i++) + retVal[i] = (short) ((data[i * 2 + 1] & 0xff) | (data[i * 2] & 0xff) << 8); + + return retVal; + } + + private short[] byteArray2ShortArrayLittle(byte[] data, int items) { + short[] retVal = new short[items]; + for (int i = 0; i < retVal.length; i++) + retVal[i] = (short) ((data[i * 2] & 0xff) | (data[i * 2 + 1] & 0xff) << 8); + + return retVal; + } + + private int maxVolume = 0; + protected void calculateRealVolume(short[] buffer, int readSize, float v) { + double sum = 0; + for (int i = 0; i < readSize; i++) { + sum += buffer[i] * buffer[i]; + } + if (readSize > 0) { + double amplitude = sum / readSize; + int mVolume = (int) Math.sqrt(amplitude); + Log.e(TAG, "calculateRealVolume: " + mVolume); + maxVolume = Math.max(mVolume,maxVolume); + mDbCallBackListener.cuurentFrequenty(false,mVolume,v); + } + } + + private IGetVideoDbCallBackListener mDbCallBackListener; + + + + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/MuxerVoiceAndVideo.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/MuxerVoiceAndVideo.java new file mode 100644 index 0000000..06a66e2 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/MuxerVoiceAndVideo.java @@ -0,0 +1,44 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice; + +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.interfaces.IEncoderVideoCallBackListener; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.interfaces.IMuxerVideoCallBackListener; + +import VideoHandle.EpEditor; +import VideoHandle.OnEditorListener; + +/** + * 功能: + * + * @author aserbao + * @date : On 2019/1/8 4:25 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.whole.createVideoByVoice + * @Copyright: 个人版权所有 + */ +public class MuxerVoiceAndVideo { + private IMuxerVideoCallBackListener mIMuxerVideoCallBackListener; + + public MuxerVoiceAndVideo(IMuxerVideoCallBackListener iMuxerVideoCallBackListener) { + mIMuxerVideoCallBackListener = iMuxerVideoCallBackListener; + } + + public void startMuxer(String inputVideoPath, String inputMusicPath, float musicTime, String outputVideoPath){ + String cmd = "-y -i "+ inputVideoPath + " -ss 0 -t "+ musicTime + " -i "+ inputMusicPath + " -acodec copy -vcodec copy "+ outputVideoPath; + EpEditor.execCmd(cmd, 10000,new OnEditorListener() { + @Override + public void onSuccess() { + mIMuxerVideoCallBackListener.success(); + } + + @Override + public void onFailure() { + mIMuxerVideoCallBackListener.failed(); + } + + @Override + public void onProgress(float v) { + } + }); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/beans/MapFriend.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/beans/MapFriend.java new file mode 100644 index 0000000..7806930 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/beans/MapFriend.java @@ -0,0 +1,30 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice.beans; + +import android.graphics.Bitmap; + +/** + * 功能: + * + * @author aserbao + * @date : On 2019/1/8 5:59 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.whole.createVideoByVoice.beans + * @Copyright: 个人版权所有 + */ +public class MapFriend { + private float screenX; + private float screenY; + private Bitmap friendHeadBitmap; + + /** + * @param screenX + * @param screenY + * @param friendHeadBitmap + */ + public MapFriend(float screenX, float screenY, Bitmap friendHeadBitmap) { + this.screenX = screenX; + this.screenY = screenY; + this.friendHeadBitmap = friendHeadBitmap; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/frameData/BackgroundFrame.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/frameData/BackgroundFrame.java new file mode 100644 index 0000000..5b1a6f3 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/frameData/BackgroundFrame.java @@ -0,0 +1,21 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice.frameData; + +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; + +/** + * 功能: 背景绘制 + * @author aserbao + * @date : On 2019/1/8 5:37 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.whole.createVideoByVoice.frameData + * @Copyright: 个人版权所有 + */ +public class BackgroundFrame extends BaseFrameData{ + @Override + void onDraw(Canvas canvas, Paint paint,float volume,int changeHueColor) { + canvas.drawColor(changeHueColor); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/frameData/BaseFrameData.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/frameData/BaseFrameData.java new file mode 100644 index 0000000..ab400f0 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/frameData/BaseFrameData.java @@ -0,0 +1,17 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice.frameData; + +import android.graphics.Canvas; +import android.graphics.Paint; + +/** + * 功能:基类 + * @author aserbao + * @date : On 2019/1/8 5:37 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.whole.createVideoByVoice.frameData + * @Copyright: 个人版权所有 + */ +public abstract class BaseFrameData { + abstract void onDraw(Canvas canvas,Paint paint,float volume,int hueColor); +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/frameData/FrameDataManager.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/frameData/FrameDataManager.java new file mode 100644 index 0000000..4db6220 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/frameData/FrameDataManager.java @@ -0,0 +1,58 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice.frameData; + +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.Rect; +import android.graphics.RectF; +import android.view.Surface; + +import java.util.ArrayList; +import java.util.List; + +/** + * 功能: + * + * @author aserbao + * @date : On 2019/1/8 5:31 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.whole.createVideoByVoice + * @Copyright: 个人版权所有 + */ +public class FrameDataManager { + public List mBaseFrameDataList = new ArrayList<>(); + + public void addBaseFrameData(BaseFrameData baseFrameData){ + mBaseFrameDataList.add(baseFrameData); + } + public void drawFrame(Surface inputSurface ,int frameNum, float volume){ + Canvas canvas = inputSurface.lockCanvas(null); + Paint paint = new Paint(); + int changeHueColor = changeHue(volume); + for (BaseFrameData baseFrameData : mBaseFrameDataList) { + baseFrameData.onDraw(canvas,paint,volume,changeHueColor); + } + try { + paint.setTextSize(100); + paint.setColor(0xff000000); + + } finally { + inputSurface.unlockCanvasAndPost(canvas); + } + } + + /** + * @param volume 0 ~ 360 + * @return + */ + public int changeHue(float volume){ + float[] hsbVals = new float[3]; + int inputColor = Color.parseColor("#FFF757"); + Color.colorToHSV(inputColor,hsbVals); + float v = (float) volume / (float) 360; + hsbVals[0] = volume; + int color = Color.HSVToColor(hsbVals); + return color; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/frameData/FriendListFrame.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/frameData/FriendListFrame.java new file mode 100644 index 0000000..0971ebb --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/frameData/FriendListFrame.java @@ -0,0 +1,22 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice.frameData; + +import android.graphics.Canvas; +import android.graphics.Paint; + +/** + * 功能: + * + * @author aserbao + * @date : On 2019/1/8 6:01 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.whole.createVideoByVoice.frameData + * @Copyright: 个人版权所有 + */ +public class FriendListFrame extends BaseFrameData { + + @Override + void onDraw(Canvas canvas, Paint paint, float volume, int hueColor) { + + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/frameData/MapFrame.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/frameData/MapFrame.java new file mode 100644 index 0000000..60a4daa --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/frameData/MapFrame.java @@ -0,0 +1,37 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice.frameData; + +import android.graphics.Bitmap; +import android.graphics.Canvas; +import android.graphics.Paint; +import android.graphics.Rect; + +/** + * 功能:地图 + * + * @author aserbao + * @date : On 2019/1/8 5:56 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.whole.createVideoByVoice.frameData + * @Copyright: 个人版权所有 + */ +public class MapFrame extends BaseFrameData { + private int margainTop = 20; + private int margainLefAndRight = 30; + private int margainBottom = 100; + + private Bitmap mMapBitmap; + + public MapFrame(Bitmap bitmapMap) { + this.mMapBitmap = bitmapMap; + } + @Override + void onDraw(Canvas canvas, Paint paint, float volume, int hueColor) { + int width = canvas.getWidth(); + int height = canvas.getHeight(); + Rect srcRect = new Rect(0, 0, mMapBitmap.getWidth(), mMapBitmap.getHeight()); + Rect decRect = new Rect(margainLefAndRight, margainTop, width - margainLefAndRight, height-margainBottom); + canvas.drawBitmap(mMapBitmap,srcRect,decRect,paint); + } + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/frameData/TopPopFrame.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/frameData/TopPopFrame.java new file mode 100644 index 0000000..50f4671 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/frameData/TopPopFrame.java @@ -0,0 +1,74 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice.frameData; + +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.RectF; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Locale; + +/** + * 功能:最上层歌词显示框 + * @author aserbao + * @date : On 2019/1/8 6:09 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.whole.createVideoByVoice.frameData + * @Copyright: 个人版权所有 + */ +public class TopPopFrame extends BaseFrameData{ + + int roundMargain = 60; + int roundHeight = 300; + int roundRadius = 25; + int roundLineWidth = 5; + private String todayYyyyMmDd; + private String lyricContent; + + public TopPopFrame(String lyricContent) { + this.lyricContent = lyricContent; + todayYyyyMmDd = todayYyyyMmDd(); + } + + @Override + void onDraw(Canvas canvas, Paint paint, float volume, int hueColor) { + int width = canvas.getWidth(); + paint.setStyle(Paint.Style.FILL);//充满 + paint.setAntiAlias(true);// 设置画笔的锯齿效果 + RectF roundRect1 = new RectF(roundMargain - roundLineWidth,roundMargain - roundLineWidth,width - roundMargain + roundLineWidth,roundHeight + roundMargain + roundLineWidth); + paint.setColor(Color.BLACK); + canvas.drawRoundRect(roundRect1,roundRadius,roundRadius,paint); + paint.setColor(hueColor); + RectF roundRect2 = new RectF(roundMargain,roundMargain,width - roundMargain,roundHeight + roundMargain); + canvas.drawRoundRect(roundRect2,roundRadius,roundRadius,paint); + + int timeMargain = roundMargain + 50; + + paint.setTextSize(10); + paint.setColor(Color.BLACK); + String sTime = todayYyyyMmDd + " "+ todayMMSS(); + paint.setTextAlign(Paint.Align.CENTER); + paint.setTextSize(40); + paint.setColor(Color.BLACK); + canvas.drawText(sTime,width/2,timeMargain,paint); + + int soundMargain = timeMargain + 80; + String soundTime = "party 是我家"; + String soundTime2 = "party party 是我家"; + paint.setTextAlign(Paint.Align.CENTER); + paint.setTextSize(60); + canvas.drawText(soundTime,width/2,soundMargain,paint); + canvas.drawText(soundTime2,width/2,soundMargain + 80,paint); + } + public static final SimpleDateFormat YYYYMMDD_FORMAT = new SimpleDateFormat("yyyy/MM/dd", Locale.getDefault()); + public static final SimpleDateFormat MMSS_FORMAT = new SimpleDateFormat("mm:ss", Locale.getDefault()); + public static String todayYyyyMmDd() { + return YYYYMMDD_FORMAT.format(new Date()); + } + public static String todayMMSS(){ + return MMSS_FORMAT.format(new Date()); + } + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/interfaces/IEncoderVideoCallBackListener.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/interfaces/IEncoderVideoCallBackListener.java new file mode 100644 index 0000000..2bf01a5 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/interfaces/IEncoderVideoCallBackListener.java @@ -0,0 +1,16 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice.interfaces; + +/** + * 功能: + * + * @author aserbao + * @date : On 2019/1/8 4:23 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.whole.createVideoByVoice.interfaces + * @Copyright: 个人版权所有 + */ +public interface IEncoderVideoCallBackListener { + void success(String outputMeidaPath, float finalMediaTime); + void failed(); +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/interfaces/IGetVideoDbCallBackListener.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/interfaces/IGetVideoDbCallBackListener.java new file mode 100644 index 0000000..7cafe2d --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/interfaces/IGetVideoDbCallBackListener.java @@ -0,0 +1,15 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice.interfaces; + +/** + * 功能: + * + * @author aserbao + * @date : On 2019/1/8 4:22 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.whole.createVideoByVoice + * @Copyright: 个人版权所有 + */ +public interface IGetVideoDbCallBackListener { + void cuurentFrequenty(boolean isEnd, double volume, float cuurTime); +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/interfaces/IMuxerVideoCallBackListener.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/interfaces/IMuxerVideoCallBackListener.java new file mode 100644 index 0000000..1341eb3 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/interfaces/IMuxerVideoCallBackListener.java @@ -0,0 +1,16 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice.interfaces; + +/** + * 功能: + * + * @author aserbao + * @date : On 2019/1/8 4:23 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.whole.createVideoByVoice.interfaces + * @Copyright: 个人版权所有 + */ +public interface IMuxerVideoCallBackListener { + void success(); + void failed(); +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/localEdit/LocalVideoActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/localEdit/LocalVideoActivity.java new file mode 100644 index 0000000..57cee4b --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/localEdit/LocalVideoActivity.java @@ -0,0 +1,648 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice.localEdit; + +import android.content.Context; +import android.graphics.Bitmap; +import android.media.MediaMetadataRetriever; +import android.media.MediaPlayer; +import android.net.Uri; +import android.os.AsyncTask; +import android.os.Bundle; +import android.os.Handler; +import android.os.Message; +import android.support.annotation.NonNull; +import android.support.v7.app.AppCompatActivity; +import android.support.v7.widget.LinearLayoutManager; +import android.support.v7.widget.RecyclerView; +import android.text.TextUtils; +import android.util.Log; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.widget.FrameLayout; +import android.widget.ImageView; +import android.widget.ProgressBar; +import android.widget.RelativeLayout; +import android.widget.TextView; +import android.widget.Toast; + + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.MyApplication; +import com.aserbao.androidcustomcamera.base.utils.DisplayUtil; +import com.aserbao.androidcustomcamera.base.utils.StaticFinalValues; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.localEdit.adapter.ThumbAdapter; +import com.aserbao.androidcustomcamera.whole.videoPlayer.VideoPlayerActivity2; + +import java.io.File; +import java.lang.ref.WeakReference; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.Executors; + +import VideoHandle.CmdList; +import VideoHandle.EpEditor; +import VideoHandle.OnEditorListener; +import butterknife.BindView; +import butterknife.ButterKnife; +import butterknife.OnClick; + +import static com.aserbao.androidcustomcamera.base.utils.StaticFinalValues.ISSAVEVIDEOTEMPEXIST; +import static com.aserbao.androidcustomcamera.base.utils.StaticFinalValues.VIDEOTEMP; + +public class LocalVideoActivity extends AppCompatActivity implements MediaPlayerWrapper.IMediaCallback { + private static final int SAVE_BITMAP = 1; + private static final int SUBMIT = 2; + private static final int ClEAR_BITMAP = 3; + private static final int CLIPPER_GONE = 4; + private static final int CLIPPER_FAILURE = 5; + private static final String TAG = "Atest"; + @BindView(R.id.local_back_iv) + ImageView mLocalBackIv; + @BindView(R.id.local_rotate_iv) + ImageView mLocalRotateIv; + @BindView(R.id.local_video_next_tv) + TextView mLocalVideoNextTv; + @BindView(R.id.local_title) + RelativeLayout mLocalTitle; + @BindView(R.id.local_video_view) + VideoPreviewView mLocalVideoView; + @BindView(R.id.local_sel_time_tv) + TextView mLocalSelTimeTv; + @BindView(R.id.local_recycler_view) + RecyclerView mLocalRecyclerView; + @BindView(R.id.local_thumb_view) + ThumbnailView mLocalThumbView; + @BindView(R.id.local_frame_layout) + FrameLayout mLocalFrameLayout; + @BindView(R.id.pb_loading) + ProgressBar mPbLoading; + @BindView(R.id.tv_hint) + TextView mTvHint; + @BindView(R.id.pop_video_loading_fl) + FrameLayout mPopVideoLoadingFl; + private String mInputVideoPath = "/storage/emulated/0/aserbaoCamera/321.mp4"; + private String mOutVideoPath; + private int rotate; + public ThumbAdapter mThumbAdapter; + public LinearLayoutManager mLinearLayoutManager; + public float mStartTime = 0; + public float mEndTime; + public int mRecyclerWidth; + public int mTotolWidth; + public int mThumbSelTime = 30;//选择器选中的时间间隔 + public String mVideoRotation; + private int mInitRotation;//视频初始旋转角度,竖屏为90,横屏为0 + private boolean isFailure = false; + public long mLastTime; + private long lastTime; + private boolean isLocalPortrait = false; + public String mSavevideotemp; + private boolean isClickRotate = false;//是否点击了旋转按钮 + public AsyncTask mAsyncTask; + private int mRotate = 0; + private String DIR; + + @Override + public boolean dispatchTouchEvent(MotionEvent ev) { + if(mPopVideoLoadingFl != null && mPopVideoLoadingFl.getVisibility() == View.VISIBLE){ + return true; + } + return super.dispatchTouchEvent(ev); + } + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_local_video); + ButterKnife.bind(this); + mContext = this; + initData(); + initView(); + initListener(); + } + + public int mHorizontalScrollOffset; + + private void initListener() { + mLocalRecyclerView.addOnScrollListener(new RecyclerView.OnScrollListener() { + @Override + public void onScrollStateChanged(RecyclerView recyclerView, int newState) { + super.onScrollStateChanged(recyclerView, newState); + } + + @Override + public void onScrolled(RecyclerView recyclerView, int dx, int dy) { + super.onScrolled(recyclerView, dx, dy); + //获取当前RecyclerView的滑动偏移距离2340 + mTotolWidth = mLocalRecyclerView.computeHorizontalScrollRange(); + mHorizontalScrollOffset = mLocalRecyclerView.computeHorizontalScrollOffset(); + float mThumbLeftPosition = mLocalThumbView.getLeftInterval() + mHorizontalScrollOffset; + float v = mThumbLeftPosition / (float) mTotolWidth; + mStartTime = mVideoDuration * v; + mEndTime = (int) mStartTime + mThumbSelTime * 1000; + + if (mLocalRecyclerView.computeHorizontalScrollExtent() + mHorizontalScrollOffset == mTotolWidth) { + float right = mLocalThumbView.getRightInterval(); + float width = mLocalThumbView.getTotalWidth(); + if (right == width) { + mEndTime = mVideoDuration; + mStartTime = mEndTime - mThumbSelTime * 1000; + } + } + Log.e(TAG, "OnScrollBorder: mStartTime:" + mStartTime + "mEndTime:" + mEndTime); + } + }); + mLocalThumbView.setOnScrollBorderListener(new ThumbnailView.OnScrollBorderListener() { + @Override + public void OnScrollBorder(float start, float end) { + mTotolWidth = mLocalRecyclerView.computeHorizontalScrollRange(); + float left = mLocalThumbView.getLeftInterval(); + float mThumbLeftPosition = left + mHorizontalScrollOffset; + float v = mThumbLeftPosition / (float) mTotolWidth; + mStartTime = mVideoDuration * v; + float right = mLocalThumbView.getRightInterval(); + mThumbSelTime = (int) ((right - left) * 30 / MyApplication.screenWidth); + float width = mLocalThumbView.getTotalWidth(); + if (right == width) { + mThumbSelTime = (mVideoDuration - (int) mStartTime) / 1000; + } + if (mThumbSelTime > 30) { + mThumbSelTime = 30; + } + mLocalSelTimeTv.setText("已选取" + mThumbSelTime + "秒"); + mEndTime = mStartTime + mThumbSelTime * 1000; + Log.e(TAG, "OnScrollBorder: mStartTime:" + mStartTime + "mEndTime:" + mEndTime); + } + + @Override + public void onScrollStateChange() { + Log.e(TAG, "OnScrollBorder: startTime" + mStartTime + " endTime === " + mEndTime); + } + }); + + } + + private boolean isThreadStart = false; + private Thread mThread = new Thread(new Runnable() { + @Override + public void run() { + while (isPlaying) { + isThreadStart = true; + int videoDuration = mLocalVideoView.getVideoDuration(); + if (mStartTime > videoDuration || mEndTime < videoDuration) { + mLocalVideoView.seekTo((int) mStartTime / 1000); + mLocalVideoView.start(); + } + try { + Thread.sleep(500); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + }); + + private void initView() { + mThumbAdapter = new ThumbAdapter(this); + mLinearLayoutManager = new LinearLayoutManager(this, LinearLayoutManager.HORIZONTAL, false); + mLocalRecyclerView.setLayoutManager(mLinearLayoutManager); + mLocalRecyclerView.setAdapter(mThumbAdapter); + mLocalThumbView.setMinInterval(MyApplication.screenWidth / 6); + } + + private void initData() { + mInputVideoPath = getIntent().getBundleExtra(StaticFinalValues.BUNDLE).getString(StaticFinalValues.VIDEOFILEPATH); + initThumbs();//获取缩略图 + ArrayList srcList = new ArrayList<>(); + srcList.add(mInputVideoPath); + mLocalVideoView.setVideoPath(srcList); + mLocalVideoView.setIMediaCallback(this); + initSetParam(); + } + + /** + * + */ + private void initSetParam() { + /*if(mVideoRotation.equals("90") && mVideoWidth > mVideoHeight || mVideoRotation.equals("0") && mVideoWidth < mVideoHeight){//本地相机视频竖屏//自定义相机视频 + }*/ + //todo:自定义相机录制视频的方向不对,长宽是对的,系统相机视频只可以获取正确是角度,不能通过长宽进行判断 + if (mVideoRotation.equals("0") && mVideoWidth > mVideoHeight) {//本地视频横屏 + Log.e(TAG, "initSetParam: " ); +// mInitRotation = 90; +// mLocalVideoView.setRotation(mInitRotation); + } else if (mVideoRotation.equals("90") && mVideoWidth > mVideoHeight) {//本地视频竖屏 + mInitRotation = 90; + isLocalPortrait = true; + setPortraitParam(); + }else if(mVideoRotation.equals("0") && mVideoWidth < mVideoHeight){ //保存视频竖屏 + setPortraitParam(); + }else if(mVideoRotation.equals("180") && mVideoWidth > mVideoHeight){//本地视频横屏 + Log.e(TAG, "initSetParam: " ); + } else{ + mInitRotation = 90; + setPortraitParam(); + } + } + + @NonNull + private void setPortraitParam() { + ViewGroup.LayoutParams layoutParams1 = mLocalVideoView.getLayoutParams(); + layoutParams1.width = 630; + layoutParams1.height = 1120; + mLocalVideoView.setLayoutParams(layoutParams1); + mLocalVideoView.requestLayout(); + } + @NonNull + private void setLandScapeParam() { + ViewGroup.LayoutParams layoutParams1 = mLocalVideoView.getLayoutParams(); + layoutParams1.width = 1120; + layoutParams1.height = 630; + mLocalVideoView.setLayoutParams(layoutParams1); + mLocalVideoView.requestLayout(); + } + + + + @OnClick({R.id.local_back_iv, R.id.local_rotate_iv, R.id.local_video_next_tv}) + public void onViewClicked(View view) { + if (System.currentTimeMillis() - lastTime < 500 || mPopVideoLoadingFl != null && mPopVideoLoadingFl.getVisibility() == View.VISIBLE) { + return; + } + lastTime = System.currentTimeMillis(); + switch (view.getId()) { + case R.id.local_back_iv: + onBackPressed(); + break; + case R.id.local_rotate_iv: + isClickRotate = true; + mRotate = rotate; + if (rotate < 270) { + rotate = rotate + 90; + } else { + rotate = 0; + } + int rotation = mInitRotation + rotate; + if(rotation == 90 || rotation == 270){ + setLandScapeParam(); + }else{ + setPortraitParam(); + } +// mLocalVideoView.setRotation(rotation); + /* ObjectAnimator animator = ObjectAnimator.ofFloat(mLocalVideoView, "rotation", mRotate, rotation); + animator.setDuration(500); + animator.start();*/ + if (mInitRotation == 90) { + mLocalVideoView.setRotate(rotate); + } else { + mLocalVideoView.setRotate(rotate); + } + break; + case R.id.local_video_next_tv: + CmdList cmd = new CmdList(); + cmd.append("-y"); + cmd.append("-ss").append(String.valueOf((int) mStartTime / 1000)).append("-t").append(String.valueOf(mThumbSelTime)).append("-accurate_seek"); + cmd.append("-i").append(mInputVideoPath); + if (isLocalPortrait) { + if (!isClickRotate && rotate == 0 || rotate == 90) { + rotate = 180; + } else { + isLocalPortrait = false; + if (rotate == 0) { + rotate = 270; + } else { + rotate = rotate - 90; + } + } + } + switch (rotate) { + case 0: + cmd.append("-vcodec"); + cmd.append("copy"); + cmd.append("-acodec"); + cmd.append("copy"); + break; + case 270: + cmd.append("-filter_complex"); + cmd.append("transpose=2"); + cmd.append("-preset"); + cmd.append("ultrafast"); + break; + case 180: + cmd.append("-filter_complex"); + cmd.append("vflip,hflip"); + cmd.append("-preset"); + cmd.append("ultrafast"); + break; + case 90: + cmd.append("-filter_complex"); + cmd.append("transpose=1"); + cmd.append("-preset"); + cmd.append("ultrafast"); + break; + } + + File file = new File(ISSAVEVIDEOTEMPEXIST); + if (!file.exists()){ + file.mkdir(); + } + mOutVideoPath = ISSAVEVIDEOTEMPEXIST + System.currentTimeMillis() + ".mp4"; + if (!new File(VIDEOTEMP).exists()) { + new File(VIDEOTEMP).mkdirs(); + } + cmd.append(mOutVideoPath); + mLocalVideoView.pause(); + exec(cmd); + break; + } + } + + public void translateVideo() { + CmdList cmd = new CmdList(); + cmd.append("-y"); + cmd.append("-i"); + cmd.append(mOutVideoPath); + cmd.append("-filter_complex"); + cmd.append("vflip,hflip"); + cmd.append("-preset"); + cmd.append("ultrafast"); + File file = new File(ISSAVEVIDEOTEMPEXIST); + if (!file.exists()){ + file.mkdir(); + } + mSavevideotemp = ISSAVEVIDEOTEMPEXIST+ System.currentTimeMillis() + ".mp4"; + cmd.append(mSavevideotemp); + isLocalPortrait = false; + exec(cmd); + } + + public void exec(CmdList cmdList) { + mPopVideoLoadingFl.setVisibility(View.VISIBLE); +// progressDialog = DialogManager.showProgressDialog(mContext); + String[] cmds = cmdList.toArray(new String[cmdList.size()]); + StringBuffer stringBuffer = new StringBuffer(); + for (String ss : cmds) { + stringBuffer.append(ss).append(" "); + Log.e("EpMediaF", "cmd:" + ss + " stringBuffer : " + stringBuffer.toString()); + } + EpEditor.execCmd(stringBuffer.toString(), 0, new OnEditorListener() { + @Override + public void onSuccess() { + isFailure = false; + if (!isLocalPortrait) { + if (!TextUtils.isEmpty(mSavevideotemp)) { + if (new File(mOutVideoPath).exists()) { + new File(mOutVideoPath).delete(); + } + VideoPlayerActivity2.launch(LocalVideoActivity.this,mSavevideotemp); + } else { + VideoPlayerActivity2.launch(LocalVideoActivity.this,mOutVideoPath); + } + myHandler.sendEmptyMessage(CLIPPER_GONE); + } else { + translateVideo(); + } + } + + @Override + public void onFailure() { + isFailure = true; + myHandler.sendEmptyMessage(CLIPPER_GONE); + } + + @Override + public void onProgress(float v) { + } + }); + } + + + @Override + protected void onResume() { + super.onResume(); + if (resumed) { + mLocalVideoView.start(); + } + resumed = true; + } + + @Override + protected void onPause() { + super.onPause(); + mLocalVideoView.pause(); + } + + @Override + protected void onDestroy() { + super.onDestroy(); + mHandler.removeCallbacksAndMessages(null); + isDestroy = true; + mLocalVideoView.onDestroy(); + for (int i = 0; i < mThumbBitmap.size(); i++) { + mThumbBitmap.get(i).recycle(); + } + mThumbBitmap = null; + System.gc(); + mAsyncTask.cancel(true); + mAsyncTask = null; + } + + @Override + public void onBackPressed() { + if (mPopVideoLoadingFl != null && mPopVideoLoadingFl.getVisibility() == View.GONE) { + super.onBackPressed(); + } + } + + private boolean resumed; + private boolean isDestroy; + private boolean isPlaying = false; + static final int VIDEO_PREPARE = 0; + static final int VIDEO_START = 1; + static final int VIDEO_UPDATE = 2; + static final int VIDEO_PAUSE = 3; + static final int VIDEO_CUT_FINISH = 4; + Handler mHandler = new Handler() { + @Override + public void handleMessage(Message msg) { + switch (msg.what) { + case VIDEO_PREPARE: + Executors.newSingleThreadExecutor().execute(update); + break; + case VIDEO_START: + isPlaying = true; + break; + case VIDEO_UPDATE: + /* int curDuration = mVideoView.getCurDuration(); + if (curDuration > startPoint + clipDur) { + mVideoView.seekTo(startPoint); + mVideoView.start(); + }*/ + break; + case VIDEO_PAUSE: + isPlaying = false; + break; + case VIDEO_CUT_FINISH: + finish(); + //TODO 已经渲染完毕了  + break; + } + } + }; + private Runnable update = new Runnable() { + @Override + public void run() { + while (!isDestroy) { + if (!isPlaying) { + try { + Thread.currentThread().sleep(200); + } catch (InterruptedException e) { + e.printStackTrace(); + } + continue; + } + mHandler.sendEmptyMessage(VIDEO_UPDATE); + try { + Thread.currentThread().sleep(200); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + }; + + @Override + public void onVideoPrepare() { + mHandler.sendEmptyMessage(VIDEO_PREPARE); + } + + @Override + public void onVideoStart() { + mHandler.sendEmptyMessage(VIDEO_START); + } + + @Override + public void onVideoPause() { + mHandler.sendEmptyMessage(VIDEO_PAUSE); + } + + @Override + public void onCompletion(MediaPlayer mp) { + mLocalVideoView.seekTo(0); + mLocalVideoView.start(); + } + + @Override + public void onVideoChanged(VideoInfo info) { + } + + public int mVideoHeight, mVideoWidth, mVideoDuration; + private Context mContext; + + private void initThumbs() { + final MediaMetadataRetriever mediaMetadata = new MediaMetadataRetriever(); + mediaMetadata.setDataSource(mContext, Uri.parse(mInputVideoPath)); + mVideoRotation = mediaMetadata.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION); + mVideoWidth = Integer.parseInt(mediaMetadata.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH)); + mVideoHeight = Integer.parseInt(mediaMetadata.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT)); + mVideoDuration = Integer.parseInt(mediaMetadata.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)); + if(mVideoDuration /1000 > 30){ + mThumbSelTime = 30; + }else { + mThumbSelTime = mVideoDuration / 1000; + } + mEndTime = (mVideoDuration + 100) / 1000; + if (mEndTime < 30) { + mLocalSelTimeTv.setText("已选取" + mEndTime + "秒"); + } + final int frame; + final int frameTime; + if (mVideoDuration >= 29900 && mVideoDuration < 30300) { + frame = 10; + frameTime = mVideoDuration / frame * 1000; + } else { + frameTime = 3000 * 1000; + frame = mVideoDuration * 1000 / frameTime; + } + mAsyncTask = new AsyncTask() { + @Override + protected Boolean doInBackground(Void... params) { + myHandler.sendEmptyMessage(ClEAR_BITMAP); + for (int x = 0; x < frame; x++) { + Bitmap bitmap = mediaMetadata.getFrameAtTime(frameTime * x, MediaMetadataRetriever.OPTION_CLOSEST_SYNC); + Message msg = myHandler.obtainMessage(); + msg.what = SAVE_BITMAP; + msg.obj = bitmap; + msg.arg1 = x; + Log.e(TAG, "doInBackground: " + x); + myHandler.sendMessage(msg); + } + mediaMetadata.release(); + return true; + } + + @Override + protected void onPostExecute(Boolean result) { + myHandler.sendEmptyMessage(SUBMIT); + } + }; + mAsyncTask.execute(); + } + + private List mThumbBitmap = new ArrayList<>(); + + private static class MyHandler extends Handler { + private WeakReference mWeakReference; + + public MyHandler(LocalVideoActivity localVideoActivity) { + mWeakReference = new WeakReference(localVideoActivity); + } + + @Override + public void handleMessage(Message msg) { + final LocalVideoActivity localVideoActivity = mWeakReference.get(); + if (localVideoActivity != null) { + switch (msg.what) { + case CLIPPER_FAILURE: + Toast.makeText(localVideoActivity.mContext, "视频编译失败,请换个视频试试", Toast.LENGTH_LONG).show(); + case CLIPPER_GONE: + localVideoActivity.mPopVideoLoadingFl.setVisibility(View.GONE); + break; + case ClEAR_BITMAP: + localVideoActivity.mThumbBitmap.clear(); + break; + case SAVE_BITMAP: + if (localVideoActivity.mThumbBitmap != null) { + localVideoActivity.mThumbBitmap.add(msg.arg1, (Bitmap) msg.obj); + } + break; + case SUBMIT: + localVideoActivity.mThumbAdapter.addThumb(localVideoActivity.mThumbBitmap); + localVideoActivity.mThumbAdapter.setLoadSuccessCallBack(new ThumbAdapter.LoadSuccessCallBack() { + @Override + public void callback() { + //获取recyclerView在屏幕中的长度1080 + localVideoActivity.mRecyclerWidth = localVideoActivity.mLocalRecyclerView.computeHorizontalScrollExtent(); + //获取recyclerView所有item的长度3420 + localVideoActivity.mTotolWidth = localVideoActivity.mLocalRecyclerView.computeHorizontalScrollRange(); + int i = localVideoActivity.mLocalRecyclerView.computeHorizontalScrollRange(); + if (i < MyApplication.screenWidth) { + if (i > MyApplication.screenWidth / 6) { + localVideoActivity.mLocalThumbView.setWidth(i + DisplayUtil.dp2px(localVideoActivity,1)); + } else { + localVideoActivity.mLocalThumbView.setWidth(MyApplication.screenWidth / 6 - DisplayUtil.dp2px(localVideoActivity,10)); + } + } + Log.e(TAG, "callback: " + i); + } + }); + break; + } + } + } + } + + + private Handler myHandler = new MyHandler(this); + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/localEdit/MediaPlayerWrapper.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/localEdit/MediaPlayerWrapper.java new file mode 100644 index 0000000..55d786c --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/localEdit/MediaPlayerWrapper.java @@ -0,0 +1,248 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice.localEdit; + +import android.media.AudioManager; +import android.media.MediaMetadataRetriever; +import android.media.MediaPlayer; +import android.view.Surface; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * Created by Administrator on 2017/6/29 0029. + * desc:MediaPlayer的代理类 支持循环播放多个视频 + */ + +public class MediaPlayerWrapper implements MediaPlayer.OnCompletionListener, MediaPlayer.OnErrorListener, MediaPlayer.OnPreparedListener { + private MediaPlayer mCurMediaPlayer; //current player + private List mPlayerList; //player list + private List mSrcList; //video src list + private List mInfoList; //video info list + private Surface surface; + private IMediaCallback mCallback; + private int curIndex; //current player index + + public MediaPlayerWrapper() { + mPlayerList = new ArrayList<>(); + mInfoList = new ArrayList<>(); + } + + public void setOnCompletionListener(IMediaCallback callback) { + this.mCallback = callback; + } + + /** + * get video info and store + * + * @param dataSource + */ + public void setDataSource(List dataSource) { + this.mSrcList = dataSource; + MediaMetadataRetriever retr = new MediaMetadataRetriever(); + for (int i = 0; i < dataSource.size(); i++) { + VideoInfo info = new VideoInfo(); + String path=dataSource.get(i); + retr.setDataSource(path); + String rotation = retr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION); + String width = retr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH); + String height = retr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT); + String duration = retr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION); + + info.path=path; + try { + info.rotation = Integer.parseInt(rotation); + info.width = Integer.parseInt(width); + info.height = Integer.parseInt(height); + info.duration = Integer.parseInt(duration); + } catch (NumberFormatException e) { + e.printStackTrace(); + }finally { + mInfoList.add(info); + } + } + } + public List getVideoInfo(){ + return mInfoList; + } + + public void setSurface(Surface surface) { + this.surface = surface; + } + + public void prepare() throws IOException { + for (int i = 0; i < mSrcList.size(); i++) { + MediaPlayer player = new MediaPlayer(); + player.setAudioStreamType(AudioManager.STREAM_MUSIC); + player.setOnCompletionListener(this); + player.setOnErrorListener(this); + player.setOnPreparedListener(this); + player.setDataSource(mSrcList.get(i)); + player.prepare(); + mPlayerList.add(player); + if (i == 0) { + mCurMediaPlayer = player; + if (mCallback != null) { + mCallback.onVideoChanged(mInfoList.get(0)); + } + } + } + if (mCallback != null) { + mCallback.onVideoPrepare(); + } + } + + public void start() { + mCurMediaPlayer.setSurface(surface); + mCurMediaPlayer.start(); + if (mCallback != null) { + mCallback.onVideoStart(); + } + } + + public void pause() { + if(mCurMediaPlayer != null) { + mCurMediaPlayer.pause(); + if (mCallback != null) { + mCallback.onVideoPause(); + } + } + } + public int getCurVideoDuration(){ + return mInfoList.get(curIndex).duration; + } + + public int getVideoDuration() { + if (mSrcList.size() == 0) { + throw new IllegalStateException("please set video src first"); + } + int duration = 0; + for (int i = 0; i < mSrcList.size(); i++) { + duration += mInfoList.get(i).duration; + } + return duration; + } + + public int getCurPosition() { + int position = 0; + for (int i = 0; i < curIndex; i++) { + position += mInfoList.get(i).duration; + } + position += mCurMediaPlayer.getCurrentPosition(); + return position; + } + + public void seekTo(int time) { + int duration = 0; + mCurMediaPlayer.seekTo(time); + /*mCurMediaPlayer.setOnSeekCompleteListener(new MediaPlayer.OnSeekCompleteListener() { + @Override + public void onSeekComplete(MediaPlayer mp) { + mCurMediaPlayer.start(); + new Handler().postDelayed(new Runnable() { + @Override + public void run() { + mCurMediaPlayer.pause(); + } + },1000); + } + });*/ + } + + public boolean isPlaying() { + if (mCurMediaPlayer != null) { + return mCurMediaPlayer.isPlaying(); + }else{ + return false; + } + } + + + @Override + public void onCompletion(MediaPlayer mp) { + curIndex++; + if (curIndex >= mSrcList.size()) { + curIndex = 0; + if (mCallback != null) { + mCallback.onCompletion(mp); + } + } +// switchPlayer(mp); + + } + + private void switchPlayer(MediaPlayer mp) { + mp.setSurface(null); + if (mCallback != null) { + mCallback.onVideoChanged(mInfoList.get(curIndex)); + } + mCurMediaPlayer = mPlayerList.get(curIndex); + mCurMediaPlayer.setSurface(surface); + mCurMediaPlayer.start(); + } + + public void stop() { + mCurMediaPlayer.stop(); + } + + public void release() { + for (int i = 0; i < mPlayerList.size(); i++) { + mPlayerList.get(i).release(); + } + } + + @Override + public boolean onError(MediaPlayer mp, int what, int extra) { + return false; + } + + @Override + public void onPrepared(MediaPlayer mp) { + + } + + public void setVolume(float volume) { + for(int i=0;i0 && minPx > mWidth){ + minPx = mWidth; + } + this.minPx = minPx; + } + public void setWidth(int width){ + mWidth = width; + rectF2.left = mWidth - rectWidth; + rectF2.top = 0; + rectF2.right = mWidth; + invalidate(); + } + + public interface OnScrollBorderListener{ + void OnScrollBorder(float start, float end); + void onScrollStateChange(); + } + + public void setOnScrollBorderListener(OnScrollBorderListener listener){ + this.onScrollBorderListener = listener; + } + + public float getLeftInterval(){ + return rectF.left; + } + + public float getRightInterval(){ + return rectF2.right; + } + public float getTotalWidth(){ + return mWidth; + } + + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + + if (mWidth == 0) { + mWidth = getWidth(); + mHeight = getHeight(); + + rectF = new RectF(); + rectF.left = 0; + rectF.top = 0; + rectF.right = rectWidth; + rectF.bottom = mHeight; + + rectF2 = new RectF(); + rectF2.left = mWidth - rectWidth; + rectF2.top = 0; + rectF2.right = mWidth; + rectF2.bottom = mHeight; + } + } + + private float downX; + private boolean scrollLeft; + private boolean scrollRight; + + @Override + public boolean onTouchEvent(MotionEvent event) { + + move(event); + return scrollLeft || scrollRight; + } + + boolean scrollChange; + private boolean move(MotionEvent event) { + + switch (event.getAction()) { + case MotionEvent.ACTION_DOWN: + downX = event.getX(); + if (downX > rectF.left-rectWidth/2 && downX < rectF.right+rectWidth/2) { + scrollLeft = true; + } + if (downX > rectF2.left-rectWidth/2 && downX < rectF2.right+rectWidth/2) { + scrollRight = true; + } + break; + case MotionEvent.ACTION_MOVE: + + float moveX = event.getX(); + + float scrollX = moveX - downX; + + if (scrollLeft) { + rectF.left = rectF.left + scrollX; + rectF.right = rectF.right + scrollX; + + if(rectF.left < 0){ + rectF.left = 0; + rectF.right = rectWidth; + } + if(rectF.left > rectF2.right-minPx){ + rectF.left = rectF2.right-minPx; + rectF.right = rectF.left+rectWidth; + } + scrollChange = true; + invalidate(); + } else if (scrollRight) { + rectF2.left = rectF2.left + scrollX; + rectF2.right = rectF2.right + scrollX; + + if(rectF2.right > mWidth){ + rectF2.right = mWidth; + rectF2.left = rectF2.right- rectWidth; + } + if(rectF2.right < rectF.left+minPx){ + rectF2.right = rectF.left+minPx; + rectF2.left = rectF2.right-rectWidth; + } + scrollChange = true; + invalidate(); + } + + if(onScrollBorderListener != null){ + onScrollBorderListener.OnScrollBorder(rectF.left, rectF2.right); + } + + + downX = moveX; + break; + case MotionEvent.ACTION_CANCEL: + case MotionEvent.ACTION_UP: + downX = 0; + scrollLeft = false; + scrollRight = false; + if(scrollChange && onScrollBorderListener != null){ + onScrollBorderListener.onScrollStateChange(); + } + scrollChange = false; + break; + } + return true; + } + + @Override + protected void onDraw(Canvas canvas) { + + mPaint.setColor(Color.parseColor("#FC4253")); + + Rect rect = new Rect(); + rect.left = (int) rectF.left; + rect.top = (int) rectF.top; + rect.right = (int) rectF.right; + rect.bottom = (int) rectF.bottom; + canvas.drawBitmap(bitmap, null, rectF, mPaint); + + Rect rect2 = new Rect(); + rect2.left = (int) rectF2.left; + rect2.top = (int) rectF2.top; + rect2.right = (int) rectF2.right; + rect2.bottom = (int) rectF2.bottom; + canvas.drawBitmap(bitmap, null, rectF2, mPaint); + + + + canvas.drawLine(rectF.left, 0, rectF2.right, 0, mPaint); + canvas.drawLine(rectF.left, mHeight, rectF2.right, mHeight, mPaint); + + mPaint.setColor(Color.parseColor("#99313133")); + + RectF rectF3 = new RectF(); + rectF3.left = 0; + rectF3.top = 0; + rectF3.right = rectF.left; + rectF3.bottom = mHeight; + canvas.drawRect(rectF3, mPaint); + + RectF rectF4 = new RectF(); + rectF4.left = rectF2.right; + rectF4.top = 0; + rectF4.right = mWidth; + rectF4.bottom = mHeight; + canvas.drawRect(rectF4, mPaint); + } +} \ No newline at end of file diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/localEdit/VideoDrawer.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/localEdit/VideoDrawer.java new file mode 100644 index 0000000..93ed17b --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/localEdit/VideoDrawer.java @@ -0,0 +1,225 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice.localEdit; + +import android.content.Context; +import android.content.res.Resources; +import android.graphics.Bitmap; +import android.graphics.Matrix; +import android.graphics.SurfaceTexture; +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import android.opengl.GLSurfaceView; +import android.util.Log; +import android.view.MotionEvent; + + +import com.aserbao.androidcustomcamera.whole.record.filters.AFilter; +import com.aserbao.androidcustomcamera.whole.record.filters.GroupFilter; +import com.aserbao.androidcustomcamera.whole.record.filters.NoFilter; +import com.aserbao.androidcustomcamera.whole.record.filters.ProcessFilter; +import com.aserbao.androidcustomcamera.whole.record.filters.RotationOESFilter; +import com.aserbao.androidcustomcamera.whole.record.filters.WaterMarkFilter; +import com.aserbao.androidcustomcamera.whole.record.filters.gpuFilters.baseFilter.GPUImageFilter; +import com.aserbao.androidcustomcamera.whole.record.filters.gpuFilters.baseFilter.MagicBeautyFilter; +import com.aserbao.androidcustomcamera.whole.record.ui.SlideGpuFilterGroup; +import com.aserbao.androidcustomcamera.whole.record.utils.EasyGlUtils; +import com.aserbao.androidcustomcamera.whole.record.utils.MatrixUtils; + +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.opengles.GL10; + + + +public class VideoDrawer implements GLSurfaceView.Renderer { + private float[] OM; + private float[] SM = new float[16]; + private SurfaceTexture surfaceTexture; + private RotationOESFilter mPreFilter; + private AFilter mShow; + private MagicBeautyFilter mBeautyFilter; + private AFilter mProcessFilter; + private final GroupFilter mBeFilter; + private SlideGpuFilterGroup mSlideFilterGroup; + + private GPUImageFilter mGroupFilter; + private int viewWidth; + private int viewHeight; + + private int[] fFrame = new int[1]; + private int[] fTexture = new int[1]; + private int rotation; + private boolean isBeauty = false; + + private static final String TAG = "VideoDrawer"; + + public VideoDrawer(Context context, Resources res){ + Log.e(TAG, "VideoDrawer: " ); + mPreFilter = new RotationOESFilter(res); + mShow = new NoFilter(res); + mBeFilter = new GroupFilter(res); + mBeautyFilter = new MagicBeautyFilter(); + + mProcessFilter=new ProcessFilter(res); + + mSlideFilterGroup = new SlideGpuFilterGroup(); + OM = MatrixUtils.getOriginalMatrix(); + MatrixUtils.flip(OM,false,true); + mShow.setMatrix(OM); + } + + public void addWaterMarkFilter(Resources res, int x, int y, int width, int height, long startTime, long endTime, Bitmap bitmap, int bitRes, boolean isGif, float rotateDegree){ + WaterMarkFilter waterMarkFilter = new WaterMarkFilter(res,isGif,bitRes,rotateDegree); + waterMarkFilter.setWaterMark(bitmap); + waterMarkFilter.setPosition(x, y, 0, 0); + waterMarkFilter.setShowTime(startTime, endTime); + mBeFilter.addFilter(waterMarkFilter); + Log.e(TAG, "addWaterMarkFilter: "); + } + public void addWaterMarkFilter(Resources res, int x, int y, int width, int height, long startTime, long endTime, Bitmap bitmap, int bitRes, boolean isGif, float rotateDegree, Matrix matrix){ + WaterMarkFilter waterMarkFilter = new WaterMarkFilter(res,isGif,bitRes,rotateDegree); + waterMarkFilter.setWaterMark(bitmap); + waterMarkFilter.setPosition(x, y, 0, 0); + waterMarkFilter.setShowTime(startTime, endTime); + waterMarkFilter.setMatrix(matrix); + mBeFilter.addFilter(waterMarkFilter); + Log.e(TAG, "addWaterMarkFilter: "); + } + + @Override + public void onSurfaceCreated(GL10 gl, EGLConfig config) { + Log.e(TAG, "onSurfaceCreated: "); + int texture[]=new int[1]; + GLES20.glGenTextures(1,texture,0); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES ,texture[0]); + GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, + GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR); + GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, + GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR); + surfaceTexture = new SurfaceTexture(texture[0]); + mPreFilter.create(); + mPreFilter.setTextureId(texture[0]); + + mBeFilter.create(); + mProcessFilter.create(); + mShow.create(); + mBeautyFilter.init(); + mBeautyFilter.setBeautyLevel(3); + mSlideFilterGroup.init(); + } + public void onVideoChanged(VideoInfo info){ + Log.e(TAG, "onVideoChanged: "); + setRotation(info.rotation); + MatrixUtils.flip(SM,false,true); + if(info.rotation==0||info.rotation==180){ + MatrixUtils.getShowMatrix(SM,info.width,info.height,viewWidth,viewHeight); + }else{ + MatrixUtils.getShowMatrix(SM,info.height,info.width,viewWidth,viewHeight); + } + + mPreFilter.setMatrix(SM); + } + @Override + public void onSurfaceChanged(GL10 gl, int width, int height) { + Log.e(TAG, "onSurfaceChanged: "); + viewWidth=width; + viewHeight=height; + GLES20.glDeleteFramebuffers(1, fFrame, 0); + GLES20.glDeleteTextures(1, fTexture, 0); + + GLES20.glGenFramebuffers(1,fFrame,0); + EasyGlUtils.genTexturesWithParameter(1,fTexture,0, GLES20.GL_RGBA,viewWidth,viewHeight); + + mBeFilter.setSize(viewWidth,viewHeight); + mProcessFilter.setSize(viewWidth,viewHeight); + mBeautyFilter.onDisplaySizeChanged(viewWidth,viewHeight); + mBeautyFilter.onInputSizeChanged(viewWidth,viewHeight); + mSlideFilterGroup.onSizeChanged(viewWidth,viewHeight); + } + + private long mTime; + public void setMediaTime(long time){ + mTime = time; + onDrawFrame(null); + } + + @Override + public void onDrawFrame(GL10 gl) { + surfaceTexture.updateTexImage(); + EasyGlUtils.bindFrameTexture(fFrame[0],fTexture[0]); + GLES20.glViewport(0,0,viewWidth,viewHeight); + mPreFilter.draw(); + EasyGlUtils.unBindFrameBuffer(); + + mBeFilter.setTextureId(fTexture[0]); + mBeFilter.draw(mTime); + + if (mBeautyFilter != null && isBeauty && mBeautyFilter.getBeautyLevel() != 0){ + EasyGlUtils.bindFrameTexture(fFrame[0],fTexture[0]); + GLES20.glViewport(0,0,viewWidth,viewHeight); + mBeautyFilter.onDrawFrame(mBeFilter.getOutputTexture()); + EasyGlUtils.unBindFrameBuffer(); + mProcessFilter.setTextureId(fTexture[0]); + }else { + mProcessFilter.setTextureId(mBeFilter.getOutputTexture()); + } + mProcessFilter.draw(); + + mSlideFilterGroup.onDrawFrame(mProcessFilter.getOutputTexture()); + if (mGroupFilter != null){ + EasyGlUtils.bindFrameTexture(fFrame[0],fTexture[0]); + GLES20.glViewport(0,0,viewWidth,viewHeight); + mGroupFilter.onDrawFrame(mSlideFilterGroup.getOutputTexture()); + EasyGlUtils.unBindFrameBuffer(); + mProcessFilter.setTextureId(fTexture[0]); + }else { + mProcessFilter.setTextureId(mSlideFilterGroup.getOutputTexture()); + } + mProcessFilter.draw(); + + GLES20.glViewport(0,0,viewWidth,viewHeight); + + mShow.setTextureId(mProcessFilter.getOutputTexture()); + mShow.draw(); + } + public SurfaceTexture getSurfaceTexture(){ + return surfaceTexture; + } + + public void setRotation(int rotation){ + this.rotation=rotation; + if(mPreFilter!=null){ + mPreFilter.setRotation(this.rotation); + } + } + public void switchBeauty(){ + isBeauty = !isBeauty; + } + public void isOpenBeauty(boolean isBeauty){ + this.isBeauty = isBeauty; + } + + public void onTouch(MotionEvent event){ +// mSlideFilterGroup.onTouchEvent(event); + } + public void setOnFilterChangeListener(SlideGpuFilterGroup.OnFilterChangeListener listener){ + mSlideFilterGroup.setOnFilterChangeListener(listener); + } + public void setFilter(int i){ + mSlideFilterGroup.setFilter(i); + } + public void checkGlError(String s) { + int error; + while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { + throw new RuntimeException(s + ": glError " + error); + } + } + + public void setGpuFilter(GPUImageFilter filter) { + if (filter != null){ + mGroupFilter = filter; + mGroupFilter.init(); + mGroupFilter.onDisplaySizeChanged(viewWidth, viewWidth); + mGroupFilter.onInputSizeChanged(viewWidth,viewHeight); + } + + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/localEdit/VideoInfo.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/localEdit/VideoInfo.java new file mode 100644 index 0000000..6691b18 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/localEdit/VideoInfo.java @@ -0,0 +1,22 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice.localEdit; + +/** + * Created by Administrator on 2017/6/29 0029. + * 视频的信息bean + */ + +public class VideoInfo { + public String path;//路径 + public int rotation;//旋转角度 + public int width;//宽 + public int height;//高 + public int bitRate;//比特率 + public int frameRate;//帧率 + public int frameInterval;//关键帧间隔 + public int duration;//时长 + + public int expWidth;//期望宽度 + public int expHeight;//期望高度 + public int cutPoint;//剪切的开始点 + public int cutDuration;//剪切的时长 +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/localEdit/VideoPreviewView.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/localEdit/VideoPreviewView.java new file mode 100644 index 0000000..71c9969 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/localEdit/VideoPreviewView.java @@ -0,0 +1,206 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice.localEdit; + +import android.content.Context; +import android.graphics.SurfaceTexture; +import android.media.MediaPlayer; +import android.opengl.GLSurfaceView; +import android.util.AttributeSet; +import android.util.Log; +import android.view.MotionEvent; +import android.view.Surface; +import android.view.SurfaceHolder; + + +import com.aserbao.androidcustomcamera.whole.record.ui.SlideGpuFilterGroup; + +import java.io.IOException; +import java.util.List; + +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.opengles.GL10; + +public class VideoPreviewView extends GLSurfaceView implements GLSurfaceView.Renderer, MediaPlayerWrapper.IMediaCallback { + private MediaPlayerWrapper mMediaPlayer; + private VideoDrawer mDrawer; + + /**视频播放状态的回调*/ + private MediaPlayerWrapper.IMediaCallback callback; + + public VideoPreviewView(Context context) { + super(context,null); + } + + public VideoPreviewView(Context context, AttributeSet attrs) { + super(context, attrs); + init(context); + } + public void setRotate(int angle){ + mDrawer.setRotation(angle); + } + + private void init(Context context) { + setEGLContextClientVersion(2); + setRenderer(this); + setRenderMode(RENDERMODE_WHEN_DIRTY); + setPreserveEGLContextOnPause(false); + setCameraDistance(100); + mDrawer = new VideoDrawer(context,getResources()); + + //初始化Drawer和VideoPlayer + mMediaPlayer = new MediaPlayerWrapper(); + mMediaPlayer.setOnCompletionListener(this); + } + + /**设置视频的播放地址*/ + public void setVideoPath(List paths){ + mMediaPlayer.setDataSource(paths); + } + + public void setFilter(int i){ + mDrawer.setFilter(i); + } + + @Override + public void surfaceDestroyed(SurfaceHolder holder) { +// onDestroy(); + super.surfaceDestroyed(holder); + Log.e("Atest", "surfaceDestroyed: "); + } + + @Override + public void onSurfaceCreated(GL10 gl, EGLConfig config) { + mDrawer.onSurfaceCreated(gl,config); + SurfaceTexture surfaceTexture = mDrawer.getSurfaceTexture(); + surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() { + @Override + public void onFrameAvailable(SurfaceTexture surfaceTexture) { + requestRender(); + } + }); + Surface surface = new Surface(surfaceTexture); + mMediaPlayer.setSurface(surface); + try { + mMediaPlayer.prepare(); + } catch (IOException e) { + e.printStackTrace(); + } + mMediaPlayer.start(); + } + + @Override + public void onSurfaceChanged(GL10 gl, int width, int height) { + mDrawer.onSurfaceChanged(gl,width,height); + } + + + @Override + public void onDrawFrame(GL10 gl) { + Log.e("a", "onDrawFrame: " ); + mDrawer.onDrawFrame(gl); + } + public void onDestroy(){ + if (mMediaPlayer != null) { + if(mMediaPlayer.isPlaying()){ + mMediaPlayer.stop(); + } + mMediaPlayer.release(); + } + } + public void onTouch(final MotionEvent event){ + queueEvent(new Runnable() { + @Override + public void run() { + mDrawer.onTouch(event); + } + }); + } + public void setOnFilterChangeListener(SlideGpuFilterGroup.OnFilterChangeListener listener){ + mDrawer.setOnFilterChangeListener(listener); + } + + @Override + public void onVideoPrepare() { + if (callback!= null){ + callback.onVideoPrepare(); + } + } + + @Override + public void onVideoStart() { + if(callback!=null){ + callback.onVideoStart(); + } + } + + @Override + public void onVideoPause() { + if (callback != null){ + callback.onVideoPause(); + } + } + + @Override + public void onCompletion(MediaPlayer mp) { + if (callback != null){ + callback.onCompletion(mp); + } + } + + @Override + public void onVideoChanged(final VideoInfo info) { + queueEvent(new Runnable() { + @Override + public void run() { + mDrawer.onVideoChanged(info); + } + }); + if(callback!=null){ + callback.onVideoChanged(info); + } + } + /** + * isPlaying now + * */ + public boolean isPlaying(){ + return mMediaPlayer.isPlaying(); + } + /** + * pause play + * */ + public void pause(){ + if (mMediaPlayer != null) { + mMediaPlayer.pause(); + } + } + /** + * start play video + * */ + public void start(){ + mMediaPlayer.start(); + } + /** + * 跳转到指定的时间点,只能跳到关键帧 + * */ + public void seekTo(int time){ + requestRender(); + mMediaPlayer.seekTo(time); + } + /** + * 获取当前视频的长度 + * */ + public int getVideoDuration(){ + return mMediaPlayer.getCurVideoDuration(); + } + + /** + * 切换美颜状态 + * */ + public void switchBeauty(){ + mDrawer.switchBeauty(); + } + + + public void setIMediaCallback(MediaPlayerWrapper.IMediaCallback callback){ + this.callback=callback; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/localEdit/adapter/ThumbAdapter.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/localEdit/adapter/ThumbAdapter.java new file mode 100644 index 0000000..9af43f0 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/createVideoByVoice/localEdit/adapter/ThumbAdapter.java @@ -0,0 +1,80 @@ +package com.aserbao.androidcustomcamera.whole.createVideoByVoice.localEdit.adapter; + +import android.content.Context; +import android.graphics.Bitmap; +import android.support.v7.widget.RecyclerView; +import android.view.LayoutInflater; +import android.view.View; +import android.view.ViewGroup; +import android.widget.ImageView; + +import com.aserbao.androidcustomcamera.R; + +import java.util.ArrayList; +import java.util.List; + +import butterknife.BindView; +import butterknife.ButterKnife; + +/** + * description: + * Created by aserbao on 2018/2/6. + */ + + +public class ThumbAdapter extends RecyclerView.Adapter { + + private Context mContext; + private List mBitmapList = new ArrayList<>(); + + public ThumbAdapter(Context context) { + mContext = context; + } + public void setLoadSuccessCallBack(LoadSuccessCallBack loadSuccessCallBack){ + mLoadSuccessCallBack = loadSuccessCallBack; + } + + public void addThumb (List bitmaps){ +// mBitmapList.clear(); + if (mBitmapList != null) { + mBitmapList.addAll(bitmaps); + notifyDataSetChanged(); + } + } + @Override + public MyViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { + View view = LayoutInflater.from(mContext).inflate(R.layout.thumb_item, parent, false); + return new MyViewHolder(view); + } + + @Override + public void onBindViewHolder(MyViewHolder holder, int position) { + Bitmap bitmap = mBitmapList.get(position); + holder.mItemThumbIv.setImageBitmap(bitmap); + if(position == mBitmapList.size()-1 && mLoadSuccessCallBack != null ){ + mLoadSuccessCallBack.callback(); + } + } + + @Override + public int getItemCount() { + int ret = 0; + if (mBitmapList != null) { + ret = mBitmapList.size(); + } + return ret; + } + + class MyViewHolder extends RecyclerView.ViewHolder { + @BindView(R.id.item_thumb_iv) + ImageView mItemThumbIv; + public MyViewHolder(View itemView) { + super(itemView); + ButterKnife.bind(this, itemView); + } + } + private LoadSuccessCallBack mLoadSuccessCallBack; + public interface LoadSuccessCallBack{ + void callback(); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/VideoEditActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/VideoEditActivity.java new file mode 100644 index 0000000..02fd0b0 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/VideoEditActivity.java @@ -0,0 +1,820 @@ +package com.aserbao.androidcustomcamera.whole.editVideo; + +import android.content.Context; +import android.graphics.Bitmap; +import android.graphics.Color; +import android.media.MediaMetadataRetriever; +import android.media.MediaPlayer; +import android.net.Uri; +import android.os.AsyncTask; +import android.os.Bundle; +import android.os.Handler; +import android.os.Message; +import android.support.v4.app.FragmentActivity; +import android.text.TextUtils; +import android.util.Log; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.widget.FrameLayout; +import android.widget.ImageView; +import android.widget.LinearLayout; +import android.widget.ProgressBar; +import android.widget.RelativeLayout; +import android.widget.TextView; +import android.widget.Toast; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.utils.StaticFinalValues; +import com.aserbao.androidcustomcamera.base.utils.StatusBarUtil; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.localEdit.MediaPlayerWrapper; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.localEdit.VideoInfo; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.localEdit.VideoPreviewView; +import com.aserbao.androidcustomcamera.whole.editVideo.fragment.FilterDialogFragment; +import com.aserbao.androidcustomcamera.whole.editVideo.mediacodec.VideoClipper; +import com.aserbao.androidcustomcamera.whole.editVideo.view.BaseImageView; +import com.aserbao.androidcustomcamera.whole.editVideo.view.BubbleInputDialog; +import com.aserbao.androidcustomcamera.whole.editVideo.view.BubbleTextView; +import com.aserbao.androidcustomcamera.whole.editVideo.view.DynamicImageView; +import com.aserbao.androidcustomcamera.whole.editVideo.view.PopBubbleEditView; +import com.aserbao.androidcustomcamera.whole.editVideo.view.PopBubbleView; +import com.aserbao.androidcustomcamera.whole.editVideo.view.PopPasterView; +import com.aserbao.androidcustomcamera.whole.editVideo.view.StickInfoImageView; +import com.aserbao.androidcustomcamera.whole.editVideo.view.StickerView; +import com.aserbao.androidcustomcamera.whole.editVideo.view.VideoEditView; +import com.aserbao.androidcustomcamera.whole.record.filters.GifDecoder; +import com.aserbao.androidcustomcamera.whole.record.other.MagicFilterType; +import com.aserbao.androidcustomcamera.whole.record.ui.SlideGpuFilterGroup; +import com.aserbao.androidcustomcamera.whole.videoPlayer.VideoPlayerActivity2; +import com.aserbao.androidcustomcamera.whole.videoPlayer.VideoViewPlayerActivity; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.Executors; + +import butterknife.BindView; +import butterknife.ButterKnife; +import butterknife.OnClick; + +import static com.aserbao.androidcustomcamera.base.utils.StaticFinalValues.BUNDLE; +import static com.aserbao.androidcustomcamera.base.utils.StaticFinalValues.STORAGE_TEMP_VIDEO_PATH; + + +public class VideoEditActivity extends FragmentActivity implements PopBubbleView.BubbleSelectListener, PopPasterView.PasterSelectListener, MediaPlayerWrapper.IMediaCallback, SlideGpuFilterGroup.OnFilterChangeListener, View.OnTouchListener, VideoEditView.OnSelectTimeChangeListener { + @BindView(R.id.pb_loading) + ProgressBar mPbLoading; + @BindView(R.id.tv_hint) + TextView mTvHint; + @BindView(R.id.pop_video_loading_fl) + FrameLayout mPopVideoLoadingFl; + private String TAG = VideoEditActivity.class.getSimpleName(); + @BindView(R.id.iv_back) + ImageView ivBack; + @BindView(R.id.rl_title) + RelativeLayout rlTitle; + @BindView(R.id.ll_edit_seekbar) + VideoEditView videoEditView; + @BindView(R.id.ll_select_bar) + LinearLayout llSelectBar; + @BindView(R.id.rl_content_root) + FrameLayout mContentRootView; + @BindView(R.id.bigicon_play) + ImageView bigiconPlay; + @BindView(R.id.video_preview) + VideoPreviewView mVideoView; + @BindView(R.id.ll_add_filter) + TextView mLlAddFilterTv; + @BindView(R.id.pop_video_percent_tv) + TextView mPopVideoPercentTv; + //当前处于编辑状态的贴纸 + private StickerView mCurrentView; + //当前处于编辑状态的气泡 + private BubbleTextView mCurrentEditTextView; + //存储贴纸列表 + private ArrayList mViews = new ArrayList<>(); + //气泡输入框 + private BubbleInputDialog mBubbleInputDialog; + + private ArrayList stickerViews = new ArrayList<>(); + private ArrayList dynamicImageViews = new ArrayList<>(); + + private PopBubbleView popBubbleView; + private PopPasterView popPasterView; + + private int[] bubbleArray = new int[]{ + R.drawable.bubbleone, R.drawable.bubbletwo, R.drawable.bubblethree, R.drawable.bubblefour, R.drawable.bubblefive, R.drawable.bubblesix, R.drawable.bubbleseven, R.drawable.bubbleeight + }; + private String mVideoPath = "/storage/emulated/0/ych/1234.mp4"; + public int mVideoHeight, mVideoWidth, mVideoDuration; //mIsNotComeLocal 1表示拍摄,mIsAnswer 1表示回答者 + private Context mContext; + private boolean hasSelectStickerView; + private float mPixel = 1.778f; + private long lastTime = 0; + private boolean isVideoPause = false; + private PopBubbleEditView popBubbleEditView; + private long currentTime; + private boolean isPlayVideo; + public String mVideoRotation = "90"; + + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_video_edit); + ButterKnife.bind(this); + mContext = this; + + mBubbleInputDialog = new BubbleInputDialog(this); + mBubbleInputDialog.setCompleteCallBack(new BubbleInputDialog.CompleteCallBack() { + @Override + public void onComplete(View bubbleTextView, String str) { + ((BubbleTextView) bubbleTextView).setText(str); + } + }); + initData(); + initListener(); + mThumbBitmap.clear(); + StatusBarUtil.transparencyBar(this); + } + + private void initData() { + mVideoPath = getIntent().getStringExtra(StaticFinalValues.VIDEOFILEPATH); + initThumbs(); + ArrayList srcList = new ArrayList<>(); + srcList.add(mVideoPath); + mVideoView.setVideoPath(srcList); + initSetParam(); + } + private void initListener(){ + mVideoView.setOnFilterChangeListener(this); + mVideoView.setIMediaCallback(this); + videoEditView.setOnSelectTimeChangeListener(this); + } + + private void initSetParam() { + ViewGroup.LayoutParams layoutParams = mContentRootView.getLayoutParams(); + ViewGroup.LayoutParams layoutParams1 = mVideoView.getLayoutParams(); + if (!TextUtils.isEmpty(mVideoRotation) && mVideoRotation.equals("0") && mVideoWidth > mVideoHeight || mVideoRotation.equals("180") && mVideoWidth > mVideoHeight) {//本地视频横屏 + layoutParams.width = 1120; + layoutParams.height = 630; + layoutParams1.width = 1120; + layoutParams1.height = 630; + } else { +// layoutParams.width = (int) (mVideoWidth * StaticFinalValues.VIDEO_WIDTH_HEIGHT); + layoutParams.width = 630; + layoutParams.height = 1120; + layoutParams1.width = 630; + layoutParams1.height = 1120; + } + mContentRootView.setLayoutParams(layoutParams); + mVideoView.setLayoutParams(layoutParams1); + + } + + int mFilterSel = 0; + + @Override + public boolean dispatchTouchEvent(MotionEvent ev) { + if(mPopVideoLoadingFl != null && mPopVideoLoadingFl.getVisibility() == View.VISIBLE){ + Log.e(TAG, "dispatchTouchEvent: "); + return true; + }else { + return super.dispatchTouchEvent(ev); + } + } + + @OnClick({R.id.rl_content_root, R.id.iv_back, R.id.ll_add_sticker, R.id.ll_add_subtitle, R.id.edit_video_next_tv, R.id.ll_play_video, R.id.ll_add_filter}) + public void onViewClicked(View view) { + if (System.currentTimeMillis() - lastTime < 500 || mPopVideoLoadingFl != null && mPopVideoLoadingFl.getVisibility() == View.VISIBLE) { + return; + } + lastTime = System.currentTimeMillis(); + switch (view.getId()) { + case R.id.rl_content_root: + if (mCurrentEditTextView != null) { + mCurrentEditTextView.setInEdit(false); + } + if (mCurrentView != null) { + mCurrentView.setInEdit(false); + } + break; + case R.id.ll_add_filter: + FilterDialogFragment dialogFragment = new FilterDialogFragment(); + Bundle bundle = new Bundle(); + dialogFragment.setArguments(bundle); + dialogFragment.setResultListener(new FilterDialogFragment.ResultListener() { + @Override + public void result(int making, int mFilterType, int mBeauty, boolean isDismiss) { + mFilterSel = mFilterType; + filterType = StaticFinalValues.types[mFilterType]; + mVideoView.setFilter(mFilterSel - 1); + } + }); + dialogFragment.show(getFragmentManager(), "filter"); + break; + case R.id.iv_back: + onBackPressed(); + break; + case R.id.ll_add_sticker: + if (popPasterView == null) { + popPasterView = new PopPasterView(VideoEditActivity.this); + popPasterView.setPasterSelectListener(VideoEditActivity.this); + } + if (isPlayVideo) { + videoPlay(); + } + popPasterView.show(); + break; + case R.id.ll_add_subtitle: + if (popBubbleView == null) { + popBubbleView = new PopBubbleView(VideoEditActivity.this); + popBubbleView.setBubbleSelectListener(VideoEditActivity.this); + } + if (isPlayVideo) { + videoPlay(); + } + popBubbleView.show(); + break; + case R.id.edit_video_next_tv: + videoEditView.recoverView(); + if (mCurrentEditTextView != null) { + mCurrentEditTextView.setInEdit(false); + } + if (mCurrentView != null) { + mCurrentView.setInEdit(false); + } + mVideoView.pause(); + VideoClipper clipper = new VideoClipper(); + clipper.setInputVideoPath(mVideoPath); + final String outputPath = STORAGE_TEMP_VIDEO_PATH; + clipper.setFilterType(filterType); +// clipper.setFilterType(MagicFilterType.HUDSON); + clipper.setOutputVideoPath(outputPath); + clipper.setOnVideoCutFinishListener(new VideoClipper.OnVideoCutFinishListener() { + @Override + public void onFinish() { +// VideoViewPlayerActivity.launch(VideoEditActivity.this,outputPath); + VideoPlayerActivity2.launch(VideoEditActivity.this,outputPath); + mHandler.sendEmptyMessage(VIDEO_CUT_FINISH); + } + + @Override + public void onProgress(float percent) { + Message message = new Message(); + message.what = CLIP_VIDEO_PERCENT; + message.obj = percent; + mHandler.sendMessage(message); + } + }); + try { + int clipDur = mVideoView.getVideoDuration() * 1000; + clipper.clipVideo(0, clipDur, mViews, getResources()); + +// progressDialog = new PopupManager(mContext).showLoading(); + mPopVideoLoadingFl.setVisibility(View.VISIBLE); + } catch (IOException e) { + e.printStackTrace(); + } + break; + case R.id.ll_play_video: + videoPlay(); + break; + } + } + + private void videoPlay() { + Log.e(TAG,"currentTime:"+currentTime+",mVideoDuration:"+mVideoDuration); + if(currentTime >= mVideoDuration){ + return; + } + for (StickInfoImageView stickInfoImageView : stickerViews) { //清空gif图 + mContentRootView.removeView(stickInfoImageView); + } + stickerViews.clear(); + for (BaseImageView baseImageView : mViews) { + baseImageView.setVisibility(View.GONE); +// addGifView(baseImageView); + } + videoEditView.videoPlay(mViews); + } + + + //添加表情 + private void addStickerView(int resourceId, int gifId) { + if(mViews.size() >= 40){ + Toast.makeText(VideoEditActivity.this, "字幕和贴纸的数量不能超过40个", Toast.LENGTH_SHORT).show(); + return; + } + if ((mVideoDuration - currentTime) / 1000 < 2) { + Toast.makeText(VideoEditActivity.this, "当前时间不足以添加贴纸", Toast.LENGTH_SHORT).show(); + return; + } + hasSelectStickerView = true; + final StickerView stickerView = new StickerView(this); +// stickerView.setImageResource(R.drawable.ic_cat); + stickerView.setParentSize(mContentRootView.getMeasuredWidth(), mContentRootView.getMeasuredHeight()); + GifDecoder gifDecoder = new GifDecoder(); + gifDecoder.read(getResources().openRawResource(gifId)); + List bitmaps = new ArrayList<>(); + for (int i = 0; i < gifDecoder.getFrameCount(); i++) { + bitmaps.add(gifDecoder.getFrame(i)); + } + stickerView.setBitmaps(bitmaps); +// stickerView.setImageResource(resourceId); + stickerView.setBitmap(bitmaps.get(bitmaps.size() / 2)); + stickerView.setGif(true); + stickerView.setGifId(gifId); + stickerView.setOperationListener(new StickerView.OperationListener() { + @Override + public void onDeleteClick() { + mViews.remove(stickerView); + mContentRootView.removeView(stickerView); + } + + @Override + public void onEdit(StickerView stickerView) { + Log.e(TAG, "StickerView onEdit"); + hasSelectStickerView = true; + if (mCurrentEditTextView != null) { + mCurrentEditTextView.setInEdit(false); + } + mCurrentView.setInEdit(false); + mCurrentView = stickerView; + mCurrentView.setInEdit(true); + + if (mViews != null && mViews.size() > 0) { + int position; + position = mViews.indexOf(mCurrentView); + if (position != -1) { + mViews.get(position).setRotateDegree(mCurrentView.getRotateDegree()); + mViews.get(position).setViewHeight(mCurrentView.getViewHeight()); + mViews.get(position).setViewWidth(mCurrentView.getViewWidth()); + mViews.get(position).setX(mCurrentView.getX()); + mViews.get(position).setY(mCurrentView.getY()); + } + } + + videoEditView.recoverView(mViews, stickerView, true); + if (isPlayVideo) { //如果已经处于播放状态,则暂停播放 + videoEditView.videoPlay(mViews); + } + } + + @Override + public void onTop(StickerView stickerView) { + Log.e(TAG, "StickerView onTop"); + int position = mViews.indexOf(stickerView); + if (position == mViews.size() - 1) { + return; + } + StickerView stickerTemp = (StickerView) mViews.remove(position); + mViews.add(mViews.size(), stickerTemp); + } + + + }); + RelativeLayout.LayoutParams lp = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT); + mContentRootView.addView(stickerView, lp); + Log.e(TAG, " 初始位置,X=" + stickerView.getPosX() + stickerView.getBitmap().getWidth() / 2); + Log.e(TAG, " 初始位置,Y=" + stickerView.getPosY() + stickerView.getBitmap().getHeight() / 2); + stickerView.setX(stickerView.getPosX() + stickerView.getBitmap().getWidth() / 2); + stickerView.setY(stickerView.getPosY() + stickerView.getBitmap().getHeight() / 2); + + stickerView.setStartTime(currentTime); + long endTime = currentTime + 2000; + if (endTime > mVideoDuration) { + endTime = mVideoDuration; + } + stickerView.setEndTime(endTime); + stickerView.setTimeStamp(System.currentTimeMillis()); + mViews.add(stickerView); + setCurrentEdit(stickerView); + videoEditView.recoverView(mViews, stickerView, false); + } + + + //添加气泡 + private void addBubble(int index) { + if(mViews.size() >= 40){ + Toast.makeText(VideoEditActivity.this, "字幕和贴纸的数量不能超过40个", Toast.LENGTH_SHORT).show(); + return; + } + if ((mVideoDuration - currentTime) / 1000 < 2) { + Toast.makeText(VideoEditActivity.this, "当前时间不足以添加贴纸", Toast.LENGTH_SHORT).show(); + return; + } + hasSelectStickerView = false; + final BubbleTextView bubbleTextView = new BubbleTextView(this, + Color.BLACK, 0, index); +// bubbleTextView.setImageResource(R.drawable.bubble_7_rb); + bubbleTextView.setParentSize(mContentRootView.getMeasuredWidth(), mContentRootView.getMeasuredHeight()); + bubbleTextView.setImageResource(bubbleArray[index]); + bubbleTextView.setGif(false); + bubbleTextView.setOperationListener(new BubbleTextView.OperationListener() { + @Override + public void onDeleteClick() { + Log.e(TAG, "BubbleTextView onDeleteClick"); + mViews.remove(bubbleTextView); + mContentRootView.removeView(bubbleTextView); + } + + @Override + public void onEdit(BubbleTextView bubbleTextView) { + Log.e(TAG, "BubbleTextView onEdit"); + hasSelectStickerView = false; + if (mCurrentView != null) { + mCurrentView.setInEdit(false); + } + mCurrentEditTextView.setInEdit(false); + mCurrentEditTextView = bubbleTextView; + mCurrentEditTextView.setInEdit(true); + + if (mViews != null && mViews.size() > 0) { + int position; + position = mViews.indexOf(mCurrentEditTextView); + if (position != -1) { + mViews.get(position).setRotateDegree(mCurrentEditTextView.getRotateDegree()); + mViews.get(position).setViewHeight(mCurrentEditTextView.getViewHeight()); + mViews.get(position).setViewWidth(mCurrentEditTextView.getViewWidth()); + mViews.get(position).setX(mCurrentEditTextView.getX()); + mViews.get(position).setY(mCurrentEditTextView.getY()); + } + } + + videoEditView.recoverView(mViews, bubbleTextView, true); + + if (isPlayVideo) { //如果已经处于播放状态,则暂停播放 + videoEditView.videoPlay(mViews); + } + } + + @Override + public void onClick(BubbleTextView bubbleTextView) { +// mBubbleInputDialog.setBubbleTextView(bubbleTextView); +// mBubbleInputDialog.show(); + if (popBubbleEditView == null) { + popBubbleEditView = new PopBubbleEditView(VideoEditActivity.this); + popBubbleEditView.setOnTextSendListener(new PopBubbleEditView.OnTextSendListener() { + @Override + public void onTextSend(String text) { + mCurrentEditTextView.setText(text); + } + }); + } + popBubbleEditView.show(bubbleTextView.getmStr()); + } + + @Override + public void onTop(BubbleTextView bubbleTextView) { + int position = mViews.indexOf(bubbleTextView); + if (position == mViews.size() - 1) { + return; + } + BubbleTextView textView = (BubbleTextView) mViews.remove(position); + mViews.add(mViews.size(), textView); + } + + }); + RelativeLayout.LayoutParams lp = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT); + mContentRootView.addView(bubbleTextView, lp); + + bubbleTextView.setStartTime(currentTime); + long endTime = currentTime + 2000; + if (endTime > mVideoDuration) { + endTime = mVideoDuration; + } + bubbleTextView.setEndTime(endTime); + bubbleTextView.setTimeStamp(System.currentTimeMillis()); + mViews.add(bubbleTextView); + setCurrentEdit(bubbleTextView); + videoEditView.recoverView(mViews, bubbleTextView, false); + } + + /** + * 设置当前处于编辑模式的贴纸 + */ + private void setCurrentEdit(StickerView stickerView) { + if (mCurrentView != null) { + mCurrentView.setInEdit(false); + } + if (mCurrentEditTextView != null) { + mCurrentEditTextView.setInEdit(false); + } + mCurrentView = stickerView; + stickerView.setInEdit(true); + } + + /** + * 设置当前处于编辑模式的气泡 + */ + private void setCurrentEdit(BubbleTextView bubbleTextView) { + if (mCurrentView != null) { + mCurrentView.setInEdit(false); + } + if (mCurrentEditTextView != null) { + mCurrentEditTextView.setInEdit(false); + } + mCurrentEditTextView = bubbleTextView; + mCurrentEditTextView.setInEdit(true); + } + //字幕选择接口回调 + @Override + public void bubbleSelect(int bubbleIndex) { + addBubble(bubbleIndex); + } + + @Override + public void pasterSelect(int resourceId, int gifId) { + addStickerView(resourceId, gifId); + } + + //视频播放接口 + private boolean isDestroy; + private boolean isPlaying = false; + static final int VIDEO_PREPARE = 0; + static final int VIDEO_START = 1; + static final int VIDEO_UPDATE = 2; + static final int VIDEO_PAUSE = 3; + static final int VIDEO_CUT_FINISH = 4; + static final int CLIP_VIDEO_PERCENT = 5; + static final int AUTO_PAUSE = 6; + private MagicFilterType filterType = MagicFilterType.NONE; + Handler mHandler = new Handler() { + @Override + public void handleMessage(Message msg) { + switch (msg.what) { + case CLIP_VIDEO_PERCENT: + float aFloat = (float) msg.obj; + mPopVideoPercentTv.setText(String.valueOf((int) (aFloat * 100)) + "%"); + break; + case VIDEO_PREPARE: + Executors.newSingleThreadExecutor().execute(update); + mHandler.sendEmptyMessageDelayed(AUTO_PAUSE,50); + break; + case VIDEO_START: + isPlaying = true; + break; + case VIDEO_UPDATE: + /* int curDuration = mVideoView.getCurDuration(); + if (curDuration > startPoint + clipDur) { + mVideoView.seekTo(startPoint); + mVideoView.start(); + }*/ + break; + case VIDEO_PAUSE: + isPlaying = false; + break; + case VIDEO_CUT_FINISH: + mPopVideoPercentTv.setText("0%"); + mPopVideoLoadingFl.setVisibility(View.GONE); + //TODO 已经渲染完毕了  + break; + case AUTO_PAUSE: + mVideoView.pause(); + break; + } + } + }; + + @Override + protected void onResume() { + super.onResume(); +// mVideoView.start(); + } + + @Override + protected void onPause() { + super.onPause(); + if (mVideoView != null) { + mVideoView.pause(); + } + + if (isPlayVideo) { + videoPlay(); + } + } + + @Override + protected void onDestroy() { + super.onDestroy(); + mHandler.removeCallbacksAndMessages(null); + isDestroy = true; + mVideoView.onDestroy(); + if (mThumbBitmap != null) { + for (int i = 0; i < mThumbBitmap.size(); i++) { + mThumbBitmap.get(i).recycle(); + } + mThumbBitmap = null; + } + System.gc(); + } + + private Runnable update = new Runnable() { + @Override + public void run() { + while (!isDestroy) { + if (!isPlaying) { + try { + Thread.currentThread().sleep(200); + } catch (InterruptedException e) { + e.printStackTrace(); + } + continue; + } + mHandler.sendEmptyMessage(VIDEO_UPDATE); + try { + Thread.currentThread().sleep(200); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + }; + + @Override + public boolean onTouch(View v, MotionEvent event) { + return false; + } + + @Override + public void onVideoPrepare() { + mHandler.sendEmptyMessage(VIDEO_PREPARE); + } + + @Override + public void onVideoStart() { + mHandler.sendEmptyMessage(VIDEO_START); + } + + @Override + public void onVideoPause() { + mHandler.sendEmptyMessage(VIDEO_PAUSE); + } + + @Override + public void onCompletion(MediaPlayer mp) { + /*mVideoView.seekTo(0); + mVideoView.start();*/ + } + + @Override + public void onVideoChanged(VideoInfo info) { + + } + + @Override + public void onFilterChange(MagicFilterType type) { + this.filterType = type; + } + + /** + * 初始化缩略图 + */ + private void initThumbs() { + final MediaMetadataRetriever mediaMetadata = new MediaMetadataRetriever(); + mediaMetadata.setDataSource(mContext, Uri.parse(mVideoPath)); + try { + mVideoRotation = mediaMetadata.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION); + mVideoWidth = Integer.parseInt(mediaMetadata.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH)); + mVideoHeight = Integer.parseInt(mediaMetadata.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT)); + } catch (NumberFormatException e) { + e.printStackTrace(); + finish(); + } + mPixel = (float) mVideoHeight / (float) mVideoWidth; + mVideoDuration = Integer.parseInt(mediaMetadata.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)); + Log.e(TAG, "mVideoDuration:" + mVideoDuration); + videoEditView.setTotalTime(mVideoDuration + 100); + final int frame = mVideoDuration / (2 * 1000); + Log.e(TAG, "frame:" + frame); + final int frameTime; + if (frame > 0) { + frameTime = mVideoDuration / frame * 1000; + } else { + frameTime = 1 * 1000; + } + new AsyncTask() { + @Override + protected Boolean doInBackground(Void... params) { + for (int x = 0; x < frame; x++) { + Bitmap bitmap = mediaMetadata.getFrameAtTime(frameTime * x, MediaMetadataRetriever.OPTION_CLOSEST_SYNC); + Message msg = myHandler.obtainMessage(); + msg.obj = bitmap; + msg.arg1 = x; + myHandler.sendMessage(msg); + } + mediaMetadata.release(); + return true; + } + + @Override + protected void onPostExecute(Boolean result) { + if (mThumbBitmap != null) { + videoEditView.addImageView(mThumbBitmap); + } + } + }.execute(); + } + + private List mThumbBitmap = new ArrayList<>(); + private Handler myHandler = new Handler() { + @Override + public void handleMessage(Message msg) { + if (mThumbBitmap != null) { + mThumbBitmap.add(msg.arg1, (Bitmap) msg.obj); + } + } + }; + + + @Override + public void selectTimeChange(long startTime, long endTime) { + if (mViews == null || mViews.size() == 0) { + return; + } + int position; + if (hasSelectStickerView) { + position = mViews.indexOf(mCurrentView); + } else { + position = mViews.indexOf(mCurrentEditTextView); + } + if (position != -1) { + mViews.get(position).setStartTime(startTime); + mViews.get(position).setEndTime(endTime); + } + + } + + + @Override + public void playChange(boolean isPlayVideo) { + Log.e(TAG, "播放状态变化"); + this.isPlayVideo = isPlayVideo; + if (isPlayVideo) { + if (mCurrentView != null) { + mCurrentView.setInEdit(false); + } + if (mCurrentEditTextView != null) { + mCurrentEditTextView.setInEdit(false); + } + } else { + for (StickInfoImageView stickInfoImageView : stickerViews) { //清空动态贴纸 + mContentRootView.removeView(stickInfoImageView); + } + stickerViews.clear(); + } + try { + if (isPlayVideo) { +// mVideoView.seekTo(0); + mVideoView.start(); + } else { + mVideoView.pause(); + } + } catch (Exception e) { + Log.e(TAG, "异常:" + e); + } + } + + @Override + public void videoProgressUpdate(long currentTime, boolean isVideoPlaying) { + this.currentTime = currentTime; + if (!isVideoPlaying) { + try { + Log.e(TAG, "currentTime:" + currentTime); + mVideoView.seekTo((int) currentTime); + } catch (Exception e) { + e.printStackTrace(); + Log.e(TAG, "异常:" + e); + } + }else { + Log.e(TAG, "播放中currentTime:" + currentTime); + } + for (int i = 0; i < mViews.size(); i++) { ////遍历显示静态图 + BaseImageView baseImageView = mViews.get(i); + long startTime = baseImageView.getStartTime(); + long endTime = baseImageView.getEndTime(); + if (currentTime >= startTime && currentTime <= endTime) { + if (baseImageView.isGif()) { + if (currentTime != 0) { + int frameIndex = baseImageView.getFrameIndex(); + ((StickerView) baseImageView).changeBitmap(baseImageView.getBitmaps().get(frameIndex)); + mViews.get(i).setFrameIndex(frameIndex + 1); + } + baseImageView.setVisibility(View.VISIBLE); + } else { + baseImageView.setVisibility(View.VISIBLE); + } + } else { + baseImageView.setVisibility(View.GONE); + } + } + } + + + @Override + public void onBackPressed() { + setResult(StaticFinalValues.COMR_FROM_VIDEO_EDIT_TIME_ACTIVITY,getIntent()); + super.onBackPressed(); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/adpaters/PasterAdapter.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/adpaters/PasterAdapter.java new file mode 100644 index 0000000..831d83d --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/adpaters/PasterAdapter.java @@ -0,0 +1,89 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.adpaters; + +import android.content.Context; +import android.support.v4.view.PagerAdapter; +import android.support.v7.widget.RecyclerView; +import android.view.LayoutInflater; +import android.view.View; +import android.view.ViewGroup; +import android.widget.ImageView; + +import com.aserbao.androidcustomcamera.R; + + +import butterknife.BindView; +import butterknife.ButterKnife; +import butterknife.OnClick; + + +/** + *

    + *     author : Administrator (Jacket)
    + *     e-mail : 378315764@qq.com
    + *     time   : 2018/01/31
    + *     desc   :
    + *     version: 3.2
    + * 
    + */ + +public class PasterAdapter extends RecyclerView.Adapter { + + private String TAG = PagerAdapter.class.getSimpleName(); + private Context context; + private int[] imgList; + + private int[] imagesGif = new int[]{ + R.raw.aini, R.raw.dengliao, R.raw.baituole, R.raw.burangwo, R.raw.bufuhanzhe, R.raw.nizabushagntian, R.raw.zan, R.raw.buyue, R.raw.nizaidouwo, R.raw.gandepiaoliang, R.raw.xiase + }; + + public PasterAdapter(Context context, int[] imgList) { + this.context = context; + this.imgList = imgList; + } + + @Override + public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { + return new ViewHolder(LayoutInflater.from(context).inflate(R.layout.item_paster, parent, false)); + } + + @Override + public void onBindViewHolder(ViewHolder holder, int position) { + holder.pasterview.setImageResource(imgList[position]); + } + + @Override + public int getItemCount() { + return imgList == null ? 0 : imgList.length; + } + + class ViewHolder extends RecyclerView.ViewHolder { + @BindView(R.id.pasterview) + ImageView pasterview; + + public ViewHolder(View itemView) { + super(itemView); + ButterKnife.bind(this, itemView); + } + + @OnClick({R.id.pasterview}) + public void onViewClicked(View view) { + switch (view.getId()) { + case R.id.pasterview: + if (pasterItemSelectListener != null) { + pasterItemSelectListener.pasterItemSelect(imgList[getLayoutPosition()], imagesGif[getLayoutPosition()]); + } + break; + } + } + } + + public interface PasterItemSelectListener { + void pasterItemSelect(int resourseId, int gifId); + } + + PasterItemSelectListener pasterItemSelectListener; + + public void setPasterItemSelectListener(PasterItemSelectListener pasterItemSelectListener) { + this.pasterItemSelectListener = pasterItemSelectListener; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/beans/BubblePropertyModel.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/beans/BubblePropertyModel.java new file mode 100644 index 0000000..d7544a8 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/beans/BubblePropertyModel.java @@ -0,0 +1,82 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.beans; + +import java.io.Serializable; + +/** + * Created by Abner on 15/6/11. + * QQ 230877476 + * Email nimengbo@gmail.com + */ +public class BubblePropertyModel implements Serializable { + private static final long serialVersionUID = 6339777989485920188L; + //气泡id + private long bubbleId; + //文本 + private String text; + //x坐标 + private float xLocation; + //y坐标 + private float yLocation; + //角度 + private float degree; + //缩放值 + private float scaling; + //气泡顺序 + private int order; + + public long getBubbleId() { + return bubbleId; + } + + public void setBubbleId(long bubbleId) { + this.bubbleId = bubbleId; + } + + public String getText() { + return text; + } + + public void setText(String text) { + this.text = text; + } + + public float getxLocation() { + return xLocation; + } + + public void setxLocation(float xLocation) { + this.xLocation = xLocation; + } + + public float getyLocation() { + return yLocation; + } + + public void setyLocation(float yLocation) { + this.yLocation = yLocation; + } + + public float getDegree() { + return degree; + } + + public void setDegree(float degree) { + this.degree = degree; + } + + public float getScaling() { + return scaling; + } + + public void setScaling(float scaling) { + this.scaling = scaling; + } + + public int getOrder() { + return order; + } + + public void setOrder(int order) { + this.order = order; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/beans/SelectViewBean.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/beans/SelectViewBean.java new file mode 100644 index 0000000..c094806 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/beans/SelectViewBean.java @@ -0,0 +1,41 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.beans; + +/** + *
    + *     author : Administrator (Jacket)
    + *     e-mail : 378315764@qq.com
    + *     time   : 2018/02/10
    + *     desc   :
    + *     version: 3.2
    + * 
    + */ + +public class SelectViewBean { + private long timeStamp; + private long startTime; + private long endTime; + + public long getStartTime() { + return startTime; + } + + public void setStartTime(long startTime) { + this.startTime = startTime; + } + + public long getEndTime() { + return endTime; + } + + public void setEndTime(long endTime) { + this.endTime = endTime; + } + + public long getTimeStamp() { + return timeStamp; + } + + public void setTimeStamp(long timeStamp) { + this.timeStamp = timeStamp; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/beans/StickerPropertyModel.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/beans/StickerPropertyModel.java new file mode 100644 index 0000000..1ebaca9 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/beans/StickerPropertyModel.java @@ -0,0 +1,105 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.beans; + +import java.io.Serializable; + +/** + * Created by Abner on 15/6/11. + * QQ 230877476 + * Email nimengbo@gmail.com + */ +public class StickerPropertyModel implements Serializable { + private static final long serialVersionUID = 3800737478616389410L; + + //贴纸id + private long stickerId; + //文本 + private String text; + //x坐标 + private float xLocation; + //y坐标 + private float yLocation; + //角度 + private float degree; + //缩放值 + private float scaling; + //气泡顺序 + private int order; + + //水平镜像 1镜像 2未镜像 + private int horizonMirror; + + //贴纸PNG URL + private String stickerURL; + + public int getHorizonMirror() { + return horizonMirror; + } + + public void setHorizonMirror(int horizonMirror) { + this.horizonMirror = horizonMirror; + } + + public String getStickerURL() { + return stickerURL; + } + + public void setStickerURL(String stickerURL) { + this.stickerURL = stickerURL; + } + + public long getStickerId() { + return stickerId; + } + + public void setStickerId(long stickerId) { + this.stickerId = stickerId; + } + + public String getText() { + return text; + } + + public void setText(String text) { + this.text = text; + } + + public float getxLocation() { + return xLocation; + } + + public void setxLocation(float xLocation) { + this.xLocation = xLocation; + } + + public float getyLocation() { + return yLocation; + } + + public void setyLocation(float yLocation) { + this.yLocation = yLocation; + } + + public float getDegree() { + return degree; + } + + public void setDegree(float degree) { + this.degree = degree; + } + + public float getScaling() { + return scaling; + } + + public void setScaling(float scaling) { + this.scaling = scaling; + } + + public int getOrder() { + return order; + } + + public void setOrder(int order) { + this.order = order; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/fragment/FilterDialogFragment.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/fragment/FilterDialogFragment.java new file mode 100644 index 0000000..19da0e4 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/fragment/FilterDialogFragment.java @@ -0,0 +1,277 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.fragment; + +import android.app.Dialog; +import android.app.DialogFragment; +import android.content.Context; +import android.content.DialogInterface; +import android.graphics.Color; +import android.os.Bundle; +import android.util.Log; +import android.view.Gravity; +import android.view.View; +import android.view.Window; +import android.view.WindowManager; +import android.widget.HorizontalScrollView; +import android.widget.ImageView; +import android.widget.LinearLayout; +import android.widget.TextView; + +import com.aserbao.androidcustomcamera.R; + +import java.util.ArrayList; +import java.util.List; + +import butterknife.BindView; +import butterknife.ButterKnife; +import butterknife.OnClick; + + +/** + * description: + * Created by aserbao on 2018/1/27. + */ + + +public class FilterDialogFragment extends DialogFragment { + + + @BindView(R.id.pop_filter_tv) + TextView mPopFilterTv; + @BindView(R.id.origin) + TextView mOrigin; + @BindView(R.id.delta) + TextView mDelta; + @BindView(R.id.electric) + TextView mElectric; + @BindView(R.id.slowlived) + TextView mSlowlived; + @BindView(R.id.tokyo) + TextView mTokyo; + @BindView(R.id.warm) + TextView mWarm; + @BindView(R.id.pop_filter_sv) + HorizontalScrollView mPopFilterSv; + @BindView(R.id.none_iv) + ImageView mNoneIv; + @BindView(R.id.one_tv) + TextView mOneTv; + @BindView(R.id.two_tv) + TextView mTwoTv; + @BindView(R.id.three_tv) + TextView mThreeTv; + @BindView(R.id.four_tv) + TextView mFourTv; + @BindView(R.id.five_tv) + TextView mFiveTv; + @BindView(R.id.pop_beauty_ll) + LinearLayout mPopBeautyLl; + @BindView(R.id.pop_filter_ll) + LinearLayout mPopFilterLl; + @BindView(R.id.mking_tv) + TextView mMkingTv; + @BindView(R.id.filter_tv) + TextView mFilterTv; + @BindView(R.id.beauty_tv) + TextView mBeautyTv; + + + private int making = 0,mBeauty = 0,mFilter = 0;//美型,美颜,滤镜类型 + private boolean mIsMaking = true,mIsBeauty; + private Dialog mDialog; + private Context mContext; + private String[] mFilterType = {"origin", "delta", "electric", "slowlived", "tokyo", "warm"}; + private List mBeautyTvList = new ArrayList<>(); + private List mFilterTvList = new ArrayList<>(); + private ResultListener mResultListener; + public int mComeFrom; + + @Override + public Dialog onCreateDialog(Bundle savedInstanceState) { + mDialog = new Dialog(getActivity(), R.style.BottomDialog); + mDialog.requestWindowFeature(Window.FEATURE_NO_TITLE); // 设置Content前设定 + mDialog.setContentView(R.layout.pop_filter_camera); + mDialog.setCanceledOnTouchOutside(true); // 外部点击取消 + mDialog.getWindow().setWindowAnimations(R.style.expression_dialog_anim_style); + // 设置宽度为屏宽, 靠近屏幕底部。 + Window window = mDialog.getWindow(); + WindowManager.LayoutParams lp = window.getAttributes(); + lp.gravity = Gravity.BOTTOM; // 紧贴底部 + lp.width = WindowManager.LayoutParams.MATCH_PARENT; // 宽度持平 + lp.height = WindowManager.LayoutParams.WRAP_CONTENT; // 高度持平 + window.setAttributes(lp); + ButterKnife.bind(this, mDialog); + mContext = mDialog.getContext(); + initData(); + initView(); + return mDialog; + } + + @Override + public void dismiss() { + super.dismiss(); + Log.e("atest", "dismiss: " ); + } + + private void initData() { + Bundle bundle = getArguments(); + + mBeautyTvList.add(mNoneIv); + mBeautyTvList.add(mOneTv); + mBeautyTvList.add(mTwoTv); + mBeautyTvList.add(mThreeTv); + mBeautyTvList.add(mFourTv); + mBeautyTvList.add(mFiveTv); + + mFilterTvList.add(mOrigin); + mFilterTvList.add(mDelta); + mFilterTvList.add(mElectric); + mFilterTvList.add(mSlowlived); + mFilterTvList.add(mTokyo); + mFilterTvList.add(mWarm); + + } + + @Override + public void onDismiss(DialogInterface dialog) { + super.onDismiss(dialog); + mResultListener.result(making,mFilter,mBeauty,true); + } + + private void initView() { + switch (mComeFrom){ + case 0://从相机进入 + clickBeauty(making); + break; + case 1://本地视频编辑 + mPopFilterLl.setVisibility(View.GONE); + clickFilter(mFilter); + mPopBeautyLl.setVisibility(View.GONE); + mPopFilterSv.setVisibility(View.VISIBLE); + break; + } + } + + @OnClick({R.id.origin, R.id.delta, R.id.electric, R.id.slowlived, R.id.tokyo, R.id.warm, R.id.none_iv, R.id.one_tv, R.id.two_tv, R.id.three_tv, R.id.four_tv, R.id.five_tv, R.id.mking_tv, R.id.filter_tv, R.id.beauty_tv}) + public void onViewClicked(View view) { + switch (view.getId()) { + case R.id.origin: + clickFilter(0); + break; + case R.id.delta: + clickFilter(1); + break; + case R.id.electric: + clickFilter(2); + break; + case R.id.slowlived: + clickFilter(3); + break; + case R.id.tokyo: + clickFilter(4); + break; + case R.id.warm: + clickFilter(5); + break; + case R.id.none_iv: + clickBeauty(0); + break; + case R.id.one_tv: + clickBeauty(1); + break; + case R.id.two_tv: + clickBeauty(2); + break; + case R.id.three_tv: + clickBeauty(3); + break; + case R.id.four_tv: + clickBeauty(4); + break; + case R.id.five_tv: + clickBeauty(5); + break; + case R.id.mking_tv: + mMkingTv.setTextColor(Color.parseColor("#FFEB58")); + mFilterTv.setTextColor(Color.parseColor("#7FFFFFFF")); + mBeautyTv.setTextColor(Color.parseColor("#7FFFFFFF")); + mIsBeauty = false; + mIsMaking = true; + clickBeauty(making); + mPopBeautyLl.setVisibility(View.VISIBLE); + mPopFilterSv.setVisibility(View.GONE); + break; + case R.id.filter_tv: + mMkingTv.setTextColor(Color.parseColor("#7FFFFFFF")); + mFilterTv.setTextColor(Color.parseColor("#FFEB58")); + mBeautyTv.setTextColor(Color.parseColor("#7FFFFFFF")); + clickFilter(mFilter); + mPopBeautyLl.setVisibility(View.GONE); + mPopFilterSv.setVisibility(View.VISIBLE); + break; + case R.id.beauty_tv: + mBeautyTv.setTextColor(Color.parseColor("#FFEB58")); + mFilterTv.setTextColor(Color.parseColor("#7FFFFFFF")); + mMkingTv.setTextColor(Color.parseColor("#7FFFFFFF")); + mIsBeauty = true; + mIsMaking = false; + clickBeauty(mBeauty); + mPopBeautyLl.setVisibility(View.VISIBLE); + mPopFilterSv.setVisibility(View.GONE); + break; + } + } + + + public void clickBeauty(int position){ + if(mIsMaking){ + making = position; + } + if(mIsBeauty){ + mBeauty = position; + } + for (int i = 0; i < mBeautyTvList.size(); i++) { + View view = mBeautyTvList.get(i); + if(i == position){ + if(i == 0) { + ((ImageView) view).setImageResource(R.drawable.bigicon_no_light); + }else{ + ((TextView) view).setTextColor(Color.parseColor("#ffffff")); + } + view.setBackgroundResource(R.drawable.tv_circle_white40_bg); + }else{ + if(i == 0){ + ((ImageView) view).setImageResource(R.drawable.bigicon_no); + }else{ + ((TextView) view).setTextColor(Color.parseColor("#7fffffff")); + } + view.setBackgroundResource(R.drawable.tv_circle_white10_bg); + } + } + mResultListener.result(making,mFilter,mBeauty,false); + } + + public void clickFilter(int position){ + mFilter = position; + for (int i = 0; i < mFilterTvList.size(); i++) { + if(position == i){ + mFilterTvList.get(i).setAlpha(1); + ((TextView) mFilterTvList.get(i)).setTextColor(Color.parseColor("#FFEB58")); + }else{ + ((TextView) mFilterTvList.get(i)).setTextColor(getResources().getColor(R.color.white40)); + mFilterTvList.get(i).setAlpha(0.5f); + } + } + if(mComeFrom == 1) { + mDialog.dismiss(); + }else { + mResultListener.result(making, mFilter, mBeauty, false); + } + } + + public void setResultListener(ResultListener resultListener) { + mResultListener = resultListener; + } + public interface ResultListener{ + void result(int making, int mFilterType, int mBeauty, boolean isDismiss); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/mediacodec/OutputSurface.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/mediacodec/OutputSurface.java new file mode 100644 index 0000000..1c04ce2 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/mediacodec/OutputSurface.java @@ -0,0 +1,370 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.mediacodec; +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +import android.content.res.Resources; +import android.graphics.Bitmap; +import android.graphics.Matrix; +import android.graphics.SurfaceTexture; +import android.opengl.EGL14; +import android.util.Log; +import android.view.Surface; + +import com.aserbao.androidcustomcamera.base.MyApplication; +import com.aserbao.androidcustomcamera.base.utils.StaticFinalValues; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.localEdit.VideoDrawer; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.localEdit.VideoInfo; +import com.aserbao.androidcustomcamera.whole.editVideo.view.BaseImageView; +import com.aserbao.androidcustomcamera.whole.record.filters.gpuFilters.baseFilter.GPUImageFilter; + +import java.util.ArrayList; + +import javax.microedition.khronos.egl.EGL10; +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.egl.EGLContext; +import javax.microedition.khronos.egl.EGLDisplay; +import javax.microedition.khronos.egl.EGLSurface; + +/** + * Holds state associated with a Surface used for MediaCodec decoder output. + *

    + * The (width,height) constructor for this class will prepare GL, create a SurfaceTexture, + * and then create a Surface for that SurfaceTexture. The Surface can be passed to + * MediaCodec.configure() to receive decoder output. When a frame arrives, we latch the + * texture with updateTexImage, then render the texture with GL to a pbuffer. + *

    + * The no-arg constructor skips the GL preparation step and doesn't allocate a pbuffer. + * Instead, it just creates the Surface and SurfaceTexture, and when a frame arrives + * we just draw it on whatever surface is current. + *

    + * By default, the Surface will be using a BufferQueue in asynchronous mode, so we + * can potentially drop frames. + */ +class OutputSurface implements SurfaceTexture.OnFrameAvailableListener { + private static final String TAG = "OutputSurface"; + private static final boolean VERBOSE = false; + private static final int EGL_OPENGL_ES2_BIT = 4; + private EGL10 mEGL; + private EGLDisplay mEGLDisplay; + private EGLContext mEGLContext; + private EGLSurface mEGLSurface; + private SurfaceTexture mSurfaceTexture; + private Surface mSurface; + private Object mFrameSyncObject = new Object(); // guards mFrameAvailable + private boolean mFrameAvailable; +// private TextureRender mTextureRender; + private VideoDrawer mDrawer; + + /** + * Creates an OutputSurface using the current EGL context. Creates a Surface that can be + * passed to MediaCodec.configure(). + */ + public OutputSurface(VideoInfo info) { + if (info.width <= 0 || info.height <= 0) { + throw new IllegalArgumentException(); + } + setup(info); + } + private ArrayList mViews = new ArrayList<>(); + private Resources mResources; + public OutputSurface(VideoInfo info, ArrayList views, Resources res) { + mViews = views; + mResources = res; + if (info.width <= 0 || info.height <= 0) { + throw new IllegalArgumentException(); + } + setup(info); + } + + /** + * Creates instances of TextureRender and SurfaceTexture, and a Surface associated + * with the SurfaceTexture. + */ + private void setup(VideoInfo info) { +// mTextureRender = new TextureRender(info); +// mTextureRender.surfaceCreated(); + + mDrawer = new VideoDrawer(MyApplication.getContext(), MyApplication.getContext().getResources()); + + if (mViews != null && mViews.size() > 0) { + for (int i = 0; i < mViews.size(); i++) { + BaseImageView baseImageView = mViews.get(i); + Bitmap bitmap = baseImageView.getBitmap(); + int width = bitmap.getWidth(); + int height = bitmap.getHeight(); +// int x = (int) ((baseImageView.getX() - width/2) /StaticFinalValues.VIDEO_WIDTH_HEIGHT) ; + int x = (int) baseImageView.getX() - width/2 ; +// int x = (int) baseImageView.getX(); + int y = (int) baseImageView.getY(); + int y1 = (int)(info.height - (y / StaticFinalValues.VIDEO_WIDTH_HEIGHT) - (height/StaticFinalValues.VIDEO_WIDTH_HEIGHT/2)); + Matrix matrix = baseImageView.getMatrix(); + float[] v = new float[9]; + matrix.getValues(v); + float leftBottomY = baseImageView.getLeftBottomY(); + float leftBottomX = baseImageView.getLeftBottomX(); + float rAngle = Math.round(Math.atan2(v[Matrix.MSKEW_X], v[Matrix.MSCALE_X]) * (180 / Math.PI)) * -1; + int y3 = 0; + if(info.width < info.height) { + y3 = (int) ((1120 - leftBottomY) * ((float) info.height / 1120)); + }else{ + y3 = (int) ((630 - leftBottomY) * ((float) info.height / 630)); + } + int viewWidth = (int) baseImageView.getViewWidth(); + int viewHeight = (int) baseImageView.getViewHeight(); + if(viewWidth > 200) { +// viewWidth = viewWidth + (int) ((viewWidth - 200) * 0.25); +// viewHeight = viewHeight + (int) ((viewHeight - 200) * 0.25); + viewWidth = (int)(viewWidth * 1.2); + viewHeight = (int)(viewHeight * 1.2); + } + mDrawer.addWaterMarkFilter(mResources, (int)leftBottomX, y3, viewWidth, viewHeight,baseImageView.getStartTime(),baseImageView.getEndTime(), bitmap,baseImageView.getGifId(),baseImageView.isGif(),rAngle); +// mDrawer.addWaterMarkFilter(mResources, (int)tx, y1,baseImageView.getWidth(),baseImageView.getHeight(),baseImageView.getStartTime(),baseImageView.getEndTime(), bitmap,baseImageView.getGifId(),baseImageView.isGif(),baseImageView.getRotateDegree()); +// mDrawer.addWaterMarkFilter(mResources,new Random().nextInt(100),new Random().nextInt(200),50,50,baseImageView.getStartTime(),baseImageView.getEndTime(),baseImageView.getBitmap()); + } + } + mDrawer.onSurfaceCreated(null,null); + mDrawer.onSurfaceChanged(null,info.width,info.height); + mDrawer.onVideoChanged(info); +// mDrawer.onSurfaceChanged(null,(int)(info.width * StaticFinalValues.VIDEO_WIDTH_HEIGHT),(int)(info.height * StaticFinalValues.VIDEO_WIDTH_HEIGHT)); + + // Even if we don't access the SurfaceTexture after the constructor returns, we + // still need to keep a reference to it. The Surface doesn't retain a reference + // at the Java level, so if we don't either then the object can get GCed, which + // causes the native finalizer to run. +// if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId()); +// mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId()); + mSurfaceTexture = mDrawer.getSurfaceTexture(); + + // This doesn't work if OutputSurface is created on the thread that CTS started for + // these test cases. + // + // The CTS-created thread has a Looper, and the SurfaceTexture constructor will + // create a Handler that uses it. The "frame available" message is delivered + // there, but since we're not a Looper-based thread we'll never see it. For + // this to do anything useful, OutputSurface must be created on a thread without + // a Looper, so that SurfaceTexture uses the main application Looper instead. + // + // Java language note: passing "this" out of a constructor is generally unwise, + // but we should be able to get away with it here. + mSurfaceTexture.setOnFrameAvailableListener(this); + mSurface = new Surface(mSurfaceTexture); + } + + /** + * just for clip + * @param info + * @param clipMode + */ + public OutputSurface(VideoInfo info, int clipMode) { + if (info.width <= 0 || info.height <= 0) { + throw new IllegalArgumentException(); + } +// mTextureRender = new TextureRender(info); +// mTextureRender.setClipMode(clipMode); +// mTextureRender.surfaceCreated(); +// if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId()); +// mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId()); +// mSurfaceTexture.setOnFrameAvailableListener(this); +// mSurface = new Surface(mSurfaceTexture); + } + + /** + * Prepares EGL. We want a GLES 2.0 context and a surface that supports pbuffer. + */ + private void eglSetup(int width, int height) { + mEGL = (EGL10) EGLContext.getEGL(); + mEGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); + if (!mEGL.eglInitialize(mEGLDisplay, null)) { + throw new RuntimeException("unable to initialize EGL10"); + } + // Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits + // to be able to tell if the frame is reasonable. + int[] attribList = { + EGL10.EGL_RED_SIZE, 8, + EGL10.EGL_GREEN_SIZE, 8, + EGL10.EGL_BLUE_SIZE, 8, + EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT, + EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, + EGL10.EGL_NONE + }; + EGLConfig[] configs = new EGLConfig[1]; + int[] numConfigs = new int[1]; + if (!mEGL.eglChooseConfig(mEGLDisplay, attribList, configs, 1, numConfigs)) { + throw new RuntimeException("unable to find RGB888+pbuffer EGL config"); + } + // Configure context for OpenGL ES 2.0. + int[] attrib_list = { + EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, + EGL10.EGL_NONE + }; + mEGLContext = mEGL.eglCreateContext(mEGLDisplay, configs[0], EGL10.EGL_NO_CONTEXT, + attrib_list); + checkEglError("eglCreateContext"); + if (mEGLContext == null) { + throw new RuntimeException("null context"); + } + // Create a pbuffer surface. By using this for output, we can use glReadPixels + // to test values in the output. + int[] surfaceAttribs = { + EGL10.EGL_WIDTH, width, + EGL10.EGL_HEIGHT, height, + EGL10.EGL_NONE + }; + mEGLSurface = mEGL.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs); + checkEglError("eglCreatePbufferSurface"); + if (mEGLSurface == null) { + throw new RuntimeException("surface was null"); + } + } + + /** + * Discard all resources held by this class, notably the EGL context. + */ + public void release() { + if (mEGL != null) { + if (mEGL.eglGetCurrentContext().equals(mEGLContext)) { + // Clear the current context and surface to ensure they are discarded immediately. + mEGL.eglMakeCurrent(mEGLDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, + EGL10.EGL_NO_CONTEXT); + } + mEGL.eglDestroySurface(mEGLDisplay, mEGLSurface); + mEGL.eglDestroyContext(mEGLDisplay, mEGLContext); + //mEGL.eglTerminate(mEGLDisplay); + } + mSurface.release(); + // this causes a bunch of warnings that appear harmless but might confuse someone: + // W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned! + //mSurfaceTexture.release(); + // null everything out so future attempts to use this object will cause an NPE + mEGLDisplay = null; + mEGLContext = null; + mEGLSurface = null; + mEGL = null; +// mTextureRender = null; + mDrawer = null; + mSurface = null; + mSurfaceTexture = null; + } + + /** + * Makes our EGL context and surface current. + */ + public void makeCurrent() { + if (mEGL == null) { + throw new RuntimeException("not configured for makeCurrent"); + } + checkEglError("before makeCurrent"); + if (!mEGL.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) { + throw new RuntimeException("eglMakeCurrent failed"); + } + } + + /** + * Returns the Surface that we draw onto. + */ + public Surface getSurface() { + return mSurface; + } + + /** + * Replaces the fragment shader. + */ + public void changeFragmentShader(String fragmentShader) { +// mTextureRender.changeFragmentShader(fragmentShader); + } + + /** + * Latches the next buffer into the texture. Must be called from the thread that created + * the OutputSurface object, after the onFrameAvailable callback has signaled that new + * data is available. + */ + public void awaitNewImage() { + final int TIMEOUT_MS = 500; + synchronized (mFrameSyncObject) { + while (!mFrameAvailable) { + try { + // Wait for onFrameAvailable() to signal us. Use a timeout to avoid + // stalling the test if it doesn't arrive. + mFrameSyncObject.wait(TIMEOUT_MS); + /*if (!mFrameAvailable) { + // TODO: if "spurious wakeup", continue while loop + throw new RuntimeException("Surface frame wait timed out"); + }*/ + } catch (InterruptedException ie) { + // shouldn't happen + throw new RuntimeException(ie); + } + } + mFrameAvailable = false; + } + // Latch the data. +// mTextureRender.checkGlError("before updateTexImage"); +// mDrawer.checkGlError("before updateTexImage"); +// mSurfaceTexture.updateTexImage(); + } + + /** + * Draws the data from SurfaceTexture onto the current EGL surface. + */ + public void drawImage() { +// mTextureRender.drawFrame(mSurfaceTexture); + mDrawer.onDrawFrame(null); + } + public void drawImage(long mCuurTime) { +// mTextureRender.drawFrame(mSurfaceTexture); +// mDrawer.onDrawFrame(null); + mDrawer.setMediaTime(mCuurTime); + } + + @Override + public void onFrameAvailable(SurfaceTexture st) { + + if (VERBOSE) Log.d(TAG, "new frame available"); + synchronized (mFrameSyncObject) { + if (mFrameAvailable) { + throw new RuntimeException("mFrameAvailable already set, frame could be dropped"); + } + mFrameAvailable = true; + mFrameSyncObject.notifyAll(); + } + } + + /** + * Che cks for EGL errors. + */ + private void checkEglError(String msg) { + boolean failed = false; + int error; + while ((error = mEGL.eglGetError()) != EGL10.EGL_SUCCESS) { + Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error)); + failed = true; + } + if (failed) { + throw new RuntimeException("EGL error encountered (see log)"); + } + } + public void addGpuFilter(GPUImageFilter filter){ + mDrawer.setGpuFilter(filter); + } + public void isBeauty(boolean isBeauty){ + mDrawer.isOpenBeauty(isBeauty); + } + + public void onVideoSizeChanged(VideoInfo info){ +// mTextureRender.onVideoSizeChanged(info); + } +} \ No newline at end of file diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/mediacodec/VideoClipper.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/mediacodec/VideoClipper.java new file mode 100644 index 0000000..2fa40ff --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/mediacodec/VideoClipper.java @@ -0,0 +1,509 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.mediacodec; + +import android.annotation.TargetApi; +import android.content.res.Resources; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.media.MediaMetadataRetriever; +import android.media.MediaMuxer; +import android.os.Build; +import android.util.Log; + +import com.aserbao.androidcustomcamera.blocks.mediaCodec.bigflake.encodeDecode.InputSurface; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.localEdit.VideoInfo; +import com.aserbao.androidcustomcamera.whole.editVideo.view.BaseImageView; +import com.aserbao.androidcustomcamera.whole.record.filters.gpuFilters.baseFilter.GPUImageFilter; +import com.aserbao.androidcustomcamera.whole.record.other.MagicFilterFactory; +import com.aserbao.androidcustomcamera.whole.record.other.MagicFilterType; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +import static android.media.MediaExtractor.SEEK_TO_PREVIOUS_SYNC; + + +@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2) +public class VideoClipper { + private static final String TAG = "VideoClipper"; + + final int TIMEOUT_USEC = 0; + private String mInputVideoPath; + private String mOutputVideoPath; + + MediaCodec videoDecoder; + MediaCodec videoEncoder; + MediaCodec audioDecoder; + MediaCodec audioEncoder; + + MediaExtractor mVideoExtractor; + MediaExtractor mAudioExtractor; + MediaMuxer mMediaMuxer; + static ExecutorService executorService = Executors.newFixedThreadPool(4); + int muxVideoTrack = -1; + int muxAudioTrack = -1; + int videoTrackIndex = -1; + int audioTrackIndex = -1; + long startPosition; + long clipDur; + int videoWidth; + int videoHeight; + int videoRotation; + OutputSurface outputSurface = null; + InputSurface inputSurface = null; + MediaFormat videoFormat; + MediaFormat audioFormat; + GPUImageFilter mFilter; + boolean isOpenBeauty; + boolean videoFinish = false; + boolean audioFinish = false; + boolean released = false; + long before; + long after; + Object lock = new Object(); + boolean muxStarted = false; + OnVideoCutFinishListener listener; + + public VideoClipper() { + try { + videoDecoder = MediaCodec.createDecoderByType("video/avc"); + videoEncoder = MediaCodec.createEncoderByType("video/avc"); + audioDecoder = MediaCodec.createDecoderByType("audio/mp4a-latm"); + audioEncoder = MediaCodec.createEncoderByType("audio/mp4a-latm"); + } catch (IOException e) { + e.printStackTrace(); + } + } + + public void setInputVideoPath(String inputPath) { + mInputVideoPath = inputPath; + initVideoInfo(); + } + + public void setOutputVideoPath(String outputPath) { + mOutputVideoPath = outputPath; + } + + + public void setOnVideoCutFinishListener(OnVideoCutFinishListener listener) { + this.listener = listener; + } + + public void setFilter(GPUImageFilter filter) { + if (filter == null ) { + mFilter = null; + return; + } + mFilter = filter; + } + public void setFilterType(MagicFilterType type) { + if (type == null || type == MagicFilterType.NONE) { + mFilter = null; + return; + } + mFilter = MagicFilterFactory.initFilters(type); + } + + public void showBeauty(){ + isOpenBeauty = true; + } + + private ArrayList mViews = new ArrayList<>(); + private Resources mResources; + public void clipVideo(long startPosition, long clipDur, ArrayList views, Resources resources) throws IOException { + mViews = views; + mResources = resources; + before = System.currentTimeMillis(); + this.startPosition = startPosition; + this.clipDur = clipDur; + mVideoExtractor = new MediaExtractor(); + mAudioExtractor = new MediaExtractor(); + mVideoExtractor.setDataSource(mInputVideoPath); + mAudioExtractor.setDataSource(mInputVideoPath); + mMediaMuxer = new MediaMuxer(mOutputVideoPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + for (int i = 0; i < mVideoExtractor.getTrackCount(); i++) { + MediaFormat format = mVideoExtractor.getTrackFormat(i); + if (format.getString(MediaFormat.KEY_MIME).startsWith("video/")) { + videoTrackIndex = i; + videoFormat = format; + continue; + } + if (format.getString(MediaFormat.KEY_MIME).startsWith("audio/")) { + audioTrackIndex = i; + audioFormat = format; + continue; + } + Log.e(TAG, "clipVideo: audioTrackIndex= " + audioTrackIndex + " videoTrackIndex = " + videoTrackIndex); + } + executorService.execute(videoCliper); + executorService.execute(audioCliper); + } + + private Runnable videoCliper = new Runnable() { + @Override + public void run() { + mVideoExtractor.selectTrack(videoTrackIndex); + + long firstVideoTime = mVideoExtractor.getSampleTime(); + mVideoExtractor.seekTo(firstVideoTime + startPosition, SEEK_TO_PREVIOUS_SYNC); + + initVideoCodec(); + startVideoCodec(videoDecoder, videoEncoder, mVideoExtractor, inputSurface, outputSurface, firstVideoTime, startPosition, clipDur); + + videoFinish = true; + release(); + } + }; + + private Runnable audioCliper = new Runnable() { + @Override + public void run() { + mAudioExtractor.selectTrack(audioTrackIndex); + initAudioCodec(); + startAudioCodec(audioDecoder, audioEncoder, mAudioExtractor, mAudioExtractor.getSampleTime(), startPosition, clipDur); + audioFinish = true; + release(); + } + }; + + private void initVideoInfo() { + MediaMetadataRetriever retr = new MediaMetadataRetriever(); + retr.setDataSource(mInputVideoPath); + String width = retr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH); + String height = retr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT); + String rotation = retr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION); + videoWidth = Integer.parseInt(width); + videoHeight = Integer.parseInt(height); + if(rotation.equals("180") && Integer.parseInt(width) > Integer.parseInt(height)){ + videoRotation = 180; + }else { + videoRotation = Integer.parseInt(rotation); + } + } + + private void initAudioCodec() { + audioDecoder.configure(audioFormat, null, null, 0); + audioDecoder.start(); + MediaFormat format = MediaFormat.createAudioFormat("audio/mp4a-latm", 44100, /*channelCount*/2); + format.setInteger(MediaFormat.KEY_BIT_RATE, 3000000); + format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); + audioEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + audioEncoder.start(); + } + + private void startAudioCodec(MediaCodec decoder, MediaCodec encoder, MediaExtractor extractor, long firstSampleTime, long startPosition, long duration) { + ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers(); + ByteBuffer[] decoderOutputBuffers = decoder.getOutputBuffers(); + ByteBuffer[] encoderInputBuffers = encoder.getInputBuffers(); + ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers(); + MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); + MediaCodec.BufferInfo outputInfo = new MediaCodec.BufferInfo(); + boolean done = false; + boolean inputDone = false; + boolean decodeDone = false; + extractor.seekTo(firstSampleTime + startPosition, SEEK_TO_PREVIOUS_SYNC); + int decodeinput=0; + int encodeinput=0; + int encodeoutput=0; + long lastEncodeOutputTimeStamp=-1; + while (!done) { + if (!inputDone) { + int inputIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC); + if (inputIndex >= 0) { + ByteBuffer inputBuffer = decoderInputBuffers[inputIndex]; + inputBuffer.clear(); + int readSampleData = extractor.readSampleData(inputBuffer, 0); + long dur = extractor.getSampleTime() - firstSampleTime - startPosition; + if ((dur < duration) && readSampleData > 0) { + decoder.queueInputBuffer(inputIndex, 0, readSampleData, extractor.getSampleTime(), 0); + decodeinput++; + extractor.advance(); + } else { + decoder.queueInputBuffer(inputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); + inputDone = true; + } + } + } + if (!decodeDone) { + int index = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC); + if (index == MediaCodec.INFO_TRY_AGAIN_LATER) { + } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + decoderOutputBuffers = decoder.getOutputBuffers(); + } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + MediaFormat newFormat = decoder.getOutputFormat(); + } else if (index < 0) { + } else { + boolean canEncode = (info.size != 0 && info.presentationTimeUs - firstSampleTime > startPosition); + boolean endOfStream = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; + if (canEncode&&!endOfStream) { + ByteBuffer decoderOutputBuffer = decoderOutputBuffers[index]; + + int encodeInputIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC); + if(encodeInputIndex>=0){ + ByteBuffer encoderInputBuffer = encoderInputBuffers[encodeInputIndex]; + encoderInputBuffer.clear(); + if (info.size < 4096) { + byte[] chunkPCM = new byte[info.size]; + decoderOutputBuffer.get(chunkPCM); + decoderOutputBuffer.clear(); + byte[] stereoBytes = new byte[info.size * 2]; + for (int i = 0; i < info.size; i += 2) { + stereoBytes[i * 2 + 0] = chunkPCM[i]; + stereoBytes[i * 2 + 1] = chunkPCM[i + 1]; + stereoBytes[i * 2 + 2] = chunkPCM[i]; + stereoBytes[i * 2 + 3] = chunkPCM[i + 1]; + } + encoderInputBuffer.put(stereoBytes); + encoder.queueInputBuffer(encodeInputIndex, 0, stereoBytes.length, info.presentationTimeUs, 0); + encodeinput++; + }else{ + encoderInputBuffer.put(decoderOutputBuffer); + encoder.queueInputBuffer(encodeInputIndex, info.offset, info.size, info.presentationTimeUs, 0); + encodeinput++; + } + } + } + if(endOfStream){ + int encodeInputIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC); + encoder.queueInputBuffer(encodeInputIndex, 0, 0, info.presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM); + decodeDone = true; + } + decoder.releaseOutputBuffer(index, false); + } + } + boolean encoderOutputAvailable = true; + while (encoderOutputAvailable) { + int encoderStatus = encoder.dequeueOutputBuffer(outputInfo, TIMEOUT_USEC); + if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + encoderOutputAvailable = false; + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + encoderOutputBuffers = encoder.getOutputBuffers(); + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + MediaFormat newFormat = encoder.getOutputFormat(); + startMux(newFormat, 1); + } else if (encoderStatus < 0) { + } else { + ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; + done = (outputInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; + if (done) { + encoderOutputAvailable = false; + } + if (outputInfo.presentationTimeUs == 0 && !done) { + continue; + } + if (outputInfo.size != 0&&outputInfo.presentationTimeUs>0) { + /*encodedData.position(outputInfo.offset); + encodedData.limit(outputInfo.offset + outputInfo.size);*/ + if(!muxStarted){ + synchronized (lock){ + if(!muxStarted){ + try { + lock.wait(); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + } + if(outputInfo.presentationTimeUs>lastEncodeOutputTimeStamp){ + encodeoutput++; + System.out.println("videoCliper audio encodeOutput"+encodeoutput+" dataSize"+outputInfo.size+" sampeTime"+outputInfo.presentationTimeUs); + mMediaMuxer.writeSampleData(muxAudioTrack, encodedData, outputInfo); + lastEncodeOutputTimeStamp=outputInfo.presentationTimeUs; + } + } + + encoder.releaseOutputBuffer(encoderStatus, false); + } + if (encoderStatus != MediaCodec.INFO_TRY_AGAIN_LATER) { + continue; + } + } + } + } + + private void initVideoCodec() { + int encodeW = videoWidth; + int encodeH = videoHeight; + MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", encodeW, encodeH); + mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 3000000); + mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30); + mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); + mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); + videoEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + inputSurface = new InputSurface(videoEncoder.createInputSurface()); + inputSurface.makeCurrent(); + videoEncoder.start(); + VideoInfo info = new VideoInfo(); + info.width = videoWidth ; + info.height = videoHeight; + info.rotation = videoRotation; + outputSurface = new OutputSurface(info,mViews,mResources); + outputSurface.isBeauty(isOpenBeauty); + + if (mFilter != null) { + outputSurface.addGpuFilter(mFilter); + } + + videoDecoder.configure(videoFormat, outputSurface.getSurface(), null, 0); + videoDecoder.start(); + } + + + private void startVideoCodec(MediaCodec decoder, MediaCodec encoder, MediaExtractor extractor, InputSurface inputSurface, OutputSurface outputSurface, long firstSampleTime, long startPosition, long duration) { + ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers(); + ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers(); + MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); + MediaCodec.BufferInfo outputInfo = new MediaCodec.BufferInfo(); + boolean done = false; + boolean inputDone = false; + boolean decodeDone = false; + while (!done) { + if (!inputDone) { + int inputIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC); + if (inputIndex >= 0) { + ByteBuffer inputBuffer = decoderInputBuffers[inputIndex]; + inputBuffer.clear(); + int readSampleData = extractor.readSampleData(inputBuffer, 0); + long dur = extractor.getSampleTime() - firstSampleTime - startPosition; + if ((dur < duration) && readSampleData > 0) { + decoder.queueInputBuffer(inputIndex, 0, readSampleData, extractor.getSampleTime(), 0); + extractor.advance(); + } else { + decoder.queueInputBuffer(inputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); + inputDone = true; + } + } + } + if (!decodeDone) { + int index = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC); + if (index == MediaCodec.INFO_TRY_AGAIN_LATER) { + + } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + + } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + + MediaFormat newFormat = decoder.getOutputFormat(); + + } else if (index < 0) { + } else { + boolean doRender = (info.size != 0 && info.presentationTimeUs - firstSampleTime > startPosition); + decoder.releaseOutputBuffer(index, doRender); + if (doRender) { + + outputSurface.awaitNewImage(); + + outputSurface.drawImage(info.presentationTimeUs / 1000); + listener.onProgress((float)info.presentationTimeUs / (float)duration); + + inputSurface.setPresentationTime(info.presentationTimeUs * 1000); + inputSurface.swapBuffers(); + } + if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + encoder.signalEndOfInputStream(); + decodeDone = true; + } + } + } + boolean encoderOutputAvailable = true; + while (encoderOutputAvailable) { + int encoderStatus = encoder.dequeueOutputBuffer(outputInfo, TIMEOUT_USEC); + if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + + encoderOutputAvailable = false; + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + encoderOutputBuffers = encoder.getOutputBuffers(); + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + MediaFormat newFormat = encoder.getOutputFormat(); + startMux(newFormat, 0); + } else if (encoderStatus < 0) { + } else { + ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; + done = (outputInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; + if (done) { + encoderOutputAvailable = false; + } + + if (outputInfo.presentationTimeUs == 0 && !done) { + continue; + } + if (outputInfo.size != 0) { + encodedData.position(outputInfo.offset); + encodedData.limit(outputInfo.offset + outputInfo.size); + if(!muxStarted){ + synchronized (lock){ + if(!muxStarted){ + try { + lock.wait(); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + } + mMediaMuxer.writeSampleData(muxVideoTrack, encodedData, outputInfo); + } + + encoder.releaseOutputBuffer(encoderStatus, false); + } + if (encoderStatus != MediaCodec.INFO_TRY_AGAIN_LATER) { + + continue; + } + } + } + } + + private void startMux(MediaFormat mediaFormat, int flag) { + if (flag == 0) { + muxVideoTrack = mMediaMuxer.addTrack(mediaFormat); + } else if (flag == 1) { + muxAudioTrack = mMediaMuxer.addTrack(mediaFormat); + } + synchronized (lock) { + if (muxAudioTrack != -1 && muxVideoTrack != -1 && !muxStarted) { + mMediaMuxer.start(); + muxStarted = true; + lock.notify(); + } + } + } + + private synchronized void release() { + if (!videoFinish || !audioFinish || released) { + return; + } + mVideoExtractor.release(); + mAudioExtractor.release(); + mMediaMuxer.stop(); + mMediaMuxer.release(); + if (outputSurface != null) { + outputSurface.release(); + } + if (inputSurface != null) { + inputSurface.release(); + } + videoDecoder.stop(); + videoDecoder.release(); + videoEncoder.stop(); + videoEncoder.release(); + audioDecoder.stop(); + audioDecoder.release(); + audioEncoder.stop(); + audioEncoder.release(); + released = true; + after = System.currentTimeMillis(); + if (listener != null) { + listener.onFinish(); + } + } + + public interface OnVideoCutFinishListener { + void onFinish(); + void onProgress(float percent); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/BaseImageView.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/BaseImageView.java new file mode 100644 index 0000000..f94a916 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/BaseImageView.java @@ -0,0 +1,280 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.view; + +import android.content.Context; +import android.graphics.Bitmap; +import android.graphics.Matrix; +import android.graphics.Movie; +import android.support.annotation.Nullable; +import android.util.AttributeSet; +import android.widget.ImageView; + +import java.util.List; + + + +public class BaseImageView extends ImageView { + + protected float X; + protected float Y; + protected float viewWidth; + protected float viewHeight; + protected int resourceId; + protected float rotateDegree; + protected long startTime; + protected long endTime; + protected boolean isGif;//是否是gif + protected String resourceGif;//gif位置 + + protected float mScaleX; + protected float mScaleY; + + protected float posX; + protected float posY; + + protected float leftBottomX;//左下角坐标 + protected float leftBottomY; + + protected Matrix matrix = new Matrix(); + + + protected int gifId; + + protected long timeStamp; + + private int frameIndex; //播放gif图的第几帧 + private List bitmaps; + + + public float getLeftBottomX() { + return leftBottomX; + } + + public void setLeftBottomX(float leftBottomX) { + this.leftBottomX = leftBottomX; + } + + public float getLeftBottomY() { + return leftBottomY; + } + + public void setLeftBottomY(float leftBottomY) { + this.leftBottomY = leftBottomY; + } + + public int getFrameIndex() { + return frameIndex; + } + + public void setFrameIndex(int frameIndex) { + if (bitmaps != null && bitmaps.size() > 0) { + this.frameIndex = frameIndex % bitmaps.size(); + } + } + + public List getBitmaps() { + return bitmaps; + } + + public void setBitmaps(List bitmaps) { + this.bitmaps = bitmaps; + } + + public long getTimeStamp() { + return timeStamp; + } + + public void setTimeStamp(long timeStamp) { + this.timeStamp = timeStamp; + } + + public int getGifId() { + return gifId; + } + + public void setGifId(int gifId) { + this.gifId = gifId; + } + + public float getPosX() { + return posX; + } + + public void setPosX(float posX) { + this.posX = posX; + } + + public float getPosY() { + return posY; + } + + public void setPosY(float posY) { + this.posY = posY; + } + + + @Override + public Matrix getMatrix() { + return matrix; + } + + public void setMatrix(Matrix matrix) { + this.matrix = matrix; + } + + public float getmScaleX() { + return mScaleX; + } + + public void setmScaleX(float mScaleX) { + this.mScaleX = mScaleX; + } + + public float getmScaleY() { + return mScaleY; + } + + public void setmScaleY(float mScaleY) { + this.mScaleY = mScaleY; + } + + /** + * 记录动画开始的时间 + */ + private long mMovieStart; + + /** + * 播放GIF动画的关键类 + */ + private Movie mMovie; + + + /** + * GIF图片的宽度 + */ + private int mImageWidth; + + /** + * GIF图片的高度 + */ + private int mImageHeight; + + + public boolean isGif() { + return isGif; + } + + public void setGif(boolean gif) { + isGif = gif; + } + + public String getResourceGif() { + return resourceGif; + } + + public void setResourceGif(String resourceGif) { + this.resourceGif = resourceGif; + } + + public BaseImageView(Context context) { + super(context); + initData(); + } + + + public BaseImageView(Context context, @Nullable AttributeSet attrs) { + super(context, attrs); + initData(); + } + + public BaseImageView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { + super(context, attrs, defStyleAttr); + initData(); + } + + private void initData() { + endTime = 2000; + } + + + @Override + public float getX() { + return X; + } + + @Override + public void setX(float x) { + X = x; + } + + @Override + public float getY() { + return Y; + } + + @Override + public void setY(float y) { + Y = y; + } + + public float getViewWidth() { + return viewWidth; + } + + public void setViewWidth(float viewWidth) { + this.viewWidth = viewWidth; + } + + public float getViewHeight() { + return viewHeight; + } + + public void setViewHeight(float viewHeight) { + this.viewHeight = viewHeight; + } + + public int getResourceId() { + return resourceId; + } + + public void setResourceId(int resourceId) { + this.resourceId = resourceId; + } + + public float getRotateDegree() { + return rotateDegree; + } + + public void setRotateDegree(float rotateDegree) { + this.rotateDegree = rotateDegree; + } + + public long getStartTime() { + return startTime; + } + + public void setStartTime(long startTime) { + this.startTime = startTime; + } + + public long getEndTime() { + return endTime; + } + + public void setEndTime(long endTime) { + this.endTime = endTime; + } + + public Bitmap getBitmap(){ + return null; + }; + + + public float getParentX(){ + return super.getX(); + } + + public float getParentY(){ + return super.getY(); + } + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/BubbleInputDialog.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/BubbleInputDialog.java new file mode 100644 index 0000000..a2e3b1d --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/BubbleInputDialog.java @@ -0,0 +1,155 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.view; + +import android.app.Dialog; +import android.content.Context; +import android.graphics.Color; +import android.os.Handler; +import android.text.Editable; +import android.text.TextUtils; +import android.text.TextWatcher; +import android.view.KeyEvent; +import android.view.View; +import android.view.inputmethod.EditorInfo; +import android.view.inputmethod.InputMethodManager; +import android.widget.EditText; +import android.widget.TextView; +import android.widget.Toast; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.utils.CommonUtils; + + +/** + * Created by Abner on 15/6/12. + * QQ 230877476 + * Email nimengbo@gmail.com + */ +public class BubbleInputDialog extends Dialog { + private final String defaultStr; + private EditText et_bubble_input; + private TextView tv_show_count; + private TextView tv_action_done; + private static final int MAX_COUNT = 33; //字数最大限制33个 + private Context mContext; + private BubbleTextView bubbleTextView; + + public BubbleInputDialog(Context context) { + super(context, android.R.style.Theme_Translucent_NoTitleBar); + mContext = context; + defaultStr = context.getString(R.string.double_click_input_text); + initView(); + } + + public BubbleInputDialog(Context context, BubbleTextView view) { + super(context, android.R.style.Theme_Translucent_NoTitleBar); + mContext = context; + defaultStr = context.getString(R.string.double_click_input_text); + bubbleTextView = view; + initView(); + } + + public void setBubbleTextView(BubbleTextView bubbleTextView) { + this.bubbleTextView = bubbleTextView; + if (defaultStr.equals(bubbleTextView.getmStr())) { + et_bubble_input.setText(""); + } else { + et_bubble_input.setText(bubbleTextView.getmStr()); + et_bubble_input.setSelection(bubbleTextView.getmStr().length()); + } + } + + + private void initView() { + setContentView(R.layout.view_input_dialog); + tv_action_done = (TextView) findViewById(R.id.tv_action_done); + et_bubble_input = (EditText) findViewById(R.id.et_bubble_input); + tv_show_count = (TextView) findViewById(R.id.tv_show_count); + et_bubble_input.addTextChangedListener(new TextWatcher() { + @Override + public void beforeTextChanged(CharSequence s, int start, int count, int after) { + + } + + @Override + public void onTextChanged(CharSequence s, int start, int before, int count) { + long textLength = CommonUtils.calculateLength(s); + tv_show_count.setText(String.valueOf(MAX_COUNT - textLength)); + if (textLength > MAX_COUNT) { + tv_show_count.setTextColor(Color.parseColor("#e73a3d")); + } else { + tv_show_count.setTextColor(Color.parseColor("#8b8b8b")); + } + } + + @Override + public void afterTextChanged(Editable s) { + + } + }); + et_bubble_input.setOnEditorActionListener(new TextView.OnEditorActionListener() { + @Override + public boolean onEditorAction(TextView v, int actionId, KeyEvent event) { + if (actionId == EditorInfo.IME_ACTION_DONE) { + done(); + return true; + } + return false; + } + }); + tv_action_done.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View v) { + done(); + } + }); + } + + + @Override + public void show() { + super.show(); + Handler handler = new Handler(); + handler.postDelayed(new Runnable() { + @Override + public void run() { + InputMethodManager m = (InputMethodManager) et_bubble_input.getContext().getSystemService(Context.INPUT_METHOD_SERVICE); + m.toggleSoftInput(0, InputMethodManager.SHOW_FORCED); + } + }, 500); + + } + + @Override + public void dismiss() { + super.dismiss(); + InputMethodManager m = (InputMethodManager) et_bubble_input.getContext().getSystemService(Context.INPUT_METHOD_SERVICE); + m.hideSoftInputFromWindow(et_bubble_input.getWindowToken(), 0); + } + + public interface CompleteCallBack { + void onComplete(View bubbleTextView, String str); + } + + private CompleteCallBack mCompleteCallBack; + + public void setCompleteCallBack(CompleteCallBack completeCallBack) { + this.mCompleteCallBack = completeCallBack; + } + + private void done() { + if (Integer.valueOf(tv_show_count.getText().toString()) < 0) { + Toast.makeText(mContext, "您已超过字数限制", Toast.LENGTH_LONG).show(); + return; + } + dismiss(); + if (mCompleteCallBack != null) { + String str; + if (TextUtils.isEmpty(et_bubble_input.getText())) { + str = ""; + } else { + str = et_bubble_input.getText().toString(); + } + mCompleteCallBack.onComplete(bubbleTextView, str); + } + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/BubbleTextView.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/BubbleTextView.java new file mode 100644 index 0000000..38fb9a1 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/BubbleTextView.java @@ -0,0 +1,1034 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.view; + +import android.content.Context; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Matrix; +import android.graphics.Paint; +import android.graphics.PaintFlagsDrawFilter; +import android.graphics.PointF; +import android.graphics.Rect; +import android.support.v4.view.MotionEventCompat; +import android.text.TextPaint; +import android.text.TextUtils; +import android.util.AttributeSet; +import android.util.DisplayMetrics; +import android.util.Log; +import android.util.TypedValue; +import android.view.MotionEvent; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.utils.DisplayUtil; +import com.aserbao.androidcustomcamera.whole.editVideo.beans.BubblePropertyModel; + +/** + * Created by Abner on 15/6/7. + * QQ 230877476 + * Email nimengbo@gmail.com + */ +public class BubbleTextView extends BaseImageView { + + private static final String TAG = BubbleTextView.class.getSimpleName(); + + + private Bitmap deleteBitmap; + private Bitmap flipVBitmap; + private Bitmap topBitmap; + private Bitmap resizeBitmap; + private Bitmap mBitmap; + private Bitmap originBitmap; + private Rect dst_delete; + private Rect dst_resize; + private Rect dst_flipV; + private Rect dst_top; + + + private int deleteBitmapWidth; + private int deleteBitmapHeight; + private int resizeBitmapWidth; + private int resizeBitmapHeight; + private int flipVBitmapWidth; + private int flipVBitmapHeight; + + //置顶 + private int topBitmapWidth; + private int topBitmapHeight; + private Paint localPaint; + private int mScreenwidth, mScreenHeight; + private static final float BITMAP_SCALE = 0.7f; + private PointF mid = new PointF(); + private OperationListener operationListener; + private float lastRotateDegree; + + //是否是第二根手指放下 + private boolean isPointerDown = false; + //手指移动距离必须超过这个数值 + private final float pointerLimitDis = 20f; + private final float pointerZoomCoeff = 0.09f; + + private final float moveLimitDis = 0.5f; + /** + * 对角线的长度 + */ + private float lastLength; + private boolean isInResize = false; + +// private Matrix matrix = new Matrix(); + /** + * 是否在四条线内部 + */ + private boolean isInSide; + + private float lastX, lastY; + /** + * 是否在编辑模式 + */ + private boolean isInEdit = true; + + private float MIN_SCALE = 0.5f; + + private float MAX_SCALE = 1.5f; + + private double halfDiagonalLength; + + private float oringinWidth = 0; + + private DisplayMetrics dm; + + /** + * 文字部分 + */ + private final String defaultStr; + //显示的字符串 + private String mStr = ""; + + //字号默认16sp + private final float mDefultSize = 14; + private float mFontSize = 10; + //最大最小字号 + private final float mMaxFontSize = 25; + private final float mMinFontSize = 18; + + //字离旁边的距离 + private final float mDefaultMargin = 20; + private float mMargin = 20; + + //绘制文字的画笔 + private TextPaint mFontPaint; + + //绘制背景图片的话题 + private Paint paint; + + private Canvas canvasText; + + private Paint.FontMetrics fm; + //由于系统基于字体的底部来绘制文本,所有需要加上字体的高度。 + private float baseline; + + boolean isInit = true; + + //双指缩放时的初始距离 + private float oldDis; + + //是否按下 + private boolean isDown = false; + //是否移动 + private boolean isMove = false; + //是否抬起手 + private boolean isUp = false; + //是否在顶部 + private boolean isTop = true; + + private boolean isInBitmap; + + private final int fontColor; + + private boolean isInputEdit; + + private final long bubbleId; + private int selectIndex; + + private int resourceId; + private boolean isInRomate = false; + int direction = 0; + private boolean isFaceBottom = true;//边角是否在底部 + private boolean isFaceRight = false;//边角是否在右边 + protected Matrix rotateMatrix = new Matrix(); + private long mTouchDownTime; + + + public BubbleTextView(Context context, AttributeSet attrs) { + super(context, attrs); + defaultStr = getContext().getString(R.string.double_click_input_text); + this.fontColor = Color.BLACK; + bubbleId = 0; + init(); + } + + public BubbleTextView(Context context) { + super(context); + defaultStr = getContext().getString(R.string.double_click_input_text); + this.fontColor = Color.BLACK; + bubbleId = 0; + init(); + } + + public BubbleTextView(Context context, AttributeSet attrs, int defStyleAttr) { + super(context, attrs, defStyleAttr); + defaultStr = getContext().getString(R.string.double_click_input_text); + this.fontColor = Color.BLACK; + bubbleId = 0; + init(); + } + + /** + * @param context + * @param fontColor + * @param bubbleId some fuck id + */ + public BubbleTextView(Context context, int fontColor, long bubbleId) { + super(context); + defaultStr = getContext().getString(R.string.double_click_input_text); + this.fontColor = fontColor; + this.bubbleId = bubbleId; + init(); + } + + /** + * @param context + * @param fontColor + * @param bubbleId some fuck id + */ + public BubbleTextView(Context context, int fontColor, long bubbleId, int selectIndex) { + super(context); + defaultStr = getContext().getString(R.string.double_click_input_text); + this.fontColor = fontColor; + this.bubbleId = bubbleId; + this.selectIndex = selectIndex; + Log.e(TAG, "selectIndex:" + selectIndex); + init(); + } + + + private void init() { + dm = getResources().getDisplayMetrics(); + dst_delete = new Rect(); + dst_resize = new Rect(); + dst_flipV = new Rect(); + dst_top = new Rect(); + localPaint = new Paint(); + localPaint.setColor(getResources().getColor(R.color.white)); + localPaint.setAntiAlias(true); + localPaint.setDither(true); + localPaint.setStyle(Paint.Style.STROKE); + localPaint.setStrokeWidth(2.0f); + mScreenwidth = dm.widthPixels; + mScreenHeight = dm.heightPixels; + mFontSize = mDefultSize; + mFontPaint = new TextPaint(); + mFontPaint.setTextSize(TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_SP, mFontSize, dm)); + mFontPaint.setColor(fontColor); + mFontPaint.setTextAlign(Paint.Align.CENTER); + mFontPaint.setAntiAlias(true); + mFontPaint.setAlpha(255); + paint = new Paint(); + paint.setAlpha(204); + fm = mFontPaint.getFontMetrics(); + + baseline = fm.descent - fm.ascent; + isInit = true; + mStr = defaultStr; + } + + @Override + protected void onDraw(Canvas canvas) { + if (mBitmap != null) { + float[] arrayOfFloat = new float[9]; + matrix.getValues(arrayOfFloat); + float f1 = 0.0F * arrayOfFloat[0] + 0.0F * arrayOfFloat[1] + arrayOfFloat[2]; + float f2 = 0.0F * arrayOfFloat[3] + 0.0F * arrayOfFloat[4] + arrayOfFloat[5]; + float f3 = arrayOfFloat[0] * this.mBitmap.getWidth() + 0.0F * arrayOfFloat[1] + arrayOfFloat[2]; + float f4 = arrayOfFloat[3] * this.mBitmap.getWidth() + 0.0F * arrayOfFloat[4] + arrayOfFloat[5]; + float f5 = 0.0F * arrayOfFloat[0] + arrayOfFloat[1] * this.mBitmap.getHeight() + arrayOfFloat[2]; + float f6 = 0.0F * arrayOfFloat[3] + arrayOfFloat[4] * this.mBitmap.getHeight() + arrayOfFloat[5]; + float f7 = arrayOfFloat[0] * this.mBitmap.getWidth() + arrayOfFloat[1] * this.mBitmap.getHeight() + arrayOfFloat[2]; + float f8 = arrayOfFloat[3] * this.mBitmap.getWidth() + arrayOfFloat[4] * this.mBitmap.getHeight() + arrayOfFloat[5]; + + canvas.save(); + //先往文字上绘图 + mBitmap = rotateToDegrees(originBitmap.copy(Bitmap.Config.ARGB_8888, true)); + canvasText.setBitmap(mBitmap); + canvasText.setDrawFilter(new PaintFlagsDrawFilter(0, Paint.ANTI_ALIAS_FLAG | Paint.FILTER_BITMAP_FLAG)); + canvas.setDrawFilter(new PaintFlagsDrawFilter(0, Paint.ANTI_ALIAS_FLAG | Paint.FILTER_BITMAP_FLAG)); + float left = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 15, dm); + float scalex = arrayOfFloat[Matrix.MSCALE_X]; + float skewy = arrayOfFloat[Matrix.MSKEW_Y]; + float rScale = (float) Math.sqrt(scalex * scalex + skewy * skewy); + float size = rScale * 0.75f * mDefultSize; + if (size > mMaxFontSize) { + mFontSize = mMaxFontSize; + } else if (size < mMinFontSize) { + mFontSize = mMinFontSize; + } else { + mFontSize = size; + } + float parts = 2; + float leftMarginCount = 4; + float marginLeft = mBitmap.getWidth() / 2; + + if (selectIndex == 0) { + parts = 2.8f; + mFontSize = 16; + if (!isFaceBottom) { //1 + parts = 1.4f; + } + } + if (selectIndex == 1) { + parts = 2.8f; + mFontSize = 16; + if (!isFaceBottom) { + parts = 1.3f; + } + } + + if (selectIndex == 2) { + parts = 2f; + mFontSize = 16; + if (!isFaceBottom) { + parts = 1.4f; + } + leftMarginCount = 7; + if(isFaceRight){ + marginLeft *= 0.9f; + }else { + marginLeft *= 1.1f; + } + } + + if (selectIndex == 3) { + parts = 2f; + mFontSize = 16; + if (!isFaceBottom) { + parts = 1.5f; + } + } + + if (selectIndex == 4) { + parts = 2f; + mFontSize = 16; + if (!isFaceBottom) { + parts = 1.4f; + } + } + + if (selectIndex == 5) { + parts = 2.14f; + mFontSize = 15; + leftMarginCount = 5.8f; + if (!isFaceBottom) { + parts = 1.2f; + } + if(isFaceRight){ + marginLeft *= 1.1f; + }else { + marginLeft *= 0.9f; + } + } + if (selectIndex == 6) { + mFontSize = 13; + if (!isFaceBottom) { + parts = 1.5f; + } else { + parts = 1.7f; + } + } + + if (selectIndex == 7) { + parts = 2f; + mFontSize = 16; + if (!isFaceBottom) { + parts = 1.4f; + } + if(isFaceRight){ + marginLeft *= 1f; + }else { + marginLeft *= 1.05f; + } + } + mFontPaint.setTextSize(TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_SP, mFontSize, dm)); +// String[] texts = autoSplit(mStr, mFontPaint, mBitmap.getWidth() - left * 3); + String[] texts = autoSplit(mStr, mFontPaint, mBitmap.getWidth() - left * leftMarginCount); + float height = (texts.length * (baseline + fm.leading) + baseline); + float top = (mBitmap.getHeight() - height) / parts; + //基于底线开始画的 + top += baseline; + + for (String text : texts) { + if (TextUtils.isEmpty(text)) { + continue; + } + canvasText.drawText(text, marginLeft, top, mFontPaint); //坐标以控件左上角为原点 + top += baseline + fm.leading; //添加字体行间距 + + } +// rotateMatrix.set(matrix); +// if (!isMove) { +// switch (direction % 4) { +// case 0: +// break; +// case 1: +// rotateMatrix.postRotate(90, X, Y); +// break; +// case 2: +// rotateMatrix.postRotate(180, X, Y); +// break; +// case 3: +// rotateMatrix.postRotate(270, X, Y); +// break; +// } +// } + canvas.drawBitmap(mBitmap, matrix, paint); + + //删除在右上角 + dst_delete.left = (int) (f3 - deleteBitmapWidth / 2); + dst_delete.right = (int) (f3 + deleteBitmapWidth / 2); + dst_delete.top = (int) (f4 - deleteBitmapHeight / 2); + dst_delete.bottom = (int) (f4 + deleteBitmapHeight / 2); + //拉伸等操作在右下角 + dst_resize.left = (int) (f7 - resizeBitmapWidth / 2); + dst_resize.right = (int) (f7 + resizeBitmapWidth / 2); + dst_resize.top = (int) (f8 - resizeBitmapHeight / 2); + dst_resize.bottom = (int) (f8 + resizeBitmapHeight / 2); + //置顶在左上角 + dst_top.left = (int) (f1 - topBitmapWidth / 2); + dst_top.right = (int) (f1 + topBitmapWidth / 2); + dst_top.top = (int) (f2 - topBitmapHeight / 2); + dst_top.bottom = (int) (f2 + topBitmapHeight / 2); + //水平镜像在右下角 + dst_flipV.left = (int) (f5 - topBitmapWidth / 2); + dst_flipV.right = (int) (f5 + topBitmapWidth / 2); + dst_flipV.top = (int) (f6 - topBitmapHeight / 2); + dst_flipV.bottom = (int) (f6 + topBitmapHeight / 2); + + //dst_delete dst_resize dst_top dst_flipV + leftBottomX = dst_delete.centerX(); + leftBottomX = leftBottomX > dst_resize.centerX() ? dst_resize.centerX() : leftBottomX; + leftBottomX = leftBottomX > dst_top.centerX() ? dst_top.centerX() : leftBottomX; + leftBottomX = leftBottomX > dst_flipV.centerX() ? dst_flipV.centerX() : leftBottomX; + + leftBottomY = dst_flipV.centerY(); + leftBottomY = leftBottomY < dst_resize.centerY() ? dst_resize.centerY() : leftBottomY; + leftBottomY = leftBottomY < dst_top.centerY() ? dst_top.centerY() : leftBottomY; + leftBottomY = leftBottomY < dst_flipV.centerY() ? dst_flipV.centerY() : leftBottomY; + + float distanceX = Math.abs(dst_delete.centerX() - dst_top.centerX()); + float distanceY = Math.abs(dst_delete.centerY() - dst_top.centerY()); + viewWidth = (float) Math.sqrt(distanceX * distanceX + distanceY * distanceY); + float distanceHeightX = Math.abs(dst_resize.centerX() - dst_delete.centerX()); + float distanceHeightY = Math.abs(dst_resize.centerY() - dst_delete.centerY()); + viewHeight = (float) Math.sqrt(distanceHeightX * distanceHeightX + distanceHeightY * distanceHeightY); + + if (isInEdit) { + canvas.drawLine(f1, f2, f3, f4, localPaint); + canvas.drawLine(f3, f4, f7, f8, localPaint); + canvas.drawLine(f5, f6, f7, f8, localPaint); + canvas.drawLine(f5, f6, f1, f2, localPaint); + + + canvas.drawBitmap(deleteBitmap, null, dst_delete, null); + canvas.drawBitmap(resizeBitmap, null, dst_resize, null); +// canvas.drawBitmap(flipVBitmap, null, dst_flipV, null); + canvas.drawBitmap(topBitmap, null, dst_top, null); + } + + canvas.restore(); + } + } + + /** + * 图片旋转 + * + * @param tmpBitmap + * @param + * @return + */ + public Bitmap rotateToDegrees(Bitmap tmpBitmap) { + Matrix matrix = new Matrix(); + matrix.reset(); +// matrix.setRotate(degrees); + switch (direction % 4) { + case 0: + isFaceBottom = true; + isFaceRight = false; + break; + case 1: + isFaceBottom = true; + isFaceRight = true; + matrix.postScale(-1.0F, 1.0F); + break; + case 2: + isFaceBottom = false; + isFaceRight = true; + matrix.postScale(-1.0F, -1.0F); + break; + case 3: + isFaceBottom = false; + isFaceRight = false; + matrix.postScale(1.0F, -1.0F); + break; + } + + return tmpBitmap = + Bitmap.createBitmap(tmpBitmap, 0, 0, tmpBitmap.getWidth(), tmpBitmap.getHeight(), matrix, + true); + } + + + public void setText(String text) { +// if (TextUtils.isEmpty(text)) { +// mStr = defaultStr; +// mFontSize = mDefultSize; +// mMargin = mDefaultMargin; +// } else { + mStr = text; +// } + invalidate(); + } + + + public void setParentSize(int mScreenwidth, int mScreenHeight) { +// this.mScreenwidth = mScreenwidth; +// this.mScreenHeight = mScreenHeight; + this.mScreenwidth = mScreenwidth > mScreenHeight ? mScreenHeight : mScreenwidth; + this.mScreenHeight = mScreenwidth > mScreenHeight ? mScreenwidth : mScreenHeight; + } + + @Override + public void setImageResource(int resId) { + this.resourceId = resId; + matrix.reset(); + //使用拷贝 不然会对资源文件进行引用而修改 + setBitmap(BitmapFactory.decodeResource(getResources(), resId)); + } + + public void setImageResource(int resId, BubblePropertyModel model) { + matrix.reset(); + //使用拷贝 不然会对资源文件进行引用而修改 + setBitmap(BitmapFactory.decodeResource(getResources(), resId), model); + } + + public void setBitmap(Bitmap bitmap, BubblePropertyModel model) { + mFontSize = mDefultSize; + originBitmap = bitmap; + mBitmap = originBitmap.copy(Bitmap.Config.ARGB_8888, true); + canvasText = new Canvas(mBitmap); + setDiagonalLength(); + initBitmaps(); + int w = mBitmap.getWidth(); + int h = mBitmap.getHeight(); + oringinWidth = w; + + mStr = model.getText(); + float scale = model.getScaling() * mScreenwidth / mBitmap.getWidth(); + if (scale > MAX_SCALE) { + scale = MAX_SCALE; + } else if (scale < MIN_SCALE) { + scale = MIN_SCALE; + } + float degree = (float) Math.toDegrees(model.getDegree()); + matrix.postRotate(-degree, w >> 1, h >> 1); + matrix.postScale(scale, scale, w >> 1, h >> 1); + float midX = model.getxLocation() * mScreenwidth; + float midY = model.getyLocation() * mScreenwidth; + float offset = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 22, dm); + midX = midX - (w * scale) / 2 - offset; + midY = midY - (h * scale) / 2 - offset; + matrix.postTranslate(midX, midY); + invalidate(); + } + + public void setBitmap(Bitmap bitmap) { + mFontSize = mDefultSize; + originBitmap = bitmap; + mBitmap = originBitmap.copy(Bitmap.Config.ARGB_8888, true); + canvasText = new Canvas(mBitmap); + setDiagonalLength(); + initBitmaps(); + int w = mBitmap.getWidth(); + int h = mBitmap.getHeight(); + oringinWidth = w; + float topbarHeight = DisplayUtil.dipToPx(getContext(), 50); + float initScale = 1.3f * (MIN_SCALE + MAX_SCALE) / 2; + matrix.postScale(initScale, initScale, w / 2, h / 2); + //Y坐标为 (顶部操作栏+正方形图)/2 + matrix.postTranslate(mScreenwidth / 2 - w / 2, (mScreenwidth) / 2 - h / 2); + invalidate(); + } + + private void setDiagonalLength() { + halfDiagonalLength = Math.hypot(mBitmap.getWidth(), mBitmap.getHeight()) / 2; + } + + private void initBitmaps() { + + float minWidth = mScreenwidth / 8; + if (mBitmap.getWidth() < minWidth) { + MIN_SCALE = 1f; + } else { + MIN_SCALE = 1.0f * minWidth / mBitmap.getWidth(); + } + + if (mBitmap.getWidth() > mScreenwidth) { + MAX_SCALE = 1; + } else { + MAX_SCALE = 1.0f * mScreenwidth / mBitmap.getWidth(); + } + topBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.camera_scaling); + deleteBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.camera_delete); + flipVBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.camera_scaling); + resizeBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.camera_rotate); + + deleteBitmapWidth = (int) (deleteBitmap.getWidth() * BITMAP_SCALE); + deleteBitmapHeight = (int) (deleteBitmap.getHeight() * BITMAP_SCALE); + + resizeBitmapWidth = (int) (resizeBitmap.getWidth() * BITMAP_SCALE); + resizeBitmapHeight = (int) (resizeBitmap.getHeight() * BITMAP_SCALE); + + flipVBitmapWidth = (int) (flipVBitmap.getWidth() * BITMAP_SCALE); + flipVBitmapHeight = (int) (flipVBitmap.getHeight() * BITMAP_SCALE); + + topBitmapWidth = (int) (topBitmap.getWidth() * BITMAP_SCALE); + topBitmapHeight = (int) (topBitmap.getHeight() * BITMAP_SCALE); + + } + + private long preClicktime; + + private final long doubleClickTimeLimit = 200; + + @Override + public boolean onTouchEvent(MotionEvent event) { + int action = MotionEventCompat.getActionMasked(event); + boolean handled = true; + isInBitmap = false; + switch (action) { + case MotionEvent.ACTION_DOWN: + mTouchDownTime = System.currentTimeMillis(); + if (isInButton(event, dst_delete)) { + if (operationListener != null) { + operationListener.onDeleteClick(); + } + isDown = false; + isInRomate = false; + } else if (isInResize(event)) { + isInResize = true; + lastRotateDegree = rotationToStartPoint(event); + midPointToStartPoint(event); + lastLength = diagonalLength(event); + isDown = false; + isInRomate = false; + } else if (isInButton(event, dst_flipV)) { + PointF localPointF = new PointF(); + midDiagonalPoint(localPointF); + matrix.postScale(-1.0F, 1.0F, localPointF.x, localPointF.y); + isDown = false; + isInRomate = false; + invalidate(); + } else if (isInButton(event, dst_top)) { + if (operationListener != null) { + operationListener.onTop(this); + } + isDown = false; + direction++; + X = (dst_top.left + dst_resize.right) / 2; + Y = (dst_top.top + dst_resize.bottom) / 2; +// matrix.postRotate(lastRotateDegree, X, Y); + isInRomate = true; + invalidate(); + + } else if (isInBitmap(event)) { + isInSide = true; + lastX = event.getX(0); + lastY = event.getY(0); + isDown = true; + isMove = false; + isPointerDown = false; + isUp = false; + isInRomate = false; + isInBitmap = true; + isInputEdit = true; + + } else { + isInRomate = false; + handled = false; + } + break; + case MotionEvent.ACTION_POINTER_DOWN: + if (spacing(event) > pointerLimitDis) { + oldDis = spacing(event); + isPointerDown = true; + midPointToStartPoint(event); + } else { + isPointerDown = false; + } + isInSide = false; + isInResize = false; + isInRomate = false; + break; + case MotionEvent.ACTION_MOVE: + if(System.currentTimeMillis() - mTouchDownTime > 200) { + isInputEdit = false; + } + //双指缩放 + if (isPointerDown) { + float scale; + float disNew = spacing(event); + if (disNew == 0 || disNew < pointerLimitDis) { + scale = 1; + } else { + scale = disNew / oldDis; + //缩放缓慢 + scale = (scale - 1) * pointerZoomCoeff + 1; + } + float scaleTemp = (scale * Math.abs(dst_flipV.left - dst_resize.left)) / oringinWidth; + if (((scaleTemp <= MIN_SCALE)) && scale < 1 || + (scaleTemp >= MAX_SCALE) && scale > 1) { + scale = 1; + } else { + lastLength = diagonalLength(event); + } +// matrix.postScale(scale, scale, mid.x, mid.y); + matrix.postScale(scale, scale, X, Y); + isInRomate = false; + invalidate(); + } else if (isInResize) { + matrix.postRotate((rotationToStartPoint(event) - lastRotateDegree) * 2, mid.x, mid.y); + lastRotateDegree = rotationToStartPoint(event); + + float scale = diagonalLength(event) / lastLength; + + if (((diagonalLength(event) / halfDiagonalLength <= MIN_SCALE)) && scale < 1 || + (diagonalLength(event) / halfDiagonalLength >= MAX_SCALE) && scale > 1) { + scale = 1; + if (!isInResize(event)) { + isInResize = false; + } + } else { + lastLength = diagonalLength(event); + } +// matrix.postScale(scale, scale, mid.x, mid.y); + X = (dst_top.left + dst_resize.right) / 2; + Y = (dst_top.top + dst_resize.bottom) / 2; + + matrix.postScale(scale, scale, X, Y); + isInRomate = false; + invalidate(); + } else if (isInSide) { + //TODO 移动区域判断 不能超出屏幕 + float x = event.getX(0); + float y = event.getY(0); + //判断手指抖动距离 加上isMove判断 只要移动过 都是true + if (!isMove && Math.abs(x - lastX) < moveLimitDis + && Math.abs(y - lastY) < moveLimitDis) { + isMove = false; + } else { + isMove = true; + } + matrix.postTranslate(x - lastX, y - lastY); + lastX = x; + lastY = y; + isInRomate = false; + invalidate(); + } + + break; + case MotionEvent.ACTION_CANCEL: + case MotionEvent.ACTION_UP: + isInResize = false; + isInSide = false; + isPointerDown = false; + isUp = true; + + if (isInputEdit) { + isInputEdit = false; + if (isInEdit && operationListener != null) { + operationListener.onClick(this); + } + } + + + X = (dst_top.left + dst_resize.right) / 2; + Y = (dst_top.top + dst_resize.bottom) / 2; + rotateDegree = lastRotateDegree; + Log.e(TAG, "leftBottomX:" + leftBottomX); + Log.e(TAG, "leftBottomY:" + leftBottomY); + Log.e(TAG, "viewWidth:" + viewWidth); + Log.e(TAG, "viewHeight:" + viewHeight); +// Bitmap bitmap = Bitmap.createBitmap(mBitmap, 0, 0, mBitmap.getWidth(), mBitmap.getHeight(), matrix, true); +// Log.e(TAG, "bitmapWidth:" + bitmap.getWidth()); +// Log.e(TAG, "bitmapHeight:" + bitmap.getHeight()); + break; + + } + if (handled && operationListener != null) { + operationListener.onEdit(this); + } +// //判断是不是做了点击动作 必须在编辑状态 且在图片内 并且是双击 +// if (isDoubleClick && isDown && !isPointerDown && !isMove && isUp && isInBitmap && isInEdit && operationListener != null) { +// operationListener.onClick(this); +// } + return handled; + } + + public BubblePropertyModel calculate(BubblePropertyModel model) { + float[] v = new float[9]; + matrix.getValues(v); + // translation is simple + float tx = v[Matrix.MTRANS_X]; + float ty = v[Matrix.MTRANS_Y]; + Log.d(TAG, "tx : " + tx + " ty : " + ty); + // calculate real scale + float scalex = v[Matrix.MSCALE_X]; + float skewy = v[Matrix.MSKEW_Y]; + float rScale = (float) Math.sqrt(scalex * scalex + skewy * skewy); + Log.d(TAG, "rScale : " + rScale); + // calculate the degree of rotation + float rAngle = Math.round(Math.atan2(v[Matrix.MSKEW_X], v[Matrix.MSCALE_X]) * (180 / Math.PI)); + Log.d(TAG, "rAngle : " + rAngle); + + float minX = (dst_top.centerX() + dst_resize.centerX()) / 2; + float minY = (dst_top.centerY() + dst_resize.centerY()) / 2; + + Log.d(TAG, "midX : " + minX + " midY : " + minY); + + model.setDegree((float) Math.toRadians(rAngle)); + model.setBubbleId(bubbleId); + //TODO 占屏幕百分比 + float precentWidth = (mBitmap.getWidth() * rScale) / mScreenwidth; + model.setScaling(precentWidth); + Log.d(TAG, " x " + (minX / mScreenwidth) + " y " + (minY / mScreenwidth)); + model.setxLocation(minX / mScreenwidth); + model.setyLocation(minY / mScreenwidth); + model.setText(mStr); + return model; + } + + + /** + * 是否在四条线内部 + * + * @return + */ + private boolean isInBitmap(MotionEvent event) { + float[] arrayOfFloat1 = new float[9]; + this.matrix.getValues(arrayOfFloat1); + //左上角 + float f1 = 0.0F * arrayOfFloat1[0] + 0.0F * arrayOfFloat1[1] + arrayOfFloat1[2]; + float f2 = 0.0F * arrayOfFloat1[3] + 0.0F * arrayOfFloat1[4] + arrayOfFloat1[5]; + //右上角 + float f3 = arrayOfFloat1[0] * this.mBitmap.getWidth() + 0.0F * arrayOfFloat1[1] + arrayOfFloat1[2]; + float f4 = arrayOfFloat1[3] * this.mBitmap.getWidth() + 0.0F * arrayOfFloat1[4] + arrayOfFloat1[5]; + //左下角 + float f5 = 0.0F * arrayOfFloat1[0] + arrayOfFloat1[1] * this.mBitmap.getHeight() + arrayOfFloat1[2]; + float f6 = 0.0F * arrayOfFloat1[3] + arrayOfFloat1[4] * this.mBitmap.getHeight() + arrayOfFloat1[5]; + //右下角 + float f7 = arrayOfFloat1[0] * this.mBitmap.getWidth() + arrayOfFloat1[1] * this.mBitmap.getHeight() + arrayOfFloat1[2]; + float f8 = arrayOfFloat1[3] * this.mBitmap.getWidth() + arrayOfFloat1[4] * this.mBitmap.getHeight() + arrayOfFloat1[5]; + + float[] arrayOfFloat2 = new float[4]; + float[] arrayOfFloat3 = new float[4]; + //确定X方向的范围 + arrayOfFloat2[0] = f1;//左上的左 + arrayOfFloat2[1] = f3;//右上的右 + arrayOfFloat2[2] = f7;//右下的右 + arrayOfFloat2[3] = f5;//左下的左 + //确定Y方向的范围 + arrayOfFloat3[0] = f2;//左上的上 + arrayOfFloat3[1] = f4;//右上的上 + arrayOfFloat3[2] = f8; + arrayOfFloat3[3] = f6; + return pointInRect(arrayOfFloat2, arrayOfFloat3, event.getX(0), event.getY(0)); + } + + /** + * 判断点是否在一个矩形内部 + * + * @param xRange + * @param yRange + * @param x + * @param y + * @return + */ + private boolean pointInRect(float[] xRange, float[] yRange, float x, float y) { + //四条边的长度 + double a1 = Math.hypot(xRange[0] - xRange[1], yRange[0] - yRange[1]); + double a2 = Math.hypot(xRange[1] - xRange[2], yRange[1] - yRange[2]); + double a3 = Math.hypot(xRange[3] - xRange[2], yRange[3] - yRange[2]); + double a4 = Math.hypot(xRange[0] - xRange[3], yRange[0] - yRange[3]); + //待检测点到四个点的距离 + double b1 = Math.hypot(x - xRange[0], y - yRange[0]); + double b2 = Math.hypot(x - xRange[1], y - yRange[1]); + double b3 = Math.hypot(x - xRange[2], y - yRange[2]); + double b4 = Math.hypot(x - xRange[3], y - yRange[3]); + + double u1 = (a1 + b1 + b2) / 2; + double u2 = (a2 + b2 + b3) / 2; + double u3 = (a3 + b3 + b4) / 2; + double u4 = (a4 + b4 + b1) / 2; + + //矩形的面积 + double s = a1 * a2; + double ss = Math.sqrt(u1 * (u1 - a1) * (u1 - b1) * (u1 - b2)) + + Math.sqrt(u2 * (u2 - a2) * (u2 - b2) * (u2 - b3)) + + Math.sqrt(u3 * (u3 - a3) * (u3 - b3) * (u3 - b4)) + + Math.sqrt(u4 * (u4 - a4) * (u4 - b4) * (u4 - b1)); + double distance = Math.abs(s - ss); + Log.e(TAG, "pointInRect: " + distance ); + return distance < 0.5; + + + } + + + private boolean isInButton(MotionEvent event, Rect rect) { + int left = rect.left; + int right = rect.right; + int top = rect.top; + int bottom = rect.bottom; + return event.getX(0) >= left && event.getX(0) <= right && event.getY(0) >= top && event.getY(0) <= bottom; + } + + private boolean isInResize(MotionEvent event) { + int left = -20 + this.dst_resize.left; + int top = -20 + this.dst_resize.top; + int right = 20 + this.dst_resize.right; + int bottom = 20 + this.dst_resize.bottom; + return event.getX(0) >= left && event.getX(0) <= right && event.getY(0) >= top && event.getY(0) <= bottom; + } + + private void midPointToStartPoint(MotionEvent event) { + float[] arrayOfFloat = new float[9]; + matrix.getValues(arrayOfFloat); + float f1 = 0.0f * arrayOfFloat[0] + 0.0f * arrayOfFloat[1] + arrayOfFloat[2]; + float f2 = 0.0f * arrayOfFloat[3] + 0.0f * arrayOfFloat[4] + arrayOfFloat[5]; + float f3 = f1 + event.getX(0); + float f4 = f2 + event.getY(0); + mid.set(f3 / 2, f4 / 2); + } + + private void midDiagonalPoint(PointF paramPointF) { + float[] arrayOfFloat = new float[9]; + this.matrix.getValues(arrayOfFloat); + float f1 = 0.0F * arrayOfFloat[0] + 0.0F * arrayOfFloat[1] + arrayOfFloat[2]; + float f2 = 0.0F * arrayOfFloat[3] + 0.0F * arrayOfFloat[4] + arrayOfFloat[5]; + float f3 = arrayOfFloat[0] * this.mBitmap.getWidth() + arrayOfFloat[1] * this.mBitmap.getHeight() + arrayOfFloat[2]; + float f4 = arrayOfFloat[3] * this.mBitmap.getWidth() + arrayOfFloat[4] * this.mBitmap.getHeight() + arrayOfFloat[5]; + float f5 = f1 + f3; + float f6 = f2 + f4; + paramPointF.set(f5 / 2.0F, f6 / 2.0F); + } + + + /** + * 在滑动过车中X,Y是不会改变的,这里减Y,减X,其实是相当于把X,Y当做原点 + * + * @param event + * @return + */ + private float rotationToStartPoint(MotionEvent event) { + + float[] arrayOfFloat = new float[9]; + matrix.getValues(arrayOfFloat); + float x = 0.0f * arrayOfFloat[0] + 0.0f * arrayOfFloat[1] + arrayOfFloat[2]; + float y = 0.0f * arrayOfFloat[3] + 0.0f * arrayOfFloat[4] + arrayOfFloat[5]; + double arc = Math.atan2(event.getY(0) - y, event.getX(0) - x); + return (float) Math.toDegrees(arc); + } + + /** + * 触摸点到矩形中点的距离 + * + * @param event + * @return + */ + private float diagonalLength(MotionEvent event) { + float diagonalLength = (float) Math.hypot(event.getX(0) - mid.x, event.getY(0) - mid.y); + return diagonalLength; + } + + /** + * Determine the space between the first two fingers + */ + private float spacing(MotionEvent event) { + if (event.getPointerCount() == 2) { + float x = event.getX(0) - event.getX(1); + float y = event.getY(0) - event.getY(1); + return (float) Math.sqrt(x * x + y * y); + } else { + return 0; + } + } + + public interface OperationListener { + void onDeleteClick(); + + void onEdit(BubbleTextView bubbleTextView); + + void onClick(BubbleTextView bubbleTextView); + + void onTop(BubbleTextView bubbleTextView); + } + + public void setOperationListener(OperationListener operationListener) { + this.operationListener = operationListener; + } + + public void setInEdit(boolean isInEdit) { + this.isInEdit = isInEdit; + invalidate(); + } + + /** + * 自动分割文本 + * + * @param content 需要分割的文本 + * @param p 画笔,用来根据字体测量文本的宽度 + * @param width 指定的宽度 + * @return 一个字符串数组,保存每行的文本 + */ + private String[] autoSplit(String content, Paint p, float width) { + int length = content.length(); + float textWidth = p.measureText(content); + if (textWidth <= width) { + return new String[]{content}; + } + + int start = 0, end = 1, i = 0; + int lines = (int) Math.ceil(textWidth / width); //计算行数 + String[] lineTexts = new String[lines]; + while (start < length) { + if (p.measureText(content, start, end) > width) { //文本宽度超出控件宽度时 + lineTexts[i++] = (String) content.subSequence(start, end); + start = end; + } + if (end == length) { //不足一行的文本 + lineTexts[i] = (String) content.subSequence(start, end); + break; + } + end += 1; + } + return lineTexts; + } + + public String getmStr() { + return mStr; + } + + public Bitmap getBitmap() { + Bitmap bitmap = Bitmap.createBitmap(mBitmap, 0, 0, mBitmap.getWidth(), mBitmap.getHeight(), matrix, true); + return bitmap; + } + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/DynamicImageView.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/DynamicImageView.java new file mode 100644 index 0000000..bb881c4 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/DynamicImageView.java @@ -0,0 +1,72 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.view; + +import android.content.Context; +import android.graphics.Bitmap; +import android.support.annotation.Nullable; +import android.util.AttributeSet; +import android.widget.ImageView; + +import java.util.List; + +/** + *

    + *     author : Administrator (Jacket)
    + *     e-mail : 378315764@qq.com
    + *     time   : 2018/03/05
    + *     desc   :
    + *     version: 3.2
    + * 
    + */ + +public class DynamicImageView extends ImageView { + private long startTime; + private long endTime; + private int frameIndex; //播放gif图的第几帧 + private List bitmaps; + + public DynamicImageView(Context context) { + super(context); + } + + public DynamicImageView(Context context, @Nullable AttributeSet attrs) { + super(context, attrs); + } + + public DynamicImageView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { + super(context, attrs, defStyleAttr); + } + + public long getStartTime() { + return startTime; + } + + public void setStartTime(long startTime) { + this.startTime = startTime; + } + + public long getEndTime() { + return endTime; + } + + public void setEndTime(long endTime) { + this.endTime = endTime; + } + + public int getFrameIndex() { + return frameIndex; + } + + public void setFrameIndex(int frameIndex) { + if (bitmaps != null && bitmaps.size() > 0) { + this.frameIndex = frameIndex % bitmaps.size(); + } + } + + public List getBitmaps() { + return bitmaps; + } + + public void setBitmaps(List bitmaps) { + this.bitmaps = bitmaps; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/PopBubbleEditView.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/PopBubbleEditView.java new file mode 100644 index 0000000..91653b1 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/PopBubbleEditView.java @@ -0,0 +1,299 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.view; + +import android.app.Activity; +import android.content.Context; +import android.graphics.drawable.BitmapDrawable; +import android.os.Handler; +import android.text.Editable; +import android.text.TextWatcher; +import android.util.Log; +import android.view.Gravity; +import android.view.LayoutInflater; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.view.WindowManager; +import android.view.inputmethod.InputMethodManager; +import android.widget.Button; +import android.widget.CheckBox; +import android.widget.EditText; +import android.widget.PopupWindow; +import android.widget.RelativeLayout; +import android.widget.TextView; +import android.widget.Toast; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.MyApplication; + +import java.util.LinkedHashMap; + +import butterknife.BindView; +import butterknife.ButterKnife; +import butterknife.OnClick; + +/** + *
    + *     author : Administrator (Jacket)
    + *     e-mail : 378315764@qq.com
    + *     time   : 2018/02/10
    + *     desc   :
    + *     version: 3.2
    + * 
    + */ + +public class PopBubbleEditView { + @BindView(R.id.ed_content) + EditText edContent; + @BindView(R.id.tv_confirm) + TextView tvConfirm; + private String TAG = PopBubbleEditView.class.getSimpleName(); + + private Context context; + private PopupWindow popupWindow; + private View popupWindowView; + + private RelativeLayout rlReleasePornographicContent; + + private RelativeLayout rlIssueViolenceContent; + + private RelativeLayout rlHarass; + + private Button submit; + + private CheckBox cbReleasePornographicContent; + + private CheckBox cbIssueViolenceContent; + + private CheckBox cbHarass; + + private String fkMobile; + + private String clientToken; + + private String fkMobileWasReport; + + private String reportType; + + private String reportDec; + + private String initText; + + + public PopBubbleEditView(Context context) { + this.context = context; + initPopupWindow(); + } + + /** + * 初始化 + */ + public void initPopupWindow() { + if (popupWindowView != null) { + popupWindow.dismiss(); + } + + + popupWindowView = LayoutInflater.from(context).inflate(R.layout.pop_bubble_edit_view, null); + ButterKnife.bind(this, popupWindowView); + popupWindow = new PopupWindow(popupWindowView, ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT, true); +// popupWindow.setAnimationStyle(R.style.popup_window_scale); + popupWindow.setSoftInputMode(PopupWindow.INPUT_METHOD_NEEDED); + popupWindow.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE); + // 菜单背景色。加了一点透明度 +// ColorDrawable dw = new ColorDrawable(0xddffffff); +// popupWindow.setBackgroundDrawable(dw); + popupWindow.setOutsideTouchable(true); + + popupWindow.setBackgroundDrawable(new BitmapDrawable()); //解决部分机型按back键无法退出popupwindow + + + // 设置背景半透明 + popupWindow.setOnDismissListener(new popupDismissListener()); + + popupWindowView.setOnTouchListener(new View.OnTouchListener() { + + @Override + public boolean onTouch(View v, MotionEvent event) { + /* + * if( popupWindow!=null && popupWindow.isShowing()){ + * popupWindow.dismiss(); popupWindow=null; } + */ + // 这里如果返回true的话,touch事件将被拦截 + // 拦截后 PopupWindow的onTouchEvent不被调用,这样点击外部区域无法dismiss + return false; + } + }); + + edContent.addTextChangedListener(textWatcher); + + } + + private TextWatcher textWatcher = new TextWatcher() { + @Override + public void beforeTextChanged(CharSequence s, int start, int count, int after) { + + } + + @Override + public void onTextChanged(CharSequence s, int start, int before, int count) { + if(s != null && s.toString().length() > 30){ + String tempStr = s.toString().substring(0,30); + edContent.removeTextChangedListener(textWatcher); + edContent.setText(tempStr); + edContent.setSelection(tempStr.length()); + edContent.addTextChangedListener(textWatcher); + Toast.makeText(context,"输入文字不能超过30个", Toast.LENGTH_SHORT).show(); + } + } + + @Override + public void afterTextChanged(Editable s) { + + } + }; + + + private View.OnClickListener onClickListener = new View.OnClickListener() { + @Override + public void onClick(View v) { + switch (v.getId()) { + case R.id.cb_release_pornographic_content: //发布色情内容 + Log.e(TAG, " 发布色情内容"); + cbReleasePornographicContent.setChecked(true); + cbHarass.setChecked(false); + cbIssueViolenceContent.setChecked(false); + reportType = "0"; + break; + case R.id.cb_issue_violence_content: //发布暴力内容 + Log.e(TAG, "发布暴力内容"); + cbIssueViolenceContent.setChecked(true); + cbHarass.setChecked(false); + cbReleasePornographicContent.setChecked(false); + reportType = "1"; + break; + case R.id.cb_harass: //被骚扰 + Log.e(TAG, "被骚扰"); + cbHarass.setChecked(true); + cbReleasePornographicContent.setChecked(false); + cbIssueViolenceContent.setChecked(false); + reportType = "2"; + break; + case R.id.btn_submit: //提交 + Log.e(TAG, "提交"); + if (reportType == null || reportType.equals("")) { + Toast.makeText(context, "请至少选择一个", Toast.LENGTH_SHORT).show(); + return; + } + + break; + } + } + }; + + + /** + * 设置添加屏幕的背景透明度 + * + * @param bgAlpha + */ + public void backgroundAlpha(float bgAlpha) { + WindowManager.LayoutParams lp = ((Activity) context).getWindow().getAttributes(); + lp.alpha = bgAlpha; // 0.0-1.0 + ((Activity) context).getWindow().setAttributes(lp); + } + + @OnClick({R.id.ed_content, R.id.tv_confirm}) + public void onViewClicked(View view) { + switch (view.getId()) { + case R.id.ed_content: + break; + case R.id.tv_confirm: + if (edContent.getText().toString().length() > 60) { + Toast.makeText(context,"输入字符不能超过30个", Toast.LENGTH_SHORT).show(); + return; + } + if (onTextSendListener != null) { + onTextSendListener.onTextSend(edContent.getText().toString()); + } + edContent.setText(""); + dimss(); + break; + } + } + + + class popupDismissListener implements PopupWindow.OnDismissListener { + @Override + public void onDismiss() { + backgroundAlpha(1f); + } + } + + public void dimss() { + if (popupWindow != null) { + popupWindow.dismiss(); + } + } + + public boolean isShowing() { + return popupWindow.isShowing(); + } + + ; + + public void show(String initText) { + if (popupWindow != null && !popupWindow.isShowing()) { + if(!initText.equals("点击输入文字")){ + this.initText = initText; + edContent.setText(initText); + edContent.setSelection(initText.length()); + } + InputMethodManager imm = (InputMethodManager) context.getSystemService(Context.INPUT_METHOD_SERVICE); + //这里给它设置了弹出的时间, + imm.toggleSoftInput(1000, InputMethodManager.HIDE_NOT_ALWAYS); + //TODO 注意:这里的 R.layout.activity_main,不是固定的。你想让这个popupwindow盖在哪个界面上面。就写哪个界面的布局。这里以主界面为例 + popupWindow.showAtLocation(LayoutInflater.from(context).inflate(R.layout.base_activity, null), + Gravity.BOTTOM, 0, 0); + } + } + + + public interface OnTextSendListener{ + void onTextSend(String text); + } + + public OnTextSendListener onTextSendListener; + + public void setOnTextSendListener(OnTextSendListener onTextSendListener){ + this.onTextSendListener = onTextSendListener; + } + + + private PopTopTipWindow topTipWindow; + private long last = 0; + public void showPop(String s,Context mContext){ + long star = System.currentTimeMillis(); + long cha = star - last; + if(cha/1000 < 5){ + return; + } + last = star; + if(topTipWindow != null && topTipWindow.isShowing()){ + topTipWindow.dimss(); + topTipWindow = null; + }else { + topTipWindow = new PopTopTipWindow(mContext, s); + if (!((Activity) mContext).isFinishing()) { + new Handler().postDelayed(new Runnable() { + @Override + public void run() { + if (topTipWindow != null && topTipWindow.isShowing()) { + topTipWindow.dimss(); + } + } + }, 1000); + } + } + } + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/PopBubbleView.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/PopBubbleView.java new file mode 100644 index 0000000..2e4bd46 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/PopBubbleView.java @@ -0,0 +1,185 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.view; + +import android.app.Activity; +import android.content.Context; +import android.graphics.drawable.BitmapDrawable; +import android.view.Gravity; +import android.view.LayoutInflater; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.view.WindowManager; +import android.widget.PopupWindow; +import android.widget.Toast; + +import com.aserbao.androidcustomcamera.R; + +import butterknife.ButterKnife; +import butterknife.OnClick; + +/** + *
    + *     author : Administrator (Jacket)
    + *     e-mail : 378315764@qq.com
    + *     time   : 2018/01/31
    + *     desc   :
    + *     version: 3.2
    + * 
    + */ + +public class PopBubbleView { + private String TAG = PopBubbleView.class.getSimpleName(); + + private Context context; + private PopupWindow popupWindow; + private View popupWindowView; + private String reportType; + + public PopBubbleView(Context context) { + this.context = context; + initPopupWindow(); + } + + /** + * 初始化 + */ + public void initPopupWindow() { + if (popupWindowView != null) { + popupWindow.dismiss(); + } + popupWindowView = LayoutInflater.from(context).inflate(R.layout.pop_bubble_view, null); + ButterKnife.bind(this, popupWindowView); + popupWindow = new PopupWindow(popupWindowView, ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT, true); +// popupWindow.setAnimationStyle(R.style.popup_window_scale); + popupWindow.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE); + // 菜单背景色。加了一点透明度 +// ColorDrawable dw = new ColorDrawable(0xddffffff); +// popupWindow.setBackgroundDrawable(dw); + popupWindow.setOutsideTouchable(true); + popupWindow.setBackgroundDrawable(new BitmapDrawable()); //解决部分机型按back键无法退出popupwindow + + + // 设置背景半透明 +// backgroundAlpha(0.7f); + + popupWindow.setOnDismissListener(new popupDismissListener()); + + popupWindowView.setOnTouchListener(new View.OnTouchListener() { + + @Override + public boolean onTouch(View v, MotionEvent event) { + /* + * if( popupWindow!=null && popupWindow.isShowing()){ + * popupWindow.dismiss(); popupWindow=null; } + */ + // 这里如果返回true的话,touch事件将被拦截 + // 拦截后 PopupWindow的onTouchEvent不被调用,这样点击外部区域无法dismiss + return false; + } + }); + + + } + + + /** + * 设置添加屏幕的背景透明度 + * + * @param bgAlpha + */ + public void backgroundAlpha(float bgAlpha) { + WindowManager.LayoutParams lp = ((Activity) context).getWindow().getAttributes(); + lp.alpha = bgAlpha; // 0.0-1.0 + ((Activity) context).getWindow().setAttributes(lp); + } + + @OnClick({R.id.ll_bubble_one, R.id.ll_bubble_two, R.id.ll_bubble_three, R.id.ll_bubble_four, R.id.ll_bubble_five, R.id.ll_bubble_six, R.id.ll_bubble_seven, R.id.ll_bubble_eight}) + public void onViewClicked(View view) { + switch (view.getId()) { + case R.id.ll_bubble_one: + if (bubbleSelectListener != null) { + bubbleSelectListener.bubbleSelect(0); + } + dimss(); + break; + case R.id.ll_bubble_two: + if (bubbleSelectListener != null) { + bubbleSelectListener.bubbleSelect(1); + } + dimss(); + break; + case R.id.ll_bubble_three: + if (bubbleSelectListener != null) { + bubbleSelectListener.bubbleSelect(2); + } + dimss(); + break; + case R.id.ll_bubble_four: + if (bubbleSelectListener != null) { + bubbleSelectListener.bubbleSelect(3); + } + dimss(); + break; + case R.id.ll_bubble_five: + if (bubbleSelectListener != null) { + bubbleSelectListener.bubbleSelect(4); + } + dimss(); + break; + case R.id.ll_bubble_six: + if (bubbleSelectListener != null) { + bubbleSelectListener.bubbleSelect(5); + } + dimss(); + break; + case R.id.ll_bubble_seven: + if (bubbleSelectListener != null) { + bubbleSelectListener.bubbleSelect(6); + } + dimss(); + break; + case R.id.ll_bubble_eight: + if (bubbleSelectListener != null) { + bubbleSelectListener.bubbleSelect(7); + } + dimss(); + break; + } + } + + + class popupDismissListener implements PopupWindow.OnDismissListener { + @Override + public void onDismiss() { + backgroundAlpha(1f); + } + } + + public void dimss() { + if (popupWindow != null) { + popupWindow.dismiss(); + } + } + + public boolean isShowing() { + return popupWindow.isShowing(); + } + + + public void show() { + if (popupWindow != null && !popupWindow.isShowing()) { + popupWindow.showAtLocation(LayoutInflater.from(context).inflate(R.layout.activity_update_personal_info, null), + Gravity.BOTTOM, 0, 0); + } + } + + public interface BubbleSelectListener { + void bubbleSelect(int bubbleIndex); + } + + public BubbleSelectListener bubbleSelectListener; + + public void setBubbleSelectListener(BubbleSelectListener bubbleSelectListener) { + this.bubbleSelectListener = bubbleSelectListener; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/PopPasterView.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/PopPasterView.java new file mode 100644 index 0000000..3d66d47 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/PopPasterView.java @@ -0,0 +1,183 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.view; + +import android.app.Activity; +import android.content.Context; +import android.graphics.drawable.BitmapDrawable; +import android.support.v7.widget.GridLayoutManager; +import android.support.v7.widget.RecyclerView; +import android.util.Log; +import android.view.Gravity; +import android.view.LayoutInflater; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.view.WindowManager; +import android.widget.PopupWindow; +import android.widget.Toast; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.whole.editVideo.adpaters.PasterAdapter; + +import butterknife.BindView; +import butterknife.ButterKnife; + +/** + *
    + *     author : Administrator (Jacket)
    + *     e-mail : 378315764@qq.com
    + *     time   : 2018/01/31
    + *     desc   :
    + *     version: 3.2
    + * 
    + */ + +public class PopPasterView implements PasterAdapter.PasterItemSelectListener { + private String TAG = PopPasterView.class.getSimpleName(); + private Context context; + private PopupWindow popupWindow; + private View popupWindowView; + private String reportType; + + @BindView(R.id.recycler_view) + RecyclerView recyclerView; + + private int[] images = new int[]{ + R.drawable.aini, R.drawable.dengliao, R.drawable.baituole, R.drawable.burangwo, R.drawable.bufuhanzhe, R.drawable.nizabushagntian, R.drawable.zan, R.drawable.mudengkoudai, R.drawable.buyue, R.drawable.nizaidouwo, R.drawable.gandepiaoliang, R.drawable.xiase + }; + + public PopPasterView(Context context) { + this.context = context; + initPopupWindow(); + } + + /** + * 初始化 + */ + public void initPopupWindow() { + if (popupWindowView != null) { + popupWindow.dismiss(); + } + popupWindowView = LayoutInflater.from(context).inflate(R.layout.pop_paster_view, null); + ButterKnife.bind(this, popupWindowView); + popupWindow = new PopupWindow(popupWindowView, ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT, true); +// popupWindow.setAnimationStyle(R.style.popup_window_scale); + popupWindow.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE); + // 菜单背景色。加了一点透明度 +// ColorDrawable dw = new ColorDrawable(0xddffffff); +// popupWindow.setBackgroundDrawable(dw); + popupWindow.setOutsideTouchable(true); + popupWindow.setBackgroundDrawable(new BitmapDrawable()); //解决部分机型按back键无法退出popupwindow + + + // 设置背景半透明 +// backgroundAlpha(0.7f); + popupWindow.setOnDismissListener(new popupDismissListener()); + + popupWindowView.setOnTouchListener(new View.OnTouchListener() { + + @Override + public boolean onTouch(View v, MotionEvent event) { + /* + * if( popupWindow!=null && popupWindow.isShowing()){ + * popupWindow.dismiss(); popupWindow=null; } + */ + // 这里如果返回true的话,touch事件将被拦截 + // 拦截后 PopupWindow的onTouchEvent不被调用,这样点击外部区域无法dismiss + return false; + } + }); + + initView(); + } + + private void initView() { + PasterAdapter pasterAdapter = new PasterAdapter(context, images); + pasterAdapter.setPasterItemSelectListener(this); + recyclerView.setAdapter(pasterAdapter); + recyclerView.setLayoutManager(new GridLayoutManager(context, 4)); + } + + + private View.OnClickListener onClickListener = new View.OnClickListener() { + @Override + public void onClick(View v) { + switch (v.getId()) { + case R.id.cb_release_pornographic_content: //发布色情内容 + Log.e(TAG, " 发布色情内容"); + reportType = "0"; + break; + case R.id.cb_issue_violence_content: //发布暴力内容 + Log.e(TAG, "发布暴力内容"); + reportType = "1"; + break; + case R.id.cb_harass: //被骚扰 + Log.e(TAG, "被骚扰"); + reportType = "2"; + break; + case R.id.btn_submit: //提交 + Log.e(TAG, "提交"); + if (reportType == null || reportType.equals("")) { + Toast.makeText(context, "请至少选择一个", Toast.LENGTH_LONG).show(); + return; + } + break; + } + } + }; + + + /** + * 设置添加屏幕的背景透明度 + * + * @param bgAlpha + */ + public void backgroundAlpha(float bgAlpha) { + WindowManager.LayoutParams lp = ((Activity) context).getWindow().getAttributes(); + lp.alpha = bgAlpha; // 0.0-1.0 + ((Activity) context).getWindow().setAttributes(lp); + } + + + class popupDismissListener implements PopupWindow.OnDismissListener { + @Override + public void onDismiss() { + backgroundAlpha(1f); + } + } + + public void dimss() { + if (popupWindow != null) { + popupWindow.dismiss(); + } + } + + public boolean isShowing() { + return popupWindow.isShowing(); + } + + ; + + public void show() { + if (popupWindow != null && !popupWindow.isShowing()) { + popupWindow.showAtLocation(LayoutInflater.from(context).inflate(R.layout.activity_update_personal_info, null), + Gravity.BOTTOM, 0, 0); + } + } + + public interface PasterSelectListener { + void pasterSelect(int resourceId, int gifId); + } + + PasterSelectListener pasterSelectListener; + + public void setPasterSelectListener(PasterSelectListener pasterSelectListener) { + this.pasterSelectListener = pasterSelectListener; + } + + + @Override + public void pasterItemSelect(int resourseId, int gifId) { + pasterSelectListener.pasterSelect(resourseId, gifId); + dimss(); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/PopTopTipWindow.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/PopTopTipWindow.java new file mode 100644 index 0000000..e3cb860 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/PopTopTipWindow.java @@ -0,0 +1,154 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.view; + +import android.app.Activity; +import android.content.Context; +import android.graphics.drawable.BitmapDrawable; +import android.os.Handler; +import android.view.Gravity; +import android.view.LayoutInflater; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.view.WindowManager; +import android.widget.PopupWindow; +import android.widget.TextView; + +import com.aserbao.androidcustomcamera.R; + +/** + * 从顶部出来的PopupWindow + */ + +public class PopTopTipWindow { + + Context context; + private PopupWindow popupWindow; + View popupWindowView; + private String content; + private TextView tvContent; + private int layout = -1; + + public PopTopTipWindow(Context context) { + this.context = context; + initPopupWindow(); + } + + public PopTopTipWindow(Context context, String content) { + this.context = context; + this.content = content; + initPopupWindow(); + } + + public PopTopTipWindow(Context context, String content, int layout) { + this.context = context; + this.content = content; + this.layout = layout; + } + + /** + * 初始化 + */ + public void initPopupWindow() { + if (popupWindowView != null) { + popupWindow.dismiss(); + } + popupWindowView = LayoutInflater.from(context).inflate(R.layout.pop_phone_tip, null); + popupWindow = new PopupWindow(popupWindowView, ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT, true); + popupWindow.setAnimationStyle(R.style.TopSelectAnimationShow); + // 菜单背景色。加了一点透明度 +// ColorDrawable dw = new ColorDrawable(0xddffffff); +// popupWindow.setBackgroundDrawable(dw); + popupWindow.setOutsideTouchable(true); + popupWindow.setBackgroundDrawable(new BitmapDrawable()); //解决部分机型按back键无法退出popupwindow + + // 设置背景半透明 + backgroundAlpha(0.7f); + +// popupWindow.setOutsideTouchable(true); + popupWindow.setFocusable(true); + popupWindow.setOnDismissListener(new PopTopTipWindow.popupDismissListener()); + + popupWindowView.setOnTouchListener(new View.OnTouchListener() { + + @Override + public boolean onTouch(View v, MotionEvent event) { + /* + * if( popupWindow!=null && popupWindow.isShowing()){ + * popupWindow.dismiss(); popupWindow=null; } + */ + // 这里如果返回true的话,touch事件将被拦截 + // 拦截后 PopupWindow的onTouchEvent不被调用,这样点击外部区域无法dismiss + return false; + } + }); + + + if (content != null && !content.equals("")) { + tvContent = (TextView) popupWindowView.findViewById(R.id.tv_content); + tvContent.setText(content); + } + + //TODO 注意:这里的 R.layout.activity_main,不是固定的。你想让这个popupwindow盖在哪个界面上面。就写哪个界面的布局。这里以主界面为例 + + popupWindow.showAtLocation(LayoutInflater.from(context).inflate(layout, null), + Gravity.TOP | Gravity.CENTER_HORIZONTAL, 0, 0); + + + } + + public boolean isShowing() { + return popupWindow.isShowing(); + } + + /** + * 处理点击事件 + */ + private void dealWithSelect() { + //点击了关闭图标(右上角图标) +// popupWindowView.findViewById(R.id.cancel).setOnClickListener(new View.OnClickListener() { +// @Override +// public void onClick(View v) { +// dimss(); +// } +// }); + + + } + + + /** + * 设置添加屏幕的背景透明度 + * + * @param bgAlpha + */ + public void backgroundAlpha(float bgAlpha) { + WindowManager.LayoutParams lp = ((Activity) context).getWindow().getAttributes(); + lp.alpha = bgAlpha; // 0.0-1.0 + ((Activity) context).getWindow().setAttributes(lp); + } + + class popupDismissListener implements PopupWindow.OnDismissListener { + + @Override + public void onDismiss() { + backgroundAlpha(1f); + } + } + public void delayMiss(int mill){ + new Handler().postDelayed(new Runnable() { + @Override + public void run() { + if (popupWindow != null) { + popupWindow.dismiss(); + popupWindow = null; + } + } + },mill); + } + + public void dimss() { + if (popupWindow != null && popupWindow.isShowing()) { + popupWindow.dismiss(); + } + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/StickInfoImageView.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/StickInfoImageView.java new file mode 100644 index 0000000..8c4851c --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/StickInfoImageView.java @@ -0,0 +1,46 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.view; + +import android.content.Context; +import android.support.annotation.Nullable; +import android.util.AttributeSet; +import android.widget.ImageView; + +/** + * Created by msi- on 2018/2/7. + */ + +public class StickInfoImageView extends ImageView { + private long startTime; + private long endTime; + + + + public StickInfoImageView(Context context) { + super(context); + } + + public StickInfoImageView(Context context, @Nullable AttributeSet attrs) { + super(context, attrs); + } + + public StickInfoImageView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { + super(context, attrs, defStyleAttr); + } + + + public long getStartTime() { + return startTime; + } + + public void setStartTime(long startTime) { + this.startTime = startTime; + } + + public long getEndTime() { + return endTime; + } + + public void setEndTime(long endTime) { + this.endTime = endTime; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/StickerView.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/StickerView.java new file mode 100644 index 0000000..7b08574 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/StickerView.java @@ -0,0 +1,706 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.view; + +import android.content.Context; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.Canvas; +import android.graphics.Matrix; +import android.graphics.Paint; +import android.graphics.PointF; +import android.graphics.Rect; +import android.support.v4.view.MotionEventCompat; +import android.util.AttributeSet; +import android.util.DisplayMetrics; +import android.util.Log; +import android.view.MotionEvent; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.whole.editVideo.beans.StickerPropertyModel; + + +/** + * 表情贴纸 + */ +public class StickerView extends BaseImageView { + private static final String TAG = "StickerView"; + + private Bitmap deleteBitmap; + private Bitmap flipVBitmap; + private Bitmap topBitmap; + private Bitmap resizeBitmap; + private Bitmap mBitmap; + private Rect dst_delete; + private Rect dst_resize; + private Rect dst_flipV; + private Rect dst_top; + private int deleteBitmapWidth; + private int deleteBitmapHeight; + private int resizeBitmapWidth; + private int resizeBitmapHeight; + //水平镜像 + private int flipVBitmapWidth; + private int flipVBitmapHeight; + //置顶 + private int topBitmapWidth; + private int topBitmapHeight; + private Paint localPaint; + private int mScreenwidth, mScreenHeight; + private static final float BITMAP_SCALE = 0.7f; + private PointF mid = new PointF(); + private OperationListener operationListener; + private float lastRotateDegree; + + //是否是第二根手指放下 + private boolean isPointerDown = false; + //手指移动距离必须超过这个数值 + private final float pointerLimitDis = 20f; + private final float pointerZoomCoeff = 0.09f; + /** + * 对角线的长度 + */ + private float lastLength; + private boolean isInResize = false; + +// private Matrix matrix = new Matrix(); + /** + * 是否在四条线内部 + */ + private boolean isInSide; + + private float lastX, lastY; + /** + * 是否在编辑模式 + */ + private boolean isInEdit = true; + + private float MIN_SCALE = 0.5f; + + private float MAX_SCALE = 1.2f; + + private double halfDiagonalLength; + + private float oringinWidth = 0; + + //双指缩放时的初始距离 + private float oldDis; + + private final long stickerId; + + private DisplayMetrics dm; + + //水平镜像 + private boolean isHorizonMirror = false; + + private int resourceId; + + private boolean isInRomate = false; + + + public StickerView(Context context, AttributeSet attrs) { + super(context, attrs); + stickerId = 0; + init(); + } + + public StickerView(Context context) { + super(context); + stickerId = 0; + init(); + } + + public StickerView(Context context, AttributeSet attrs, int defStyleAttr) { + super(context, attrs, defStyleAttr); + stickerId = 0; + init(); + } + + private void init() { + dst_delete = new Rect(); + dst_resize = new Rect(); + dst_flipV = new Rect(); + dst_top = new Rect(); + localPaint = new Paint(); + localPaint.setColor(getResources().getColor(R.color.white)); + localPaint.setAntiAlias(true); + localPaint.setDither(true); + localPaint.setStyle(Paint.Style.STROKE); + localPaint.setStrokeWidth(2.0f); + dm = getResources().getDisplayMetrics(); + mScreenwidth = dm.widthPixels; + mScreenHeight = dm.heightPixels; + } + + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + Log.e(TAG, "onDraw方法----->"); + if (mBitmap != null) { + + float[] arrayOfFloat = new float[9]; + matrix.getValues(arrayOfFloat); + float f1 = 0.0F * arrayOfFloat[0] + 0.0F * arrayOfFloat[1] + arrayOfFloat[2]; + float f2 = 0.0F * arrayOfFloat[3] + 0.0F * arrayOfFloat[4] + arrayOfFloat[5]; + float f3 = arrayOfFloat[0] * this.mBitmap.getWidth() + 0.0F * arrayOfFloat[1] + arrayOfFloat[2]; + float f4 = arrayOfFloat[3] * this.mBitmap.getWidth() + 0.0F * arrayOfFloat[4] + arrayOfFloat[5]; + float f5 = 0.0F * arrayOfFloat[0] + arrayOfFloat[1] * this.mBitmap.getHeight() + arrayOfFloat[2]; + float f6 = 0.0F * arrayOfFloat[3] + arrayOfFloat[4] * this.mBitmap.getHeight() + arrayOfFloat[5]; + float f7 = arrayOfFloat[0] * this.mBitmap.getWidth() + arrayOfFloat[1] * this.mBitmap.getHeight() + arrayOfFloat[2]; + float f8 = arrayOfFloat[3] * this.mBitmap.getWidth() + arrayOfFloat[4] * this.mBitmap.getHeight() + arrayOfFloat[5]; + + canvas.save(); + canvas.drawBitmap(mBitmap, matrix, null); + //删除在右上角 + dst_delete.left = (int) (f3 - deleteBitmapWidth / 2); + dst_delete.right = (int) (f3 + deleteBitmapWidth / 2); + dst_delete.top = (int) (f4 - deleteBitmapHeight / 2); + dst_delete.bottom = (int) (f4 + deleteBitmapHeight / 2); + //拉伸等操作在右下角 + dst_resize.left = (int) (f7 - resizeBitmapWidth / 2); + dst_resize.right = (int) (f7 + resizeBitmapWidth / 2); + dst_resize.top = (int) (f8 - resizeBitmapHeight / 2); + dst_resize.bottom = (int) (f8 + resizeBitmapHeight / 2); + //垂直镜像在左上角 + dst_top.left = (int) (f1 - flipVBitmapWidth / 2); + dst_top.right = (int) (f1 + flipVBitmapWidth / 2); + dst_top.top = (int) (f2 - flipVBitmapHeight / 2); + dst_top.bottom = (int) (f2 + flipVBitmapHeight / 2); + //水平镜像在左下角 + dst_flipV.left = (int) (f5 - topBitmapWidth / 2); + dst_flipV.right = (int) (f5 + topBitmapWidth / 2); + dst_flipV.top = (int) (f6 - topBitmapHeight / 2); + dst_flipV.bottom = (int) (f6 + topBitmapHeight / 2); + + + leftBottomX = dst_delete.centerX(); + leftBottomX = leftBottomX > dst_resize.centerX() ? dst_resize.centerX() : leftBottomX; + leftBottomX = leftBottomX > dst_top.centerX() ? dst_top.centerX() : leftBottomX; + leftBottomX = leftBottomX > dst_flipV.centerX() ? dst_flipV.centerX() : leftBottomX; + + leftBottomY = dst_flipV.centerY(); + leftBottomY = leftBottomY < dst_resize.centerY() ? dst_resize.centerY() : leftBottomY; + leftBottomY = leftBottomY < dst_top.centerY() ? dst_top.centerY() : leftBottomY; + leftBottomY = leftBottomY < dst_flipV.centerY() ? dst_flipV.centerY() : leftBottomY; + + float distanceX = Math.abs(dst_delete.centerX() - dst_top.centerX()); + float distanceY = Math.abs(dst_delete.centerY() - dst_top.centerY()); + viewWidth = (float) Math.sqrt(distanceX * distanceX + distanceY * distanceY); + float distanceHeightX = Math.abs(dst_resize.centerX() - dst_delete.centerX()); + float distanceHeightY = Math.abs(dst_resize.centerY() - dst_delete.centerY()); + viewHeight = (float) Math.sqrt(distanceHeightX * distanceHeightX + distanceHeightY * distanceHeightY); + + if (isInEdit) { + canvas.drawLine(f1, f2, f3, f4, localPaint); + canvas.drawLine(f3, f4, f7, f8, localPaint); + canvas.drawLine(f5, f6, f7, f8, localPaint); + canvas.drawLine(f5, f6, f1, f2, localPaint); + + canvas.drawBitmap(deleteBitmap, null, dst_delete, null); + canvas.drawBitmap(resizeBitmap, null, dst_resize, null); +// canvas.drawBitmap(flipVBitmap, null, dst_flipV, null); +// canvas.drawBitmap(topBitmap, null, dst_top, null); + } + +// if(isInRomate){ //恢复isInRomate设置 +// isInRomate = false; +// } + canvas.restore(); + } + } + + @Override + public void setImageResource(int resId) { + this.resourceId = resId; + setBitmap(BitmapFactory.decodeResource(getResources(), resId)); + } + + + public void setParentSize(int mScreenwidth, int mScreenHeight) { +// this.mScreenwidth = mScreenwidth; +// this.mScreenHeight = mScreenHeight; + this.mScreenwidth = mScreenwidth > mScreenHeight ? mScreenHeight : mScreenwidth; + this.mScreenHeight = mScreenwidth > mScreenHeight ? mScreenwidth : mScreenHeight; + } + + public void setBitmap(Bitmap bitmap) { + matrix.reset(); + mBitmap = bitmap; + setDiagonalLength(); + initBitmaps(); + int w = mBitmap.getWidth(); + int h = mBitmap.getHeight(); + oringinWidth = w; + float initScale = (MIN_SCALE + MAX_SCALE) / 2; + matrix.postScale(initScale, initScale, w / 2, h / 2); + mScaleX = initScale; + mScaleY = initScale; + //Y坐标为 (顶部操作栏+正方形图)/2 + matrix.postTranslate(mScreenwidth / 2 - w / 2, (mScreenwidth) / 2 - h / 2); + posX = mScreenwidth / 2 - w / 2; + posY = (mScreenwidth) / 2 - h / 2; + invalidate(); + } + + public void changeBitmap(Bitmap bitmap) { + this.mBitmap = bitmap; + invalidate(); + } + + public float getPosX() { + return posX; + } + + public float getPosY() { + return posY; + } + + private void setDiagonalLength() { + halfDiagonalLength = Math.hypot(mBitmap.getWidth(), mBitmap.getHeight()) / 2; + } + + private void initBitmaps() { + //当图片的宽比高大时 按照宽计算 缩放大小根据图片的大小而改变 最小为图片的1/8 最大为屏幕宽 + if (mBitmap.getWidth() >= mBitmap.getHeight()) { + float minWidth = mScreenwidth / 8; + if (mBitmap.getWidth() < minWidth) { + MIN_SCALE = 1f; + } else { + MIN_SCALE = 1.0f * minWidth / mBitmap.getWidth(); + } + + if (mBitmap.getWidth() > mScreenwidth) { + MAX_SCALE = 1; + } else { + MAX_SCALE = 1.0f * mScreenwidth / mBitmap.getWidth(); + } + } else { + //当图片高比宽大时,按照图片的高计算 + float minHeight = mScreenwidth / 8; + if (mBitmap.getHeight() < minHeight) { + MIN_SCALE = 1f; + } else { + MIN_SCALE = 1.0f * minHeight / mBitmap.getHeight(); + } + + if (mBitmap.getHeight() > mScreenwidth) { + MAX_SCALE = 1; + } else { + MAX_SCALE = 1.0f * mScreenwidth / mBitmap.getHeight(); + } + } + + topBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.camera_scaling); + deleteBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.camera_delete); + flipVBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.camera_scaling); + resizeBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.camera_rotate); + + deleteBitmapWidth = (int) (deleteBitmap.getWidth() * BITMAP_SCALE); + deleteBitmapHeight = (int) (deleteBitmap.getHeight() * BITMAP_SCALE); + + resizeBitmapWidth = (int) (resizeBitmap.getWidth() * BITMAP_SCALE); + resizeBitmapHeight = (int) (resizeBitmap.getHeight() * BITMAP_SCALE); + + flipVBitmapWidth = (int) (flipVBitmap.getWidth() * BITMAP_SCALE); + flipVBitmapHeight = (int) (flipVBitmap.getHeight() * BITMAP_SCALE); + + topBitmapWidth = (int) (topBitmap.getWidth() * BITMAP_SCALE); + topBitmapHeight = (int) (topBitmap.getHeight() * BITMAP_SCALE); + } + + @Override + public boolean onTouchEvent(MotionEvent event) { + int action = MotionEventCompat.getActionMasked(event); + boolean handled = true; + switch (action) { + case MotionEvent.ACTION_DOWN: + if (isInButton(event, dst_delete)) { + isInRomate = false; + if (operationListener != null) { + operationListener.onDeleteClick(); + } + } else if (isInResize(event)) { + isInRomate = false; + isInResize = true; + lastRotateDegree = rotationToStartPoint(event); + midPointToStartPoint(event); + lastLength = diagonalLength(event); + } else if (isInButton(event, dst_flipV)) { + isInRomate = false; +// //水平镜像 +// PointF localPointF = new PointF(); +// midDiagonalPoint(localPointF); +// matrix.postScale(-1.0F, 1.0F, localPointF.x, localPointF.y); +// mScaleX = -1.0F; +// mScaleY = 1.0F; +// isHorizonMirror = !isHorizonMirror; +// invalidate(); + } else if (isInButton(event, dst_top)) { +// //置顶 +// bringToFront(); +// if (operationListener != null) { +// operationListener.onTop(this); +// } + //水平镜像 +// PointF localPointF = new PointF(); +// midDiagonalPoint(localPointF); +// matrix.postScale(-1.0F, 1.0F, localPointF.x, localPointF.y); +// mScaleX = -1.0F; +// mScaleY = 1.0F; +// isHorizonMirror = !isHorizonMirror; + X = (dst_top.left + dst_resize.right) / 2; + Y = (dst_top.top + dst_resize.bottom) / 2; + matrix.postRotate(lastRotateDegree + 90, X, Y); + lastRotateDegree += 90; + isInRomate = true; + invalidate(); + } else if (isInBitmap(event)) { + isInSide = true; + isInRomate = false; + lastX = event.getX(0); + lastY = event.getY(0); + } else { + isInRomate = false; + handled = false; + } + break; + case MotionEvent.ACTION_POINTER_DOWN: + if (spacing(event) > pointerLimitDis) { + oldDis = spacing(event); + isPointerDown = true; + midPointToStartPoint(event); + } else { + isPointerDown = false; + } + isInSide = false; + isInResize = false; + break; + case MotionEvent.ACTION_MOVE: + //双指缩放 + if (isPointerDown) { + float scale; + float disNew = spacing(event); + if (disNew == 0 || disNew < pointerLimitDis) { + scale = 1; + } else { + scale = disNew / oldDis; + //缩放缓慢 + scale = (scale - 1) * pointerZoomCoeff + 1; + } + float scaleTemp = (scale * Math.abs(dst_flipV.left - dst_resize.left)) / oringinWidth; + if (((scaleTemp <= MIN_SCALE)) && scale < 1 || + (scaleTemp >= MAX_SCALE) && scale > 1) { + scale = 1; + } else { + lastLength = diagonalLength(event); + } + matrix.postScale(scale, scale, mid.x, mid.y); + mScaleX = scale; + mScaleY = scale; + invalidate(); + } else if (isInResize) { + + matrix.postRotate((rotationToStartPoint(event) - lastRotateDegree) * 2, mid.x, mid.y); + lastRotateDegree = rotationToStartPoint(event); + + float scale = diagonalLength(event) / lastLength; + + if (((diagonalLength(event) / halfDiagonalLength <= MIN_SCALE)) && scale < 1 || + (diagonalLength(event) / halfDiagonalLength >= MAX_SCALE) && scale > 1) { + scale = 1; + if (!isInResize(event)) { + isInResize = false; + } + } else { + lastLength = diagonalLength(event); + } + matrix.postScale(scale, scale, mid.x, mid.y); + mScaleX = scale; + mScaleY = scale; + invalidate(); + } else if (isInSide) { + float x = event.getX(0); + float y = event.getY(0); + //TODO 移动区域判断 不能超出屏幕 + matrix.postTranslate(x - lastX, y - lastY); + posX = x; + posY = y; + lastX = x; + lastY = y; + invalidate(); + } + break; + case MotionEvent.ACTION_CANCEL: + case MotionEvent.ACTION_UP: + isInResize = false; + isInSide = false; + isPointerDown = false; + + X = (dst_top.left + dst_resize.right) / 2; + Y = (dst_top.top + dst_resize.bottom) / 2; + rotateDegree = lastRotateDegree; + Log.e(TAG, "leftBottomX:" + leftBottomX); + Log.e(TAG, "leftBottomY:" + leftBottomY); + Log.e(TAG, "viewWidth:" + viewWidth); + Log.e(TAG, "viewHeight:" + viewHeight); +// Bitmap bitmap = Bitmap.createBitmap(mBitmap, 0, 0, mBitmap.getWidth(), mBitmap.getHeight(), matrix, true); +// Log.e(TAG, "bitmapWidth:" + bitmap.getWidth()); +// Log.e(TAG, "bitmapHeight:" + bitmap.getHeight()); + break; + + } + if (handled && operationListener != null) { + operationListener.onEdit(this); + } + return handled; + } + + /** + * 计算图片的角度等属性 + * + * @param model + * @return + */ + public StickerPropertyModel calculate(StickerPropertyModel model) { + float[] v = new float[9]; + matrix.getValues(v); + // translation is simple + float tx = v[Matrix.MTRANS_X]; + float ty = v[Matrix.MTRANS_Y]; + Log.d(TAG, "tx : " + tx + " ty : " + ty); + // calculate real scale + float scalex = v[Matrix.MSCALE_X]; + float skewy = v[Matrix.MSKEW_Y]; + float rScale = (float) Math.sqrt(scalex * scalex + skewy * skewy); + Log.d(TAG, "rScale : " + rScale); + // calculate the degree of rotation + float rAngle = Math.round(Math.atan2(v[Matrix.MSKEW_X], v[Matrix.MSCALE_X]) * (180 / Math.PI)); + Log.d(TAG, "rAngle : " + rAngle); + + PointF localPointF = new PointF(); + midDiagonalPoint(localPointF); + + Log.d(TAG, " width : " + (mBitmap.getWidth() * rScale) + " height " + (mBitmap.getHeight() * rScale)); + + float minX = localPointF.x; + float minY = localPointF.y; + + Log.d(TAG, "midX : " + minX + " midY : " + minY); + model.setDegree((float) Math.toRadians(rAngle)); + //TODO 占屏幕百分比 + float precentWidth = (mBitmap.getWidth() * rScale) / mScreenwidth; + model.setScaling(precentWidth); + model.setxLocation(minX / mScreenwidth); + model.setyLocation(minY / mScreenwidth); + model.setStickerId(stickerId); + if (isHorizonMirror) { + model.setHorizonMirror(1); + } else { + model.setHorizonMirror(2); + } + return model; + } + + /** + * 是否在四条线内部 + * 图片旋转后 可能存在菱形状态 不能用4个点的坐标范围去判断点击区域是否在图片内 + * + * @return + */ + private boolean isInBitmap(MotionEvent event) { + float[] arrayOfFloat1 = new float[9]; + this.matrix.getValues(arrayOfFloat1); + //左上角 + float f1 = 0.0F * arrayOfFloat1[0] + 0.0F * arrayOfFloat1[1] + arrayOfFloat1[2]; + float f2 = 0.0F * arrayOfFloat1[3] + 0.0F * arrayOfFloat1[4] + arrayOfFloat1[5]; + //右上角 + float f3 = arrayOfFloat1[0] * this.mBitmap.getWidth() + 0.0F * arrayOfFloat1[1] + arrayOfFloat1[2]; + float f4 = arrayOfFloat1[3] * this.mBitmap.getWidth() + 0.0F * arrayOfFloat1[4] + arrayOfFloat1[5]; + //左下角 + float f5 = 0.0F * arrayOfFloat1[0] + arrayOfFloat1[1] * this.mBitmap.getHeight() + arrayOfFloat1[2]; + float f6 = 0.0F * arrayOfFloat1[3] + arrayOfFloat1[4] * this.mBitmap.getHeight() + arrayOfFloat1[5]; + //右下角 + float f7 = arrayOfFloat1[0] * this.mBitmap.getWidth() + arrayOfFloat1[1] * this.mBitmap.getHeight() + arrayOfFloat1[2]; + float f8 = arrayOfFloat1[3] * this.mBitmap.getWidth() + arrayOfFloat1[4] * this.mBitmap.getHeight() + arrayOfFloat1[5]; + + float[] arrayOfFloat2 = new float[4]; + float[] arrayOfFloat3 = new float[4]; + //确定X方向的范围 + arrayOfFloat2[0] = f1;//左上的x + arrayOfFloat2[1] = f3;//右上的x + arrayOfFloat2[2] = f7;//右下的x + arrayOfFloat2[3] = f5;//左下的x + //确定Y方向的范围 + arrayOfFloat3[0] = f2;//左上的y + arrayOfFloat3[1] = f4;//右上的y + arrayOfFloat3[2] = f8;//右下的y + arrayOfFloat3[3] = f6;//左下的y + return pointInRect(arrayOfFloat2, arrayOfFloat3, event.getX(0), event.getY(0)); + } + + /** + * 判断点是否在一个矩形内部 + * + * @param xRange + * @param yRange + * @param x + * @param y + * @return + */ + private boolean pointInRect(float[] xRange, float[] yRange, float x, float y) { + //四条边的长度 + double a1 = Math.hypot(xRange[0] - xRange[1], yRange[0] - yRange[1]); + double a2 = Math.hypot(xRange[1] - xRange[2], yRange[1] - yRange[2]); + double a3 = Math.hypot(xRange[3] - xRange[2], yRange[3] - yRange[2]); + double a4 = Math.hypot(xRange[0] - xRange[3], yRange[0] - yRange[3]); + //待检测点到四个点的距离 + double b1 = Math.hypot(x - xRange[0], y - yRange[0]); + double b2 = Math.hypot(x - xRange[1], y - yRange[1]); + double b3 = Math.hypot(x - xRange[2], y - yRange[2]); + double b4 = Math.hypot(x - xRange[3], y - yRange[3]); + + double u1 = (a1 + b1 + b2) / 2; + double u2 = (a2 + b2 + b3) / 2; + double u3 = (a3 + b3 + b4) / 2; + double u4 = (a4 + b4 + b1) / 2; + + //矩形的面积 + double s = a1 * a2; + //海伦公式 计算4个三角形面积 + double ss = Math.sqrt(u1 * (u1 - a1) * (u1 - b1) * (u1 - b2)) + + Math.sqrt(u2 * (u2 - a2) * (u2 - b2) * (u2 - b3)) + + Math.sqrt(u3 * (u3 - a3) * (u3 - b3) * (u3 - b4)) + + Math.sqrt(u4 * (u4 - a4) * (u4 - b4) * (u4 - b1)); + return Math.abs(s - ss) < 0.5; + + + } + + + /** + * 触摸是否在某个button范围 + * + * @param event + * @param rect + * @return + */ + private boolean isInButton(MotionEvent event, Rect rect) { + int left = rect.left; + int right = rect.right; + int top = rect.top; + int bottom = rect.bottom; + return event.getX(0) >= left && event.getX(0) <= right && event.getY(0) >= top && event.getY(0) <= bottom; + } + + /** + * 触摸是否在拉伸区域内 + * + * @param event + * @return + */ + private boolean isInResize(MotionEvent event) { + int left = -20 + this.dst_resize.left; + int top = -20 + this.dst_resize.top; + int right = 20 + this.dst_resize.right; + int bottom = 20 + this.dst_resize.bottom; + return event.getX(0) >= left && event.getX(0) <= right && event.getY(0) >= top && event.getY(0) <= bottom; + } + + /** + * 触摸的位置和图片左上角位置的中点 + * + * @param event + */ + private void midPointToStartPoint(MotionEvent event) { + float[] arrayOfFloat = new float[9]; + matrix.getValues(arrayOfFloat); + float f1 = 0.0f * arrayOfFloat[0] + 0.0f * arrayOfFloat[1] + arrayOfFloat[2]; + float f2 = 0.0f * arrayOfFloat[3] + 0.0f * arrayOfFloat[4] + arrayOfFloat[5]; + float f3 = f1 + event.getX(0); + float f4 = f2 + event.getY(0); + mid.set(f3 / 2, f4 / 2); + } + + /** + * 计算对角线交叉的位置 + * + * @param paramPointF + */ + private void midDiagonalPoint(PointF paramPointF) { + float[] arrayOfFloat = new float[9]; + this.matrix.getValues(arrayOfFloat); + float f1 = 0.0F * arrayOfFloat[0] + 0.0F * arrayOfFloat[1] + arrayOfFloat[2]; + float f2 = 0.0F * arrayOfFloat[3] + 0.0F * arrayOfFloat[4] + arrayOfFloat[5]; + float f3 = arrayOfFloat[0] * this.mBitmap.getWidth() + arrayOfFloat[1] * this.mBitmap.getHeight() + arrayOfFloat[2]; + float f4 = arrayOfFloat[3] * this.mBitmap.getWidth() + arrayOfFloat[4] * this.mBitmap.getHeight() + arrayOfFloat[5]; + float f5 = f1 + f3; + float f6 = f2 + f4; + paramPointF.set(f5 / 2.0F, f6 / 2.0F); + } + + + /** + * 在滑动旋转过程中,总是以左上角原点作为绝对坐标计算偏转角度 + * + * @param event + * @return + */ + private float rotationToStartPoint(MotionEvent event) { + + float[] arrayOfFloat = new float[9]; + matrix.getValues(arrayOfFloat); + float x = 0.0f * arrayOfFloat[0] + 0.0f * arrayOfFloat[1] + arrayOfFloat[2]; + float y = 0.0f * arrayOfFloat[3] + 0.0f * arrayOfFloat[4] + arrayOfFloat[5]; + double arc = Math.atan2(event.getY(0) - y, event.getX(0) - x); + return (float) Math.toDegrees(arc); + } + + /** + * 触摸点到矩形中点的距离 + * + * @param event + * @return + */ + private float diagonalLength(MotionEvent event) { + float diagonalLength = (float) Math.hypot(event.getX(0) - mid.x, event.getY(0) - mid.y); + return diagonalLength; + } + + /** + * 计算双指之间的距离 + */ + private float spacing(MotionEvent event) { + if (event.getPointerCount() == 2) { + float x = event.getX(0) - event.getX(1); + float y = event.getY(0) - event.getY(1); + return (float) Math.sqrt(x * x + y * y); + } else { + return 0; + } + } + + public interface OperationListener { + void onDeleteClick(); + + void onEdit(StickerView stickerView); + + void onTop(StickerView stickerView); + } + + public void setOperationListener(OperationListener operationListener) { + this.operationListener = operationListener; + } + + public void setInEdit(boolean isInEdit) { + this.isInEdit = isInEdit; + invalidate(); + } + + + @Override + public Bitmap getBitmap() { + Log.e(TAG, "getBitmap"); + Bitmap bitmap = Bitmap.createBitmap(mBitmap, 0, 0, mBitmap.getWidth(), mBitmap.getHeight(), matrix, true); + return bitmap; + } + + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/VideoEditProgressView.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/VideoEditProgressView.java new file mode 100644 index 0000000..739fadf --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/VideoEditProgressView.java @@ -0,0 +1,626 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.view; + +import android.content.Context; +import android.content.res.Resources; +import android.graphics.Bitmap; +import android.graphics.Color; +import android.os.Handler; +import android.os.Looper; +import android.support.annotation.NonNull; +import android.support.annotation.Nullable; +import android.util.AttributeSet; +import android.util.DisplayMetrics; +import android.util.Log; +import android.view.Gravity; +import android.view.LayoutInflater; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.widget.ImageView; +import android.widget.LinearLayout; +import android.widget.RelativeLayout; +import android.widget.TextView; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.utils.DisplayUtil; +import com.aserbao.androidcustomcamera.whole.editVideo.beans.SelectViewBean; + +import java.util.ArrayList; +import java.util.List; + +/** + *
    + *     author : Administrator (Jacket)
    + *     e-mail : 378315764@qq.com
    + *     time   : 2018/01/30
    + *     desc   :
    + *     version: 3.2
    + * 
    + */ + +public class VideoEditProgressView extends RelativeLayout { + + private String TAG = VideoEditProgressView.class.getSimpleName(); + + /** + * Content的宽 + */ + private int maxScrollWidth; + + private int minScrollWidth; + + private int screenWidth; + + private LinearLayout editBarLeft; + + private LinearLayout editBarRight; + + private ImageView ivEditBarLeft; + + private ImageView ivEditBarRight; + + private TextView tvStartTime; + + private TextView tvEndTime; + + private LinearLayout imageList; + + private int editBarLeftWidth; + private int editBarLeftHeight; + + private int editBarRightWidth; + private int editBarRightHeight; + + private RelativeLayout.LayoutParams editBarLeftParamsBar; + private RelativeLayout.LayoutParams editBarRightParamsBar; + private RelativeLayout.LayoutParams selectedParams; + + private Context context; + + private int videoEditProgressWidth; + + private LinearLayout selectdAreaView; //选中的区域 + + private long totalTime = 15 * 1000; + private long startTime = 0; + private long endTime = 1; + private long currentTime = 0; + private float minSelectTimeWidth = 0; + + private LinearLayout.LayoutParams tvStartTimeParams; + private LinearLayout.LayoutParams tvEndTimeParams; + private List selectViewBeans; //选中的时间集合 + + public VideoEditProgressView(@NonNull Context context, @Nullable AttributeSet attrs) { + super(context, attrs); + this.context = context; + initView(context, attrs); + } + + private void initView(Context context, AttributeSet attrs) { + Resources resources = context.getResources(); //获取屏幕的宽度 + DisplayMetrics dm = resources.getDisplayMetrics(); + screenWidth = dm.widthPixels; + + selectViewBeans = new ArrayList(); + + minSelectTimeWidth = screenWidth / 8 + DisplayUtil.dipToPx( 10); + + imageList = new LinearLayout(context); + RelativeLayout.LayoutParams layoutParams = new LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT); + imageList.setOrientation(LinearLayout.HORIZONTAL); + imageList.setGravity(Gravity.CENTER_VERTICAL); + addView(imageList, layoutParams); + + + selectedParams = new LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT); + selectdAreaView = new LinearLayout(context); //选中的背景 + selectdAreaView.setBackgroundColor(Color.parseColor("#3fff0000")); + selectedParams.addRule(RelativeLayout.CENTER_VERTICAL, RelativeLayout.TRUE); + addView(selectdAreaView, selectedParams); + + + editBarLeftParamsBar = new LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT); + editBarLeft = (LinearLayout) LayoutInflater.from(context).inflate(R.layout.edit_bar_layout, null); //添加左边编辑棒 + editBarLeftParamsBar.addRule(RelativeLayout.CENTER_VERTICAL, RelativeLayout.TRUE); + editBarLeftParamsBar.addRule(RelativeLayout.ALIGN_PARENT_LEFT, RelativeLayout.TRUE); + ivEditBarLeft = (ImageView) editBarLeft.findViewById(R.id.iv_edit_bar_left); + addView(editBarLeft, editBarLeftParamsBar); + tvStartTime = (TextView) editBarLeft.findViewById(R.id.tv_start_time); + + + editBarRightParamsBar = new LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT); + editBarRight = (LinearLayout) LayoutInflater.from(context).inflate(R.layout.edit_bar_two_layout, null); //添加右边边编辑棒 + editBarRightParamsBar.addRule(RelativeLayout.CENTER_VERTICAL, RelativeLayout.TRUE); + ivEditBarRight = (ImageView) editBarRight.findViewById(R.id.iv_edit_bar_right); + addView(editBarRight, editBarRightParamsBar); + tvEndTime = (TextView) editBarRight.findViewById(R.id.tv_end_time); + + + tvStartTimeParams = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT); + tvEndTimeParams = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT); + + selectdAreaView.setVisibility(GONE); + editBarLeft.setVisibility(GONE); + editBarRight.setVisibility(GONE); + + tvStartTimeParams.leftMargin = DisplayUtil.dipToPx( 3); + tvStartTime.setLayoutParams(tvStartTimeParams); + + + editBarLeft.setOnTouchListener(new OnTouchListener() { + @Override + public boolean onTouch(View v, MotionEvent event) { + switch (event.getAction()) { + case MotionEvent.ACTION_DOWN: + //1.按下记录坐标 + startLeftBarX = event.getX(); + ivEditBarLeft.setImageResource(R.drawable.camera_select_selected); + Log.e(TAG, "getX(): " + getX()); + break; + case MotionEvent.ACTION_MOVE: + //2.记录结束值 + float endX = event.getX(); + //3.计算偏移量 + float distanceX = endX - startLeftBarX; + float toX = editBarLeft.getX() + distanceX; + if (toX < -DisplayUtil.dipToPx( 20)) { + toX = -DisplayUtil.dipToPx( 20); + } + if (toX > editBarRight.getX() - minSelectTimeWidth) { + toX = editBarRight.getX() - minSelectTimeWidth; + } + selectdAreaView.layout((int) editBarLeft.getX() + DisplayUtil.dipToPx( 20), DisplayUtil.dipToPx( 26), (int) editBarRight.getX() + DisplayUtil.dipToPx( 10), DisplayUtil.dipToPx( 85)); + editBarLeft.setX(toX); + if (toX == -DisplayUtil.dipToPx( 20)) { + startTime = 0; + } else { + startTime = totalTime * selectdAreaView.getLeft() / getMeasuredWidth(); + } + if (toX == videoEditProgressWidth - DisplayUtil.dipToPx( 17)) { + endTime = totalTime; + } else { + endTime = totalTime * selectdAreaView.getRight() / getMeasuredWidth(); + } + Log.e(TAG, "startTime: " + startTime); + if (tvStartTime != null) { + tvStartTime.setText(startTime / 1000 + "s"); + if (startTime == 0) { + tvStartTimeParams.leftMargin = DisplayUtil.dipToPx( 3); + } else { + tvStartTimeParams.leftMargin = 0; + } + tvStartTime.setLayoutParams(tvStartTimeParams); + } + break; + case MotionEvent.ACTION_UP: + selectdAreaView.layout((int) editBarLeft.getX() + DisplayUtil.dipToPx( 20), DisplayUtil.dipToPx( 26), (int) editBarRight.getX() + DisplayUtil.dipToPx( 10), DisplayUtil.dipToPx( 85)); + if (playStateListener != null) { + Log.e(TAG, "startTime:" + startTime + ",endTime:" + endTime); + playStateListener.selectTimeChange(startTime, endTime); + } + ivEditBarLeft.setImageResource(R.drawable.camera_select_normal); + break; + } + return true; + } + }); + + + editBarRight.setOnTouchListener(new OnTouchListener() { + @Override + public boolean onTouch(View v, MotionEvent event) { + switch (event.getAction()) { + case MotionEvent.ACTION_DOWN: + //1.按下记录坐标 + startRightBarX = event.getX(); + ivEditBarRight.setImageResource(R.drawable.camera_select_selected); + break; + case MotionEvent.ACTION_MOVE: + //2.记录结束值 + float endX = event.getX(); + //3.计算偏移量 + float distanceX = endX - startRightBarX; + float toX = editBarRight.getX() + distanceX; + if (toX < editBarLeft.getX() + minSelectTimeWidth) { + toX = editBarLeft.getX() + minSelectTimeWidth; + } + if (toX > videoEditProgressWidth - DisplayUtil.dipToPx( 17)) { + toX = videoEditProgressWidth - DisplayUtil.dipToPx( 17); + } + selectdAreaView.layout((int) editBarLeft.getX() + DisplayUtil.dipToPx( 20), DisplayUtil.dipToPx( 26), (int) editBarRight.getX() + DisplayUtil.dipToPx( 10), DisplayUtil.dipToPx( 85)); + editBarRight.setX(toX); + if (toX == -DisplayUtil.dipToPx( 20)) { + startTime = 0; + } else { + startTime = totalTime * selectdAreaView.getLeft() / getMeasuredWidth(); + } + if (toX == videoEditProgressWidth - DisplayUtil.dipToPx( 17)) { + endTime = totalTime; + } else { + endTime = totalTime * selectdAreaView.getRight() / getMeasuredWidth(); + } + Log.e(TAG, "getRight(): " + selectdAreaView.getRight()); + Log.e(TAG, "getMeasuredWidth(): " + getMeasuredWidth()); + Log.e(TAG, "endTime: " + endTime); + if (tvEndTime != null) { + tvEndTime.setText(endTime / 1000 + "s"); + if (endTime == 15000) { + tvEndTimeParams.rightMargin = DisplayUtil.dipToPx( 6); + } else { + tvEndTimeParams.rightMargin = 0; + } + tvEndTime.setLayoutParams(tvEndTimeParams); + } + break; + case MotionEvent.ACTION_UP: + ivEditBarRight.setImageResource(R.drawable.camera_select_normal); + selectdAreaView.layout((int) editBarLeft.getX() + DisplayUtil.dipToPx( 20), DisplayUtil.dipToPx( 26), (int) editBarRight.getX() + DisplayUtil.dipToPx( 10), DisplayUtil.dipToPx( 85)); + if (playStateListener != null) { + Log.e(TAG, "startTime:" + startTime + ",endTime:" + endTime); + playStateListener.selectTimeChange(startTime, endTime); + } + break; + } + return true; + } + }); + } + + //添加视频处理关键帧图片 + public void addImageView(List bitmaps) { + if (imageList != null) { +// int width = DisplayUtil.dipToPx( 45) * bitmaps.size(); +// minScrollWidth = screenWidth / 2 - width; + int imageWidth = screenWidth / 8; + ViewGroup.LayoutParams layoutParams = new LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.MATCH_PARENT); + imageList.setLayoutParams(layoutParams); + for (Bitmap bitmap : bitmaps) { + ImageView imageView = new ImageView(context); + LayoutParams params = new LayoutParams(imageWidth, DisplayUtil.dipToPx( 60)); + imageView.setLayoutParams(params); + imageView.setScaleType(ImageView.ScaleType.CENTER_CROP); + imageView.setImageBitmap(bitmap); + imageList.addView(imageView); + } + } + } + + /** + * 当布局文件加载完成的时候回调这个方法 + */ + @Override + protected void onFinishInflate() { + super.onFinishInflate(); + } + + /** + * 在测量方法里,得到各个控件的高和宽 + * + * @param widthMeasureSpec + * @param heightMeasureSpec + */ + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + minScrollWidth = screenWidth / 2 - getMeasuredWidth(); //初始位置(屏幕中心)减去ViewEditProgressView的宽度 + maxScrollWidth = screenWidth / 2; + editBarLeftWidth = editBarLeft.getMeasuredWidth(); + editBarLeftHeight = getMeasuredHeight(); + videoEditProgressWidth = getMeasuredWidth(); + editBarRightWidth = editBarRight.getMeasuredWidth(); + editBarRightHeight = getMeasuredHeight(); + + } + + @Override + protected void onLayout(boolean changed, int l, int t, int r, int b) { + super.onLayout(changed, l, t, r, b); +// editBarRight.layout(editBarLeft.getRight() + DisplayUtil.dipToPx( 4), 0, editBarLeft.getRight() + DisplayUtil.dipToPx( 4) + editBarRightWidth, editBarRightHeight); +// editBarLeft.setX(0); +// editBarLeft.setY(0); +// editBarRight.setX(DisplayUtil.dipToPx(40)); +// editBarRight.setY(0); + editBarLeft.layout(-DisplayUtil.dipToPx( 20), DisplayUtil.dipToPx( 8), editBarLeftWidth, editBarLeftHeight + DisplayUtil.dipToPx( 8)); + editBarRight.layout(editBarRight.getLeft(), DisplayUtil.dipToPx( 8), editBarRight.getRight(), editBarRightHeight + DisplayUtil.dipToPx( 8)); + selectdAreaView.layout((int) editBarLeft.getX() + DisplayUtil.dipToPx( 20), DisplayUtil.dipToPx( 26), (int) editBarRight.getX() + DisplayUtil.dipToPx( 10), DisplayUtil.dipToPx( 85)); + } + + private float startX; + private float startLeftBarX; + private float startRightBarX; + + @Override + public boolean onTouchEvent(MotionEvent event) { + super.onTouchEvent(event); + switch (event.getAction()) { + case MotionEvent.ACTION_DOWN: + //1.按下记录坐标 + startX = event.getX(); + playState = false; + if (playStateListener != null) { + playStateListener.playStateChange(playState); + } + handler.removeCallbacksAndMessages(null); + break; + case MotionEvent.ACTION_MOVE: + //2.记录结束值 + float endX = event.getX(); + //3.计算偏移量 + float distanceX = endX - startX; + float toX = getX() + distanceX; + if (toX < minScrollWidth) { + toX = minScrollWidth; + } + if (toX > maxScrollWidth) { + toX = maxScrollWidth; + } + setX(toX); + currentTime = (long) (totalTime * (screenWidth / 2 - getX()) / getMeasuredWidth()); + Log.e(TAG, "currentTime: " + currentTime); + if (playStateListener != null) { + playStateListener.videoProgressUpdate(currentTime, false); + } + break; + } + + return true; + } + + private Handler handler = new Handler(Looper.getMainLooper()); + private float toX; + private boolean playState; + private List baseImageViews; + private List selectedTimeView = new ArrayList<>(); + + public void togglePlayVideo(final boolean playState, List baseImageViews) { + this.playState = playState; + this.baseImageViews = baseImageViews; + if (playState) { + selectdAreaView.setVisibility(GONE); + editBarLeft.setVisibility(GONE); + editBarRight.setVisibility(GONE); + + if (selectedTimeView != null && selectedTimeView.size() > 0) { + for (View view : selectedTimeView) { + removeView(view); + } + } + if (baseImageViews != null && baseImageViews.size() > 0) { + selectedTimeView.clear(); + for (BaseImageView baseImageView : baseImageViews) { + LinearLayout selectdView; //选中的区域 + long startX = baseImageView.getStartTime() * videoEditProgressWidth / totalTime; + long endX = baseImageView.getEndTime() * videoEditProgressWidth / totalTime; + Log.e(TAG, "startTime:" + baseImageView.getStartTime()); + Log.e(TAG, "endTime:" + baseImageView.getEndTime()); + Log.e(TAG, "1--------->startTime:" + baseImageView.getStartTime()); + Log.e(TAG, "1--------->endTime:" + baseImageView.getEndTime()); + Log.e(TAG, "1--------->totalTime:" + totalTime); + int width = (int) (endX - startX); + if (totalTime - baseImageView.getEndTime() <= 1000) { + Log.e(TAG, "尾部=======>"); + width += DisplayUtil.dipToPx( 10); + } else { + width += DisplayUtil.dipToPx( 4); + } + RelativeLayout.LayoutParams selectedParams = new LayoutParams(width, DisplayUtil.dipToPx( 60)); + selectdView = new LinearLayout(context); //选中的背景 + selectdView.setX(startX); + //startTime = totalTime * selectdAreaView.getLeft() / getMeasuredWidth(); + selectdView.setBackgroundColor(Color.parseColor("#7f000000")); + selectedParams.addRule(RelativeLayout.CENTER_VERTICAL, RelativeLayout.TRUE); + addView(selectdView, selectedParams); + selectedTimeView.add(selectdView); + } + } + + } + + int perTotalTime = (int) (totalTime / 1000); + Log.e(TAG, "perTotalTime:" + perTotalTime); + if (perTotalTime != 0) { +// int perX = (maxScrollWidth - minScrollWidth) / (20 * perTotalTime); + final int perX; + final long delayMillis; + if(perTotalTime > 18){ + perX = screenWidth / (perTotalTime * 8); + delayMillis = 108; + }else if (perTotalTime > 16) { + perX = screenWidth / (perTotalTime * 8); + delayMillis = 125; + } else { + perX = screenWidth / 160; + delayMillis = 100; + } + toX = getX() - 4 * perX; + handler.removeCallbacksAndMessages(null); + handler.post(new Runnable() { + @Override + public void run() { + if (toX >= minScrollWidth && playState) { + setX(toX); + if (playStateListener != null) { + playStateListener.videoProgressUpdate(currentTime, true); + } + currentTime = (long) (totalTime * (screenWidth / 2 - getX()) / getMeasuredWidth()); + toX -= perX; + if (toX < minScrollWidth) { + setX(maxScrollWidth); + if (playStateListener != null) { + playStateListener.videoProgressUpdate(0, false); + playStateListener.playStateChange(false); + } + } + handler.postDelayed(this, delayMillis); + } + } + }); + } + + } + + + public void recoverView() { + setX(maxScrollWidth); + handler.removeCallbacksAndMessages(null); + if (playStateListener != null) { + playStateListener.videoProgressUpdate(0, false); + playStateListener.playStateChange(false); + } + } + + + public interface PlayStateListener { + void playStateChange(boolean playState); + + void selectTimeChange(long startTime, long endTime); + + void videoProgressUpdate(long currentTime, boolean isPlay); + } + + PlayStateListener playStateListener; + + public void setPlayStateListener(PlayStateListener playStateListener) { + this.playStateListener = playStateListener; + } + + + public void setTotalTime(int totalTime) { + this.totalTime = totalTime; + } + + + public long getStartTime() { + return startTime; + } + + public long getEndTime() { + return endTime; + } + + public long getCurrentTime() { + return currentTime; + } + + //恢复View的初始化位置 + public void recoverView(List baseImageViews, BaseImageView baseImageView, boolean isEdit) { +// editBarLeft.layout(0, 0, editBarLeftWidth, editBarLeftHeight); + Log.e(TAG, "isEdit=" + isEdit); + startTime = 0; + endTime = 2; + float leftX = screenWidth / 2 - getX() - DisplayUtil.dipToPx( 20); + + if (selectedTimeView != null && selectedTimeView.size() > 0) { + for (View view : selectedTimeView) { + removeView(view); + } + } + if (baseImageViews != null && baseImageViews.size() > 0) { + selectedTimeView.clear(); + for (BaseImageView baseImageView1 : baseImageViews) { + if (baseImageView != null && baseImageView.getTimeStamp() == baseImageView1.getTimeStamp()) { + if (isEdit) { + Log.e(TAG, "11111111111111,endTime:" + baseImageView.getEndTime()); + long startX = baseImageView.getStartTime() * videoEditProgressWidth / totalTime - DisplayUtil.dipToPx( 20); + long endX = baseImageView.getEndTime() * videoEditProgressWidth / totalTime - DisplayUtil.dipToPx( 10); +// float leftX = screenWidth / 2 - getX() - DisplayUtil.dipToPx( 20); + editBarLeft.setX(startX); + + if (endX > videoEditProgressWidth - DisplayUtil.dipToPx( 17)) { //防止滑棒滑出界限 + endX = videoEditProgressWidth - DisplayUtil.dipToPx( 17); + } + editBarRight.setX(endX); + } else { + Log.e(TAG, "666666"); + } + } else { + LinearLayout selectdView; //选中的区域 + long startX = baseImageView1.getStartTime() * videoEditProgressWidth / totalTime; + long endX = baseImageView1.getEndTime() * videoEditProgressWidth / totalTime; + Log.e(TAG, "2--------->startTime:" + baseImageView1.getStartTime()); + Log.e(TAG, "2--------->endTime:" + baseImageView1.getEndTime()); + Log.e(TAG, "2--------->totalTime:" + totalTime); + int width = (int) (endX - startX); + if ((totalTime - baseImageView1.getEndTime()) <= 1000) { + width += DisplayUtil.dipToPx( 10); + } else { + width += DisplayUtil.dipToPx( 4); + } + RelativeLayout.LayoutParams selectedParams = new LayoutParams(width, DisplayUtil.dipToPx( 60)); + selectdView = new LinearLayout(context); //选中的背景 + selectdView.setX(startX); + //startTime = totalTime * selectdAreaView.getLeft() / getMeasuredWidth(); + selectdView.setBackgroundColor(Color.parseColor("#7f000000")); + selectedParams.addRule(RelativeLayout.CENTER_VERTICAL, RelativeLayout.TRUE); + addView(selectdView, selectedParams); + selectedTimeView.add(selectdView); + } + + } + } + + this.baseImageViews = baseImageViews; + + removeView(editBarLeft); //清除左右编辑棒和选择区域,让其覆盖到布局最顶端 + removeView(editBarRight); + removeView(selectdAreaView); + + addView(selectdAreaView, selectedParams); + addView(editBarLeft, editBarLeftParamsBar); + addView(editBarRight, editBarRightParamsBar); + + if (!isEdit) { + Log.e(TAG, "222222222222222"); + editBarLeft.setX(leftX); + minSelectTimeWidth = videoEditProgressWidth * 2000 / totalTime + DisplayUtil.dipToPx( 10); + float rightX = (leftX + minSelectTimeWidth > (getMeasuredWidth() - DisplayUtil.dipToPx( 16))) ? (getMeasuredWidth() - DisplayUtil.dipToPx( 16)) : (leftX + minSelectTimeWidth); +// float rightX = leftX + minSelectTimeWidth; + Log.e(TAG, "rightX=" + rightX); + Log.e(TAG, "width=" + getMeasuredWidth()); + editBarRight.setX(rightX); +// selectdAreaView.layout((int) (screenWidth / 2 - getX()) +DisplayUtil.dipToPx( 20), DisplayUtil.dipToPx( 20), (int) editBarRight.getX() + DisplayUtil.dipToPx( 10), DisplayUtil.dipToPx( 82)); + } + if (baseImageViews.size() == 0) { + editBarLeft.setVisibility(GONE); + editBarRight.setVisibility(GONE); + selectdAreaView.setVisibility(GONE); + } else { + editBarLeft.setVisibility(VISIBLE); + editBarRight.setVisibility(VISIBLE); + selectdAreaView.setVisibility(VISIBLE); + } + + if (baseImageViews.indexOf(baseImageView) == -1) { + editBarLeft.setVisibility(GONE); + editBarRight.setVisibility(GONE); + selectdAreaView.setVisibility(GONE); + } + + + } + + +// //选中状态View更改方法 +// public void selectAreaChange(BaseImageView baseImageView) { +// Log.e(TAG,"666"); +//// startTime = totalTime * selectdAreaView.getLeft() / getMeasuredWidth(); +// long leftX = baseImageView.getStartTime() * getMeasuredWidth() / totalTime; +// editBarLeft.setX(leftX); +// editBarRight.setX(leftX + DisplayUtil.dipToPx( 50)); +// selectdAreaView.layout((int) editBarLeft.getX() + DisplayUtil.dipToPx( 20), DisplayUtil.dipToPx( 26), (int) editBarRight.getX() + DisplayUtil.dipToPx( 10), DisplayUtil.dipToPx( 85)); +// } + +// public void getFormatTime(long time) { +// SimpleDateFormat formatter = new SimpleDateFormat("HH:mm:ss");//初始化Formatter的转换格式。 +// //取整 +// String hms = formatter.format(time); +// Log.e(TAG, "时间:" + hms); +//// //时 +//// shiTv.setText(hms.substring(0,2)); +//// //分 +//// fenTv.setText(hms.substring(3,5)); +//// //秒 +//// miaoTv.setText(hms.substring(6,hms.length())); +// } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/VideoEditView.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/VideoEditView.java new file mode 100644 index 0000000..7f16f58 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/editVideo/view/VideoEditView.java @@ -0,0 +1,235 @@ +package com.aserbao.androidcustomcamera.whole.editVideo.view; + +import android.content.Context; +import android.content.res.Resources; +import android.graphics.Bitmap; +import android.util.AttributeSet; +import android.util.DisplayMetrics; +import android.util.Log; +import android.view.LayoutInflater; +import android.view.View; +import android.view.ViewGroup; +import android.widget.ImageView; +import android.widget.LinearLayout; +import android.widget.RelativeLayout; +import android.widget.TextView; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.utils.DisplayUtil; + +import java.util.ArrayList; +import java.util.List; + +/** + *
    + *     author : Administrator (Jacket)
    + *     e-mail : 378315764@qq.com
    + *     time   : 2018/01/31
    + *     desc   :
    + *     version: 3.2
    + * 
    + */ + +public class VideoEditView extends RelativeLayout implements VideoEditProgressView.PlayStateListener { + + private String TAG = VideoEditView.class.getSimpleName(); + + private Context context; + + private VideoEditProgressView videoEditProgressView; + private LinearLayout llPlayVideoView; + private ImageView ivCenter; + private int viewWidth; + private int viewHeight; + private int screenWidth; + private boolean isVideoPlaying = false;//视频是否处于播放状态 + private ImageView bigiconPlay; + private RelativeLayout rlCurrentLayout; + private TextView tvTotalTime; + private TextView tvCurrentTime; + //存储贴纸列表 + private ArrayList mViews = new ArrayList<>(); + + public VideoEditView(Context context, AttributeSet attrs) { + super(context, attrs); + this.context = context; + initView(context, attrs); + } + + //初始化控件 + private void initView(Context context, AttributeSet attrs) { + Resources resources = context.getResources(); //获取屏幕的宽度 + DisplayMetrics dm = resources.getDisplayMetrics(); + screenWidth = dm.widthPixels; + + rlCurrentLayout = (RelativeLayout) LayoutInflater.from(context).inflate(R.layout.rl_current_layout, null); + RelativeLayout.LayoutParams rlCurrentParams = new RelativeLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT); + addView(rlCurrentLayout, rlCurrentParams); + + tvTotalTime = (TextView) rlCurrentLayout.findViewById(R.id.tv_totalTime); + tvCurrentTime = (TextView) rlCurrentLayout.findViewById(R.id.tv_currentTime); + + videoEditProgressView = new VideoEditProgressView(context, attrs); //添加ViewEditProgressView + RelativeLayout.LayoutParams videoEditParams = new RelativeLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.MATCH_PARENT); + ViewGroup.LayoutParams params = new LayoutParams(200, ViewGroup.LayoutParams.MATCH_PARENT); + videoEditProgressView.setLayoutParams(params); + videoEditProgressView.setPlayStateListener(this); + videoEditParams.addRule(RelativeLayout.CENTER_VERTICAL, RelativeLayout.TRUE); + addView(videoEditProgressView, videoEditParams); + + llPlayVideoView = (LinearLayout) LayoutInflater.from(context).inflate(R.layout.ll_play_video_view, null); //添加llPlayVideoView + RelativeLayout.LayoutParams rlParams = new RelativeLayout.LayoutParams(DisplayUtil.dipToPx(context, 60), DisplayUtil.dipToPx(context, 60)); + rlParams.addRule(RelativeLayout.CENTER_VERTICAL, RelativeLayout.TRUE); + rlParams.addRule(RelativeLayout.ALIGN_PARENT_LEFT, RelativeLayout.TRUE); + addView(llPlayVideoView, rlParams); + + ivCenter = new ImageView(context); //添加ivCenter + ivCenter.setImageResource(R.drawable.bigicon_center); +// RelativeLayout.LayoutParams ivRarams = new RelativeLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, DisplayUtil.dipToPx(context, 60)); + RelativeLayout.LayoutParams ivRarams = new RelativeLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.MATCH_PARENT); + ivRarams.addRule(RelativeLayout.CENTER_IN_PARENT, RelativeLayout.TRUE); + addView(ivCenter, ivRarams); + + bigiconPlay = (ImageView) findViewById(R.id.bigicon_play); + } + + + /** + * 当布局文件加载完成的时候回调这个方法 + */ + @Override + protected void onFinishInflate() { + super.onFinishInflate(); + } + + /** + * 在测量方法里,得到各个控件的高和宽 + * + * @param widthMeasureSpec + * @param heightMeasureSpec + */ + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + viewWidth = videoEditProgressView.getMeasuredWidth(); + viewHeight = getMeasuredHeight(); + } + + + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + //指定菜单的位置 + videoEditProgressView.layout(screenWidth / 2, 0, screenWidth / 2 + viewWidth, viewHeight); + } + + + public void addImageView(List bitmaps) { +// int width = DisplayUtil.dipToPx(context, 45) * bitmaps.size(); + if (bitmaps != null) { + int width = screenWidth * bitmaps.size() / 8; + ViewGroup.LayoutParams layoutParams = new LayoutParams(width, ViewGroup.LayoutParams.MATCH_PARENT); + videoEditProgressView.setLayoutParams(layoutParams); + videoEditProgressView.addImageView(bitmaps); + } + } + + ArrayList baseImageViews; + + public void videoPlay(ArrayList baseImageViews) { + this.baseImageViews = baseImageViews; + if (isVideoPlaying) { + isVideoPlaying = false; + bigiconPlay.setImageResource(R.drawable.camera_play); + } else { + isVideoPlaying = true; + bigiconPlay.setImageResource(R.drawable.bigicon_timeout_small); + } + if (onSelectTimeChangeListener != null) { + onSelectTimeChangeListener.playChange(isVideoPlaying); + } + videoEditProgressView.togglePlayVideo(isVideoPlaying, baseImageViews); + + } + + + @Override + public void playStateChange(boolean playState) { + isVideoPlaying = playState; + if (isVideoPlaying) { + bigiconPlay.setImageResource(R.drawable.bigicon_timeout_small); + } else { + bigiconPlay.setImageResource(R.drawable.camera_play); + if (onSelectTimeChangeListener != null) { + onSelectTimeChangeListener.playChange(false); + } + } + } + + public void setTotalTime(int totalTime) { + if (tvTotalTime != null) { + tvTotalTime.setText(totalTime / 1000 + "s"); + } + if (videoEditProgressView != null) { + videoEditProgressView.setTotalTime(totalTime); + } + } + + + + public interface OnSelectTimeChangeListener { + void selectTimeChange(long startTime, long endTime); + + void playChange(boolean isPlayVideo); + + void videoProgressUpdate(long currentTime, boolean isVideoPlaying); + } + + public OnSelectTimeChangeListener onSelectTimeChangeListener; + + public void setOnSelectTimeChangeListener(OnSelectTimeChangeListener onSelectTimeChangeListener) { + this.onSelectTimeChangeListener = onSelectTimeChangeListener; + } + + //开始时间和结束时间回调 + @Override + public void selectTimeChange(long startTime, long endTime) { + if (onSelectTimeChangeListener != null) { + onSelectTimeChangeListener.selectTimeChange(startTime, endTime); + } + } + + + public void recoverView(ArrayList baseImageViews, BaseImageView baseImageView, boolean isEdit) { + if (videoEditProgressView != null) { + videoEditProgressView.recoverView(baseImageViews,baseImageView,isEdit); + } + } + + @Override + public void videoProgressUpdate(long currentTime, boolean isVideoPlaying) { + if (tvCurrentTime != null) { + Log.e(TAG, "进度更新" ); + tvCurrentTime.setText(currentTime/1000+"s"); + } + if (onSelectTimeChangeListener != null) { + onSelectTimeChangeListener.videoProgressUpdate(currentTime, isVideoPlaying); + } + } + + public void recoverView(){ + bigiconPlay.setImageResource(R.drawable.camera_play); + if (onSelectTimeChangeListener != null) { + onSelectTimeChangeListener.playChange(false); + } + if (videoEditProgressView != null) { + videoEditProgressView.recoverView(); + } + } + +// public void selectAreaChange(BaseImageView baseImageView){ +// if (videoEditProgressView != null) { +// videoEditProgressView.selectAreaChange(baseImageView); +// } +// } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZMediaManager.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZMediaManager.java new file mode 100644 index 0000000..d700991 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZMediaManager.java @@ -0,0 +1,262 @@ +package com.aserbao.androidcustomcamera.whole.jiaozivideo; + +import android.graphics.Point; +import android.graphics.SurfaceTexture; +import android.media.AudioManager; +import android.media.MediaPlayer; +import android.os.Handler; +import android.os.HandlerThread; +import android.os.Looper; +import android.os.Message; +import android.util.Log; +import android.view.Surface; +import android.view.TextureView; + + +import com.danikula.videocache.HttpProxyCacheServer; + +import java.lang.reflect.Method; +import java.util.Map; + +import static com.aserbao.androidcustomcamera.base.MyApplication.getProxy; + + +/** + *

    统一管理MediaPlayer的地方,只有一个mediaPlayer实例,那么不会有多个视频同时播放,也节省资源。

    + *

    Unified management MediaPlayer place, there is only one MediaPlayer instance, then there will be no more video broadcast at the same time, also save resources.

    + * Created by Nathen + * On 2015/11/30 15:39 + */ +public class JZMediaManager implements TextureView.SurfaceTextureListener, MediaPlayer.OnPreparedListener, MediaPlayer.OnCompletionListener, MediaPlayer.OnBufferingUpdateListener, MediaPlayer.OnSeekCompleteListener, MediaPlayer.OnErrorListener, MediaPlayer.OnInfoListener, MediaPlayer.OnVideoSizeChangedListener { + public static final int HANDLER_PREPARE = 0; + public static final int HANDLER_RELEASE = 2; + public static final String TAG = "JiaoZiVideoPlayer"; + public static JZResizeTextureView textureView; + public static SurfaceTexture savedSurfaceTexture; + public static Surface surface; + public static String CURRENT_PLAYING_URL; + public static boolean CURRENT_PLING_LOOP; + public int music_type = AudioManager.STREAM_MUSIC; + public static Map MAP_HEADER_DATA; + private static JZMediaManager JZMediaManager; + public MediaPlayer mediaPlayer = new MediaPlayer(); + public int currentVideoWidth = 0; + public int currentVideoHeight = 0; + HandlerThread mMediaHandlerThread; + MediaHandler mMediaHandler; + Handler mainThreadHandler; + public int positionInList = -1; + + public JZMediaManager() { + mMediaHandlerThread = new HandlerThread(TAG); + mMediaHandlerThread.start(); + mMediaHandler = new MediaHandler((mMediaHandlerThread.getLooper())); + mainThreadHandler = new Handler(); + } + + public static JZMediaManager instance() { + if (JZMediaManager == null) { + JZMediaManager = new JZMediaManager(); + } + return JZMediaManager; + } + public void setMusic_type(int music){ + music_type = music; + } + public Point getVideoSize() { + if (currentVideoWidth != 0 && currentVideoHeight != 0) { + return new Point(currentVideoWidth, currentVideoHeight); + } else { + return null; + } + } + + public void prepare() { + releaseMediaPlayer(); + Message msg = new Message(); + msg.what = HANDLER_PREPARE; + mMediaHandler.sendMessage(msg); + } + + public void releaseMediaPlayer() { + Message msg = new Message(); + msg.what = HANDLER_RELEASE; + mMediaHandler.sendMessage(msg); + } + + @Override + public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) { + Log.i(TAG, "onSurfaceTextureAvailable [" + JZVideoPlayerManager.getCurrentJzvd().hashCode() + "] "); + if (savedSurfaceTexture == null) { + savedSurfaceTexture = surfaceTexture; + prepare(); + } else { + textureView.setSurfaceTexture(savedSurfaceTexture); + } + } + + @Override + public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int i, int i1) { + // 如果SurfaceTexture还没有更新Image,则记录SizeChanged事件,否则忽略 + Log.i(TAG, "onSurfaceTextureSizeChanged [" + JZVideoPlayerManager.getCurrentJzvd().hashCode() + "] "); + } + + @Override + public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) { + return savedSurfaceTexture == null; + } + + @Override + public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) { + } + private boolean isNeedVoice = false; + public void setIsNeedVoice(boolean isNeedV){ + isNeedVoice = isNeedV; + } + @Override + public void onPrepared(MediaPlayer mp) { + if(isNeedVoice){ + mp.setVolume(0,0); + } + mediaPlayer.start(); + mainThreadHandler.post(new Runnable() { + @Override + public void run() { + if (JZVideoPlayerManager.getCurrentJzvd() != null) { + JZVideoPlayerManager.getCurrentJzvd().onPrepared(); + } + } + }); + } + + @Override + public void onCompletion(MediaPlayer mp) { + mainThreadHandler.post(new Runnable() { + @Override + public void run() { + if (JZVideoPlayerManager.getCurrentJzvd() != null) { + JZVideoPlayerManager.getCurrentJzvd().onAutoCompletion(); + } + } + }); + } + + @Override + public void onBufferingUpdate(MediaPlayer mp, final int percent) { + mainThreadHandler.post(new Runnable() { + @Override + public void run() { + if (JZVideoPlayerManager.getCurrentJzvd() != null) { + JZVideoPlayerManager.getCurrentJzvd().setBufferProgress(percent); + } + } + }); + } + + @Override + public void onSeekComplete(MediaPlayer mp) { + mainThreadHandler.post(new Runnable() { + @Override + public void run() { + if (JZVideoPlayerManager.getCurrentJzvd() != null) { + JZVideoPlayerManager.getCurrentJzvd().onSeekComplete(); + } + } + }); + } + + @Override + public boolean onError(MediaPlayer mp, final int what, final int extra) { + mainThreadHandler.post(new Runnable() { + @Override + public void run() { + if (JZVideoPlayerManager.getCurrentJzvd() != null) { + JZVideoPlayerManager.getCurrentJzvd().onError(what, extra); + } + } + }); + return true; + } + + @Override + public boolean onInfo(MediaPlayer mp, final int what, final int extra) { + mainThreadHandler.post(new Runnable() { + @Override + public void run() { + if (JZVideoPlayerManager.getCurrentJzvd() != null) { + JZVideoPlayerManager.getCurrentJzvd().onInfo(what, extra); + } + } + }); + return false; + } + + @Override + public void onVideoSizeChanged(MediaPlayer mp, int width, int height) { + currentVideoWidth = width; + currentVideoHeight = height; + mainThreadHandler.post(new Runnable() { + @Override + public void run() { + if (JZVideoPlayerManager.getCurrentJzvd() != null) { + JZVideoPlayerManager.getCurrentJzvd().onVideoSizeChanged(); + } + } + }); + } + + public class MediaHandler extends Handler { + public MediaHandler(Looper looper) { + super(looper); + } + + @Override + public void handleMessage(Message msg) { + super.handleMessage(msg); + switch (msg.what) { + case HANDLER_PREPARE: + try { + currentVideoWidth = 0; + currentVideoHeight = 0; + mediaPlayer.release(); + mediaPlayer = new MediaPlayer(); + mediaPlayer.setAudioStreamType(music_type); + mediaPlayer.setLooping(CURRENT_PLING_LOOP); + mediaPlayer.setOnPreparedListener(JZMediaManager.this); + mediaPlayer.setOnCompletionListener(JZMediaManager.this); + mediaPlayer.setOnBufferingUpdateListener(JZMediaManager.this); + mediaPlayer.setScreenOnWhilePlaying(true); + mediaPlayer.setOnSeekCompleteListener(JZMediaManager.this); + mediaPlayer.setOnErrorListener(JZMediaManager.this); + mediaPlayer.setOnInfoListener(JZMediaManager.this); + mediaPlayer.setOnVideoSizeChangedListener(JZMediaManager.this); + Class clazz = MediaPlayer.class; + Method method = clazz.getDeclaredMethod("setDataSource", String.class, Map.class); + if(CURRENT_PLAYING_URL.startsWith("http")) { + HttpProxyCacheServer proxy = getProxy(); + String proxyUrl = proxy.getProxyUrl(CURRENT_PLAYING_URL); + method.invoke(mediaPlayer, proxyUrl, MAP_HEADER_DATA); + }else { + method.invoke(mediaPlayer, CURRENT_PLAYING_URL, MAP_HEADER_DATA); + } + mediaPlayer.prepareAsync(); + if (surface != null) { + surface.release(); + } + surface = new Surface(savedSurfaceTexture); + mediaPlayer.setSurface(surface); + } catch (Exception e) { + e.printStackTrace(); + } + break; + case HANDLER_RELEASE: +// CURRENT_PLAYING_URL = null; +// CURRENT_PLING_LOOP = false; +// MAP_HEADER_DATA = null; + mediaPlayer.release(); + break; + } + } + } + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZResizeTextureView.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZResizeTextureView.java new file mode 100644 index 0000000..d906e86 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZResizeTextureView.java @@ -0,0 +1,136 @@ +package com.aserbao.androidcustomcamera.whole.jiaozivideo; + +import android.content.Context; +import android.graphics.Point; +import android.util.AttributeSet; +import android.util.Log; +import android.view.TextureView; + +/** + *

    参照Android系统的VideoView的onMeasure方法 + *
    注意!relativelayout中无法全屏,要嵌套一个linearlayout

    + *

    Referring Android system Video View of onMeasure method + *
    NOTE! Can not fullscreen relativelayout, to nest a linearlayout

    + * Created by Nathen + * On 2016/06/02 00:01 + */ +public class JZResizeTextureView extends TextureView { + protected static final String TAG = "JZResizeTextureView"; + + // x as width, y as height + protected Point mVideoSize; + + public JZResizeTextureView(Context context) { + super(context); + init(); + } + + public JZResizeTextureView(Context context, AttributeSet attrs) { + super(context, attrs); + init(); + } + + private void init() { + mVideoSize = new Point(0, 0); + } + + public void setVideoSize(Point videoSize) { + if (videoSize != null && !mVideoSize.equals(videoSize)) { + this.mVideoSize = videoSize; + requestLayout(); + } + } + + @Override + public void setRotation(float rotation) { + if (rotation != getRotation()) { + super.setRotation(rotation); + requestLayout(); + } + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + Log.i(TAG, "onMeasure " + " [" + this.hashCode() + "] "); + int viewRotation = (int) getRotation(); + /*WindowManager wm = (WindowManager) getContext() + .getSystemService(Context.WINDOW_SERVICE); + + int width1 = wm.getDefaultDisplay().getWidth(); + int height1 = wm.getDefaultDisplay().getHeight(); + int videoWidth = width1; + int videoHeight = height1;*/ + int videoWidth = mVideoSize.x; + int videoHeight = mVideoSize.y; + + Log.i(TAG, "videoWidth = " + videoWidth + ", " + "videoHeight = " + videoHeight); + Log.i(TAG, "viewRotation = " + viewRotation); + + // 如果判断成立,则说明显示的TextureView和本身的位置是有90度的旋转的,所以需要交换宽高参数。 + if (viewRotation == 90 || viewRotation == 270) { + int tempMeasureSpec = widthMeasureSpec; + widthMeasureSpec = heightMeasureSpec; + heightMeasureSpec = tempMeasureSpec; + } + + int width = getDefaultSize(videoWidth, widthMeasureSpec); + int height = getDefaultSize(videoHeight, heightMeasureSpec); + if (videoWidth > 0 && videoHeight > 0) { + + int widthSpecMode = MeasureSpec.getMode(widthMeasureSpec); + int widthSpecSize = MeasureSpec.getSize(widthMeasureSpec); + int heightSpecMode = MeasureSpec.getMode(heightMeasureSpec); + int heightSpecSize = MeasureSpec.getSize(heightMeasureSpec); + + Log.i(TAG, "widthMeasureSpec [" + MeasureSpec.toString(widthMeasureSpec) + "]"); + Log.i(TAG, "heightMeasureSpec [" + MeasureSpec.toString(heightMeasureSpec) + "]"); + + if (widthSpecMode == MeasureSpec.EXACTLY && heightSpecMode == MeasureSpec.EXACTLY) { + // the size is fixed + width = widthSpecSize; + height = heightSpecSize; + // for compatibility, we adjust size based on aspect ratio + if (videoWidth * height < width * videoHeight) { + width = height * videoWidth / videoHeight; + } else if (videoWidth * height > width * videoHeight) { + height = width * videoHeight / videoWidth; + } + } else if (widthSpecMode == MeasureSpec.EXACTLY) { + // only the width is fixed, adjust the height to match aspect ratio if possible + width = widthSpecSize; + height = width * videoHeight / videoWidth; + if (heightSpecMode == MeasureSpec.AT_MOST && height > heightSpecSize) { + // couldn't match aspect ratio within the constraints + height = heightSpecSize; + width = height * videoWidth / videoHeight; + } + } else if (heightSpecMode == MeasureSpec.EXACTLY) { + // only the height is fixed, adjust the width to match aspect ratio if possible + height = heightSpecSize; + width = height * videoWidth / videoHeight; + if (widthSpecMode == MeasureSpec.AT_MOST && width > widthSpecSize) { + // couldn't match aspect ratio within the constraints + width = widthSpecSize; + height = width * videoHeight / videoWidth; + } + } else { + // neither the width nor the height are fixed, try to use actual video size + width = videoWidth; + height = videoHeight; + if (heightSpecMode == MeasureSpec.AT_MOST && height > heightSpecSize) { + // too tall, decrease both width and height + height = heightSpecSize; + width = height * videoWidth / videoHeight; + } + if (widthSpecMode == MeasureSpec.AT_MOST && width > widthSpecSize) { + // too wide, decrease both width and height + width = widthSpecSize; + height = width * videoHeight / videoWidth; + } + } + } else { + // no size yet, just adopt the given spec sizes + } + setMeasuredDimension(width+60 , height + 100); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZUserAction.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZUserAction.java new file mode 100644 index 0000000..91724f1 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZUserAction.java @@ -0,0 +1,29 @@ +package com.aserbao.androidcustomcamera.whole.jiaozivideo; + +/** + * Created by Nathen + * On 2016/04/04 22:13 + */ +public interface JZUserAction { + + int ON_CLICK_START_ICON = 0; + int ON_CLICK_START_ERROR = 1; + int ON_CLICK_START_AUTO_COMPLETE = 2; + + int ON_CLICK_PAUSE = 3; + int ON_CLICK_RESUME = 4; + int ON_SEEK_POSITION = 5; + int ON_AUTO_COMPLETE = 6; + + int ON_ENTER_FULLSCREEN = 7; + int ON_QUIT_FULLSCREEN = 8; + int ON_ENTER_TINYSCREEN = 9; + int ON_QUIT_TINYSCREEN = 10; + + + int ON_TOUCH_SCREEN_SEEK_VOLUME = 11; + int ON_TOUCH_SCREEN_SEEK_POSITION = 12; + + void onEvent(int type, String url, int screen, Object... objects); + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZUserActionStandard.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZUserActionStandard.java new file mode 100644 index 0000000..48b514a --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZUserActionStandard.java @@ -0,0 +1,12 @@ +package com.aserbao.androidcustomcamera.whole.jiaozivideo; + +/** + * Created by Nathen + * On 2016/04/26 20:53 + */ +public interface JZUserActionStandard extends JZUserAction { + + int ON_CLICK_START_THUMB = 101; + int ON_CLICK_BLANK = 102; + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZUtils.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZUtils.java new file mode 100644 index 0000000..cba038a --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZUtils.java @@ -0,0 +1,187 @@ +package com.aserbao.androidcustomcamera.whole.jiaozivideo; + +import android.app.Activity; +import android.content.Context; +import android.content.ContextWrapper; +import android.content.SharedPreferences; +import android.net.ConnectivityManager; +import android.net.NetworkInfo; +import android.support.v7.app.AppCompatActivity; +import android.support.v7.view.ContextThemeWrapper; +import android.text.TextUtils; +import android.util.Log; +import android.view.Window; + +import java.util.Formatter; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.Locale; + + +/** + * Created by Nathen + * On 2016/02/21 12:25 + */ +public class JZUtils { + public static final String TAG = "JiaoZiVideoPlayer"; + + public static String stringForTime(int timeMs) { + if (timeMs <= 0 || timeMs >= 24 * 60 * 60 * 1000) { + return "00:00"; + } + int totalSeconds = timeMs / 1000; + int seconds = totalSeconds % 60; + int minutes = (totalSeconds / 60) % 60; + int hours = totalSeconds / 3600; + StringBuilder stringBuilder = new StringBuilder(); + Formatter mFormatter = new Formatter(stringBuilder, Locale.getDefault()); + if (hours > 0) { + return mFormatter.format("%d:%02d:%02d", hours, minutes, seconds).toString(); + } else { + return mFormatter.format("%02d:%02d", minutes, seconds).toString(); + } + } + + /** + * This method requires the caller to hold the permission ACCESS_NETWORK_STATE. + * + * @param context context + * @return if wifi is connected,return true + */ + public static boolean isWifiConnected(Context context) { + ConnectivityManager connectivityManager = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); + NetworkInfo networkInfo = connectivityManager.getActiveNetworkInfo(); + return networkInfo != null && networkInfo.getType() == ConnectivityManager.TYPE_WIFI; + } + + /** + * Get activity from context object + * + * @param context context + * @return object of Activity or null if it is not Activity + */ + public static Activity scanForActivity(Context context) { + if (context == null) return null; + + if (context instanceof Activity) { + return (Activity) context; + } else if (context instanceof ContextWrapper) { + return scanForActivity(((ContextWrapper) context).getBaseContext()); + } + + return null; + } + + /** + * Get AppCompatActivity from context + * + * @param context context + * @return AppCompatActivity if it's not null + */ + public static AppCompatActivity getAppCompActivity(Context context) { + if (context == null) return null; + if (context instanceof AppCompatActivity) { + return (AppCompatActivity) context; + } else if (context instanceof ContextThemeWrapper) { + return getAppCompActivity(((ContextThemeWrapper) context).getBaseContext()); + } + return null; + } + + public static void setRequestedOrientation(Context context, int orientation) { + if (JZUtils.getAppCompActivity(context) != null) { + JZUtils.getAppCompActivity(context).setRequestedOrientation( + orientation); + } else { + JZUtils.scanForActivity(context).setRequestedOrientation( + orientation); + } + } + + public static Window getWindow(Context context) { + if (JZUtils.getAppCompActivity(context) != null) { + return JZUtils.getAppCompActivity(context).getWindow(); + } else { + return JZUtils.scanForActivity(context).getWindow(); + } + } + + public static int dip2px(Context context, float dpValue) { + final float scale = context.getResources().getDisplayMetrics().density; + return (int) (dpValue * scale + 0.5f); + } + + public static void saveProgress(Context context, String url, int progress) { + if (!JZVideoPlayer.SAVE_PROGRESS) return; + Log.i(TAG, "saveProgress: " + progress); + SharedPreferences spn = context.getSharedPreferences("JZVD_PROGRESS", + Context.MODE_PRIVATE); + SharedPreferences.Editor editor = spn.edit(); + editor.putInt(url, progress); + editor.apply(); + } + + public static int getSavedProgress(Context context, String url) { + if (!JZVideoPlayer.SAVE_PROGRESS) return 0; + SharedPreferences spn; + spn = context.getSharedPreferences("JZVD_PROGRESS", + Context.MODE_PRIVATE); + return spn.getInt(url, 0); + } + + /** + * if url == null, clear all progress + * + * @param context context + * @param url if url!=null clear this url progress + */ + public static void clearSavedProgress(Context context, String url) { + if (TextUtils.isEmpty(url)) { + SharedPreferences spn = context.getSharedPreferences("JZVD_PROGRESS", + Context.MODE_PRIVATE); + spn.edit().clear().apply(); + } else { + SharedPreferences spn = context.getSharedPreferences("JZVD_PROGRESS", + Context.MODE_PRIVATE); + spn.edit().putInt(url, 0).apply(); + } + } + + public static String getCurrentUrlFromMap(LinkedHashMap map, int index) { + if (map != null) { + if (map.size() == 1) { + return getValueFromLinkedMap(map, index); + } else { + return getValueFromLinkedMap(map, index); + } + } +// return VideoErrorUrl; + return "http://jzvd.nathen.cn/c6e3dc12a1154626b3476d9bf3bd7266/6b56c5f0dc31428083757a45764763b0-5287d2089db37e62345123a1be272f8b.mp4"; + } + + public static String getValueFromLinkedMap(LinkedHashMap map, int index) { + int currentIndex = 0; + for (Iterator it = map.keySet().iterator(); it.hasNext(); ) { + Object key = it.next(); + if (currentIndex == index) { + return map.get(key); + } + currentIndex++; + } + return null; + } + + public static String getKeyFromLinkedMap(LinkedHashMap map, int index) { + int currentIndex = 0; + if (map != null && map.keySet() != null) { + for (Iterator it = map.keySet().iterator(); it.hasNext(); ) { + Object key = it.next(); + if (currentIndex == index) { + return key.toString(); + } + currentIndex++; + } + } + return null; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZVideoPlayer.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZVideoPlayer.java new file mode 100644 index 0000000..05dc344 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZVideoPlayer.java @@ -0,0 +1,1178 @@ +package com.aserbao.androidcustomcamera.whole.jiaozivideo; + +import android.content.Context; +import android.content.pm.ActivityInfo; +import android.hardware.Sensor; +import android.hardware.SensorEvent; +import android.hardware.SensorEventListener; +import android.hardware.SensorManager; +import android.media.AudioManager; +import android.media.MediaPlayer; +import android.os.Handler; +import android.provider.Settings; +import android.support.v7.app.ActionBar; +import android.support.v7.app.AppCompatActivity; +import android.text.TextUtils; +import android.util.AttributeSet; +import android.util.Log; +import android.view.Gravity; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.view.Window; +import android.view.WindowManager; +import android.widget.AbsListView; +import android.widget.FrameLayout; +import android.widget.ImageView; +import android.widget.SeekBar; +import android.widget.TextView; +import android.widget.Toast; + + +import com.aserbao.androidcustomcamera.R; + +import java.lang.reflect.Constructor; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Timer; +import java.util.TimerTask; + +/** + * Created by Nathen on 16/7/30. + */ +public abstract class JZVideoPlayer extends FrameLayout implements View.OnClickListener, SeekBar.OnSeekBarChangeListener, View.OnTouchListener { + + public static final String TAG = "JiaoZiVideoPlayer"; + public static final int THRESHOLD = 80; + public static final int FULL_SCREEN_NORMAL_DELAY = 3000; + + public static final int SCREEN_LAYOUT_NORMAL = 0; + public static final int SCREEN_LAYOUT_LIST = 1; + public static final int SCREEN_WINDOW_FULLSCREEN = 2; + public static final int SCREEN_WINDOW_TINY = 3; + + public static final int CURRENT_STATE_NORMAL = 0; + public static final int CURRENT_STATE_PREPARING = 1; + public static final int CURRENT_STATE_PREPARING_CHANGING_URL = 2; + public static final int CURRENT_STATE_PLAYING = 3; + public static final int CURRENT_STATE_PAUSE = 5; + public static final int CURRENT_STATE_AUTO_COMPLETE = 6; + public static final int CURRENT_STATE_ERROR = 7; + + public static final String URL_KEY_DEFAULT = "URL_KEY_DEFAULT"; + public static boolean ACTION_BAR_EXIST = true; + public static boolean TOOL_BAR_EXIST = true; + public static int FULLSCREEN_ORIENTATION = ActivityInfo.SCREEN_ORIENTATION_SENSOR; + public static int NORMAL_ORIENTATION = ActivityInfo.SCREEN_ORIENTATION_PORTRAIT; + public static boolean SAVE_PROGRESS = true; + public static boolean WIFI_TIP_DIALOG_SHOWED = false; + public static long CLICK_QUIT_FULLSCREEN_TIME = 0; + public static long lastAutoFullscreenTime = 0; + public static AudioManager.OnAudioFocusChangeListener onAudioFocusChangeListener = new AudioManager.OnAudioFocusChangeListener() {//是否新建个class,代码更规矩,并且变量的位置也很尴尬 + @Override + public void onAudioFocusChange(int focusChange) { + switch (focusChange) { + case AudioManager.AUDIOFOCUS_GAIN: + break; + case AudioManager.AUDIOFOCUS_LOSS: + releaseAllVideos(); + Log.e("Atest", "releaseAllVideos onAudioFocusChange: JZVideoPlayer"); + Log.e(TAG, "AUDIOFOCUS_LOSS [" + this.hashCode() + "]"); + break; + case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT: + try { + if (JZMediaManager.instance().mediaPlayer != null && + JZMediaManager.instance().mediaPlayer.isPlaying()) { + JZMediaManager.instance().mediaPlayer.pause(); + } + } catch (IllegalStateException e) { + e.printStackTrace(); + } + Log.e(TAG, "AUDIOFOCUS_LOSS_TRANSIENT [" + this.hashCode() + "]"); + break; + case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK: + break; + } + } + }; + protected static JZUserAction JZ_USER_EVENT; + protected static Timer UPDATE_PROGRESS_TIMER; + public int currentState = -1; + public int currentScreen = -1; + public boolean loop = true;//是否重复播放 + public Map headData; + // public String url = ""; + public Object[] objects = null; + public int seekToInAdvance = 0; + public ImageView startButton; + public SeekBar progressBar; + public ImageView fullscreenButton; + public TextView currentTimeTextView, totalTimeTextView; + public ViewGroup textureViewContainer; + public ViewGroup topContainer, bottomContainer; + public int widthRatio = 0; + public int heightRatio = 0; + protected boolean isVideoRendingStart = false; + protected int mScreenWidth; + protected int mScreenHeight; + protected AudioManager mAudioManager; + protected Handler mHandler; + protected ProgressTimerTask mProgressTimerTask; + protected boolean mTouchingProgressBar; + protected float mDownX; + protected float mDownY; + protected boolean mChangeVolume; + protected boolean mChangePosition; + protected boolean mChangeBrightness; + protected int mGestureDownPosition; + protected int mGestureDownVolume; + protected float mGestureDownBrightness; + protected int mSeekTimePosition; + public LinkedHashMap urlMap; + public int currentUrlMapIndex = 0; + public int positionInList = -1; + + public JZVideoPlayer(Context context) { + super(context); + init(context); + } + + public JZVideoPlayer(Context context, AttributeSet attrs) { + super(context, attrs); + init(context); + } + + public static void releaseAllVideos() { + if ((System.currentTimeMillis() - CLICK_QUIT_FULLSCREEN_TIME) > FULL_SCREEN_NORMAL_DELAY) { +// Log.e("Atest", "releaseAllVideos"); + JZVideoPlayerManager.completeAll(); + JZMediaManager.instance().positionInList = -1; + JZMediaManager.instance().releaseMediaPlayer(); + } + } + + public static void startFullscreen(Context context, Class _class, String url, Object... objects) { + LinkedHashMap map = new LinkedHashMap(); + map.put(URL_KEY_DEFAULT, url); + startFullscreen(context, _class, map, 0, objects); + } + + public static void startFullscreen(Context context, Class _class, LinkedHashMap urlMap, int defaultUrlMapIndex, Object... objects) { + hideSupportActionBar(context); + JZUtils.setRequestedOrientation(context, FULLSCREEN_ORIENTATION); + ViewGroup vp = (ViewGroup) (JZUtils.scanForActivity(context))//.getWindow().getDecorView(); + .findViewById(Window.ID_ANDROID_CONTENT); + View old = vp.findViewById(R.id.jz_fullscreen_id); + if (old != null) { + vp.removeView(old); + } + try { + Constructor constructor = _class.getConstructor(Context.class); + final JZVideoPlayer jzVideoPlayer = constructor.newInstance(context); + jzVideoPlayer.setId(R.id.jz_fullscreen_id); + LayoutParams lp = new LayoutParams( + ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT); + vp.addView(jzVideoPlayer, lp); +// final Animation ra = AnimationUtils.loadAnimation(context, R.anim.start_fullscreen); +// jzVideoPlayer.setAnimation(ra); + jzVideoPlayer.setUp(urlMap, defaultUrlMapIndex, JZVideoPlayerStandard.SCREEN_WINDOW_FULLSCREEN, objects); + CLICK_QUIT_FULLSCREEN_TIME = System.currentTimeMillis(); + jzVideoPlayer.startButton.performClick(); + } catch (InstantiationException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + } + + public String getCurrentUrl() { + return JZUtils.getCurrentUrlFromMap(urlMap, currentUrlMapIndex); + } + + public static boolean backPress() { + Log.e(TAG, "backPress"); + if ((System.currentTimeMillis() - CLICK_QUIT_FULLSCREEN_TIME) < FULL_SCREEN_NORMAL_DELAY) + return false; + if (JZVideoPlayerManager.getSecondFloor() != null) { + CLICK_QUIT_FULLSCREEN_TIME = System.currentTimeMillis(); + if (JZVideoPlayerManager.getFirstFloor().getCurrentUrl().equals(JZMediaManager.CURRENT_PLAYING_URL)) { + JZVideoPlayer jzVideoPlayer = JZVideoPlayerManager.getSecondFloor(); + jzVideoPlayer.onEvent(jzVideoPlayer.currentScreen == JZVideoPlayerStandard.SCREEN_WINDOW_FULLSCREEN ? + JZUserAction.ON_QUIT_FULLSCREEN : + JZUserAction.ON_QUIT_TINYSCREEN); + JZVideoPlayerManager.getFirstFloor().playOnThisJzvd(); + } else { + //直接退出全屏和小窗 + JZVideoPlayerManager.getCurrentJzvd().currentState = CURRENT_STATE_NORMAL; + JZVideoPlayerManager.getFirstFloor().clearFloatScreen(); + JZMediaManager.instance().releaseMediaPlayer(); + JZVideoPlayerManager.setFirstFloor(null); + } + return true; + } else if (JZVideoPlayerManager.getFirstFloor() != null && + (JZVideoPlayerManager.getFirstFloor().currentScreen == SCREEN_WINDOW_FULLSCREEN || + JZVideoPlayerManager.getFirstFloor().currentScreen == SCREEN_WINDOW_TINY)) {//以前我总想把这两个判断写到一起,这分明是两个独立是逻辑 + CLICK_QUIT_FULLSCREEN_TIME = System.currentTimeMillis(); + //直接退出全屏和小窗 + JZVideoPlayerManager.getCurrentJzvd().currentState = CURRENT_STATE_NORMAL; + JZVideoPlayerManager.getFirstFloor().clearFloatScreen(); + JZMediaManager.instance().releaseMediaPlayer(); + JZVideoPlayerManager.setFirstFloor(null); + return true; + } + return false; + } + + public static void showSupportActionBar(Context context) { + if (ACTION_BAR_EXIST && JZUtils.getAppCompActivity(context) != null) { + ActionBar ab = JZUtils.getAppCompActivity(context).getSupportActionBar(); + if (ab != null) { + ab.setShowHideAnimationEnabled(false); + ab.show(); + } + } + if (TOOL_BAR_EXIST) { + JZUtils.getWindow(context).clearFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); + } + } + + public static void hideSupportActionBar(Context context) { + if (ACTION_BAR_EXIST && JZUtils.getAppCompActivity(context) != null) { + ActionBar ab = JZUtils.getAppCompActivity(context).getSupportActionBar(); + if (ab != null) { + ab.setShowHideAnimationEnabled(false); + ab.hide(); + } + } + if (TOOL_BAR_EXIST) { + JZUtils.getWindow(context).setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, + WindowManager.LayoutParams.FLAG_FULLSCREEN); + } + } + + public static void clearSavedProgress(Context context, String url) { + JZUtils.clearSavedProgress(context, url); + } + + public static void setJzUserAction(JZUserAction jzUserEvent) { + JZ_USER_EVENT = jzUserEvent; + } + + public static void goOnPlayOnResume() { + if (JZVideoPlayerManager.getCurrentJzvd() != null) { + JZVideoPlayer jzvd = JZVideoPlayerManager.getCurrentJzvd(); + if (jzvd.currentState == JZVideoPlayer.CURRENT_STATE_PAUSE) { + jzvd.onStatePlaying(); + JZMediaManager.instance().mediaPlayer.start(); + } + } + } + + public static void goOnPlayOnPause() { + if (JZVideoPlayerManager.getCurrentJzvd() != null) { + JZVideoPlayer jzvd = JZVideoPlayerManager.getCurrentJzvd(); + if (jzvd.currentState == JZVideoPlayer.CURRENT_STATE_AUTO_COMPLETE || + jzvd.currentState == JZVideoPlayer.CURRENT_STATE_NORMAL) { +// JZVideoPlayer.releaseAllVideos(); + } else { + jzvd.onStatePause(); + JZMediaManager.instance().mediaPlayer.pause(); + } + } + } + + public abstract int getLayoutId(); + + public void init(Context context) { + View.inflate(context, getLayoutId(), this); + startButton = (ImageView) findViewById(R.id.start); + fullscreenButton = (ImageView) findViewById(R.id.fullscreen); + progressBar = (SeekBar) findViewById(R.id.bottom_seek_progress); + currentTimeTextView = (TextView) findViewById(R.id.current); + totalTimeTextView = (TextView) findViewById(R.id.total); + bottomContainer = (ViewGroup) findViewById(R.id.layout_bottom); + textureViewContainer = (ViewGroup) findViewById(R.id.surface_container); + topContainer = (ViewGroup) findViewById(R.id.layout_top); + +// startButton.setOnClickListener(this); + fullscreenButton.setOnClickListener(this); + progressBar.setOnSeekBarChangeListener(this); + bottomContainer.setOnClickListener(this); + textureViewContainer.setOnClickListener(this); + textureViewContainer.setOnTouchListener(this); + + mScreenWidth = getContext().getResources().getDisplayMetrics().widthPixels; + mScreenHeight = getContext().getResources().getDisplayMetrics().heightPixels; + mAudioManager = (AudioManager) getContext().getSystemService(Context.AUDIO_SERVICE); + mHandler = new Handler(); + + try { + if (isCurrentPlay()) { + NORMAL_ORIENTATION = ((AppCompatActivity) context).getRequestedOrientation(); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + public void setUp(String url, int screen, Object... objects) { + LinkedHashMap map = new LinkedHashMap(); + map.put(URL_KEY_DEFAULT, url); + setUp(map, 0, screen, objects); + } + + public void setUp(LinkedHashMap urlMap, int defaultUrlMapIndex, int screen, Object... objects) { + if (this.urlMap != null && !TextUtils.isEmpty(JZUtils.getCurrentUrlFromMap(urlMap, currentUrlMapIndex)) && + TextUtils.equals(JZUtils.getCurrentUrlFromMap(this.urlMap, currentUrlMapIndex), JZUtils.getCurrentUrlFromMap(urlMap, currentUrlMapIndex))) { + return; + } + //setUp的几种情况 + if (isCurrentJZVD() && urlMap.containsValue(JZMediaManager.CURRENT_PLAYING_URL)) {//即使也是 + //滑出屏幕记录位置 -- 这个应该在releaseAllVideos之前 是也不是的分类 + int position = 0; + try { + position = JZMediaManager.instance().mediaPlayer.getCurrentPosition(); + } catch (IllegalStateException e) { + e.printStackTrace(); + } + if (position != 0) { + JZUtils.saveProgress(getContext(), JZMediaManager.CURRENT_PLAYING_URL, position); + } + JZMediaManager.instance().releaseMediaPlayer(); + } else if (isCurrentJZVD() && !urlMap.containsValue(JZMediaManager.CURRENT_PLAYING_URL)) {//是也不是 + Log.e("jzvd", "setUp: 列表复用");//要么releaseAllVideos,要么进入小窗 +// JZVideoPlayer.releaseAllVideos(); + startWindowTiny(); + } else if (!isCurrentJZVD() && urlMap.containsValue(JZMediaManager.CURRENT_PLAYING_URL)) {//不是也是 进入全屏或者需要退出小窗 + Log.e("jzvd", "setUp: 列表复用 不是也是");//进入小窗或者全屏了,但是下面的判断进不去 + if (JZVideoPlayerManager.getCurrentJzvd() != null && + JZVideoPlayerManager.getCurrentJzvd().currentScreen == JZVideoPlayer.SCREEN_WINDOW_TINY) { + //需要退出小窗退到我这里,我这里是第一层级 + tmp_test_back = true; + Log.e("jzvd", "setUp: tmp_test_back=true"); + } + } else if (!isCurrentJZVD() && !urlMap.containsValue(JZMediaManager.CURRENT_PLAYING_URL)) {//都不是 + + } + this.urlMap = urlMap; + this.currentUrlMapIndex = defaultUrlMapIndex; + this.currentScreen = screen; + this.objects = objects; + this.headData = null; + isVideoRendingStart = false; + onStateNormal(); + + } + + boolean tmp_test_back = false; + + @Override + public void onClick(View v) { + int i = v.getId(); + if (i == R.id.start) { + Log.e(TAG, "onClick start [" + this.hashCode() + "] "); + if (urlMap == null || TextUtils.isEmpty(JZUtils.getCurrentUrlFromMap(urlMap, currentUrlMapIndex))) { + Toast.makeText(getContext(), "No url", Toast.LENGTH_LONG).show(); + return; + } + if (currentState == CURRENT_STATE_NORMAL || currentState == CURRENT_STATE_ERROR) { + if (!JZUtils.getCurrentUrlFromMap(urlMap, currentUrlMapIndex).startsWith("file") && ! + JZUtils.getCurrentUrlFromMap(urlMap, currentUrlMapIndex).startsWith("/") && + !JZUtils.isWifiConnected(getContext()) && !WIFI_TIP_DIALOG_SHOWED) { + showWifiDialog(JZUserAction.ON_CLICK_START_ICON); + return; + } + startVideo(); + onEvent(currentState != CURRENT_STATE_ERROR ? JZUserAction.ON_CLICK_START_ICON : JZUserAction.ON_CLICK_START_ERROR); + } else if (currentState == CURRENT_STATE_PLAYING) { + onEvent(JZUserAction.ON_CLICK_PAUSE); + Log.e(TAG, "pauseVideo [" + this.hashCode() + "] "); + JZMediaManager.instance().mediaPlayer.pause(); + onStatePause(); + } else if (currentState == CURRENT_STATE_PAUSE) { + onEvent(JZUserAction.ON_CLICK_RESUME); + JZMediaManager.instance().mediaPlayer.start(); + onStatePlaying(); + } else if (currentState == CURRENT_STATE_AUTO_COMPLETE) { + onEvent(JZUserAction.ON_CLICK_START_AUTO_COMPLETE); + startVideo(); + } + } else if (i == R.id.fullscreen) { + Log.e(TAG, "onClick fullscreen [" + this.hashCode() + "] "); + if (currentState == CURRENT_STATE_AUTO_COMPLETE) return; + if (currentScreen == SCREEN_WINDOW_FULLSCREEN) { + //quit fullscreen + backPress(); + } else { + Log.e(TAG, "toFullscreenActivity [" + this.hashCode() + "] "); + onEvent(JZUserAction.ON_ENTER_FULLSCREEN); + startWindowFullscreen(); + } + } else if (i == R.id.surface_container && currentState == CURRENT_STATE_ERROR) { + Log.e(TAG, "onClick surfaceContainer State=Error [" + this.hashCode() + "] "); + startVideo(); + } + } + public void handlerOnPause(){} + public void handlerOnStart(){}; + @Override + public boolean onTouch(View v, MotionEvent event) { + float x = event.getX(); + float y = event.getY(); + int id = v.getId(); + if (id == R.id.surface_container) { + switch (event.getAction()) { + case MotionEvent.ACTION_DOWN: + Log.e(TAG, "onTouch surfaceContainer actionDown [" + this.hashCode() + "] "); + mTouchingProgressBar = true; + mDownX = x; + mDownY = y; + mChangeVolume = false; + mChangePosition = false; + mChangeBrightness = false; + break; + case MotionEvent.ACTION_MOVE: + Log.e(TAG, "onTouch surfaceContainer actionMove [" + this.hashCode() + "] "); + float deltaX = x - mDownX; + float deltaY = y - mDownY; + float absDeltaX = Math.abs(deltaX); + float absDeltaY = Math.abs(deltaY); + if (currentScreen == SCREEN_WINDOW_FULLSCREEN) { + if (!mChangePosition && !mChangeVolume && !mChangeBrightness) { + if (absDeltaX > THRESHOLD || absDeltaY > THRESHOLD) { + cancelProgressTimer(); + if (absDeltaX >= THRESHOLD) { + // 全屏模式下的CURRENT_STATE_ERROR状态下,不响应进度拖动事件. + // 否则会因为mediaplayer的状态非法导致App Crash + if (currentState != CURRENT_STATE_ERROR) { + mChangePosition = true; + mGestureDownPosition = getCurrentPositionWhenPlaying(); + } + } else { + //如果y轴滑动距离超过设置的处理范围,那么进行滑动事件处理 + if (mDownX < mScreenWidth * 0.5f) {//左侧改变亮度 + mChangeBrightness = true; + WindowManager.LayoutParams lp = JZUtils.getWindow(getContext()).getAttributes(); + if (lp.screenBrightness < 0) { + try { + mGestureDownBrightness = Settings.System.getInt(getContext().getContentResolver(), Settings.System.SCREEN_BRIGHTNESS); + Log.e(TAG, "current system brightness: " + mGestureDownBrightness); + } catch (Settings.SettingNotFoundException e) { + e.printStackTrace(); + } + } else { + mGestureDownBrightness = lp.screenBrightness * 255; + Log.e(TAG, "current activity brightness: " + mGestureDownBrightness); + } + } else {//右侧改变声音 + mChangeVolume = true; + mGestureDownVolume = mAudioManager.getStreamVolume(AudioManager.STREAM_MUSIC); + } + } + } + } + } + if (mChangePosition) { + int totalTimeDuration = getDuration(); + mSeekTimePosition = (int) (mGestureDownPosition + deltaX * totalTimeDuration / mScreenWidth); + if (mSeekTimePosition > totalTimeDuration) + mSeekTimePosition = totalTimeDuration; + String seekTime = JZUtils.stringForTime(mSeekTimePosition); + String totalTime = JZUtils.stringForTime(totalTimeDuration); + + showProgressDialog(deltaX, seekTime, mSeekTimePosition, totalTime, totalTimeDuration); + } + if (mChangeVolume) { + deltaY = -deltaY; + int max = mAudioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC); + int deltaV = (int) (max * deltaY * 3 / mScreenHeight); + mAudioManager.setStreamVolume(AudioManager.STREAM_MUSIC, mGestureDownVolume + deltaV, 0); + //dialog中显示百分比 + int volumePercent = (int) (mGestureDownVolume * 100 / max + deltaY * 3 * 100 / mScreenHeight); + showVolumeDialog(-deltaY, volumePercent); + } + + if (mChangeBrightness) { + deltaY = -deltaY; + int deltaV = (int) (255 * deltaY * 3 / mScreenHeight); + WindowManager.LayoutParams params = JZUtils.getWindow(getContext()).getAttributes(); + if (((mGestureDownBrightness + deltaV) / 255) >= 1) {//这和声音有区别,必须自己过滤一下负值 + params.screenBrightness = 1; + } else if (((mGestureDownBrightness + deltaV) / 255) <= 0) { + params.screenBrightness = 0.01f; + } else { + params.screenBrightness = (mGestureDownBrightness + deltaV) / 255; + } + JZUtils.getWindow(getContext()).setAttributes(params); + //dialog中显示百分比 + int brightnessPercent = (int) (mGestureDownBrightness * 100 / 255 + deltaY * 3 * 100 / mScreenHeight); + showBrightnessDialog(brightnessPercent); +// mDownY = y; + } + break; + case MotionEvent.ACTION_UP: + Log.e(TAG, "onTouch surfaceContainer actionUp [" + this.hashCode() + "] "); + mTouchingProgressBar = false; + dismissProgressDialog(); + dismissVolumeDialog(); + dismissBrightnessDialog(); + if (mChangePosition) { + onEvent(JZUserAction.ON_TOUCH_SCREEN_SEEK_POSITION); + JZMediaManager.instance().mediaPlayer.seekTo(mSeekTimePosition); + int duration = getDuration(); + int progress = mSeekTimePosition * 100 / (duration == 0 ? 1 : duration); + progressBar.setProgress(progress); + } + if (mChangeVolume) { + onEvent(JZUserAction.ON_TOUCH_SCREEN_SEEK_VOLUME); + } + startProgressTimer(); + break; + } + } + return false; + } + public boolean isNeedVoice = false; + public void setNeedVoice(boolean isNeed){ + isNeedVoice = isNeed; + } + public void setNeedLoop(boolean l){//是否需要循环播发 + loop = l; + } + public void startVideo() { + JZMediaManager.instance().setIsNeedVoice(isNeedVoice); + JZVideoPlayerManager.completeAll(); + Log.e(TAG, "startVideo [" + this.hashCode() + "] "); + initTextureView(); + addTextureView(); + AudioManager mAudioManager = (AudioManager) getContext().getSystemService(Context.AUDIO_SERVICE); + mAudioManager.requestAudioFocus(onAudioFocusChangeListener, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT); + JZUtils.scanForActivity(getContext()).getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); + JZMediaManager.CURRENT_PLAYING_URL = JZUtils.getCurrentUrlFromMap(urlMap, currentUrlMapIndex); + JZMediaManager.CURRENT_PLING_LOOP = loop; + JZMediaManager.MAP_HEADER_DATA = headData; + onStatePreparing(); + JZVideoPlayerManager.setFirstFloor(this); + JZMediaManager.instance().positionInList = positionInList; + } + + + public void onPrepared() { + Log.e(TAG, "onPrepared " + " [" + this.hashCode() + "] "); + if (JZUtils.getCurrentUrlFromMap(urlMap, currentUrlMapIndex).toLowerCase().contains("mp3")) { + onVideoRendingStart(); + } + } + + public void onVideoRendingStart() { + Log.e(TAG, "onVideoRendingStart " + " [" + this.hashCode() + "] "); + isVideoRendingStart = true; + if (currentState != CURRENT_STATE_PREPARING && currentState != CURRENT_STATE_PREPARING_CHANGING_URL) + return; + if (seekToInAdvance != 0) { + JZMediaManager.instance().mediaPlayer.seekTo(seekToInAdvance); + seekToInAdvance = 0; + } else { + try { + int position = JZUtils.getSavedProgress(getContext(), JZUtils.getCurrentUrlFromMap(urlMap, currentUrlMapIndex)); + if (position != 0) { + JZMediaManager.instance().mediaPlayer.seekTo(position); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + startProgressTimer(); + onStatePlaying(); + } + + public void setState(int state) { + setState(state, 0, 0); + } + + public void setState(int state, int urlMapIndex, int seekToInAdvance) { + switch (state) { + case CURRENT_STATE_NORMAL: + onStateNormal(); + break; + case CURRENT_STATE_PREPARING: + onStatePreparing(); + break; + case CURRENT_STATE_PREPARING_CHANGING_URL: + onStatePreparingChangingUrl(urlMapIndex, seekToInAdvance); + break; + case CURRENT_STATE_PLAYING: + onStatePlaying(); + break; + case CURRENT_STATE_PAUSE: + onStatePause(); + break; + case CURRENT_STATE_ERROR: + onStateError(); + break; + case CURRENT_STATE_AUTO_COMPLETE: + onStateAutoComplete(); + break; + } + } + + public void onStateNormal() { + Log.e(TAG, "onStateNormal " + " [" + this.hashCode() + "] "); + currentState = CURRENT_STATE_NORMAL; + cancelProgressTimer(); + } + + public void onStatePreparing() { + Log.e(TAG, "onStatePreparing " + " [" + this.hashCode() + "] "); + currentState = CURRENT_STATE_PREPARING; + resetProgressAndTime(); + } + + public void onStatePreparingChangingUrl(int urlMapIndex, int seekToInAdvance) { + currentState = CURRENT_STATE_PREPARING_CHANGING_URL; + this.currentUrlMapIndex = urlMapIndex; + this.seekToInAdvance = seekToInAdvance; + JZMediaManager.CURRENT_PLAYING_URL = JZUtils.getCurrentUrlFromMap(urlMap, this.currentUrlMapIndex); + JZMediaManager.CURRENT_PLING_LOOP = this.loop; + JZMediaManager.MAP_HEADER_DATA = this.headData; + JZMediaManager.instance().prepare(); + } + + public void onStatePlaying() { + Log.e(TAG, "onStatePlaying " + " [" + this.hashCode() + "] "); + currentState = CURRENT_STATE_PLAYING; + startProgressTimer(); + } + + public void onStatePause() { + Log.e(TAG, "onStatePause " + " [" + this.hashCode() + "] "); + currentState = CURRENT_STATE_PAUSE; + startProgressTimer(); + } + + public void onStateError() { + Log.e(TAG, "onStateError " + " [" + this.hashCode() + "] "); + currentState = CURRENT_STATE_ERROR; + cancelProgressTimer(); + } + + public void onStateAutoComplete() { + Log.e(TAG, "onStateAutoComplete " + " [" + this.hashCode() + "] "); + currentState = CURRENT_STATE_AUTO_COMPLETE; + cancelProgressTimer(); + progressBar.setProgress(100); + currentTimeTextView.setText(totalTimeTextView.getText()); + } + + public void onInfo(int what, int extra) { + Log.e(TAG, "onInfo what - " + what + " extra - " + extra); + if (what == MediaPlayer.MEDIA_INFO_VIDEO_RENDERING_START) { + onVideoRendingStart(); + } + } + + public void onError(int what, int extra) { + Log.e(TAG, "onError " + what + " - " + extra + " [" + this.hashCode() + "] "); + if (what != 38 && what != -38 && extra != -38) { + onStateError(); + if (isCurrentPlay()) { + JZMediaManager.instance().releaseMediaPlayer(); + } + } + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + if (currentScreen == SCREEN_WINDOW_FULLSCREEN || currentScreen == SCREEN_WINDOW_TINY) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + return; + } + if (widthRatio != 0 && heightRatio != 0) { + int specWidth = MeasureSpec.getSize(widthMeasureSpec); + int specHeight = (int) ((specWidth * (float) heightRatio) / widthRatio); + setMeasuredDimension(specWidth, specHeight); + + int childWidthMeasureSpec = MeasureSpec.makeMeasureSpec(specWidth, MeasureSpec.EXACTLY); + int childHeightMeasureSpec = MeasureSpec.makeMeasureSpec(specHeight, MeasureSpec.EXACTLY); + getChildAt(0).measure(childWidthMeasureSpec, childHeightMeasureSpec); + } else { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + + } + + public void onAutoCompletion() { + //加上这句,避免循环播放video的时候,内存不断飙升。 + Runtime.getRuntime().gc(); + Log.e(TAG, "onAutoCompletion " + " [" + this.hashCode() + "] "); + onEvent(JZUserAction.ON_AUTO_COMPLETE); + dismissVolumeDialog(); + dismissProgressDialog(); + dismissBrightnessDialog(); + cancelProgressTimer(); + onStateAutoComplete(); + + if (currentScreen == SCREEN_WINDOW_FULLSCREEN || currentScreen == SCREEN_WINDOW_TINY) { + backPress(); + } + JZMediaManager.instance().mediaPlayer.release(); + JZUtils.saveProgress(getContext(), JZUtils.getCurrentUrlFromMap(urlMap, currentUrlMapIndex), 0); + } + + public void onCompletion() { + Log.e(TAG, "onCompletion " + " [" + this.hashCode() + "] "); + //save position + if (currentState == CURRENT_STATE_PLAYING || currentState == CURRENT_STATE_PAUSE) { + int position = getCurrentPositionWhenPlaying(); + JZUtils.saveProgress(getContext(), JZUtils.getCurrentUrlFromMap(urlMap, currentUrlMapIndex), position); + } + cancelProgressTimer(); + onStateNormal(); + // 清理缓存变量 + textureViewContainer.removeView(JZMediaManager.textureView); + JZMediaManager.instance().currentVideoWidth = 0; + JZMediaManager.instance().currentVideoHeight = 0; + + AudioManager mAudioManager = (AudioManager) getContext().getSystemService(Context.AUDIO_SERVICE); + mAudioManager.abandonAudioFocus(onAudioFocusChangeListener); + JZUtils.scanForActivity(getContext()).getWindow().clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); + clearFullscreenLayout(); + JZUtils.setRequestedOrientation(getContext(), NORMAL_ORIENTATION); + JZMediaManager.textureView = null; + JZMediaManager.savedSurfaceTexture = null; + isVideoRendingStart = false; + } + + public void release() { + if (JZUtils.getCurrentUrlFromMap(urlMap, currentUrlMapIndex).equals(JZMediaManager.CURRENT_PLAYING_URL) && + (System.currentTimeMillis() - CLICK_QUIT_FULLSCREEN_TIME) > FULL_SCREEN_NORMAL_DELAY) { + //在非全屏的情况下只能backPress() + if (JZVideoPlayerManager.getSecondFloor() != null && + JZVideoPlayerManager.getSecondFloor().currentScreen == SCREEN_WINDOW_FULLSCREEN) {//点击全屏 + } else if (JZVideoPlayerManager.getSecondFloor() == null && JZVideoPlayerManager.getFirstFloor() != null && + JZVideoPlayerManager.getFirstFloor().currentScreen == SCREEN_WINDOW_FULLSCREEN) {//直接全屏 + } else { + Log.e(TAG, "release [" + this.hashCode() + "]"); + releaseAllVideos(); + Log.e("Atest", "release: JZVideoPlayer ==== releaseAllVideo"); + } + } + } + + public void initTextureView() { + removeTextureView(); + JZMediaManager.textureView = new JZResizeTextureView(getContext()); + JZMediaManager.textureView.setSurfaceTextureListener(JZMediaManager.instance()); + } + + public void addTextureView() { + Log.e(TAG, "addTextureView [" + this.hashCode() + "] "); + LayoutParams layoutParams = + new LayoutParams( + ViewGroup.LayoutParams.MATCH_PARENT, + ViewGroup.LayoutParams.MATCH_PARENT, + Gravity.CENTER); + ViewGroup parent = (ViewGroup)JZMediaManager.textureView.getParent(); + if (parent != null) { + parent.removeView(JZMediaManager.textureView); + } + textureViewContainer.addView(JZMediaManager.textureView, layoutParams); + } + + public void removeTextureView() { + JZMediaManager.savedSurfaceTexture = null; + if (JZMediaManager.textureView != null && JZMediaManager.textureView.getParent() != null) { + ((ViewGroup) JZMediaManager.textureView.getParent()).removeView(JZMediaManager.textureView); + } + } + + public void clearFullscreenLayout() { + ViewGroup vp = (ViewGroup) (JZUtils.scanForActivity(getContext()))//.getWindow().getDecorView(); + .findViewById(Window.ID_ANDROID_CONTENT); + View oldF = vp.findViewById(R.id.jz_fullscreen_id); + View oldT = vp.findViewById(R.id.jz_tiny_id); + if (oldF != null) { + vp.removeView(oldF); + } + if (oldT != null) { + vp.removeView(oldT); + } + showSupportActionBar(getContext()); + } + + public void clearFloatScreen() { + JZUtils.setRequestedOrientation(getContext(), NORMAL_ORIENTATION); + showSupportActionBar(getContext()); + JZVideoPlayer currJzvd = JZVideoPlayerManager.getCurrentJzvd(); + currJzvd.textureViewContainer.removeView(JZMediaManager.textureView); + ViewGroup vp = (ViewGroup) (JZUtils.scanForActivity(getContext()))//.getWindow().getDecorView(); + .findViewById(Window.ID_ANDROID_CONTENT); + vp.removeView(currJzvd); + JZVideoPlayerManager.setSecondFloor(null); + } + + public void onVideoSizeChanged() { + Log.e(TAG, "onVideoSizeChanged " + " [" + this.hashCode() + "] "); + if (JZMediaManager.textureView != null) { + JZMediaManager.textureView.setVideoSize(JZMediaManager.instance().getVideoSize()); + } + } + + public void startProgressTimer() { + cancelProgressTimer(); + UPDATE_PROGRESS_TIMER = new Timer(); + mProgressTimerTask = new ProgressTimerTask(); + UPDATE_PROGRESS_TIMER.schedule(mProgressTimerTask, 0, 300); + } + + public void cancelProgressTimer() { + if (UPDATE_PROGRESS_TIMER != null) { + UPDATE_PROGRESS_TIMER.cancel(); + } + if (mProgressTimerTask != null) { + mProgressTimerTask.cancel(); + } + } + + public void setProgressAndText(int progress, int position, int duration) { + if (!mTouchingProgressBar) { + if (progress != 0) progressBar.setProgress(progress); + } + if (position != 0) currentTimeTextView.setText(JZUtils.stringForTime(position)); + totalTimeTextView.setText(JZUtils.stringForTime(duration)); + } + + public void setBufferProgress(int bufferProgress) { + if (bufferProgress != 0) progressBar.setSecondaryProgress(bufferProgress); + } + + public void resetProgressAndTime() { + progressBar.setProgress(0); + progressBar.setSecondaryProgress(0); + currentTimeTextView.setText(JZUtils.stringForTime(0)); + totalTimeTextView.setText(JZUtils.stringForTime(0)); + } + + public int getCurrentPositionWhenPlaying() { + int position = 0; + if (JZMediaManager.instance().mediaPlayer == null) + return position;//这行代码不应该在这,如果代码和逻辑万无一失的话,心头之恨呐 + if (currentState == CURRENT_STATE_PLAYING || + currentState == CURRENT_STATE_PAUSE) { + try { + position = JZMediaManager.instance().mediaPlayer.getCurrentPosition(); + } catch (IllegalStateException e) { + e.printStackTrace(); + return position; + } + } + return position; + } + + public int getDuration() { + int duration = 0; + if (JZMediaManager.instance().mediaPlayer == null) return duration; + try { + duration = JZMediaManager.instance().mediaPlayer.getDuration(); + } catch (IllegalStateException e) { + e.printStackTrace(); + return duration; + } + return duration; + } + + @Override + public void onStartTrackingTouch(SeekBar seekBar) { + Log.e(TAG, "bottomProgress onStartTrackingTouch [" + this.hashCode() + "] "); + cancelProgressTimer(); + /* ViewParent vpdown = getParent(); + while (vpdown != null) { + vpdown.requestDisallowInterceptTouchEvent(true); + vpdown = vpdown.getParent(); + }*/ + } + + @Override + public void onStopTrackingTouch(SeekBar seekBar) { + Log.e(TAG, "bottomProgress onStopTrackingTouch [" + this.hashCode() + "] "); + onEvent(JZUserAction.ON_SEEK_POSITION); + startProgressTimer(); + /* ViewParent vpup = getParent(); + while (vpup != null) { + vpup.requestDisallowInterceptTouchEvent(false); + vpup = vpup.getParent(); + }*/ + if (currentState != CURRENT_STATE_PLAYING && + currentState != CURRENT_STATE_PAUSE) return; + int time = seekBar.getProgress() * getDuration() / 100; + JZMediaManager.instance().mediaPlayer.seekTo(time); + Log.e(TAG, "seekTo " + time + " [" + this.hashCode() + "] "); + } + + @Override + public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { + } + + public void startWindowFullscreen() { + Log.e(TAG, "startWindowFullscreen " + " [" + this.hashCode() + "] "); + hideSupportActionBar(getContext()); + JZUtils.setRequestedOrientation(getContext(), FULLSCREEN_ORIENTATION); + + ViewGroup vp = (ViewGroup) (JZUtils.scanForActivity(getContext()))//.getWindow().getDecorView(); + .findViewById(Window.ID_ANDROID_CONTENT); + View old = vp.findViewById(R.id.jz_fullscreen_id); + if (old != null) { + vp.removeView(old); + } + textureViewContainer.removeView(JZMediaManager.textureView); + try { + Constructor constructor = (Constructor) JZVideoPlayer.this.getClass().getConstructor(Context.class); + JZVideoPlayer jzVideoPlayer = constructor.newInstance(getContext()); + jzVideoPlayer.setId(R.id.jz_fullscreen_id); + LayoutParams lp = new LayoutParams( + ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT); + vp.addView(jzVideoPlayer, lp); + jzVideoPlayer.setUp(urlMap, currentUrlMapIndex, JZVideoPlayerStandard.SCREEN_WINDOW_FULLSCREEN, objects); + jzVideoPlayer.setState(currentState); + jzVideoPlayer.addTextureView(); + JZVideoPlayerManager.setSecondFloor(jzVideoPlayer); +// final Animation ra = AnimationUtils.loadAnimation(getContext(), R.anim.start_fullscreen); +// jzVideoPlayer.setAnimation(ra); + onStateNormal(); + jzVideoPlayer.progressBar.setSecondaryProgress(progressBar.getSecondaryProgress()); + jzVideoPlayer.startProgressTimer(); + CLICK_QUIT_FULLSCREEN_TIME = System.currentTimeMillis(); + } catch (Exception e) { + e.printStackTrace(); + } + } + + public void startWindowTiny() { + Log.e(TAG, "startWindowTiny " + " [" + this.hashCode() + "] "); + onEvent(JZUserAction.ON_ENTER_TINYSCREEN); + if (currentState == CURRENT_STATE_NORMAL || currentState == CURRENT_STATE_ERROR) return; + ViewGroup vp = (ViewGroup) (JZUtils.scanForActivity(getContext()))//.getWindow().getDecorView(); + .findViewById(Window.ID_ANDROID_CONTENT); + View old = vp.findViewById(R.id.jz_tiny_id); + if (old != null) { + vp.removeView(old); + } + + new Handler().post(new Runnable() { + @Override + public void run() { + textureViewContainer.removeView(JZMediaManager.textureView); + } + }); + + try { + Constructor constructor = (Constructor) JZVideoPlayer.this.getClass().getConstructor(Context.class); + JZVideoPlayer jzVideoPlayer = constructor.newInstance(getContext()); + jzVideoPlayer.setId(R.id.jz_tiny_id); + LayoutParams lp = new LayoutParams(400, 400); + lp.gravity = Gravity.RIGHT | Gravity.BOTTOM; + vp.addView(jzVideoPlayer, lp); + jzVideoPlayer.setUp(urlMap, currentUrlMapIndex, JZVideoPlayerStandard.SCREEN_WINDOW_TINY, objects); + jzVideoPlayer.setState(currentState); + jzVideoPlayer.addTextureView(); + JZVideoPlayerManager.setSecondFloor(jzVideoPlayer); + onStateNormal(); + } catch (InstantiationException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + } + + //isCurrentPlay and isCurrenPlayUrl should be two logic methods,isCurrentPlay is for different jzvd with same + //url when fullscreen or tiny screen. isCurrenPlayUrl is to find where is myself when back from tiny screen. + //Sometimes they are overlap. + public boolean isCurrentPlay() {//虽然看这个函数很不爽,但是干不掉 + if (urlMap != null) { + return isCurrentJZVD() + && urlMap.containsValue(JZMediaManager.CURRENT_PLAYING_URL);//不仅正在播放的url不能一样,并且各个清晰度也不能一样 + }else{ + return false; + } + } + + public boolean isCurrentJZVD() {//是否是当前实例 + return JZVideoPlayerManager.getCurrentJzvd() != null + && JZVideoPlayerManager.getCurrentJzvd() == this; + } + + //退出全屏和小窗的方法 + public void playOnThisJzvd() { + Log.e(TAG, "playOnThisJzvd " + " [" + this.hashCode() + "] "); + //1.清空全屏和小窗的jzvd + currentState = JZVideoPlayerManager.getSecondFloor().currentState; + currentUrlMapIndex = JZVideoPlayerManager.getSecondFloor().currentUrlMapIndex; + clearFloatScreen(); + //2.在本jzvd上播放 + Log.e("jzvd", "jklf: " + currentState); + setState(currentState); + addTextureView(); + } + + //重力感应的时候调用的函数, + public void autoFullscreen(float x) { + if (isCurrentPlay() + && currentState == CURRENT_STATE_PLAYING + && currentScreen != SCREEN_WINDOW_FULLSCREEN + && currentScreen != SCREEN_WINDOW_TINY) { + if (x > 0) { + JZUtils.setRequestedOrientation(getContext(), ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); + } else { + JZUtils.setRequestedOrientation(getContext(), ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE); + } + onEvent(JZUserAction.ON_ENTER_FULLSCREEN); + startWindowFullscreen(); + } + } + + public void autoQuitFullscreen() { + if ((System.currentTimeMillis() - lastAutoFullscreenTime) > 2000 + && isCurrentPlay() + && currentState == CURRENT_STATE_PLAYING + && currentScreen == SCREEN_WINDOW_FULLSCREEN) { + lastAutoFullscreenTime = System.currentTimeMillis(); + backPress(); + } + } + + public void onEvent(int type) { + if (JZ_USER_EVENT != null && isCurrentPlay() && urlMap != null) { + JZ_USER_EVENT.onEvent(type, JZUtils.getCurrentUrlFromMap(urlMap, currentUrlMapIndex), currentScreen, objects); + } + } + + public static void onScrollAutoTiny(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) { + int lastVisibleItem = firstVisibleItem + visibleItemCount; + int currentPlayPosition = JZMediaManager.instance().positionInList; + if (currentPlayPosition >= 0) { + if ((currentPlayPosition < firstVisibleItem || currentPlayPosition > (lastVisibleItem - 1))) { + //划出屏幕 要么release 要么进入小窗 + //JZVideoPlayer.releaseAllVideos(); + if (JZVideoPlayerManager.getCurrentJzvd() != null && + JZVideoPlayerManager.getCurrentJzvd().currentScreen != JZVideoPlayer.SCREEN_WINDOW_TINY) { + Log.e("jzvd", "onScroll: 划出屏幕"); + JZVideoPlayerManager.getCurrentJzvd().startWindowTiny(); + } + } else { + //滑入屏幕,这个会频繁回调,判断是否在屏幕中 + if (JZVideoPlayerManager.getCurrentJzvd() != null && + JZVideoPlayerManager.getCurrentJzvd().currentScreen == JZVideoPlayer.SCREEN_WINDOW_TINY) { + Log.e("jzvd", "onScroll: 划入屏幕"); + JZVideoPlayer.backPress(); + } + } + } + } + public static void onScrollReleaseAllVideos(int firstVisibleItem,int lastVisibleItem,int currentscreen){ + if(currentscreen == JZVideoPlayer.SCREEN_WINDOW_FULLSCREEN){ + return; + } + int currentPlayPosition = JZMediaManager.instance().positionInList; + if(currentPlayPosition >= 0 ){ + if(currentPlayPosition <= firstVisibleItem || currentPlayPosition >= lastVisibleItem){ + JZVideoPlayer.releaseAllVideos(); + Log.e("Atest", "onScrollReleaseAllVideos: JZVideoPlayer ==== releaseAllVideo"); + } + } + } + public static void onScrollReleaseAllVideos(View view, int firstVisibleItem, int visibleItemCount, int totalItemCount) { + int lastVisibleItem = firstVisibleItem + visibleItemCount; + int currentPlayPosition = JZMediaManager.instance().positionInList; + if (currentPlayPosition >= 0) { + if ((currentPlayPosition < firstVisibleItem || currentPlayPosition > (lastVisibleItem - 1))) { + //划出屏幕 要么release 要么进入小窗 + JZVideoPlayer.releaseAllVideos(); +// if (JZVideoPlayerManager.getCurrentJzvd() != null && +// JZVideoPlayerManager.getCurrentJzvd().currentScreen != JZVideoPlayer.SCREEN_WINDOW_TINY) { +// Log.e("jzvd", "onScroll: 划出屏幕"); +// JZVideoPlayerManager.getCurrentJzvd().startWindowTiny(); +// } + } else { + //滑入屏幕,这个会频繁回调,判断是否在屏幕中 +// if (JZVideoPlayerManager.getCurrentJzvd() != null && +// JZVideoPlayerManager.getCurrentJzvd().currentScreen == JZVideoPlayer.SCREEN_WINDOW_TINY) { +// Log.e("jzvd", "onScroll: 划入屏幕"); +// JZVideoPlayer.backPress(); +// } + } + } + } + + //TODO 是否有用 + public void onSeekComplete() { + + } + + public void showWifiDialog(int event) { + } + + public void showProgressDialog(float deltaX, + String seekTime, int seekTimePosition, + String totalTime, int totalTimeDuration) { + } + + public void dismissProgressDialog() { + + } + + public void showVolumeDialog(float deltaY, int volumePercent) { + + } + + public void dismissVolumeDialog() { + + } + + public void showBrightnessDialog(int brightnessPercent) { + + } + + public void dismissBrightnessDialog() { + + } + + public static class JZAutoFullscreenListener implements SensorEventListener { + @Override + public void onSensorChanged(SensorEvent event) {//可以得到传感器实时测量出来的变化值 + final float x = event.values[SensorManager.DATA_X]; + float y = event.values[SensorManager.DATA_Y]; + float z = event.values[SensorManager.DATA_Z]; + //过滤掉用力过猛会有一个反向的大数值 + if (((x > -15 && x < -10) || (x < 15 && x > 10)) && Math.abs(y) < 1.5) { + if ((System.currentTimeMillis() - lastAutoFullscreenTime) > 2000) { + if (JZVideoPlayerManager.getCurrentJzvd() != null) { + JZVideoPlayerManager.getCurrentJzvd().autoFullscreen(x); + } + lastAutoFullscreenTime = System.currentTimeMillis(); + } + } + } + + @Override + public void onAccuracyChanged(Sensor sensor, int accuracy) { + } + } + + public class ProgressTimerTask extends TimerTask { + @Override + public void run() { + if (currentState == CURRENT_STATE_PLAYING || currentState == CURRENT_STATE_PAUSE) { +// Log.v(TAG, "onProgressUpdate " + position + "/" + duration + " [" + this.hashCode() + "] "); + mHandler.post(new Runnable() { + @Override + public void run() { + int position = getCurrentPositionWhenPlaying(); + int duration = getDuration(); + int progress = position * 100 / (duration == 0 ? 1 : duration); + setProgressAndText(progress, position, duration); + } + }); + } + } + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZVideoPlayerManager.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZVideoPlayerManager.java new file mode 100644 index 0000000..f25fc95 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZVideoPlayerManager.java @@ -0,0 +1,46 @@ +package com.aserbao.androidcustomcamera.whole.jiaozivideo; + +/** + * Put JZVideoPlayer into layout + * From a JZVideoPlayer to another JZVideoPlayer + * Created by Nathen on 16/7/26. + */ +public class JZVideoPlayerManager { + + public static JZVideoPlayer FIRST_FLOOR_JZVD; + public static JZVideoPlayer SECOND_FLOOR_JZVD; + + public static JZVideoPlayer getFirstFloor() { + return FIRST_FLOOR_JZVD; + } + + public static void setFirstFloor(JZVideoPlayer jzVideoPlayer) { + FIRST_FLOOR_JZVD = jzVideoPlayer; + } + + public static JZVideoPlayer getSecondFloor() { + return SECOND_FLOOR_JZVD; + } + + public static void setSecondFloor(JZVideoPlayer jzVideoPlayer) { + SECOND_FLOOR_JZVD = jzVideoPlayer; + } + + public static JZVideoPlayer getCurrentJzvd() { + if (getSecondFloor() != null) { + return getSecondFloor(); + } + return getFirstFloor(); + } + + public static void completeAll() { + if (SECOND_FLOOR_JZVD != null) { + SECOND_FLOOR_JZVD.onCompletion(); + SECOND_FLOOR_JZVD = null; + } + if (FIRST_FLOOR_JZVD != null) { + FIRST_FLOOR_JZVD.onCompletion(); + FIRST_FLOOR_JZVD = null; + } + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZVideoPlayerSimple.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZVideoPlayerSimple.java new file mode 100644 index 0000000..5306906 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZVideoPlayerSimple.java @@ -0,0 +1,76 @@ +package com.aserbao.androidcustomcamera.whole.jiaozivideo; + +import android.content.Context; +import android.util.AttributeSet; +import android.view.View; +import android.widget.SeekBar; +import android.widget.Toast; + +import com.aserbao.androidcustomcamera.R; + +/** + * Manage UI + * Created by Nathen + * On 2016/04/10 15:45 + */ +public class JZVideoPlayerSimple extends JZVideoPlayer { + + public JZVideoPlayerSimple(Context context) { + super(context); + } + + public JZVideoPlayerSimple(Context context, AttributeSet attrs) { + super(context, attrs); + } + + @Override + public int getLayoutId() { + return R.layout.jz_layout_base; + } + + @Override + public void setUp(String url, int screen, Object... objects) { + super.setUp(url, screen, objects); + updateFullscreenButton(); + fullscreenButton.setVisibility(View.GONE); + } + + private void updateStartImage() { + if (currentState == CURRENT_STATE_PLAYING) { + startButton.setImageResource(R.drawable.jz_click_pause_selector); + } else if (currentState == CURRENT_STATE_ERROR) { + startButton.setImageResource(R.drawable.jz_click_error_selector); + } else { + startButton.setImageResource(R.drawable.jz_click_play_selector); + } + } + + public void updateFullscreenButton() { + if (currentScreen == SCREEN_WINDOW_FULLSCREEN) { + fullscreenButton.setImageResource(R.drawable.jz_shrink); + } else { + fullscreenButton.setImageResource(R.drawable.jz_enlarge); + } + } + + @Override + public void onClick(View v) { + if (v.getId() == R.id.fullscreen && currentState == CURRENT_STATE_NORMAL) { + Toast.makeText(getContext(), "Play video first", Toast.LENGTH_LONG).show(); + return; + } + super.onClick(v); + } + + + @Override + public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { + if (fromUser) { + if (currentState == CURRENT_STATE_NORMAL) { + Toast.makeText(getContext(), "Play video first", Toast.LENGTH_LONG).show(); + return; + } + } + super.onProgressChanged(seekBar, progress, fromUser); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZVideoPlayerStandard.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZVideoPlayerStandard.java new file mode 100644 index 0000000..6dd8866 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/JZVideoPlayerStandard.java @@ -0,0 +1,840 @@ +package com.aserbao.androidcustomcamera.whole.jiaozivideo; + +import android.app.Activity; +import android.app.AlertDialog; +import android.app.Dialog; +import android.content.BroadcastReceiver; +import android.content.Context; +import android.content.DialogInterface; +import android.content.Intent; +import android.content.IntentFilter; +import android.graphics.Color; +import android.text.TextUtils; +import android.util.AttributeSet; +import android.util.Log; +import android.view.Gravity; +import android.view.LayoutInflater; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.view.Window; +import android.view.WindowManager; +import android.widget.FrameLayout; +import android.widget.ImageView; +import android.widget.LinearLayout; +import android.widget.PopupWindow; +import android.widget.ProgressBar; +import android.widget.SeekBar; +import android.widget.TextView; +import android.widget.Toast; + + +import com.aserbao.androidcustomcamera.R; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.LinkedHashMap; +import java.util.Timer; +import java.util.TimerTask; + +/** + * Created by Nathen + * On 2016/04/18 16:15 + */ +public class JZVideoPlayerStandard extends JZVideoPlayer { + + protected static Timer DISMISS_CONTROL_VIEW_TIMER; + + public ImageView backButton; + public ProgressBar bottomProgressBar, loadingProgressBar; + public TextView titleTextView; + public ImageView thumbImageView; + public ImageView tinyBackImageView; + public LinearLayout batteryTimeLayout; + public ImageView batteryLevel; + public TextView videoCurrentTime; + public TextView retryTextView; + public TextView clarity; + public PopupWindow clarityPopWindow; + + protected DismissControlViewTimerTask mDismissControlViewTimerTask; + protected Dialog mProgressDialog; + protected ProgressBar mDialogProgressBar; + protected TextView mDialogSeekTime; + protected TextView mDialogTotalTime; + protected ImageView mDialogIcon; + protected Dialog mVolumeDialog; + protected ProgressBar mDialogVolumeProgressBar; + protected TextView mDialogVolumeTextView; + protected ImageView mDialogVolumeImageView; + protected Dialog mBrightnessDialog; + protected ProgressBar mDialogBrightnessProgressBar; + protected TextView mDialogBrightnessTextView; + private boolean brocasting = false; + private BroadcastReceiver battertReceiver = new BroadcastReceiver() { + public void onReceive(Context context, Intent intent) { + String action = intent.getAction(); + if (Intent.ACTION_BATTERY_CHANGED.equals(action)) { + int level = intent.getIntExtra("level", 0); + int scale = intent.getIntExtra("scale", 100); + int percent = level * 100 / scale; + if (percent < 15) { + batteryLevel.setBackgroundResource(R.drawable.jz_battery_level_10); + } else if (percent >= 15 && percent < 40) { + batteryLevel.setBackgroundResource(R.drawable.jz_battery_level_30); + } else if (percent >= 40 && percent < 60) { + batteryLevel.setBackgroundResource(R.drawable.jz_battery_level_50); + } else if (percent >= 60 && percent < 80) { + batteryLevel.setBackgroundResource(R.drawable.jz_battery_level_70); + } else if (percent >= 80 && percent < 95) { + batteryLevel.setBackgroundResource(R.drawable.jz_battery_level_90); + } else if (percent >= 95 && percent <= 100) { + batteryLevel.setBackgroundResource(R.drawable.jz_battery_level_100); + } + getContext().unregisterReceiver(battertReceiver); + brocasting = false; + } + } + }; + + public JZVideoPlayerStandard(Context context) { + super(context); + } + + public JZVideoPlayerStandard(Context context, AttributeSet attrs) { + super(context, attrs); + } + + @Override + public void init(Context context) { + super.init(context); + + batteryTimeLayout = (LinearLayout) findViewById(R.id.battery_time_layout); + bottomProgressBar = (ProgressBar) findViewById(R.id.bottom_progress); + titleTextView = (TextView) findViewById(R.id.title); + backButton = (ImageView) findViewById(R.id.back); + thumbImageView = (ImageView) findViewById(R.id.thumb); + loadingProgressBar = (ProgressBar) findViewById(R.id.loading); + tinyBackImageView = (ImageView) findViewById(R.id.back_tiny); + batteryLevel = (ImageView) findViewById(R.id.battery_level); + videoCurrentTime = (TextView) findViewById(R.id.video_current_time); + retryTextView = (TextView) findViewById(R.id.retry_text); + clarity = (TextView) findViewById(R.id.clarity); + + thumbImageView.setOnClickListener(this); + backButton.setOnClickListener(this); + tinyBackImageView.setOnClickListener(this); + clarity.setOnClickListener(this); + + } + + public void setUp(LinkedHashMap urlMap, int defaultUrlMapIndex, int screen, Object... objects) { + super.setUp(urlMap, defaultUrlMapIndex, screen, objects); + if (objects.length == 0) return; + titleTextView.setText(objects[0].toString()); + if (currentScreen == SCREEN_WINDOW_FULLSCREEN) { + fullscreenButton.setImageResource(R.drawable.jz_shrink); + backButton.setVisibility(View.VISIBLE); + tinyBackImageView.setVisibility(View.INVISIBLE); + batteryTimeLayout.setVisibility(View.VISIBLE); + if (urlMap.size() == 1) { + clarity.setVisibility(GONE); + } else { + clarity.setText(JZUtils.getKeyFromLinkedMap(urlMap, currentUrlMapIndex)); + clarity.setVisibility(View.VISIBLE); + } +// changeStartButtonSize((int) getResources().getDimension(R.dimen.jz_start_button_w_h_fullscreen)); + } else if (currentScreen == SCREEN_LAYOUT_NORMAL + || currentScreen == SCREEN_LAYOUT_LIST) { + fullscreenButton.setImageResource(R.drawable.jz_enlarge); + backButton.setVisibility(View.GONE); + tinyBackImageView.setVisibility(View.INVISIBLE); +// changeStartButtonSize((int) getResources().getDimension(R.dimen.jz_start_button_w_h_normal)); + batteryTimeLayout.setVisibility(View.GONE); + clarity.setVisibility(View.GONE); + } else if (currentScreen == SCREEN_WINDOW_TINY) { + tinyBackImageView.setVisibility(View.VISIBLE); + setAllControlsVisiblity(View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, + View.INVISIBLE, View.INVISIBLE, View.INVISIBLE); + batteryTimeLayout.setVisibility(View.GONE); + clarity.setVisibility(View.GONE); + } + setSystemTimeAndBattery(); + + + if (tmp_test_back) { + tmp_test_back = false; + //更新jzvd第一层,然后backpress + JZVideoPlayerManager.setFirstFloor(this); + Log.e("jzvd", "setUp: tmp_test_back=true 启动线程"); + //等着setUp的子类setUp执行完毕 + Log.e("jzvd", "setUp: tmp_test_back=true 执行backPress"); + isVideoRendingStart = true;//表示可以渲染图像 + backPress(); + } + } + + + public void changeStartButtonSize(int size) { + ViewGroup.LayoutParams lp = startButton.getLayoutParams(); + lp.height = size; + lp.width = size; + lp = loadingProgressBar.getLayoutParams(); + lp.height = size; + lp.width = size; + } + + @Override + public int getLayoutId() { + return R.layout.jz_layout_standard; + } + + @Override + public void onStateNormal() { + super.onStateNormal(); + changeUiToNormal(); + } + + @Override + public void onStatePreparing() { + super.onStatePreparing(); + changeUiToPreparing(); + startDismissControlViewTimer(); + } + + @Override + public void onStatePreparingChangingUrl(int urlMapIndex, int seekToInAdvance) { + super.onStatePreparingChangingUrl(urlMapIndex, seekToInAdvance); + loadingProgressBar.setVisibility(VISIBLE); + startButton.setVisibility(INVISIBLE); + } + + @Override + public void onStatePlaying() { + super.onStatePlaying(); + changeUiToPlayingShow(); + startDismissControlViewTimer(); + } + + @Override + public void onStatePause() { + super.onStatePause(); + changeUiToPauseShow(); + cancelDismissControlViewTimer(); + } + + @Override + public void onStateError() { + super.onStateError(); + changeUiToError(); + } + + @Override + public void onStateAutoComplete() { + super.onStateAutoComplete(); + changeUiToComplete(); + cancelDismissControlViewTimer(); + bottomProgressBar.setProgress(100); + } + + @Override + public boolean onTouch(View v, MotionEvent event) { + int id = v.getId(); + if (id == R.id.surface_container) { + switch (event.getAction()) { + case MotionEvent.ACTION_DOWN: + break; + case MotionEvent.ACTION_MOVE: + break; + case MotionEvent.ACTION_UP: + startDismissControlViewTimer(); + if (mChangePosition) { + int duration = getDuration(); + int progress = mSeekTimePosition * 100 / (duration == 0 ? 1 : duration); + bottomProgressBar.setProgress(progress); + } + if (!mChangePosition && !mChangeVolume) { + onEvent(JZUserActionStandard.ON_CLICK_BLANK); + onClickUiToggle(); + } + break; + } + } else if (id == R.id.bottom_seek_progress) { + switch (event.getAction()) { + case MotionEvent.ACTION_DOWN: + cancelDismissControlViewTimer(); + break; + case MotionEvent.ACTION_UP: + startDismissControlViewTimer(); + break; + } + } + return super.onTouch(v, event); + } + + @Override + public void onClick(View v) { + super.onClick(v); + int i = v.getId(); + if (i == R.id.thumb) { + if (TextUtils.isEmpty(JZUtils.getCurrentUrlFromMap(urlMap, currentUrlMapIndex))) { + Toast.makeText(getContext(), "No_url", Toast.LENGTH_LONG).show(); + return; + } + if (currentState == CURRENT_STATE_NORMAL) { + if (!JZUtils.getCurrentUrlFromMap(urlMap, currentUrlMapIndex).startsWith("file") && + !JZUtils.getCurrentUrlFromMap(urlMap, currentUrlMapIndex).startsWith("/") && + !JZUtils.isWifiConnected(getContext()) && !WIFI_TIP_DIALOG_SHOWED) { + showWifiDialog(JZUserActionStandard.ON_CLICK_START_THUMB); + return; + } + onEvent(JZUserActionStandard.ON_CLICK_START_THUMB); + startVideo(); + } else if (currentState == CURRENT_STATE_AUTO_COMPLETE) { + onClickUiToggle(); + } + } else if (i == R.id.surface_container) { + startDismissControlViewTimer(); + } else if (i == R.id.back) { + backPress(); + } else if (i == R.id.back_tiny) { + backPress(); + } else if (i == R.id.clarity) { + LayoutInflater inflater = (LayoutInflater) getContext() + .getSystemService(Context.LAYOUT_INFLATER_SERVICE); + final LinearLayout layout = (LinearLayout) inflater.inflate(R.layout.jz_layout_clarity, null); + + View.OnClickListener mQualityListener = new View.OnClickListener() { + public void onClick(View v) { + int index = (int) v.getTag(); + onStatePreparingChangingUrl(index, getCurrentPositionWhenPlaying()); + clarity.setText(JZUtils.getKeyFromLinkedMap(urlMap, currentUrlMapIndex)); + for (int j = 0; j < layout.getChildCount(); j++) {//设置点击之后的颜色 + if (j == currentUrlMapIndex) { + ((TextView) layout.getChildAt(j)).setTextColor(Color.parseColor("#fff85959")); + } else { + ((TextView) layout.getChildAt(j)).setTextColor(Color.parseColor("#ffffff")); + } + } + if (clarityPopWindow != null) { + clarityPopWindow.dismiss(); + } + } + }; + + for (int j = 0; j < urlMap.size(); j++) { + String key = JZUtils.getKeyFromLinkedMap(urlMap, j); + TextView clarityItem = (TextView) View.inflate(getContext(), R.layout.jz_layout_clarity_item, null); + clarityItem.setText(key); + clarityItem.setTag(j); + layout.addView(clarityItem, j); + clarityItem.setOnClickListener(mQualityListener); + if (j == currentUrlMapIndex) { + clarityItem.setTextColor(Color.parseColor("#fff85959")); + } + } + + clarityPopWindow = new PopupWindow(layout, FrameLayout.LayoutParams.WRAP_CONTENT, FrameLayout.LayoutParams.WRAP_CONTENT, true); + clarityPopWindow.setContentView(layout); + clarityPopWindow.showAsDropDown(clarity); + layout.measure(View.MeasureSpec.UNSPECIFIED, View.MeasureSpec.UNSPECIFIED); + clarityPopWindow.update(clarity, -40, 46, Math.round(layout.getMeasuredWidth() * 2), layout.getMeasuredHeight()); + } + } + @Override + public void showWifiDialog(int action) { + super.showWifiDialog(action); + AlertDialog.Builder builder = new AlertDialog.Builder(getContext()); + builder.setMessage("tips_not_wifi"); + builder.setPositiveButton("Resume", new DialogInterface.OnClickListener() { + @Override + public void onClick(DialogInterface dialog, int which) { + dialog.dismiss(); + onEvent(JZUserActionStandard.ON_CLICK_START_THUMB); + startVideo(); + WIFI_TIP_DIALOG_SHOWED = true; + } + }); + builder.setNegativeButton("Resume", new DialogInterface.OnClickListener() { + @Override + public void onClick(DialogInterface dialog, int which) { + dialog.dismiss(); + if (currentScreen == SCREEN_WINDOW_FULLSCREEN) { + dialog.dismiss(); + clearFullscreenLayout(); + } + } + }); + builder.setOnCancelListener(new DialogInterface.OnCancelListener() { + @Override + public void onCancel(DialogInterface dialog) { + dialog.dismiss(); + if (currentScreen == SCREEN_WINDOW_FULLSCREEN) { + dialog.dismiss(); + clearFullscreenLayout(); + } + } + }); + builder.create().show(); + } + + @Override + public void onStartTrackingTouch(SeekBar seekBar) { + super.onStartTrackingTouch(seekBar); + cancelDismissControlViewTimer(); + } + + @Override + public void onStopTrackingTouch(SeekBar seekBar) { + super.onStopTrackingTouch(seekBar); + if (currentState == CURRENT_STATE_PLAYING) { + dissmissControlView(); + } else { + startDismissControlViewTimer(); + } + } + + public void onClickUiToggle() { + if (bottomProgressBar!= null && bottomContainer.getVisibility() != View.VISIBLE) { + setSystemTimeAndBattery(); + clarity.setText(JZUtils.getKeyFromLinkedMap(urlMap, currentUrlMapIndex)); + } + if (currentState == CURRENT_STATE_PREPARING) { + changeUiToPreparing(); + if (bottomProgressBar!= null && bottomContainer.getVisibility() == View.VISIBLE) { + } else { + setSystemTimeAndBattery(); + } + } else if (currentState == CURRENT_STATE_PLAYING) { + if (bottomContainer!= null && bottomContainer.getVisibility() == View.VISIBLE) { + changeUiToPlayingClear(); + } else { + changeUiToPlayingShow(); + } + } else if (currentState == CURRENT_STATE_PAUSE) { + if (bottomContainer.getVisibility() == View.VISIBLE) { + changeUiToPauseClear(); + } else { + changeUiToPauseShow(); + } + } + } + //显示电池 + public void setSystemTimeAndBattery() { + SimpleDateFormat dateFormater = new SimpleDateFormat("HH:mm"); + Date date = new Date(); + videoCurrentTime.setText(dateFormater.format(date)); + if (!brocasting) { + getContext().registerReceiver( + battertReceiver, + new IntentFilter(Intent.ACTION_BATTERY_CHANGED) + ); + } + } + + public void onCLickUiToggleToClear() { + if (currentState == CURRENT_STATE_PREPARING) { + if (bottomContainer.getVisibility() == View.VISIBLE) { + changeUiToPreparing(); + } else { + } + } else if (currentState == CURRENT_STATE_PLAYING) { + if (bottomContainer.getVisibility() == View.VISIBLE) { + changeUiToPlayingClear(); + } else { + } + } else if (currentState == CURRENT_STATE_PAUSE) { + if (bottomContainer.getVisibility() == View.VISIBLE) { + changeUiToPauseClear(); + } else { + } + } else if (currentState == CURRENT_STATE_AUTO_COMPLETE) { + if (bottomContainer.getVisibility() == View.VISIBLE) { + changeUiToComplete(); + } else { + } + } + } + + @Override + public void setProgressAndText(int progress, int position, int duration) { + super.setProgressAndText(progress, position, duration); + if (progress != 0) bottomProgressBar.setProgress(progress); + } + + @Override + public void setBufferProgress(int bufferProgress) { + super.setBufferProgress(bufferProgress); + if (bufferProgress != 0) bottomProgressBar.setSecondaryProgress(bufferProgress); + } + + @Override + public void resetProgressAndTime() { + super.resetProgressAndTime(); + bottomProgressBar.setProgress(0); + bottomProgressBar.setSecondaryProgress(0); + } + + //Unified management Ui + public void changeUiToNormal() { + switch (currentScreen) { + case SCREEN_LAYOUT_NORMAL: + case SCREEN_LAYOUT_LIST: + setAllControlsVisiblity(View.VISIBLE, View.INVISIBLE, View.VISIBLE, + View.INVISIBLE, View.VISIBLE, View.INVISIBLE); + updateStartImage(); + break; + case SCREEN_WINDOW_FULLSCREEN: + setAllControlsVisiblity(View.VISIBLE, View.INVISIBLE, View.VISIBLE, + View.INVISIBLE, View.VISIBLE, View.INVISIBLE); + updateStartImage(); + break; + case SCREEN_WINDOW_TINY: + break; + } + } + + public void changeUiToPreparing() { + switch (currentScreen) { + case SCREEN_LAYOUT_NORMAL: + case SCREEN_LAYOUT_LIST: + setAllControlsVisiblity(View.VISIBLE, View.INVISIBLE, View.INVISIBLE, + View.VISIBLE, View.VISIBLE, View.INVISIBLE); + break; + case SCREEN_WINDOW_FULLSCREEN: + setAllControlsVisiblity(View.VISIBLE, View.INVISIBLE, View.INVISIBLE, + View.VISIBLE, View.VISIBLE, View.INVISIBLE); + break; + case SCREEN_WINDOW_TINY: + break; + } + + } + + @Override + public void onVideoRendingStart() { + super.onVideoRendingStart(); + setAllControlsVisiblity(View.VISIBLE, View.INVISIBLE, View.INVISIBLE, + View.INVISIBLE, View.INVISIBLE, View.VISIBLE); + startDismissControlViewTimer(); + } + + public void changeUiToPlayingShow() { + switch (currentScreen) { + case SCREEN_LAYOUT_NORMAL: + case SCREEN_LAYOUT_LIST: + setAllControlsVisiblity(View.VISIBLE, View.VISIBLE, View.VISIBLE, + View.INVISIBLE, View.INVISIBLE, View.INVISIBLE); + updateStartImage(); + break; + case SCREEN_WINDOW_FULLSCREEN: + setAllControlsVisiblity(View.VISIBLE, View.VISIBLE, View.VISIBLE, + View.INVISIBLE, View.INVISIBLE, View.INVISIBLE); + updateStartImage(); + break; + case SCREEN_WINDOW_TINY: + break; + } + + } + + public void changeUiToPlayingClear() { + switch (currentScreen) { + case SCREEN_LAYOUT_NORMAL: + case SCREEN_LAYOUT_LIST: + setAllControlsVisiblity(View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, + View.INVISIBLE, View.INVISIBLE, View.VISIBLE); + break; + case SCREEN_WINDOW_FULLSCREEN: + setAllControlsVisiblity(View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, + View.INVISIBLE, View.INVISIBLE, View.VISIBLE); + break; + case SCREEN_WINDOW_TINY: + break; + } + + } + + public void changeUiToPauseShow() { + switch (currentScreen) { + case SCREEN_LAYOUT_NORMAL: + case SCREEN_LAYOUT_LIST: + setAllControlsVisiblity(View.VISIBLE, View.VISIBLE, View.VISIBLE, + View.INVISIBLE, View.INVISIBLE, View.INVISIBLE); + updateStartImage(); + break; + case SCREEN_WINDOW_FULLSCREEN: + setAllControlsVisiblity(View.VISIBLE, View.VISIBLE, View.VISIBLE, + View.INVISIBLE, View.INVISIBLE, View.INVISIBLE); + updateStartImage(); + break; + case SCREEN_WINDOW_TINY: + break; + } + + } + + public void changeUiToPauseClear() { + switch (currentScreen) { + case SCREEN_LAYOUT_NORMAL: + case SCREEN_LAYOUT_LIST: + setAllControlsVisiblity(View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, + View.INVISIBLE, View.INVISIBLE, View.INVISIBLE); + break; + case SCREEN_WINDOW_FULLSCREEN: + setAllControlsVisiblity(View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, + View.INVISIBLE, View.INVISIBLE, View.INVISIBLE); + break; + case SCREEN_WINDOW_TINY: + break; + } + + } + + public void changeUiToComplete() { + switch (currentScreen) { + case SCREEN_LAYOUT_NORMAL: + case SCREEN_LAYOUT_LIST: + setAllControlsVisiblity(View.VISIBLE, View.INVISIBLE, View.VISIBLE, + View.INVISIBLE, View.VISIBLE, View.INVISIBLE); + updateStartImage(); + break; + case SCREEN_WINDOW_FULLSCREEN: + setAllControlsVisiblity(View.VISIBLE, View.INVISIBLE, View.VISIBLE, + View.INVISIBLE, View.VISIBLE, View.INVISIBLE); + updateStartImage(); + break; + case SCREEN_WINDOW_TINY: + break; + } + + } + + public void changeUiToError() { + switch (currentScreen) { + case SCREEN_LAYOUT_NORMAL: + case SCREEN_LAYOUT_LIST: + setAllControlsVisiblity(View.INVISIBLE, View.INVISIBLE, View.VISIBLE, + View.INVISIBLE, View.INVISIBLE, View.INVISIBLE); + updateStartImage(); + break; + case SCREEN_WINDOW_FULLSCREEN: + setAllControlsVisiblity(View.INVISIBLE, View.INVISIBLE, View.VISIBLE, + View.INVISIBLE, View.INVISIBLE, View.INVISIBLE); + updateStartImage(); + break; + case SCREEN_WINDOW_TINY: + break; + } + + } + + public void setAllControlsVisiblity(int topCon, int bottomCon, int startBtn, int loadingPro, + int thumbImg, int bottomPro) { + //TODO 这个地方由于前边的各种状态不是太明白,所以暂时只能这样写一下(目前没发现问题),作者可以优化一下 + if (!isVideoRendingStart && currentScreen != SCREEN_WINDOW_FULLSCREEN && currentScreen != SCREEN_WINDOW_TINY) { + //只要没开始播放,一直显示缩略图 + thumbImg = VISIBLE; + } + topContainer.setVisibility(topCon); + bottomContainer.setVisibility(bottomCon); + startButton.setVisibility(startBtn); + loadingProgressBar.setVisibility(loadingPro); + thumbImageView.setVisibility(thumbImg); + bottomProgressBar.setVisibility(VISIBLE); + } + + public void updateStartImage() { + // TODO: 2017/12/7 视屏中不修改状态图标 + /*if (currentState == CURRENT_STATE_PLAYING) { + startButton.setImageResource(R.drawable.jz_click_pause_selector); + retryTextView.setVisibility(INVISIBLE); + } else if (currentState == CURRENT_STATE_ERROR) { + startButton.setImageResource(R.drawable.jz_click_error_selector); + retryTextView.setVisibility(INVISIBLE); + } else if (currentState == CURRENT_STATE_AUTO_COMPLETE) { + startButton.setImageResource(R.drawable.jz_click_replay_selector); + retryTextView.setVisibility(VISIBLE); + } else { + startButton.setImageResource(R.drawable.jz_click_play_selector); + retryTextView.setVisibility(INVISIBLE); + }*/ + } + + @Override + public void showProgressDialog(float deltaX, String seekTime, int seekTimePosition, String totalTime, int totalTimeDuration) { + super.showProgressDialog(deltaX, seekTime, seekTimePosition, totalTime, totalTimeDuration); + if (mProgressDialog == null) { + View localView = LayoutInflater.from(getContext()).inflate(R.layout.jz_dialog_progress, null); + mDialogProgressBar = (ProgressBar) localView.findViewById(R.id.duration_progressbar); + mDialogSeekTime = (TextView) localView.findViewById(R.id.tv_current); + mDialogTotalTime = (TextView) localView.findViewById(R.id.tv_duration); + mDialogIcon = (ImageView) localView.findViewById(R.id.duration_image_tip); + mProgressDialog = createDialogWithView(localView); + } + if (!mProgressDialog.isShowing()) { + mProgressDialog.show(); + } + + mDialogSeekTime.setText(seekTime); + mDialogTotalTime.setText(" / " + totalTime); + mDialogProgressBar.setProgress(totalTimeDuration <= 0 ? 0 : (seekTimePosition * 100 / totalTimeDuration)); + if (deltaX > 0) { + mDialogIcon.setBackgroundResource(R.drawable.jz_forward_icon); + } else { + mDialogIcon.setBackgroundResource(R.drawable.jz_backward_icon); + } + onCLickUiToggleToClear(); + } + + @Override + public void dismissProgressDialog() { + super.dismissProgressDialog(); + if (mProgressDialog != null) { + mProgressDialog.dismiss(); + } + } + + @Override + public void showVolumeDialog(float deltaY, int volumePercent) { + super.showVolumeDialog(deltaY, volumePercent); + if (mVolumeDialog == null) { + View localView = LayoutInflater.from(getContext()).inflate(R.layout.jz_dialog_volume, null); + mDialogVolumeImageView = (ImageView) localView.findViewById(R.id.volume_image_tip); + mDialogVolumeTextView = (TextView) localView.findViewById(R.id.tv_volume); + mDialogVolumeProgressBar = (ProgressBar) localView.findViewById(R.id.volume_progressbar); + mVolumeDialog = createDialogWithView(localView); + } + if (!mVolumeDialog.isShowing()) { + mVolumeDialog.show(); + } + if (volumePercent <= 0) { + mDialogVolumeImageView.setBackgroundResource(R.drawable.jz_close_volume); + } else { + mDialogVolumeImageView.setBackgroundResource(R.drawable.jz_add_volume); + } + if (volumePercent > 100) { + volumePercent = 100; + } else if (volumePercent < 0) { + volumePercent = 0; + } + mDialogVolumeTextView.setText(volumePercent + "%"); + mDialogVolumeProgressBar.setProgress(volumePercent); + onCLickUiToggleToClear(); + } + + @Override + public void dismissVolumeDialog() { + super.dismissVolumeDialog(); + if (mVolumeDialog != null) { + mVolumeDialog.dismiss(); + } + } + + @Override + public void showBrightnessDialog(int brightnessPercent) { + super.showBrightnessDialog(brightnessPercent); + if (mBrightnessDialog == null) { + View localView = LayoutInflater.from(getContext()).inflate(R.layout.jz_dialog_brightness, null); + mDialogBrightnessTextView = (TextView) localView.findViewById(R.id.tv_brightness); + mDialogBrightnessProgressBar = (ProgressBar) localView.findViewById(R.id.brightness_progressbar); + mBrightnessDialog = createDialogWithView(localView); + } + if (!mBrightnessDialog.isShowing()) { + mBrightnessDialog.show(); + } + if (brightnessPercent > 100) { + brightnessPercent = 100; + } else if (brightnessPercent < 0) { + brightnessPercent = 0; + } + mDialogBrightnessTextView.setText(brightnessPercent + "%"); + mDialogBrightnessProgressBar.setProgress(brightnessPercent); + onCLickUiToggleToClear(); + } + + @Override + public void dismissBrightnessDialog() { + super.dismissBrightnessDialog(); + if (mBrightnessDialog != null) { + mBrightnessDialog.dismiss(); + } + } + + public Dialog createDialogWithView(View localView) { + Dialog dialog = new Dialog(getContext(), R.style.jz_style_dialog_progress); + dialog.setContentView(localView); + Window window = dialog.getWindow(); + window.addFlags(Window.FEATURE_ACTION_BAR); + window.addFlags(WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL); + window.addFlags(WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE); + window.setLayout(-2, -2); + WindowManager.LayoutParams localLayoutParams = window.getAttributes(); + localLayoutParams.gravity = Gravity.CENTER; + window.setAttributes(localLayoutParams); + return dialog; + } + + public void startDismissControlViewTimer() { + cancelDismissControlViewTimer(); + DISMISS_CONTROL_VIEW_TIMER = new Timer(); + mDismissControlViewTimerTask = new DismissControlViewTimerTask(); + //todo 去掉消失时间 +// DISMISS_CONTROL_VIEW_TIMER.schedule(mDismissControlViewTimerTask, 2000); + } + + public void cancelDismissControlViewTimer() { + if (DISMISS_CONTROL_VIEW_TIMER != null) { + DISMISS_CONTROL_VIEW_TIMER.cancel(); + } + if (mDismissControlViewTimerTask != null) { + mDismissControlViewTimerTask.cancel(); + } + } + + @Override + public void onAutoCompletion() { + super.onAutoCompletion(); + cancelDismissControlViewTimer(); + } + + @Override + public void onCompletion() { + super.onCompletion(); + cancelDismissControlViewTimer(); + if (clarityPopWindow != null) { + clarityPopWindow.dismiss(); + } + } + + public class DismissControlViewTimerTask extends TimerTask { + + @Override + public void run() { + dissmissControlView(); + } + } + + public void dissmissControlView() { + if (currentState != CURRENT_STATE_NORMAL + && currentState != CURRENT_STATE_ERROR + && currentState != CURRENT_STATE_AUTO_COMPLETE) { + if (getContext() != null && getContext() instanceof Activity) { + ((Activity) getContext()).runOnUiThread(new Runnable() { + @Override + public void run() { + bottomContainer.setVisibility(View.INVISIBLE); + topContainer.setVisibility(View.INVISIBLE); + startButton.setVisibility(View.INVISIBLE); + if (clarityPopWindow != null) { + clarityPopWindow.dismiss(); + } + if (currentScreen != SCREEN_WINDOW_TINY) { + bottomProgressBar.setVisibility(View.VISIBLE); + } + } + }); + } + } + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/PublicVideoJZVideo.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/PublicVideoJZVideo.java new file mode 100644 index 0000000..c907003 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/PublicVideoJZVideo.java @@ -0,0 +1,205 @@ +package com.aserbao.androidcustomcamera.whole.jiaozivideo; + +import android.content.Context; +import android.media.AudioManager; +import android.media.MediaPlayer; +import android.util.AttributeSet; +import android.view.MotionEvent; +import android.view.View; +import android.view.WindowManager; +import android.widget.RelativeLayout; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.utils.DisplayUtil; +import com.aserbao.androidcustomcamera.whole.jiaozivideo.cusomview.MyJZVideoPlayerStandard; + +/** + * 这里可以监听到视频播放的生命周期和播放状态 + * 所有关于视频的逻辑都应该写在这里 + * Created by Nathen on 2017/7/2. + */ +public class PublicVideoJZVideo extends JZVideoPlayerStandard { + private boolean mIsExit = false; + private MyJZVideoPlayerStandard.IPlayFinish mIPlayFinish; + + public PublicVideoJZVideo(Context context) { + super(context); + } + + public PublicVideoJZVideo(Context context, AttributeSet attrs) { + super(context, attrs); + } + + @Override + public void init(Context context) { + super.init(context); + initPosition(); + } + + private void initPosition() { + RelativeLayout.LayoutParams layoutParams = (RelativeLayout.LayoutParams)bottomProgressBar.getLayoutParams(); + int i = DisplayUtil.dp2px(getContext(), -10); + layoutParams.setMargins(0,0,0,i); + bottomProgressBar.setLayoutParams(layoutParams); + } + + @Override + public void onClick(View v) { + super.onClick(v); + int i = v.getId(); + if (i == R.id.fullscreen) { + if (currentScreen == SCREEN_WINDOW_FULLSCREEN) { + //click quit fullscreen + } else { + //click goto fullscreen + } + } + } + + @Override + public boolean onTouch(View v, MotionEvent event) { + return true; + } + + public void startVideo(boolean isLoop,MyJZVideoPlayerStandard.IPlayFinish playFinish){ + setNeedLoop(isLoop); + mIPlayFinish = playFinish; + super.startVideo(); + } + + @Override + public void startVideo() { + super.startVideo(); + } + + /** + * onPrepared + */ + @Override + public void onVideoRendingStart() { + try { + if(isCloseVoice){ + MediaPlayer mediaPlayer = JZMediaManager.instance().mediaPlayer; + if (mediaPlayer != null) { + mediaPlayer.setVolume(0,0); + } + } + } catch (Exception e) { + e.printStackTrace(); + } + if(mIsExit){ + JZVideoPlayerStandard.releaseAllVideos(); + } + super.onVideoRendingStart(); + } + public void setOnPrepared(boolean isExit){ + mIsExit = isExit; + } + + @Override + public void onStateNormal() { + super.onStateNormal(); + } + + @Override + public void onStatePreparing() { + super.onStatePreparing(); + } + + @Override + public void onStatePlaying() { + super.onStatePlaying(); + } + + @Override + public void onStatePause() { + super.onStatePause(); + } + + + + @Override + public void onStateError() { + super.onStateError(); + } + + @Override + public void onStateAutoComplete() { + super.onStateAutoComplete(); + if (mIPlayFinish != null) { + mIPlayFinish.playfinish(); + } + } + + @Override + public void onInfo(int what, int extra) { + super.onInfo(what, extra); + } + + @Override + public void onError(int what, int extra) { + super.onError(what, extra); + } + + @Override + public void startWindowFullscreen() { + super.startWindowFullscreen(); + } + + @Override + public void startWindowTiny() { + super.startWindowTiny(); + } + + // TODO: 2017/10/27 手动点暂停 + @Override + public void handlerOnPause() { + if (mHandlerClickVideoPauseListener != null) { + mHandlerClickVideoPauseListener.handlerPause(); + } + } + @Override + public void handlerOnStart() { + if (mHandlerClickVideoPauseListener != null) { + mHandlerClickVideoPauseListener.handlerStart(); + } + } + + private MyJZVideoPlayerStandard.HandlerClickVideoPauseListener mHandlerClickVideoPauseListener; + public void setHandlerClickVideoPauseListener(MyJZVideoPlayerStandard.HandlerClickVideoPauseListener mHandlerClickVideoPauseListener){ + this.mHandlerClickVideoPauseListener = mHandlerClickVideoPauseListener; + } + public interface HandlerClickVideoPauseListener{ + void handlerPause(); + void handlerStart(); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + WindowManager wm = (WindowManager) getContext() + .getSystemService(Context.WINDOW_SERVICE); + + int width = wm.getDefaultDisplay().getWidth(); + int height = wm.getDefaultDisplay().getHeight(); + widthMeasureSpec = MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY); + heightMeasureSpec = MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY); + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + + private boolean isCloseVoice = false; + public void clostVoice(){ + isCloseVoice = true; + } + // AudioManager audioManager=(AudioManager)getSystemService(Service.AUDIO_SERVICE); + public void OpenVolume(AudioManager audioManager){ + isCloseVoice = false; + MediaPlayer mediaPlayer = JZMediaManager.instance().mediaPlayer; + mediaPlayer.setAudioStreamType(AudioManager.STREAM_SYSTEM); + mediaPlayer.setVolume(audioManager.getStreamVolume(AudioManager.STREAM_SYSTEM), audioManager.getStreamVolume(AudioManager.STREAM_SYSTEM)); + mediaPlayer.start(); + } + + public interface IPlayFinish{ + void playfinish(); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/cusomview/MyJZVideoPlayerStandard.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/cusomview/MyJZVideoPlayerStandard.java new file mode 100644 index 0000000..24877dc --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/cusomview/MyJZVideoPlayerStandard.java @@ -0,0 +1,256 @@ +package com.aserbao.androidcustomcamera.whole.jiaozivideo.cusomview; + +import android.content.Context; +import android.media.AudioManager; +import android.media.MediaPlayer; +import android.util.AttributeSet; +import android.view.MotionEvent; +import android.view.View; +import android.view.WindowManager; +import android.widget.RelativeLayout; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.utils.DisplayUtil; +import com.aserbao.androidcustomcamera.whole.jiaozivideo.JZMediaManager; +import com.aserbao.androidcustomcamera.whole.jiaozivideo.JZVideoPlayerStandard; + + +/** + * 这里可以监听到视频播放的生命周期和播放状态 + * 所有关于视频的逻辑都应该写在这里 + * Created by Nathen on 2017/7/2. + */ +public class MyJZVideoPlayerStandard extends JZVideoPlayerStandard { + private boolean mIsExit = false; + private IPlayFinish mIPlayFinish; + + public MyJZVideoPlayerStandard(Context context) { + super(context); + } + + public MyJZVideoPlayerStandard(Context context, AttributeSet attrs) { + super(context, attrs); + } + + @Override + public void init(Context context) { + super.init(context); + initPosition(); + } + public void setProgressBottom(){ + RelativeLayout.LayoutParams layoutParams = (RelativeLayout.LayoutParams)bottomProgressBar.getLayoutParams(); + layoutParams.setMargins(0,0,0,0); + bottomProgressBar.setLayoutParams(layoutParams); + } + public void initPosition() { + RelativeLayout.LayoutParams layoutParams = (RelativeLayout.LayoutParams)bottomProgressBar.getLayoutParams(); + int i = DisplayUtil.dp2px(getContext(), 50); + layoutParams.setMargins(0,0,0,i); + bottomProgressBar.setLayoutParams(layoutParams); + } + + @Override + public void onClick(View v) { + super.onClick(v); + int i = v.getId(); + if (i == R.id.start) { + if (currentScreen == SCREEN_WINDOW_FULLSCREEN) { + //click quit fullscreen + } else { + //click goto fullscreen + } + } + } + + @Override + public boolean onTouch(View v, MotionEvent event) { + return false; + /*if(event.getAction() == MotionEvent.ACTION_DOWN){ + + }else if(event.getAction() == MotionEvent.ACTION_MOVE){ + }else if(event.getAction() == MotionEvent.ACTION_UP) { + try { + MediaPlayer mediaPlayer = JZMediaManager.instance().mediaPlayer; + if (mediaPlayer != null) { + if (mediaPlayer.isPlaying()) { + mediaPlayer.pause(); + changeUiToPlayingClear(); + startButton.setImageResource(R.drawable.jz_click_play_selector); +// loadingProgressBar.setVisibility(INVISIBLE); + } else { + mediaPlayer.start(); + changeUiToPlayingShow(); + } + } + } catch (IllegalStateException e) { + e.printStackTrace(); + } + } + return super.onTouch(v,event);*/ + } + public void click(){ + try { + MediaPlayer mediaPlayer = JZMediaManager.instance().mediaPlayer; + if (mediaPlayer != null) { + if (mediaPlayer.isPlaying()) { + mediaPlayer.pause(); +// changeUiToPlayingClear(); + startButton.setImageResource(R.drawable.jz_click_play_selector); + startButton.setVisibility(VISIBLE); +// loadingProgressBar.setVisibility(INVISIBLE); + } else { + startButton.setVisibility(INVISIBLE); + mediaPlayer.start(); +// changeUiToPlayingShow(); + } + } + } catch (IllegalStateException e) { + e.printStackTrace(); + startVideo(); + } + } + + public void startVideo(boolean isLoop,IPlayFinish playFinish){ + setNeedLoop(isLoop); + mIPlayFinish = playFinish; + super.startVideo(); + } + + @Override + public void startVideo() { + setNeedLoop(true); + super.startVideo(); + mIPlayFinish = null; + } + + /** + * onPrepared + */ + @Override + public void onVideoRendingStart() { + try { + if(isCloseVoice){ + MediaPlayer mediaPlayer = JZMediaManager.instance().mediaPlayer; + if (mediaPlayer != null) { + mediaPlayer.setVolume(0,0); + } + } + } catch (Exception e) { + e.printStackTrace(); + } + if(mIsExit){ + JZVideoPlayerStandard.releaseAllVideos(); + } + super.onVideoRendingStart(); + } + public void setOnPrepared(boolean isExit){ + mIsExit = isExit; + } + + @Override + public void onStateNormal() { + super.onStateNormal(); + } + + @Override + public void onStatePreparing() { + super.onStatePreparing(); + } + + @Override + public void onStatePlaying() { + super.onStatePlaying(); + } + + @Override + public void onStatePause() { + super.onStatePause(); + } + + + + @Override + public void onStateError() { + super.onStateError(); + } + + @Override + public void onStateAutoComplete() { + super.onStateAutoComplete(); + if (mIPlayFinish != null) { + mIPlayFinish.playfinish(); + } + } + + @Override + public void onInfo(int what, int extra) { + super.onInfo(what, extra); + } + + @Override + public void onError(int what, int extra) { + super.onError(what, extra); + } + + @Override + public void startWindowFullscreen() { + super.startWindowFullscreen(); + } + + @Override + public void startWindowTiny() { + super.startWindowTiny(); + } + + // TODO: 2017/10/27 手动点暂停 + @Override + public void handlerOnPause() { + if (mHandlerClickVideoPauseListener != null) { + mHandlerClickVideoPauseListener.handlerPause(); + } + } + @Override + public void handlerOnStart() { + if (mHandlerClickVideoPauseListener != null) { + mHandlerClickVideoPauseListener.handlerStart(); + } + } + + private HandlerClickVideoPauseListener mHandlerClickVideoPauseListener; + public void setHandlerClickVideoPauseListener(HandlerClickVideoPauseListener mHandlerClickVideoPauseListener){ + this.mHandlerClickVideoPauseListener = mHandlerClickVideoPauseListener; + } + public interface HandlerClickVideoPauseListener{ + void handlerPause(); + void handlerStart(); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + WindowManager wm = (WindowManager) getContext() + .getSystemService(Context.WINDOW_SERVICE); + + int width = wm.getDefaultDisplay().getWidth(); + int height = wm.getDefaultDisplay().getHeight(); + widthMeasureSpec = View.MeasureSpec.makeMeasureSpec(width, View.MeasureSpec.EXACTLY); + heightMeasureSpec = View.MeasureSpec.makeMeasureSpec(height, View.MeasureSpec.EXACTLY); + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + + private boolean isCloseVoice = false; + public void clostVoice(){ + isCloseVoice = true; + } +// AudioManager audioManager=(AudioManager)getSystemService(Service.AUDIO_SERVICE); + public void OpenVolume(AudioManager audioManager){ + isCloseVoice = false; + MediaPlayer mediaPlayer = JZMediaManager.instance().mediaPlayer; + mediaPlayer.setAudioStreamType(AudioManager.STREAM_SYSTEM); + mediaPlayer.setVolume(audioManager.getStreamVolume(AudioManager.STREAM_SYSTEM), audioManager.getStreamVolume(AudioManager.STREAM_SYSTEM)); + mediaPlayer.start(); + } + + public interface IPlayFinish{ + void playfinish(); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/cusomview/SelVideoPlayerStandard.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/cusomview/SelVideoPlayerStandard.java new file mode 100644 index 0000000..5ccef9e --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/cusomview/SelVideoPlayerStandard.java @@ -0,0 +1,180 @@ +package com.aserbao.androidcustomcamera.whole.jiaozivideo.cusomview; + +import android.content.Context; +import android.media.MediaPlayer; +import android.util.AttributeSet; +import android.view.MotionEvent; +import android.view.View; +import android.widget.RelativeLayout; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.utils.DisplayUtil; +import com.aserbao.androidcustomcamera.whole.jiaozivideo.JZMediaManager; +import com.aserbao.androidcustomcamera.whole.jiaozivideo.JZVideoPlayerStandard; + + +/** + * 这里可以监听到视频播放的生命周期和播放状态 + * 所有关于视频的逻辑都应该写在这里 + * Created by Nathen on 2017/7/2. + */ +public class SelVideoPlayerStandard extends JZVideoPlayerStandard { + private boolean mIsExit = false; + + public SelVideoPlayerStandard(Context context) { + super(context); + } + + public SelVideoPlayerStandard(Context context, AttributeSet attrs) { + super(context, attrs); + } + @Override + public void init(Context context) { + super.init(context); + initView(); + initPosition(); + } + + private void initView() { + topContainer.setVisibility(GONE); + bottomProgressBar.setVisibility(VISIBLE); + } + + private void initPosition() { + RelativeLayout.LayoutParams layoutParams = (RelativeLayout.LayoutParams)bottomProgressBar.getLayoutParams(); + int i = DisplayUtil.dp2px(getContext(), -10); + layoutParams.setMargins(0,0,0,i); + bottomProgressBar.setLayoutParams(layoutParams); + } + + @Override + public void onClick(View v) { + super.onClick(v); + int i = v.getId(); + if (i == R.id.fullscreen) { + if (currentScreen == SCREEN_WINDOW_FULLSCREEN) { + //click quit fullscreen + } else { + //click goto fullscreen + } + } + } + + @Override + public boolean onTouch(View v, MotionEvent event) { + if(event.getAction() == MotionEvent.ACTION_MOVE){ + }else if(event.getAction() == MotionEvent.ACTION_UP) { + try { + MediaPlayer mediaPlayer = JZMediaManager.instance().mediaPlayer; + if (mediaPlayer != null) { + if (mediaPlayer.isPlaying()) { + mediaPlayer.pause(); + changeUiToPlayingClear(); + startButton.setImageResource(R.drawable.jz_click_play_selector); +// loadingProgressBar.setVisibility(INVISIBLE); + } else { + mediaPlayer.start(); + changeUiToPlayingShow(); + } + } + } catch (IllegalStateException e) { + e.printStackTrace(); + } + } +// return true; + return super.onTouch(v,event); + } + + @Override + public void startVideo() { + super.startVideo(); + } + + /** + * onPrepared + */ + @Override + public void onVideoRendingStart() { + if(mIsExit){ + JZVideoPlayerStandard.releaseAllVideos(); + } + super.onVideoRendingStart(); + } + public void setOnPrepared(boolean isExit){ + mIsExit = isExit; + } + + @Override + public void onStateNormal() { + super.onStateNormal(); + } + + @Override + public void onStatePreparing() { + super.onStatePreparing(); + } + + @Override + public void onStatePlaying() { + super.onStatePlaying(); + } + + @Override + public void onStatePause() { + super.onStatePause(); + } + + @Override + public void onStateError() { + super.onStateError(); + } + + @Override + public void onStateAutoComplete() { + super.onStateAutoComplete(); + } + + @Override + public void onInfo(int what, int extra) { + super.onInfo(what, extra); + } + + @Override + public void onError(int what, int extra) { + super.onError(what, extra); + } + + @Override + public void startWindowFullscreen() { + super.startWindowFullscreen(); + } + + @Override + public void startWindowTiny() { + super.startWindowTiny(); + } + + // TODO: 2017/10/27 手动点暂停 + @Override + public void handlerOnPause() { + if (mHandlerClickVideoPauseListener != null) { + mHandlerClickVideoPauseListener.handlerPause(); + } + } + @Override + public void handlerOnStart() { + if (mHandlerClickVideoPauseListener != null) { + mHandlerClickVideoPauseListener.handlerStart(); + } + } + + private HandlerClickVideoPauseListener mHandlerClickVideoPauseListener; + public void setHandlerClickVideoPauseListener(HandlerClickVideoPauseListener mHandlerClickVideoPauseListener){ + this.mHandlerClickVideoPauseListener = mHandlerClickVideoPauseListener; + } + public interface HandlerClickVideoPauseListener{ + void handlerPause(); + void handlerStart(); + } + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/cusomview/VideoInfoJZVideoPlayerStandard.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/cusomview/VideoInfoJZVideoPlayerStandard.java new file mode 100644 index 0000000..49de7f0 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/jiaozivideo/cusomview/VideoInfoJZVideoPlayerStandard.java @@ -0,0 +1,174 @@ +package com.aserbao.androidcustomcamera.whole.jiaozivideo.cusomview; + +import android.content.Context; +import android.util.AttributeSet; +import android.view.MotionEvent; +import android.view.View; +import android.view.WindowManager; +import android.widget.RelativeLayout; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.utils.DisplayUtil; +import com.aserbao.androidcustomcamera.whole.jiaozivideo.JZVideoPlayerStandard; + + +/** + * 这里可以监听到视频播放的生命周期和播放状态 + * 所有关于视频的逻辑都应该写在这里 + * Created by Nathen on 2017/7/2. + */ +public class VideoInfoJZVideoPlayerStandard extends JZVideoPlayerStandard { + private boolean mIsExit = false; + + public VideoInfoJZVideoPlayerStandard(Context context) { + super(context); + } + + public VideoInfoJZVideoPlayerStandard(Context context, AttributeSet attrs) { + super(context, attrs); + } + + @Override + public void init(Context context) { + super.init(context); + initPosition(); + } + + private void initPosition() { + RelativeLayout.LayoutParams layoutParams = (RelativeLayout.LayoutParams)bottomProgressBar.getLayoutParams(); + int i = DisplayUtil.dp2px(getContext(), 50); + layoutParams.setMargins(0,0,0,i); + bottomProgressBar.setLayoutParams(layoutParams); + startButton.setVisibility(GONE); + loadingProgressBar.setVisibility(GONE); + } + + @Override + public void onClick(View v) { + super.onClick(v); + int i = v.getId(); + if (i == R.id.fullscreen) { + if (currentScreen == SCREEN_WINDOW_FULLSCREEN) { + //click quit fullscreen + } else { + //click goto fullscreen + } + } + } + + @Override + public boolean onTouch(View v, MotionEvent event) { + return true; + } + + @Override + public void startVideo() { + setNeedVoice(true); + super.startVideo(); + } + + /** + * onPrepared + */ + @Override + public void onVideoRendingStart() { + if(mIsExit){ + JZVideoPlayerStandard.releaseAllVideos(); + } + super.onVideoRendingStart(); + } + public void setOnPrepared(boolean isExit){ + mIsExit = isExit; + } + + @Override + public void onStateNormal() { + super.onStateNormal(); + } + + @Override + public void onStatePreparing() { + super.onStatePreparing(); + } + + @Override + public void onStatePlaying() { + super.onStatePlaying(); + } + + @Override + public void onStatePause() { + super.onStatePause(); + } + + @Override + public void onStateError() { + super.onStateError(); + } + + @Override + public void onStateAutoComplete() { + super.onStateAutoComplete(); + } + + @Override + public void onInfo(int what, int extra) { + super.onInfo(what, extra); + } + + @Override + public void onError(int what, int extra) { + super.onError(what, extra); + } + + @Override + public void startWindowFullscreen() { + super.startWindowFullscreen(); + } + + @Override + public void startWindowTiny() { + super.startWindowTiny(); + } + + // TODO: 2017/10/27 手动点暂停 + @Override + public void handlerOnPause() { + if (mHandlerClickVideoPauseListener != null) { + mHandlerClickVideoPauseListener.handlerPause(); + } + } + @Override + public void handlerOnStart() { + if (mHandlerClickVideoPauseListener != null) { + mHandlerClickVideoPauseListener.handlerStart(); + } + } + + private HandlerClickVideoPauseListener mHandlerClickVideoPauseListener; + public void setHandlerClickVideoPauseListener(HandlerClickVideoPauseListener mHandlerClickVideoPauseListener){ + this.mHandlerClickVideoPauseListener = mHandlerClickVideoPauseListener; + } + public interface HandlerClickVideoPauseListener{ + void handlerPause(); + void handlerStart(); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + WindowManager wm = (WindowManager) getContext() + .getSystemService(Context.WINDOW_SERVICE); + int width = wm.getDefaultDisplay().getWidth(); + int height = wm.getDefaultDisplay().getHeight(); + /*int height = 0; + for (int i = 0; i < getChildCount(); i++) { + View child = getChildAt(i); + child.measure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED)); + int h = child.getMeasuredHeight(); + if (h > height)height = h; + }*/ + widthMeasureSpec = View.MeasureSpec.makeMeasureSpec(width, View.MeasureSpec.EXACTLY); + heightMeasureSpec = View.MeasureSpec.makeMeasureSpec(height, View.MeasureSpec.EXACTLY); + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/BaseActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/BaseActivity.java new file mode 100644 index 0000000..ce2150d --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/BaseActivity.java @@ -0,0 +1,108 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo; + +import android.content.Intent; +import android.os.Bundle; +import android.support.annotation.NonNull; +import android.support.annotation.Nullable; +import android.support.v7.app.AppCompatActivity; +import android.util.Log; +import android.view.View; + +import com.aserbao.androidcustomcamera.R; +import java.util.List; + +import pub.devrel.easypermissions.AfterPermissionGranted; +import pub.devrel.easypermissions.AppSettingsDialog; +import pub.devrel.easypermissions.EasyPermissions; + +// + +/** + * Created by Vincent Woo + * Date: 2016/10/12 + * Time: 16:21 + */ + +public abstract class BaseActivity extends AppCompatActivity implements EasyPermissions.PermissionCallbacks { + private static final int RC_READ_EXTERNAL_STORAGE = 123; + private static final String TAG = BaseActivity.class.getName(); + + protected FolderListHelper mFolderHelper; + protected boolean isNeedFolderList; + public static final String IS_NEED_FOLDER_LIST = "isNeedFolderList"; + + abstract void permissionGranted(); + + @Override + protected void onCreate(@Nullable Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + + isNeedFolderList = getIntent().getBooleanExtra(IS_NEED_FOLDER_LIST, false); + if (isNeedFolderList) { + mFolderHelper = new FolderListHelper(); + mFolderHelper.initFolderListView(this); + } + } + + @Override + protected void onPostCreate(@Nullable Bundle savedInstanceState) { + super.onPostCreate(savedInstanceState); + readExternalStorage(); + } + + @Override + public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { + super.onRequestPermissionsResult(requestCode, permissions, grantResults); + // Forward results to EasyPermissions + EasyPermissions.onRequestPermissionsResult(requestCode, permissions, grantResults, this); + } + + /** + * Read external storage file + */ + @AfterPermissionGranted(RC_READ_EXTERNAL_STORAGE) + private void readExternalStorage() { + boolean isGranted = EasyPermissions.hasPermissions(this, "android.permission.READ_EXTERNAL_STORAGE"); + if (isGranted) { + permissionGranted(); + } else { + EasyPermissions.requestPermissions(this, "需要读取存储权限以提供文件选择功能", + RC_READ_EXTERNAL_STORAGE, "android.permission.READ_EXTERNAL_STORAGE"); + } + } + + @Override + public void onPermissionsGranted(int requestCode, List perms) { + Log.d(TAG, "onPermissionsGranted:" + requestCode + ":" + perms.size()); + permissionGranted(); + } + + @Override + public void onPermissionsDenied(int requestCode, List perms) { + Log.d(TAG, "onPermissionsDenied:" + requestCode + ":" + perms.size()); + // If Permission permanently denied, ask user again + if (EasyPermissions.somePermissionPermanentlyDenied(this, perms)) { + new AppSettingsDialog.Builder(this).build().show(); + } else { + finish(); + } + } + + @Override + protected void onActivityResult(int requestCode, int resultCode, Intent data) { + super.onActivityResult(requestCode, resultCode, data); + + if (requestCode == AppSettingsDialog.DEFAULT_SETTINGS_REQ_CODE) { + // Do something after user returned from app settings screen, like showing a Toast. + if (EasyPermissions.hasPermissions(this, "android.permission.READ_EXTERNAL_STORAGE")) { + permissionGranted(); + } else { + finish(); + } + } + } + + public void onBackClick(View view) { + finish(); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/BaseAdapter.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/BaseAdapter.java new file mode 100644 index 0000000..48aafd6 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/BaseAdapter.java @@ -0,0 +1,64 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo; + +import android.content.Context; +import android.support.v7.widget.RecyclerView; + +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.VideoFile; +import com.aserbao.androidcustomcamera.whole.pickvideo.callback.OnSelectStateListener; + +import java.util.ArrayList; +import java.util.List; + +/** + * Created by Vincent Woo + * Date: 2016/10/14 + * Time: 15:42 + */ + +public abstract class BaseAdapter extends RecyclerView.Adapter { + protected Context mContext; + protected ArrayList mList; + protected OnSelectStateListener mListener; + + public BaseAdapter(Context ctx, ArrayList list) { + mContext = ctx; + mList = list; + } + + public void add(List list) { + mList.addAll(list); + notifyDataSetChanged(); + } + + public void add(T file) { + mList.add(file); + notifyDataSetChanged(); + } + + public void add(int index, T file) { + mList.add(index, file); + notifyDataSetChanged(); + } + + public void refresh(List list) { + mList.clear(); + mList.addAll(list); + notifyDataSetChanged(); + } + + public void refresh(T file) { + mList.clear(); + mList.add(file); + notifyDataSetChanged(); + } + + public List getDataSet() { + return mList; + } + + + + public void setOnSelectStateListener(OnSelectStateListener listener) { + mListener = listener; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/FileFilter.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/FileFilter.java new file mode 100644 index 0000000..eb79102 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/FileFilter.java @@ -0,0 +1,39 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo; + +import android.support.v4.app.FragmentActivity; + +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.AudioFile; +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.ImageFile; +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.NormalFile; +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.VideoFile; +import com.aserbao.androidcustomcamera.whole.pickvideo.callback.FileLoaderCallbacks; +import com.aserbao.androidcustomcamera.whole.pickvideo.callback.FilterResultCallback; + +import static com.aserbao.androidcustomcamera.whole.pickvideo.callback.FileLoaderCallbacks.TYPE_AUDIO; +import static com.aserbao.androidcustomcamera.whole.pickvideo.callback.FileLoaderCallbacks.TYPE_FILE; +import static com.aserbao.androidcustomcamera.whole.pickvideo.callback.FileLoaderCallbacks.TYPE_IMAGE; +import static com.aserbao.androidcustomcamera.whole.pickvideo.callback.FileLoaderCallbacks.TYPE_VIDEO; + + +public class FileFilter { + public static void getImages(FragmentActivity activity, FilterResultCallback callback){ + activity.getSupportLoaderManager().initLoader(0, null, + new FileLoaderCallbacks(activity, callback, TYPE_IMAGE)); + } + + public static void getVideos(FragmentActivity activity, FilterResultCallback callback){ + activity.getSupportLoaderManager().initLoader(1, null, + new FileLoaderCallbacks(activity, callback, TYPE_VIDEO)); + } + + public static void getAudios(FragmentActivity activity, FilterResultCallback callback){ + activity.getSupportLoaderManager().initLoader(2, null, + new FileLoaderCallbacks(activity, callback, TYPE_AUDIO)); + } + + public static void getFiles(FragmentActivity activity, + FilterResultCallback callback, String[] suffix){ + activity.getSupportLoaderManager().initLoader(3, null, + new FileLoaderCallbacks(activity, callback, TYPE_FILE, suffix)); + } +} \ No newline at end of file diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/FolderListAdapter.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/FolderListAdapter.java new file mode 100644 index 0000000..c55dc65 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/FolderListAdapter.java @@ -0,0 +1,70 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo; + +import android.content.Context; +import android.support.v7.widget.RecyclerView; +import android.view.LayoutInflater; +import android.view.View; +import android.view.ViewGroup; +import android.widget.TextView; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.Directory; + +import java.util.ArrayList; + +/** + * Created by Vincent Woo + * Date: 2018/2/27 + * Time: 10:25 + */ + +public class FolderListAdapter extends BaseAdapter { + private FolderListListener mListener; + + public FolderListAdapter(Context ctx, ArrayList list) { + super(ctx, list); + } + + @Override + public FolderListViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { + View itemView = LayoutInflater.from(mContext).inflate(R.layout.vw_layout_item_folder_list, + parent, false); + return new FolderListViewHolder(itemView); + } + + @Override + public void onBindViewHolder(final FolderListViewHolder holder, int position) { + holder.mTvTitle.setText(mList.get(position).getName()); + holder.itemView.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View v) { + if (mListener != null) { + mListener.onFolderListClick(mList.get(holder.getAdapterPosition())); + } + } + }); + } + + @Override + public int getItemCount() { + return mList.size(); + } + + class FolderListViewHolder extends RecyclerView.ViewHolder { + private TextView mTvTitle; + + public FolderListViewHolder(View itemView) { + super(itemView); + + mTvTitle = (TextView) itemView.findViewById(R.id.tv_folder_title); + } + } + + public interface FolderListListener { + void onFolderListClick(Directory directory); + } + + public void setListener(FolderListListener listener) { + this.mListener = listener; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/FolderListHelper.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/FolderListHelper.java new file mode 100644 index 0000000..4575948 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/FolderListHelper.java @@ -0,0 +1,68 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo; + +import android.content.Context; +import android.graphics.Color; +import android.graphics.drawable.ColorDrawable; +import android.support.v7.widget.LinearLayoutManager; +import android.support.v7.widget.RecyclerView; +import android.view.LayoutInflater; +import android.view.View; +import android.widget.PopupWindow; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.Directory; + +import java.util.ArrayList; +import java.util.List; + +/** + * Created by Vincent Woo + * Date: 2018/2/27 + * Time: 13:43 + */ + +public class FolderListHelper { + private PopupWindow mPopupWindow; + private View mContentView; + private RecyclerView rv_folder; + private FolderListAdapter mAdapter; + + public void initFolderListView(Context ctx) { + if (mPopupWindow == null) { + mContentView = LayoutInflater.from(ctx).inflate(R.layout.vw_layout_folder_list, null); + rv_folder = (RecyclerView) mContentView.findViewById(R.id.rv_folder); + mAdapter = new FolderListAdapter(ctx, new ArrayList()); + rv_folder.setAdapter(mAdapter); + rv_folder.setLayoutManager(new LinearLayoutManager(ctx)); + mContentView.setFocusable(true); + mContentView.setFocusableInTouchMode(true); + + mPopupWindow = new PopupWindow(mContentView); + mPopupWindow.setBackgroundDrawable(new ColorDrawable(Color.TRANSPARENT)); + mPopupWindow.setFocusable(true); + mPopupWindow.setOutsideTouchable(false); + mPopupWindow.setTouchable(true); + } + } + + public void setFolderListListener(FolderListAdapter.FolderListListener listener) { + mAdapter.setListener(listener); + } + + public void fillData(List list) { + mAdapter.refresh(list); + } + + public void toggle(View anchor) { + if (mPopupWindow.isShowing()) { + mPopupWindow.dismiss(); + } else { + mContentView.measure(View.MeasureSpec.UNSPECIFIED, View.MeasureSpec.UNSPECIFIED); + mPopupWindow.showAsDropDown(anchor, + (anchor.getMeasuredWidth() - mContentView.getMeasuredWidth()) / 2, + 0); + mPopupWindow.update(anchor, mContentView.getMeasuredWidth(), + mContentView.getMeasuredHeight()); + } + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/MaxHeightLayout.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/MaxHeightLayout.java new file mode 100644 index 0000000..da792d1 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/MaxHeightLayout.java @@ -0,0 +1,104 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo; + +import android.content.Context; +import android.content.res.TypedArray; +import android.util.AttributeSet; +import android.view.View; +import android.view.WindowManager; +import android.widget.FrameLayout; + +import com.aserbao.androidcustomcamera.R; + +/** + * Created by Vincent Woo + * Date: 2018/2/26 + * Time: 17:52 + * + * 先判断是否设定了mMaxHeight,如果设定了mMaxHeight,则直接使用mMaxHeight的值, + * 如果没有设定mMaxHeight,则判断是否设定了mMaxRatio,如果设定了mMaxRatio的值 + * 则使用此值与屏幕高度的乘积作为最高高度 + */ + +public class MaxHeightLayout extends FrameLayout { + + private static final float DEFAULT_MAX_RATIO = 0.6f; + private static final float DEFAULT_MAX_HEIGHT = 0f; + + private float mMaxRatio = DEFAULT_MAX_RATIO;// 优先级高 + private float mMaxHeight = DEFAULT_MAX_HEIGHT;// 优先级低 + + public MaxHeightLayout(Context context) { + super(context); + init(); + } + + public MaxHeightLayout(Context context, AttributeSet attrs) { + super(context, attrs); + initAttrs(context, attrs); + init(); + } + + public MaxHeightLayout(Context context, AttributeSet attrs, int defStyle) { + super(context, attrs, defStyle); + initAttrs(context, attrs); + init(); + } + + private void initAttrs(Context context, AttributeSet attrs) { + TypedArray a = context.obtainStyledAttributes(attrs, + R.styleable.MaxHeightLayout); + + final int count = a.getIndexCount(); + for (int i = 0; i < count; ++i) { + int attr = a.getIndex(i); + if(attr == R.styleable.MaxHeightLayout_mhl_HeightRatio){ + mMaxRatio = a.getFloat(attr, DEFAULT_MAX_RATIO); + }else if(attr == R.styleable.MaxHeightLayout_mhl_HeightDimen){ + mMaxHeight = a.getDimension(attr, DEFAULT_MAX_HEIGHT); + } + } + a.recycle(); + } + + private void init(){ + if (mMaxHeight <= 0) { + mMaxHeight = mMaxRatio * (float) getScreenHeight(getContext()); + } else { + mMaxHeight = Math.min(mMaxHeight, mMaxRatio * (float) getScreenHeight(getContext())); + } + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + + int heightMode = MeasureSpec.getMode(heightMeasureSpec); + int heightSize = MeasureSpec.getSize(heightMeasureSpec); + + if (heightMode == MeasureSpec.EXACTLY) { + heightSize = heightSize <= mMaxHeight ? heightSize + : (int) mMaxHeight; + } + + if (heightMode == MeasureSpec.UNSPECIFIED) { + heightSize = heightSize <= mMaxHeight ? heightSize + : (int) mMaxHeight; + } + if (heightMode == MeasureSpec.AT_MOST) { + heightSize = heightSize <= mMaxHeight ? heightSize + : (int) mMaxHeight; + } + int maxHeightMeasureSpec = View.MeasureSpec.makeMeasureSpec(heightSize, + heightMode); + super.onMeasure(widthMeasureSpec, maxHeightMeasureSpec); + } + + /** + * 获取屏幕高度 + * + * @param context + */ + private int getScreenHeight(Context context) { + WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); + return wm.getDefaultDisplay().getHeight(); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/Util.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/Util.java new file mode 100644 index 0000000..121dc42 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/Util.java @@ -0,0 +1,130 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo; + +import android.content.Context; +import android.content.Intent; +import android.content.pm.PackageManager; +import android.content.pm.ResolveInfo; +import android.util.DisplayMetrics; +import android.view.WindowManager; + +import java.util.List; + +/** + * Created by Vincent Woo + * Date: 2016/10/21 + * Time: 16:50 + */ + +public class Util { + public static boolean detectIntent(Context ctx, Intent intent) { + final PackageManager packageManager = ctx.getPackageManager(); + List list = packageManager.queryIntentActivities( + intent, PackageManager.MATCH_DEFAULT_ONLY); + return list.size() > 0; + } + + public static String getDurationString(long duration) { +// long days = duration / (1000 * 60 * 60 * 24); + long hours = (duration % (1000 * 60 * 60 * 24)) / (1000 * 60 * 60); + long minutes = (duration % (1000 * 60 * 60)) / (1000 * 60); + long seconds = (duration % (1000 * 60)) / 1000; + + String hourStr = (hours < 10) ? "0" + hours : hours + ""; + String minuteStr = (minutes < 10) ? "0" + minutes : minutes + ""; + String secondStr = (seconds < 10) ? "0" + seconds : seconds + ""; + + if (hours != 0) { + return hourStr + ":" + minuteStr + ":" + secondStr; + } else { + return minuteStr + ":" + secondStr; + } + } + + public static int getScreenWidth(Context ctx) { + WindowManager wm = (WindowManager) ctx.getSystemService(Context.WINDOW_SERVICE); + DisplayMetrics dm = new DisplayMetrics(); + wm.getDefaultDisplay().getMetrics(dm); + return dm.widthPixels; + } + + public static int getScreenHeight(Context ctx) { + WindowManager wm = (WindowManager) ctx.getSystemService(Context.WINDOW_SERVICE); + DisplayMetrics dm = new DisplayMetrics(); + wm.getDefaultDisplay().getMetrics(dm); + return dm.heightPixels; + } + + public static int dip2px(Context context, float dpValue) { + final float scale = context.getResources().getDisplayMetrics().density; + return (int) (dpValue * scale + 0.5f); + } + + public static int px2dip(Context context, float pxValue) { + final float scale = context.getResources().getDisplayMetrics().density; + return (int) (pxValue / scale + 0.5f); + } + + /** + * Extract the file name in a URL + * /storage/emulated/legacy/Download/sample.pptx = sample.pptx + * + * @param url String of a URL + * @return the file name of URL with suffix + */ + public static String extractFileNameWithSuffix(String url) { + return url.substring(url.lastIndexOf("/") + 1); + } + + /** + * Extract the file name in a URL + * /storage/emulated/legacy/Download/sample.pptx = sample + * + * @param url String of a URL + * @return the file name of URL without suffix + */ + public static String extractFileNameWithoutSuffix(String url) { + try { + return url.substring(url.lastIndexOf("/") + 1, url.lastIndexOf(".")); + } catch (StringIndexOutOfBoundsException e) { + e.printStackTrace(); + return ""; + } + } + + /** + * Extract the path in a URL + * /storage/emulated/legacy/Download/sample.pptx = /storage/emulated/legacy/Download/ + * + * @param url String of a URL + * @return the path of URL with the file separator + */ + public static String extractPathWithSeparator(String url) { + return url.substring(0, url.lastIndexOf("/") + 1); + } + + /** + * Extract the path in a URL + * /storage/emulated/legacy/Download/sample.pptx = /storage/emulated/legacy/Download + * + * @param url String of a URL + * @return the path of URL without the file separator + */ + public static String extractPathWithoutSeparator(String url) { + return url.substring(0, url.lastIndexOf("/")); + } + + /** + * Extract the suffix in a URL + * /storage/emulated/legacy/Download/sample.pptx = pptx + * + * @param url String of a URL + * @return the suffix of URL + */ + public static String extractFileSuffix(String url) { + if (url.contains(".")) { + return url.substring(url.lastIndexOf(".") + 1); + } else { + return ""; + } + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/VideoPickActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/VideoPickActivity.java new file mode 100644 index 0000000..4987e44 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/VideoPickActivity.java @@ -0,0 +1,233 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo; + +import android.content.Intent; +import android.net.Uri; +import android.os.Bundle; +import android.support.annotation.Nullable; +import android.support.v7.widget.GridLayoutManager; +import android.support.v7.widget.RecyclerView; +import android.text.TextUtils; +import android.view.View; +import android.view.Window; +import android.view.WindowManager; +import android.widget.LinearLayout; +import android.widget.ProgressBar; +import android.widget.RelativeLayout; +import android.widget.TextView; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.utils.StaticFinalValues; +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.Directory; +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.VideoFile; +import com.aserbao.androidcustomcamera.whole.pickvideo.callback.FilterResultCallback; +import com.aserbao.androidcustomcamera.whole.pickvideo.callback.OnSelectStateListener; +import com.aserbao.androidcustomcamera.whole.pickvideo.itemDecoration.DividerGridItemDecoration; + +import java.io.File; +import java.util.ArrayList; +import java.util.List; + + +public class VideoPickActivity extends BaseActivity { + public static final String THUMBNAIL_PATH = "FilePick"; + public static final String IS_NEED_CAMERA = "IsNeedCamera"; + public static final String IS_TAKEN_AUTO_SELECTED = "IsTakenAutoSelected"; + + public static final int DEFAULT_MAX_NUMBER = 9; + public static final int COLUMN_NUMBER = 3; + private int mMaxNumber; + private int mCurrentNumber = 0; + private RecyclerView mRecyclerView; + private VideoPickAdapter mAdapter; + private boolean isNeedCamera; + private boolean isTakenAutoSelected; + private ArrayList mSelectedList = new ArrayList<>(); + private List> mAll; + private ProgressBar mProgressBar; + + private TextView tv_count; + private TextView tv_folder; + private LinearLayout ll_folder; + private RelativeLayout rl_done; + private RelativeLayout tb_pick; + + @Override + void permissionGranted() { + loadData(); + } + + @Override + protected void onCreate(@Nullable Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + getWindow().addFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);//状态栏半透明 + requestWindowFeature(Window.FEATURE_NO_TITLE); + getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, + WindowManager.LayoutParams.FLAG_FULLSCREEN); + setContentView(R.layout.vw_activity_video_pick); + mMaxNumber = getIntent().getIntExtra(StaticFinalValues.MAX_NUMBER, DEFAULT_MAX_NUMBER); + isNeedCamera = getIntent().getBooleanExtra(IS_NEED_CAMERA, false); + isTakenAutoSelected = getIntent().getBooleanExtra(IS_TAKEN_AUTO_SELECTED, true); + mSelectedList.clear(); + initView(); + } + + private void initView() { + tv_count = (TextView) findViewById(R.id.tv_count); + tv_count.setText(mCurrentNumber + "/" + mMaxNumber); + mRecyclerView = (RecyclerView) findViewById(R.id.rv_video_pick); + GridLayoutManager layoutManager = new GridLayoutManager(this, COLUMN_NUMBER); + mRecyclerView.setLayoutManager(layoutManager); + mRecyclerView.addItemDecoration(new DividerGridItemDecoration(this)); + + mAdapter = new VideoPickAdapter(this, isNeedCamera, mMaxNumber); + mRecyclerView.setAdapter(mAdapter); + + mAdapter.setOnSelectStateListener(new OnSelectStateListener() { + @Override + public void OnSelectStateChanged(boolean state, VideoFile file) { + if (state) { + mSelectedList.add(file); + mCurrentNumber++; + } else { + mSelectedList.remove(file); + mCurrentNumber--; + } + tv_count.setText(mCurrentNumber + "/" + mMaxNumber); + } + }); + + mProgressBar = (ProgressBar) findViewById(R.id.pb_video_pick); + File folder = new File(getExternalCacheDir().getAbsolutePath() + File.separator + THUMBNAIL_PATH); + if (!folder.exists()) { + mProgressBar.setVisibility(View.VISIBLE); + } else { + mProgressBar.setVisibility(View.GONE); + } + + rl_done = (RelativeLayout) findViewById(R.id.rl_done); + rl_done.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View v) { + Intent intent = new Intent(); + intent.putParcelableArrayListExtra(StaticFinalValues.RESULT_PICK_VIDEO, mSelectedList); + setResult(RESULT_OK, intent); + finish(); + } + }); + + tb_pick = (RelativeLayout) findViewById(R.id.tb_pick); + ll_folder = (LinearLayout) findViewById(R.id.ll_folder); + if (isNeedFolderList) { + ll_folder.setVisibility(View.VISIBLE); + ll_folder.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View v) { + mFolderHelper.toggle(tb_pick); + } + }); + tv_folder = (TextView) findViewById(R.id.tv_folder); + tv_folder.setText("全部"); + + mFolderHelper.setFolderListListener(new FolderListAdapter.FolderListListener() { + @Override + public void onFolderListClick(Directory directory) { + mFolderHelper.toggle(tb_pick); + tv_folder.setText(directory.getName()); + + if (TextUtils.isEmpty(directory.getPath())) { //All + refreshData(mAll); + } else { + for (Directory dir : mAll) { + if (dir.getPath().equals(directory.getPath())) { + List> list = new ArrayList<>(); + list.add(dir); + refreshData(list); + break; + } + } + } + } + }); + } + } + + @Override + protected void onActivityResult(int requestCode, int resultCode, Intent data) { + super.onActivityResult(requestCode, resultCode, data); + switch (requestCode) { + case StaticFinalValues.REQUEST_CODE_TAKE_VIDEO: + if (resultCode == RESULT_OK) { + Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE); + File file = new File(mAdapter.mVideoPath); + Uri contentUri = Uri.fromFile(file); + mediaScanIntent.setData(contentUri); + sendBroadcast(mediaScanIntent); + loadData(); + } + break; + } + } + + private void loadData() { + FileFilter.getVideos(this, new FilterResultCallback() { + @Override + public void onResult(List> directories) { + mProgressBar.setVisibility(View.GONE); + // Refresh folder list + if (isNeedFolderList) { + ArrayList list = new ArrayList<>(); + Directory all = new Directory(); + all.setName("全部"); + list.add(all); + list.addAll(directories); + mFolderHelper.fillData(list); + } + + mAll = directories; + refreshData(directories); + } + }); + } + + private void refreshData(List> directories) { + boolean tryToFindTaken = isTakenAutoSelected; + + // if auto-select taken file is enabled, make sure requirements are met + if (tryToFindTaken && !TextUtils.isEmpty(mAdapter.mVideoPath)) { + File takenFile = new File(mAdapter.mVideoPath); + tryToFindTaken = !mAdapter.isUpToMax() && takenFile.exists(); // try to select taken file only if max isn't reached and the file exists + } + + List list = new ArrayList<>(); + for (Directory directory : directories) { + list.addAll(directory.getFiles()); + + // auto-select taken file? + if (tryToFindTaken) { + tryToFindTaken = findAndAddTaken(directory.getFiles()); // if taken file was found, we're done + } + } + + for (VideoFile file : mSelectedList) { + int index = list.indexOf(file); + if (index != -1) { + list.get(index).setSelected(true); + } + } + mAdapter.refresh(list); + } + + private boolean findAndAddTaken(List list) { + for (VideoFile videoFile : list) { + if (videoFile.getPath().equals(mAdapter.mVideoPath)) { + mSelectedList.add(videoFile); + mCurrentNumber++; + mAdapter.setCurrentNumber(mCurrentNumber); + tv_count.setText(mCurrentNumber + "/" + mMaxNumber); + + return true; // taken file was found and added + } + } + return false; // taken file wasn't found + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/VideoPickAdapter.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/VideoPickAdapter.java new file mode 100644 index 0000000..2dd174e --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/VideoPickAdapter.java @@ -0,0 +1,232 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo; + +import android.app.Activity; +import android.content.ContentValues; +import android.content.Context; +import android.content.Intent; +import android.net.Uri; +import android.os.Build; +import android.os.Environment; +import android.provider.MediaStore; +import android.support.v4.content.FileProvider; +import android.support.v7.widget.RecyclerView; +import android.view.LayoutInflater; +import android.view.View; +import android.view.ViewGroup; +import android.view.WindowManager; +import android.widget.ImageView; +import android.widget.RelativeLayout; +import android.widget.TextView; +import android.widget.Toast; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.utils.StaticFinalValues; +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.VideoFile; +import com.bumptech.glide.Glide; + +import java.io.File; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.Locale; + +import static android.os.Environment.DIRECTORY_DCIM; + +/** + * Created by Vincent Woo + * Date: 2016/10/21 + * Time: 14:13 + */ + +public class VideoPickAdapter extends BaseAdapter { + private boolean isNeedCamera; + private int mMaxNumber; + private int mCurrentNumber = 0; + public String mVideoPath; + private Context mContext; + + public VideoPickAdapter(Context ctx, boolean needCamera, int max) { + this(ctx, new ArrayList(), needCamera, max); + } + + public VideoPickAdapter(Context ctx, ArrayList list, boolean needCamera, int max) { + super(ctx, list); + isNeedCamera = needCamera; + mMaxNumber = max; + mContext = ctx; + } + + @Override + public VideoPickAdapter.VideoPickViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { + View itemView = LayoutInflater.from(mContext).inflate(R.layout.vw_layout_item_video_pick, parent, false); + ViewGroup.LayoutParams params = itemView.getLayoutParams(); + if (params != null) { + WindowManager wm = (WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE); + int width = wm.getDefaultDisplay().getWidth(); + params.height = width / VideoPickActivity.COLUMN_NUMBER; + } + return new VideoPickViewHolder(itemView); + } + + @Override + public void onBindViewHolder(final VideoPickViewHolder holder, int position) { + if (isNeedCamera && position == 0) { + holder.mIvCamera.setVisibility(View.VISIBLE); + holder.mIvThumbnail.setVisibility(View.INVISIBLE); + holder.mCbx.setVisibility(View.INVISIBLE); + holder.mShadow.setVisibility(View.INVISIBLE); + holder.mDurationLayout.setVisibility(View.INVISIBLE); + holder.itemView.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View v) { + Intent intent = new Intent(MediaStore.ACTION_VIDEO_CAPTURE); + String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss", Locale.ENGLISH).format(new Date()); + File file = new File(Environment.getExternalStoragePublicDirectory(DIRECTORY_DCIM).getAbsolutePath() + + "/VID_" + timeStamp + ".mp4"); + mVideoPath = file.getAbsolutePath(); + + ContentValues contentValues = new ContentValues(1); + contentValues.put(MediaStore.Images.Media.DATA, mVideoPath); + Uri uri = mContext.getContentResolver().insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, contentValues); + + intent.putExtra(MediaStore.EXTRA_OUTPUT, uri); + intent.putExtra(MediaStore.EXTRA_VIDEO_QUALITY, 1); + if (Util.detectIntent(mContext, intent)) { + ((Activity) mContext).startActivityForResult(intent, StaticFinalValues.REQUEST_CODE_TAKE_VIDEO); + } else { + Toast.makeText(mContext, "没有可用的视频录制应用", Toast.LENGTH_SHORT).show(); + } + } + }); + } else { + holder.mIvCamera.setVisibility(View.INVISIBLE); + holder.mIvThumbnail.setVisibility(View.VISIBLE); + holder.mCbx.setVisibility(View.VISIBLE); + holder.mDurationLayout.setVisibility(View.VISIBLE); + + final VideoFile file; + if (isNeedCamera) { + file = mList.get(position - 1); + } else { + file = mList.get(position); + } + + Glide.with(mContext) + .load(file.getPath()) + .into(holder.mIvThumbnail); + + if (file.isSelected()) { + holder.mCbx.setSelected(true); + holder.mShadow.setVisibility(View.VISIBLE); + } else { + holder.mCbx.setSelected(false); + holder.mShadow.setVisibility(View.INVISIBLE); + } + + holder.mCbx.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View v) { + if (!v.isSelected() && isUpToMax()) { + Toast.makeText(mContext, "已达到选择上限", Toast.LENGTH_SHORT).show(); + return; + } + + if (v.isSelected()) { + holder.mShadow.setVisibility(View.INVISIBLE); + holder.mCbx.setSelected(false); + mCurrentNumber--; + } else { + holder.mShadow.setVisibility(View.VISIBLE); + holder.mCbx.setSelected(true); + mCurrentNumber++; + } + + int index = isNeedCamera ? holder.getAdapterPosition() - 1 : holder.getAdapterPosition(); + mList.get(index).setSelected(holder.mCbx.isSelected()); + + if (mListener != null) { + mListener.OnSelectStateChanged(holder.mCbx.isSelected(), mList.get(index)); + } + } + }); + + holder.itemView.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View v) { + if (!v.isSelected() && isUpToMax()) { + Toast.makeText(mContext, "已达到选择上限", Toast.LENGTH_SHORT).show(); + return; + } + + if (v.isSelected()) { + holder.mShadow.setVisibility(View.INVISIBLE); + holder.mCbx.setSelected(false); + mCurrentNumber--; + } else { + holder.mShadow.setVisibility(View.VISIBLE); + holder.mCbx.setSelected(true); + mCurrentNumber++; + } + + int index = isNeedCamera ? holder.getAdapterPosition() - 1 : holder.getAdapterPosition(); + mList.get(index).setSelected(holder.mCbx.isSelected()); + + if (mListener != null) { + mListener.OnSelectStateChanged(holder.mCbx.isSelected(), mList.get(index)); + } + /* String path = file.getPath(); + Uri uri = Uri.parse("file://" + path); + Intent intent = new Intent(Intent.ACTION_VIEW); + intent.setDataAndType(getUri(file.getPath()), "video*//*"); + intent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION); + if (Util.detectIntent(mContext, intent)) { + mContext.startActivity(intent); + } else { + ToastUtil.getInstance(mContext).showToast(mContext.getString(R.string.vw_no_video_play_app)); + }*/ + } + }); + + holder.mDuration.setText(Util.getDurationString(file.getDuration())); + } + } + Uri getUri(String path){ + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { + return FileProvider.getUriForFile(mContext, mContext.getPackageName()+ ".dmc", new File(path)); + }else { + return Uri.fromFile(new File(path)); + } + } + + @Override + public int getItemCount() { + return isNeedCamera ? mList.size() + 1 : mList.size(); + } + + class VideoPickViewHolder extends RecyclerView.ViewHolder { + private ImageView mIvCamera; + private ImageView mIvThumbnail; + private View mShadow; + private ImageView mCbx; + private TextView mDuration; + private RelativeLayout mDurationLayout; + + public VideoPickViewHolder(View itemView) { + super(itemView); + mIvCamera = (ImageView) itemView.findViewById(R.id.iv_camera); + mIvThumbnail = (ImageView) itemView.findViewById(R.id.iv_thumbnail); + mShadow = itemView.findViewById(R.id.shadow); + mCbx = (ImageView) itemView.findViewById(R.id.cbx); + mDuration = (TextView) itemView.findViewById(R.id.txt_duration); + mDurationLayout = (RelativeLayout) itemView.findViewById(R.id.layout_duration); + } + } + + public boolean isUpToMax() { + return mCurrentNumber >= mMaxNumber; + } + + public void setCurrentNumber(int number) { + mCurrentNumber = number; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/beans/AudioFile.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/beans/AudioFile.java new file mode 100644 index 0000000..fb69f2e --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/beans/AudioFile.java @@ -0,0 +1,62 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo.beans; + +import android.os.Parcel; +import android.os.Parcelable; + +/** + * Created by Vincent Woo + * Date: 2016/10/11 + * Time: 15:52 + */ + +public class AudioFile extends BaseFile implements Parcelable { + private long duration; + + public long getDuration() { + return duration; + } + + public void setDuration(long duration) { + this.duration = duration; + } + + @Override + public void writeToParcel(Parcel dest, int flags) { + dest.writeLong(getId()); + dest.writeString(getName()); + dest.writeString(getPath()); + dest.writeLong(getSize()); + dest.writeString(getBucketId()); + dest.writeString(getBucketName()); + dest.writeLong(getDate()); + dest.writeByte((byte) (isSelected() ? 1 : 0)); + dest.writeLong(getDuration()); + } + + @Override + public int describeContents() { + return 0; + } + + public static final Creator CREATOR = new Creator() { + @Override + public AudioFile[] newArray(int size) { + return new AudioFile[size]; + } + + @Override + public AudioFile createFromParcel(Parcel in) { + AudioFile file = new AudioFile(); + file.setId(in.readLong()); + file.setName(in.readString()); + file.setPath(in.readString()); + file.setSize(in.readLong()); + file.setBucketId(in.readString()); + file.setBucketName(in.readString()); + file.setDate(in.readLong()); + file.setSelected(in.readByte() != 0); + file.setDuration(in.readLong()); + return file; + } + }; +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/beans/BaseFile.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/beans/BaseFile.java new file mode 100644 index 0000000..72291d1 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/beans/BaseFile.java @@ -0,0 +1,137 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo.beans; + +import android.os.Parcel; +import android.os.Parcelable; + +/** + * Created by Vincent Woo + * Date: 2016/10/10 + * Time: 17:32 + */ + +public class BaseFile implements Parcelable { + private long id; + private String name; + private String path; + private long size; //byte + private String bucketId; //Directory ID + private String bucketName; //Directory Name + private long date; //Added Date + private boolean isSelected; + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof BaseFile)) return false; + + BaseFile file = (BaseFile) o; + return this.path.equals(file.path); + } + + @Override + public int hashCode() { + return path.hashCode(); + } + + public long getId() { + return id; + } + + public void setId(long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getPath() { + return path; + } + + public void setPath(String path) { + this.path = path; + } + + public long getSize() { + return size; + } + + public void setSize(long size) { + this.size = size; + } + + public String getBucketId() { + return bucketId; + } + + public void setBucketId(String bucketId) { + this.bucketId = bucketId; + } + + public String getBucketName() { + return bucketName; + } + + public void setBucketName(String bucketName) { + this.bucketName = bucketName; + } + + public long getDate() { + return date; + } + + public void setDate(long date) { + this.date = date; + } + + public boolean isSelected() { + return isSelected; + } + + public void setSelected(boolean selected) { + isSelected = selected; + } + + @Override + public void writeToParcel(Parcel dest, int flags) { + dest.writeLong(id); + dest.writeString(name); + dest.writeString(path); + dest.writeLong(size); + dest.writeString(bucketId); + dest.writeString(bucketName); + dest.writeLong(date); + dest.writeByte((byte) (isSelected ? 1 : 0)); + } + + @Override + public int describeContents() { + return 0; + } + + public static final Creator CREATOR = new Creator() { + @Override + public BaseFile[] newArray(int size) { + return new BaseFile[size]; + } + + @Override + public BaseFile createFromParcel(Parcel in) { + BaseFile file = new BaseFile(); + file.id = in.readLong(); + file.name = in.readString(); + file.path = in.readString(); + file.size = in.readLong(); + file.bucketId = in.readString(); + file.bucketName = in.readString(); + file.date = in.readLong(); + file.isSelected = in.readByte() != 0; + return file; + } + }; +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/beans/Directory.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/beans/Directory.java new file mode 100644 index 0000000..8ff39b7 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/beans/Directory.java @@ -0,0 +1,98 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo.beans; + +import java.util.ArrayList; +import java.util.List; + +/** + * Created by Vincent Woo + * Date: 2016/10/10 + * Time: 17:31 + */ + +public class Directory { + private String id; + private String name; + private String path; + private List files = new ArrayList<>(); + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public List getFiles() { + return files; + } + + public void setFiles(List files) { + this.files = files; + } + + public void addFile(T file) { + files.add(file); + } + + public String getPath() { + return path; + } + + public void setPath(String path) { + this.path = path; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof Directory)) return false; + +// Directory directory = (Directory) o; +// boolean hasId = !TextUtils.isEmpty(id); +// boolean otherHasId = !TextUtils.isEmpty(directory.id); +// +// if (hasId && otherHasId) { +// if (!TextUtils.equals(id, directory.id)) { +// return false; +// } +// +// return TextUtils.equals(name, directory.name); +// } +// +// return false; + + Directory directory = (Directory) o; + return this.path.equals(directory.path); + } + + @Override + public int hashCode() { +// if (TextUtils.isEmpty(id)) { +// if (TextUtils.isEmpty(name)) { +// return 0; +// } +// +// return name.hashCode(); +// } +// +// int result = id.hashCode(); +// +// if (TextUtils.isEmpty(name)) { +// return result; +// } +// +// result = 31 * result + name.hashCode(); +// return result; + + return path.hashCode(); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/beans/ImageFile.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/beans/ImageFile.java new file mode 100644 index 0000000..8c9d60c --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/beans/ImageFile.java @@ -0,0 +1,62 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo.beans; + +import android.os.Parcel; +import android.os.Parcelable; + +/** + * Created by Vincent Woo + * Date: 2016/10/10 + * Time: 17:44 + */ + +public class ImageFile extends BaseFile implements Parcelable { + private int orientation; //0, 90, 180, 270 + + public int getOrientation() { + return orientation; + } + + public void setOrientation(int orientation) { + this.orientation = orientation; + } + + @Override + public void writeToParcel(Parcel dest, int flags) { + dest.writeLong(getId()); + dest.writeString(getName()); + dest.writeString(getPath()); + dest.writeLong(getSize()); + dest.writeString(getBucketId()); + dest.writeString(getBucketName()); + dest.writeLong(getDate()); + dest.writeByte((byte) (isSelected() ? 1 : 0)); + dest.writeInt(orientation); + } + + @Override + public int describeContents() { + return 0; + } + + public static final Creator CREATOR = new Creator() { + @Override + public ImageFile[] newArray(int size) { + return new ImageFile[size]; + } + + @Override + public ImageFile createFromParcel(Parcel in) { + ImageFile file = new ImageFile(); + file.setId(in.readLong()); + file.setName(in.readString()); + file.setPath(in.readString()); + file.setSize(in.readLong()); + file.setBucketId(in.readString()); + file.setBucketName(in.readString()); + file.setDate(in.readLong()); + file.setSelected(in.readByte() != 0); + file.setOrientation(in.readInt()); + return file; + } + }; +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/beans/NormalFile.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/beans/NormalFile.java new file mode 100644 index 0000000..128edf8 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/beans/NormalFile.java @@ -0,0 +1,62 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo.beans; + +import android.os.Parcel; +import android.os.Parcelable; + +/** + * Created by Vincent Woo + * Date: 2016/10/12 + * Time: 14:45 + */ + +public class NormalFile extends BaseFile implements Parcelable { + private String mimeType; + + public String getMimeType() { + return mimeType; + } + + public void setMimeType(String mimeType) { + this.mimeType = mimeType; + } + + @Override + public void writeToParcel(Parcel dest, int flags) { + dest.writeLong(getId()); + dest.writeString(getName()); + dest.writeString(getPath()); + dest.writeLong(getSize()); + dest.writeString(getBucketId()); + dest.writeString(getBucketName()); + dest.writeLong(getDate()); + dest.writeByte((byte) (isSelected() ? 1 : 0)); + dest.writeString(getMimeType()); + } + + @Override + public int describeContents() { + return 0; + } + + public static final Creator CREATOR = new Creator() { + @Override + public NormalFile[] newArray(int size) { + return new NormalFile[size]; + } + + @Override + public NormalFile createFromParcel(Parcel in) { + NormalFile file = new NormalFile(); + file.setId(in.readLong()); + file.setName(in.readString()); + file.setPath(in.readString()); + file.setSize(in.readLong()); + file.setBucketId(in.readString()); + file.setBucketName(in.readString()); + file.setDate(in.readLong()); + file.setSelected(in.readByte() != 0); + file.setMimeType(in.readString()); + return file; + } + }; +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/beans/VideoFile.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/beans/VideoFile.java new file mode 100644 index 0000000..eeddf37 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/beans/VideoFile.java @@ -0,0 +1,73 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo.beans; + +import android.os.Parcel; +import android.os.Parcelable; + +/** + * Created by Vincent Woo + * Date: 2016/10/11 + * Time: 15:23 + */ + +public class VideoFile extends BaseFile implements Parcelable { + private long duration; + private String thumbnail; + + public long getDuration() { + return duration; + } + + public void setDuration(long duration) { + this.duration = duration; + } + + public String getThumbnail() { + return thumbnail; + } + + public void setThumbnail(String thumbnail) { + this.thumbnail = thumbnail; + } + + @Override + public void writeToParcel(Parcel dest, int flags) { + dest.writeLong(getId()); + dest.writeString(getName()); + dest.writeString(getPath()); + dest.writeLong(getSize()); + dest.writeString(getBucketId()); + dest.writeString(getBucketName()); + dest.writeLong(getDate()); + dest.writeByte((byte) (isSelected() ? 1 : 0)); + dest.writeLong(getDuration()); + dest.writeString(getThumbnail()); + } + + @Override + public int describeContents() { + return 0; + } + + public static final Creator CREATOR = new Creator() { + @Override + public VideoFile[] newArray(int size) { + return new VideoFile[size]; + } + + @Override + public VideoFile createFromParcel(Parcel in) { + VideoFile file = new VideoFile(); + file.setId(in.readLong()); + file.setName(in.readString()); + file.setPath(in.readString()); + file.setSize(in.readLong()); + file.setBucketId(in.readString()); + file.setBucketName(in.readString()); + file.setDate(in.readLong()); + file.setSelected(in.readByte() != 0); + file.setDuration(in.readLong()); + file.setThumbnail(in.readString()); + return file; + } + }; +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/callback/FileLoaderCallbacks.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/callback/FileLoaderCallbacks.java new file mode 100644 index 0000000..1840e79 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/callback/FileLoaderCallbacks.java @@ -0,0 +1,293 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo.callback; + +import android.content.Context; +import android.database.Cursor; +import android.os.Bundle; +import android.support.v4.app.LoaderManager; +import android.support.v4.content.CursorLoader; +import android.support.v4.content.Loader; + + +import com.aserbao.androidcustomcamera.whole.pickvideo.Util; +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.AudioFile; +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.Directory; +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.ImageFile; +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.NormalFile; +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.VideoFile; +import com.aserbao.androidcustomcamera.whole.pickvideo.loader.AudioLoader; +import com.aserbao.androidcustomcamera.whole.pickvideo.loader.FileLoader; +import com.aserbao.androidcustomcamera.whole.pickvideo.loader.ImageLoader; +import com.aserbao.androidcustomcamera.whole.pickvideo.loader.VideoLoader; + +import java.lang.ref.WeakReference; +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static android.provider.BaseColumns._ID; +import static android.provider.MediaStore.Files.FileColumns.MIME_TYPE; +import static android.provider.MediaStore.Images.ImageColumns.BUCKET_DISPLAY_NAME; +import static android.provider.MediaStore.Images.ImageColumns.BUCKET_ID; +import static android.provider.MediaStore.Images.ImageColumns.ORIENTATION; +import static android.provider.MediaStore.MediaColumns.DATA; +import static android.provider.MediaStore.MediaColumns.DATE_ADDED; +import static android.provider.MediaStore.MediaColumns.SIZE; +import static android.provider.MediaStore.MediaColumns.TITLE; +import static android.provider.MediaStore.Video.VideoColumns.DURATION; + +/** + * Created by Vincent Woo + * Date: 2016/10/11 + * Time: 11:04 + */ + +public class FileLoaderCallbacks implements LoaderManager.LoaderCallbacks { + public static final int TYPE_IMAGE = 0; + public static final int TYPE_VIDEO = 1; + public static final int TYPE_AUDIO = 2; + public static final int TYPE_FILE = 3; + + private WeakReference context; + private FilterResultCallback resultCallback; + + private int mType = TYPE_IMAGE; + private String[] mSuffixArgs; + private CursorLoader mLoader; + private String mSuffixRegex; + + public FileLoaderCallbacks(Context context, FilterResultCallback resultCallback, int type) { + this(context, resultCallback, type, null); + } + + public FileLoaderCallbacks(Context context, FilterResultCallback resultCallback, int type, String[] suffixArgs) { + this.context = new WeakReference<>(context); + this.resultCallback = resultCallback; + this.mType = type; + this.mSuffixArgs = suffixArgs; + if (suffixArgs != null && suffixArgs.length > 0) { + mSuffixRegex = obtainSuffixRegex(suffixArgs); + } + } + + @Override + public Loader onCreateLoader(int id, Bundle args) { + switch (mType) { + case TYPE_IMAGE: + mLoader = new ImageLoader(context.get()); + break; + case TYPE_VIDEO: + mLoader = new VideoLoader(context.get()); + break; + case TYPE_AUDIO: + mLoader = new AudioLoader(context.get()); + break; + case TYPE_FILE: + mLoader = new FileLoader(context.get()); + break; + } + + return mLoader; + } + + @Override + public void onLoadFinished(Loader loader, Cursor data) { + if (data == null) return; + switch (mType) { + case TYPE_IMAGE: + onImageResult(data); + break; + case TYPE_VIDEO: + onVideoResult(data); + break; + case TYPE_AUDIO: + onAudioResult(data); + break; + case TYPE_FILE: + onFileResult(data); + break; + } + } + + @Override + public void onLoaderReset(Loader loader) { + + } + + @SuppressWarnings("unchecked") + private void onImageResult(Cursor data) { + List> directories = new ArrayList<>(); + + if (data.getPosition() != -1) { + data.moveToPosition(-1); + } + + while (data.moveToNext()) { + //Create a File instance + ImageFile img = new ImageFile(); + img.setId(data.getLong(data.getColumnIndexOrThrow(_ID))); + img.setName(data.getString(data.getColumnIndexOrThrow(TITLE))); + img.setPath(data.getString(data.getColumnIndexOrThrow(DATA))); + img.setSize(data.getLong(data.getColumnIndexOrThrow(SIZE))); + img.setBucketId(data.getString(data.getColumnIndexOrThrow(BUCKET_ID))); + img.setBucketName(data.getString(data.getColumnIndexOrThrow(BUCKET_DISPLAY_NAME))); + img.setDate(data.getLong(data.getColumnIndexOrThrow(DATE_ADDED))); + + img.setOrientation(data.getInt(data.getColumnIndexOrThrow(ORIENTATION))); + + //Create a Directory + Directory directory = new Directory<>(); + directory.setId(img.getBucketId()); + directory.setName(img.getBucketName()); + directory.setPath(Util.extractPathWithoutSeparator(img.getPath())); + + if (!directories.contains(directory)) { + directory.addFile(img); + directories.add(directory); + } else { + directories.get(directories.indexOf(directory)).addFile(img); + } + } + + if (resultCallback != null) { + resultCallback.onResult(directories); + } + } + + @SuppressWarnings("unchecked") + private void onVideoResult(final Cursor data) { + List> directories = new ArrayList<>(); + + if (data.getPosition() != -1) { + data.moveToPosition(-1); + } + + while (data.moveToNext()) { + //Create a File instance + VideoFile video = new VideoFile(); + video.setId(data.getLong(data.getColumnIndexOrThrow(_ID))); + video.setName(data.getString(data.getColumnIndexOrThrow(TITLE))); + video.setPath(data.getString(data.getColumnIndexOrThrow(DATA))); + video.setSize(data.getLong(data.getColumnIndexOrThrow(SIZE))); + video.setBucketId(data.getString(data.getColumnIndexOrThrow(BUCKET_ID))); + video.setBucketName(data.getString(data.getColumnIndexOrThrow(BUCKET_DISPLAY_NAME))); + video.setDate(data.getLong(data.getColumnIndexOrThrow(DATE_ADDED))); + + video.setDuration(data.getLong(data.getColumnIndexOrThrow(DURATION))); + + //Create a Directory + Directory directory = new Directory<>(); + directory.setId(video.getBucketId()); + directory.setName(video.getBucketName()); + directory.setPath(Util.extractPathWithoutSeparator(video.getPath())); + + if (!directories.contains(directory)) { + directory.addFile(video); + directories.add(directory); + } else { + directories.get(directories.indexOf(directory)).addFile(video); + } + } + + if (resultCallback != null) { + resultCallback.onResult(directories); + } + } + + @SuppressWarnings("unchecked") + private void onAudioResult(Cursor data) { + List> directories = new ArrayList<>(); + + if (data.getPosition() != -1) { + data.moveToPosition(-1); + } + + while (data.moveToNext()) { + //Create a File instance + AudioFile audio = new AudioFile(); + audio.setId(data.getLong(data.getColumnIndexOrThrow(_ID))); + audio.setName(data.getString(data.getColumnIndexOrThrow(TITLE))); + audio.setPath(data.getString(data.getColumnIndexOrThrow(DATA))); + audio.setSize(data.getLong(data.getColumnIndexOrThrow(SIZE))); + audio.setDate(data.getLong(data.getColumnIndexOrThrow(DATE_ADDED))); + + audio.setDuration(data.getLong(data.getColumnIndexOrThrow(DURATION))); + + //Create a Directory + Directory directory = new Directory<>(); + directory.setName(Util.extractFileNameWithSuffix(Util.extractPathWithoutSeparator(audio.getPath()))); + directory.setPath(Util.extractPathWithoutSeparator(audio.getPath())); + + if (!directories.contains(directory)) { + directory.addFile(audio); + directories.add(directory); + } else { + directories.get(directories.indexOf(directory)).addFile(audio); + } + } + + if (resultCallback != null) { + resultCallback.onResult(directories); + } + } + + @SuppressWarnings("unchecked") + private void onFileResult(Cursor data) { + List> directories = new ArrayList<>(); + + if (data.getPosition() != -1) { + data.moveToPosition(-1); + } + + while (data.moveToNext()) { + String path = data.getString(data.getColumnIndexOrThrow(DATA)); + if (path != null && contains(path)) { + //Create a File instance + NormalFile file = new NormalFile(); + file.setId(data.getLong(data.getColumnIndexOrThrow(_ID))); + file.setName(data.getString(data.getColumnIndexOrThrow(TITLE))); + file.setPath(data.getString(data.getColumnIndexOrThrow(DATA))); + file.setSize(data.getLong(data.getColumnIndexOrThrow(SIZE))); + file.setDate(data.getLong(data.getColumnIndexOrThrow(DATE_ADDED))); + + file.setMimeType(data.getString(data.getColumnIndexOrThrow(MIME_TYPE))); + + //Create a Directory + Directory directory = new Directory<>(); + directory.setName(Util.extractFileNameWithSuffix(Util.extractPathWithoutSeparator(file.getPath()))); + directory.setPath(Util.extractPathWithoutSeparator(file.getPath())); + + if (!directories.contains(directory)) { + directory.addFile(file); + directories.add(directory); + } else { + directories.get(directories.indexOf(directory)).addFile(file); + } + } + } + + if (resultCallback != null) { + resultCallback.onResult(directories); + } + } + + private boolean contains(String path) { + String name = Util.extractFileNameWithSuffix(path); + Pattern pattern = Pattern.compile(mSuffixRegex, Pattern.CASE_INSENSITIVE); + Matcher matcher = pattern.matcher(name); + return matcher.matches(); + } + + private String obtainSuffixRegex(String[] suffixes) { + StringBuilder builder = new StringBuilder(); + for (int i = 0; i < suffixes.length ; i++) { + if (i ==0) { + builder.append(suffixes[i].replace(".", "")); + } else { + builder.append("|\\."); + builder.append(suffixes[i].replace(".", "")); + } + } + return ".+(\\." + builder.toString() + ")$"; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/callback/FilterResultCallback.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/callback/FilterResultCallback.java new file mode 100644 index 0000000..1745f6c --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/callback/FilterResultCallback.java @@ -0,0 +1,16 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo.callback; + +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.BaseFile; +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.Directory; + +import java.util.List; + +/** + * Created by Vincent Woo + * Date: 2016/10/11 + * Time: 11:39 + */ + +public interface FilterResultCallback { + void onResult(List> directories); +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/callback/OnSelectStateListener.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/callback/OnSelectStateListener.java new file mode 100644 index 0000000..63438a7 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/callback/OnSelectStateListener.java @@ -0,0 +1,5 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo.callback; + +public interface OnSelectStateListener { + void OnSelectStateChanged(boolean state, T file); + } \ No newline at end of file diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/itemDecoration/DividerGridItemDecoration.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/itemDecoration/DividerGridItemDecoration.java new file mode 100644 index 0000000..310ce93 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/itemDecoration/DividerGridItemDecoration.java @@ -0,0 +1,143 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo.itemDecoration; + +import android.content.Context; +import android.content.res.TypedArray; +import android.graphics.Canvas; +import android.graphics.Rect; +import android.graphics.drawable.Drawable; +import android.support.v7.widget.GridLayoutManager; +import android.support.v7.widget.RecyclerView; +import android.support.v7.widget.StaggeredGridLayoutManager; +import android.view.View; + +/** + * Created by Vincent Woo + * Date: 2016/10/13 + * Time: 17:26 + */ + +public class DividerGridItemDecoration extends RecyclerView.ItemDecoration { + private static final int[] ATTRS = new int[]{android.R.attr.listDivider}; + private Drawable mDivider; + + public DividerGridItemDecoration(Context context) { + final TypedArray a = context.obtainStyledAttributes(ATTRS); + mDivider = a.getDrawable(0); + a.recycle(); + } + + @Override + public void onDraw(Canvas c, RecyclerView parent, RecyclerView.State state) { + drawHorizontal(c, parent); + drawVertical(c, parent); + } + + private int getSpanCount(RecyclerView parent) { + // 列数 + int spanCount = -1; + RecyclerView.LayoutManager layoutManager = parent.getLayoutManager(); + if (layoutManager instanceof GridLayoutManager) { + spanCount = ((GridLayoutManager) layoutManager).getSpanCount(); + } else if (layoutManager instanceof StaggeredGridLayoutManager) { + spanCount = ((StaggeredGridLayoutManager) layoutManager) + .getSpanCount(); + } + return spanCount; + } + + public void drawHorizontal(Canvas c, RecyclerView parent) { + int childCount = parent.getChildCount(); + for (int i = 0; i < childCount; i++) { + final View child = parent.getChildAt(i); + final RecyclerView.LayoutParams params = (RecyclerView.LayoutParams) child + .getLayoutParams(); + final int left = child.getLeft() - params.leftMargin; + final int right = child.getRight() + params.rightMargin + + mDivider.getIntrinsicWidth(); + final int top = child.getBottom() + params.bottomMargin; + final int bottom = top + mDivider.getIntrinsicHeight(); + mDivider.setBounds(left, top, right, bottom); + mDivider.draw(c); + } + } + + public void drawVertical(Canvas c, RecyclerView parent) { + final int childCount = parent.getChildCount(); + for (int i = 0; i < childCount; i++) { + final View child = parent.getChildAt(i); + + final RecyclerView.LayoutParams params = (RecyclerView.LayoutParams) child + .getLayoutParams(); + final int top = child.getTop() - params.topMargin; + final int bottom = child.getBottom() + params.bottomMargin; + final int left = child.getRight() + params.rightMargin; + final int right = left + mDivider.getIntrinsicWidth(); + + mDivider.setBounds(left, top, right, bottom); + mDivider.draw(c); + } + } + + private boolean isLastColum(RecyclerView parent, int pos, int spanCount, + int childCount) { + RecyclerView.LayoutManager layoutManager = parent.getLayoutManager(); + if (layoutManager instanceof GridLayoutManager) { + if ((pos + 1) % spanCount == 0){ // 如果是最后一列,则不需要绘制右边 + return true; + } + } else if (layoutManager instanceof StaggeredGridLayoutManager) { + int orientation = ((StaggeredGridLayoutManager) layoutManager) + .getOrientation(); + if (orientation == StaggeredGridLayoutManager.VERTICAL) { + if ((pos + 1) % spanCount == 0) { // 如果是最后一列,则不需要绘制右边 + return true; + } + } else { + childCount = childCount - childCount % spanCount; + if (pos >= childCount)// 如果是最后一列,则不需要绘制右边 + return true; + } + } + return false; + } + + private boolean isLastRaw(RecyclerView parent, int pos, int spanCount, + int childCount) { + RecyclerView.LayoutManager layoutManager = parent.getLayoutManager(); + if (layoutManager instanceof GridLayoutManager) { + childCount = childCount - childCount % spanCount; + if (pos >= childCount)// 如果是最后一行,则不需要绘制底部 + return true; + } else if (layoutManager instanceof StaggeredGridLayoutManager) { + int orientation = ((StaggeredGridLayoutManager) layoutManager) + .getOrientation(); + // StaggeredGridLayoutManager 且纵向滚动 + if (orientation == StaggeredGridLayoutManager.VERTICAL) { + childCount = childCount - childCount % spanCount; + // 如果是最后一行,则不需要绘制底部 + if (pos >= childCount) + return true; + } else {// StaggeredGridLayoutManager 且横向滚动 + // 如果是最后一行,则不需要绘制底部 + if ((pos + 1) % spanCount == 0) { + return true; + } + } + } + return false; + } + + @Override + public void getItemOffsets(Rect outRect, int itemPosition, + RecyclerView parent) { + int spanCount = getSpanCount(parent); + int childCount = parent.getAdapter().getItemCount(); + if (isLastRaw(parent, itemPosition, spanCount, childCount)) {// 如果是最后一行,则不需要绘制底部 + outRect.set(0, 0, mDivider.getIntrinsicWidth(), 0); + } else if (isLastColum(parent, itemPosition, spanCount, childCount)) {// 如果是最后一列,则不需要绘制右边 + outRect.set(0, 0, 0, mDivider.getIntrinsicHeight()); + } else { + outRect.set(0, 0, mDivider.getIntrinsicWidth(), mDivider.getIntrinsicHeight()); + } + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/loader/AudioLoader.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/loader/AudioLoader.java new file mode 100644 index 0000000..1bec42d --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/loader/AudioLoader.java @@ -0,0 +1,48 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo.loader; + +import android.content.Context; +import android.net.Uri; +import android.provider.MediaStore; +import android.support.v4.content.CursorLoader; + +import static android.provider.MediaStore.MediaColumns.MIME_TYPE; + +/** + * Created by Vincent Woo + * Date: 2016/10/11 + * Time: 17:35 + */ + +public class AudioLoader extends CursorLoader { + private static final String[] AUDIO_PROJECTION = { + //Base File + MediaStore.Audio.Media._ID, + MediaStore.Audio.Media.TITLE, + MediaStore.Audio.Media.DATA, + MediaStore.Audio.Media.SIZE, + MediaStore.Audio.Media.DATE_ADDED, + //Audio File + MediaStore.Audio.Media.DURATION + }; + + private AudioLoader(Context context, Uri uri, String[] projection, String selection, + String[] selectionArgs, String sortOrder) { + super(context, uri, projection, selection, selectionArgs, sortOrder); + } + + public AudioLoader(Context context) { + super(context); + + setProjection(AUDIO_PROJECTION); + setUri(MediaStore.Files.getContentUri("external")); + setSortOrder(MediaStore.Audio.Media.DATE_ADDED + " DESC"); + + setSelection(MIME_TYPE + "=? or " + + MIME_TYPE + "=? or " +// + MIME_TYPE + "=? or " + + MIME_TYPE + "=?"); + String[] selectionArgs; + selectionArgs = new String[]{"audio/mpeg", "audio/mp3", "audio/x-ms-wma"}; + setSelectionArgs(selectionArgs); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/loader/FileLoader.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/loader/FileLoader.java new file mode 100644 index 0000000..5648ba9 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/loader/FileLoader.java @@ -0,0 +1,47 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo.loader; + +import android.content.Context; +import android.net.Uri; +import android.provider.MediaStore; +import android.support.v4.content.CursorLoader; + +/** + * Created by Vincent Woo + * Date: 2016/10/12 + * Time: 14:48 + */ + +public class FileLoader extends CursorLoader { + private static final String[] FILE_PROJECTION = { + //Base File + MediaStore.Files.FileColumns._ID, + MediaStore.Files.FileColumns.TITLE, + MediaStore.Files.FileColumns.DATA, + MediaStore.Files.FileColumns.SIZE, + MediaStore.Files.FileColumns.DATE_ADDED, + + //Normal File + MediaStore.Files.FileColumns.MIME_TYPE + }; + + private FileLoader(Context context, Uri uri, String[] projection, String selection, + String[] selectionArgs, String sortOrder) { + super(context, uri, projection, selection, selectionArgs, sortOrder); + } + + public FileLoader(Context context) { + super(context); + setProjection(FILE_PROJECTION); + setUri(MediaStore.Files.getContentUri("external")); + setSortOrder(MediaStore.Files.FileColumns.DATE_ADDED + " DESC"); + +// setSelection(MIME_TYPE + "=? or " +//// + MIME_TYPE + "=? or " +//// + MIME_TYPE + "=? or " +// + MIME_TYPE + "=?"); +// +// String[] selectionArgs; +// selectionArgs = new String[] { "text/txt", "text/plain" }; +// setSelectionArgs(selectionArgs); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/loader/ImageLoader.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/loader/ImageLoader.java new file mode 100644 index 0000000..5f3f59a --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/loader/ImageLoader.java @@ -0,0 +1,47 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo.loader; + +import android.content.Context; +import android.net.Uri; +import android.provider.MediaStore; +import android.support.v4.content.CursorLoader; + +import static android.provider.MediaStore.MediaColumns.MIME_TYPE; + +/** + * Created by Vincent Woo + * Date: 2016/10/10 + * Time: 17:55 + */ + +public class ImageLoader extends CursorLoader { + private static final String[] IMAGE_PROJECTION = { + //Base File + MediaStore.Images.Media._ID, + MediaStore.Images.Media.TITLE, + MediaStore.Images.Media.DATA, + MediaStore.Images.Media.SIZE, + MediaStore.Images.Media.BUCKET_ID, + MediaStore.Images.Media.BUCKET_DISPLAY_NAME, + MediaStore.Images.Media.DATE_ADDED, + //Image File + MediaStore.Images.Media.ORIENTATION + }; + + private ImageLoader(Context context, Uri uri, String[] projection, String selection, + String[] selectionArgs, String sortOrder) { + super(context, uri, projection, selection, selectionArgs, sortOrder); + } + + public ImageLoader(Context context) { + super(context); + setProjection(IMAGE_PROJECTION); + setUri(MediaStore.Images.Media.EXTERNAL_CONTENT_URI); + setSortOrder(MediaStore.Images.Media.DATE_ADDED + " DESC"); + + setSelection(MIME_TYPE + "=? or " + MIME_TYPE + "=? or "+ MIME_TYPE + "=? or " + MIME_TYPE + "=?"); + + String[] selectionArgs; + selectionArgs = new String[] { "image/jpeg", "image/png", "image/jpg","image/gif" }; + setSelectionArgs(selectionArgs); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/loader/VideoLoader.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/loader/VideoLoader.java new file mode 100644 index 0000000..a04cb25 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/pickvideo/loader/VideoLoader.java @@ -0,0 +1,47 @@ +package com.aserbao.androidcustomcamera.whole.pickvideo.loader; + +import android.content.Context; +import android.net.Uri; +import android.provider.MediaStore; +import android.support.v4.content.CursorLoader; + +import static android.provider.MediaStore.MediaColumns.MIME_TYPE; + +/** + * Created by Vincent Woo + * Date: 2016/10/10 + * Time: 11:38 + */ + +public class VideoLoader extends CursorLoader { + private static final String[] VIDEO_PROJECTION = { + //Base File + MediaStore.Video.Media._ID, + MediaStore.Video.Media.TITLE, + MediaStore.Video.Media.DATA, + MediaStore.Video.Media.SIZE, + MediaStore.Video.Media.BUCKET_ID, + MediaStore.Video.Media.BUCKET_DISPLAY_NAME, + MediaStore.Video.Media.DATE_ADDED, + //Video File + MediaStore.Video.Media.DURATION + }; + + private VideoLoader(Context context, Uri uri, String[] projection, String selection, + String[] selectionArgs, String sortOrder) { + super(context, uri, projection, selection, selectionArgs, sortOrder); + } + + public VideoLoader(Context context) { + super(context); + + setProjection(VIDEO_PROJECTION); + setUri(MediaStore.Video.Media.EXTERNAL_CONTENT_URI); + setSortOrder(MediaStore.Video.Media.DATE_ADDED + " DESC"); + + setSelection(MIME_TYPE + "=? or " + MIME_TYPE + "=?"); + String[] selectionArgs; + selectionArgs = new String[] { "video/mpeg", "video/mp4" }; + setSelectionArgs(selectionArgs); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/RecorderActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/RecorderActivity.java index 34c414a..2b6aa78 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/RecorderActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/RecorderActivity.java @@ -1,9 +1,13 @@ package com.aserbao.androidcustomcamera.whole.record; +import android.content.Intent; import android.graphics.Point; import android.hardware.Camera; +import android.media.MediaPlayer; +import android.os.Bundle; import android.os.Handler; import android.os.Message; +import android.util.Log; import android.view.MotionEvent; import android.view.View; import android.widget.Button; @@ -15,18 +19,28 @@ import android.widget.Toast; import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.WelcomeActivity; import com.aserbao.androidcustomcamera.base.MyApplication; import com.aserbao.androidcustomcamera.base.activity.BaseActivity; import com.aserbao.androidcustomcamera.base.pop.PopupManager; import com.aserbao.androidcustomcamera.base.utils.FileUtils; +import com.aserbao.androidcustomcamera.base.utils.StaticFinalValues; +import com.aserbao.androidcustomcamera.whole.createVideoByVoice.localEdit.LocalVideoActivity; +import com.aserbao.androidcustomcamera.whole.pickvideo.VideoPickActivity; +import com.aserbao.androidcustomcamera.whole.pickvideo.beans.VideoFile; import com.aserbao.androidcustomcamera.whole.record.beans.MediaObject; import com.aserbao.androidcustomcamera.whole.record.other.MagicFilterType; import com.aserbao.androidcustomcamera.whole.record.ui.CameraView; +import com.aserbao.androidcustomcamera.whole.record.ui.CustomRecordImageView; import com.aserbao.androidcustomcamera.whole.record.ui.FocusImageView; import com.aserbao.androidcustomcamera.whole.record.ui.ProgressView; import com.aserbao.androidcustomcamera.whole.record.ui.SlideGpuFilterGroup; +import com.aserbao.androidcustomcamera.whole.videoPlayer.VideoPlayerActivity2; + +import org.jetbrains.annotations.NotNull; import java.lang.ref.WeakReference; +import java.util.ArrayList; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -35,8 +49,11 @@ import static com.aserbao.androidcustomcamera.base.utils.StaticFinalValues.CHANGE_IMAGE; import static com.aserbao.androidcustomcamera.base.utils.StaticFinalValues.DELAY_DETAL; +import static com.aserbao.androidcustomcamera.base.utils.StaticFinalValues.MAX_NUMBER; import static com.aserbao.androidcustomcamera.base.utils.StaticFinalValues.OVER_CLICK; import static com.aserbao.androidcustomcamera.base.utils.StaticFinalValues.RECORD_MIN_TIME; +import static com.aserbao.androidcustomcamera.whole.pickvideo.BaseActivity.IS_NEED_FOLDER_LIST; +import static com.aserbao.androidcustomcamera.whole.pickvideo.VideoPickActivity.IS_NEED_CAMERA; public class RecorderActivity extends BaseActivity implements View.OnTouchListener, SlideGpuFilterGroup.OnFilterChangeListener { private static final int VIDEO_MAX_TIME = 30 * 1000; @@ -54,8 +71,8 @@ public class RecorderActivity extends BaseActivity implements View.OnTouchListen LinearLayout mIndexDelete; @BindView(R.id.index_album) TextView mIndexAlbum; - @BindView(R.id.btn_record_iv) - ImageView mBtnRecordIv; + @BindView(R.id.custom_record_image_view) + CustomRecordImageView mCustomRecordImageView; @BindView(R.id.count_down_tv) TextView mCountDownTv; @BindView(R.id.record_btn_ll) @@ -100,7 +117,7 @@ public void initView() { mVideoRecordProgressView.setOverTimeClickListener(new ProgressView.OverTimeClickListener() { @Override public void overTime() { - mBtnRecordIv.performClick(); + mCustomRecordImageView.performClick(); } @Override @@ -110,13 +127,13 @@ public void noEnoughTime() { @Override public void isArriveCountDown() { - mBtnRecordIv.performClick(); + mCustomRecordImageView.performClick(); } }); setBackAlpha(mVideoRecordFinishIv,127); } - @OnClick({R.id.matching_back, R.id.video_record_finish_iv, R.id.switch_camera, R.id.index_delete, R.id.index_album, R.id.btn_record_iv, R.id.count_down_tv, R.id.meet_mask, R.id.video_filter}) + @OnClick({R.id.matching_back, R.id.video_record_finish_iv, R.id.switch_camera, R.id.index_delete, R.id.index_album, R.id.custom_record_image_view, R.id.count_down_tv, R.id.meet_mask, R.id.video_filter}) public void onViewClicked(View view) { if (System.currentTimeMillis() - mLastTime < 500) { return; @@ -139,6 +156,11 @@ public void onViewClicked(View view) { onBackPressed(); break; case R.id.video_record_finish_iv: + onStopRecording(); + if (mMediaObject != null) { + videoFileName = mMediaObject.mergeVideo(); + } + VideoPlayerActivity2.launch(RecorderActivity.this,videoFileName); break; case R.id.switch_camera: mRecordCameraView.switchCamera(); @@ -168,24 +190,17 @@ public void onViewClicked(View view) { } break; case R.id.index_album: - Toast.makeText(this, "稍后编写", Toast.LENGTH_SHORT).show(); + Intent intent2 = new Intent(this, VideoPickActivity.class); + intent2.putExtra(IS_NEED_CAMERA, false); + intent2.putExtra(MAX_NUMBER, 1); + intent2.putExtra(IS_NEED_FOLDER_LIST, true); + startActivityForResult(intent2, StaticFinalValues.REQUEST_CODE_PICK_VIDEO); break; - case R.id.btn_record_iv: + case R.id.custom_record_image_view: if(!isRecording) { - isRecording = true; - String storageMp4 = FileUtils.getStorageMp4(String.valueOf(System.currentTimeMillis())); - MediaObject.MediaPart mediaPart = mMediaObject.buildMediaPart(storageMp4); - mRecordCameraView.setSavePath(storageMp4); - mRecordCameraView.startRecord(); - mVideoRecordProgressView.start(); - alterStatus(); + onStartRecording(); }else{ - isRecording = false; - mRecordCameraView.stopRecord(); - mVideoRecordProgressView.stop(); - //todo:录制释放有延时,稍后处理 - mMyHandler.sendEmptyMessageDelayed(DELAY_DETAL,250); - alterStatus(); + onStopRecording(); } break; case R.id.count_down_tv: @@ -226,6 +241,27 @@ public void selBeautyLevel(int level) { break; } } + + private void onStartRecording(){ + isRecording = true; + String storageMp4 = FileUtils.getStorageMp4(String.valueOf(System.currentTimeMillis())); + MediaObject.MediaPart mediaPart = mMediaObject.buildMediaPart(storageMp4); + mRecordCameraView.setSavePath(storageMp4); + mRecordCameraView.startRecord(); + mCustomRecordImageView.startRecord(); + mVideoRecordProgressView.start(); + alterStatus(); + } + + private void onStopRecording() { + isRecording = false; + mRecordCameraView.stopRecord(); + mVideoRecordProgressView.stop(); + //todo:录制释放有延时,稍后处理 + mMyHandler.sendEmptyMessageDelayed(DELAY_DETAL,250); + mCustomRecordImageView.stopRecord(); + alterStatus(); + } private void setBackAlpha(Button view ,int alpha) { if(alpha > 127){ @@ -248,7 +284,7 @@ private void showOtherView() { mVideoFilter.setVisibility(View.VISIBLE); mCountDownTv.setVisibility(View.VISIBLE); mMatchingBack.setVisibility(View.VISIBLE); - mBtnRecordIv.setVisibility(View.VISIBLE); + mCustomRecordImageView.setVisibility(View.VISIBLE); } private void hideOtherView() { mIndexAlbum.setVisibility(View.INVISIBLE); @@ -257,7 +293,7 @@ private void hideOtherView() { mVideoFilter.setVisibility(View.INVISIBLE); mCountDownTv.setVisibility(View.INVISIBLE); mMatchingBack.setVisibility(View.INVISIBLE); - mBtnRecordIv.setVisibility(View.INVISIBLE); + mCustomRecordImageView.setVisibility(View.INVISIBLE); } //正在录制中 public void alterStatus(){ @@ -338,6 +374,8 @@ public void run() { } }); } + + private static class MyHandler extends Handler { private WeakReference mVideoRecordActivity; @@ -373,8 +411,8 @@ public void handleMessage(Message msg) { activity.mMyHandler.removeCallbacks(null); activity.mCountTimeDownIv.setVisibility(View.GONE); activity.mVideoRecordProgressView.setVisibility(View.VISIBLE); - activity.mBtnRecordIv.setVisibility(View.VISIBLE); - activity.mBtnRecordIv.performClick(); + activity.mCustomRecordImageView.setVisibility(View.VISIBLE); + activity.mCustomRecordImageView.performClick(); activity.mVideoRecordProgressView.setCountDownTime(activity.mRecordTimeInterval); break; } @@ -385,11 +423,61 @@ public void handleMessage(Message msg) { } break; case OVER_CLICK: - activity.mBtnRecordIv.performClick(); //定时结束 + activity.mCustomRecordImageView.performClick(); //定时结束 break; } } } } + private static final String TAG = "RecorderActivity"; + + String videoFileName; + @Override + protected void onActivityResult(int requestCode, int resultCode, Intent data) { + super.onActivityResult(requestCode, resultCode, data); + switch (requestCode) { + case StaticFinalValues.REQUEST_CODE_PICK_VIDEO: + if (resultCode == RESULT_OK) { + ArrayList list = data.getParcelableArrayListExtra(StaticFinalValues.RESULT_PICK_VIDEO); + for (VideoFile file : list) { + videoFileName = file.getPath(); + } + + + //这一段用来判断视频时间的 + try { + MediaPlayer player = new MediaPlayer(); + player.setDataSource(videoFileName); + player.prepare(); + int duration = player.getDuration(); + player.release(); + int s = duration / 1000; + int hour = s / 3600; + int minute = s % 3600 / 60; + int second = s % 60; + Log.e(TAG, "视频文件长度,分钟: " + minute + "视频有" + s + "秒"); + if (s >= 120) { + Toast.makeText(this, "视频剪辑不能超过2分钟", Toast.LENGTH_LONG).show(); + return; + } else if (s < 5) { + Toast.makeText(this, "视频剪辑不能少于5秒", Toast.LENGTH_LONG).show(); + return; + }else{ + Intent intent = new Intent(RecorderActivity.this, LocalVideoActivity.class); + Bundle bundle = new Bundle(); + bundle.putString(StaticFinalValues.VIDEOFILEPATH, videoFileName); + bundle.putInt(StaticFinalValues.MISNOTCOMELOCAL, 0); + intent.putExtra(StaticFinalValues.BUNDLE, bundle); + startActivity(intent); + } + } catch (Exception e) { + e.printStackTrace(); + } + + + } + break; + } + } } diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/beans/MediaObject.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/beans/MediaObject.java index 9e01fe3..5f3d382 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/beans/MediaObject.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/beans/MediaObject.java @@ -6,6 +6,13 @@ import android.util.Log; +import com.coremedia.iso.boxes.Container; +import com.googlecode.mp4parser.authoring.Movie; +import com.googlecode.mp4parser.authoring.Track; +import com.googlecode.mp4parser.authoring.builder.DefaultMp4Builder; +import com.googlecode.mp4parser.authoring.container.mp4.MovieCreator; +import com.googlecode.mp4parser.authoring.tracks.AppendTrack; + import java.io.File; import java.io.RandomAccessFile; import java.io.Serializable; @@ -14,6 +21,8 @@ import java.util.LinkedList; import java.util.List; +import static com.aserbao.androidcustomcamera.base.utils.StaticFinalValues.VIDEOTEMP; + /** * description: @@ -22,6 +31,7 @@ public class MediaObject implements Serializable{ + private static final String TAG = "MediaObject"; /** 获取所有分块 */ private LinkedList mMediaList = new LinkedList(); private LinkedList paths = new LinkedList<>(); @@ -79,15 +89,20 @@ public void stopRecord(Context context, MediaObject mediaObject){ if (part != null ) { MediaMetadataRetriever mediaMetadata = new MediaMetadataRetriever(); mediaMetadata.setDataSource(context, Uri.parse(part.getMediaPath())); - int mVideoDuration = Integer.parseInt(mediaMetadata.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)); - part.duration = mVideoDuration; + String s = mediaMetadata.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION); + int mVideoDuration = 0; + try { + mVideoDuration = Integer.parseInt(s); + part.duration = mVideoDuration; + } catch (NumberFormatException e) { + e.printStackTrace(); + Log.e(TAG, "stopRecord: 是不是int型,打个日志自己查看一下" ); + } } } } public MediaPart getCurrentPart() { - /*if (mMediaPart != null) - return mMediaPart;*/ if (mMediaList != null && mMediaList.size() > 0) mMediaPart = mMediaList.get(mMediaList.size() - 1); return mMediaPart; @@ -109,6 +124,70 @@ public int getDuration() { } } + //=====================================视频合成============= + public String mergeVideo() { + long begin = System.currentTimeMillis(); + List movies = new ArrayList<>(); + String filePath = ""; + if(paths.size() == 1){ + return paths.get(0); + } + try { + for (int i = 0; i < paths.size(); i++) { + if(paths != null && paths.get(i) != null) { + Movie movie = MovieCreator.build(paths.get(i)); + movies.add(movie); + } + } + List videoTracks = new ArrayList<>(); + List audioTracks = new ArrayList<>(); + for (Movie movie : movies) { + for (Track track : movie.getTracks()) { + if ("vide".equals(track.getHandler())) { + videoTracks.add(track); + } + if ("soun".equals(track.getHandler())) { + audioTracks.add(track); + } + } + } + Movie result = new Movie(); + if (videoTracks.size() > 0) { + result.addTrack(new AppendTrack(videoTracks.toArray(new Track[videoTracks.size()]))); + } + if (audioTracks.size() > 0) { + result.addTrack(new AppendTrack(audioTracks.toArray(new Track[audioTracks.size()]))); + } + Container container = new DefaultMp4Builder().build(result); + filePath = getRecorderPath(); +// FileChannel fc = new FileOutputStream(filePath).getChannel(); + FileChannel fc = new RandomAccessFile(String.format(filePath), "rw").getChannel(); + container.writeContainer(fc); + fc.close(); + } catch (Exception e) { + e.printStackTrace(); + return paths.get(0); + } + long end = System.currentTimeMillis(); + Log.e("test", "merge use time:" + (end - begin)); +// deteleVideoPath(); + return filePath; + } + + private void deteleVideoPath() { + for (int i = 0; i < paths.size(); i++) { + new File(paths.get(i)).delete(); + } + } + private String getRecorderPath() { + File file = new File(VIDEOTEMP); + if (!file.exists()) { + file.mkdirs(); + } + String path = file.getPath() + "/" + System.currentTimeMillis() + ".mp4"; + Log.e("test", "path=" + path); + return path; + } } diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/draw/CameraDrawer.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/draw/CameraDrawer.java index a0690ca..5d0522b 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/draw/CameraDrawer.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/draw/CameraDrawer.java @@ -1,19 +1,24 @@ package com.aserbao.androidcustomcamera.whole.record.draw; import android.content.res.Resources; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; import android.graphics.SurfaceTexture; import android.opengl.EGL14; import android.opengl.GLES11Ext; import android.opengl.GLES20; import android.opengl.GLSurfaceView; +import android.util.Log; import android.view.MotionEvent; +import com.aserbao.androidcustomcamera.R; import com.aserbao.androidcustomcamera.whole.record.encoder.TextureMovieEncoder; import com.aserbao.androidcustomcamera.whole.record.filters.BaseFilter; import com.aserbao.androidcustomcamera.whole.record.filters.CameraFilter; import com.aserbao.androidcustomcamera.whole.record.filters.GroupFilter; import com.aserbao.androidcustomcamera.whole.record.filters.NoneFilter; -import com.aserbao.androidcustomcamera.whole.record.filters.ProcessFilter; +import com.aserbao.androidcustomcamera.whole.record.filters.CameraDrawProcessFilter; +import com.aserbao.androidcustomcamera.whole.record.filters.WaterMarkFilter; import com.aserbao.androidcustomcamera.whole.record.filters.gpuFilters.baseFilter.MagicBeautyFilter; import com.aserbao.androidcustomcamera.whole.record.ui.SlideGpuFilterGroup; import com.aserbao.androidcustomcamera.whole.record.utils.EasyGlUtils; @@ -69,17 +74,16 @@ public CameraDrawer(Resources resources) { //初始化一个滤镜 也可以叫控制器 showFilter = new NoneFilter(resources); drawFilter = new CameraFilter(resources); - mProcessFilter=new ProcessFilter(resources); + mProcessFilter=new CameraDrawProcessFilter(resources); mBeFilter = new GroupFilter(resources); mAfFilter = new GroupFilter(resources); mBeautyFilter = new MagicBeautyFilter(); // mBeautyFilter = new MagicAntiqueFilter(); mSlideFilterGroup = new SlideGpuFilterGroup(); OM = MatrixUtils.getOriginalMatrix(); - MatrixUtils.flip(OM,false,true);//矩阵上下翻转 + MatrixUtils.flip(OM,false,false);//矩阵上下翻转 showFilter.setMatrix(OM); } - @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { textureID = createTextureID(); diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/AFilter.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/AFilter.java new file mode 100644 index 0000000..07e6f3c --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/AFilter.java @@ -0,0 +1,339 @@ +package com.aserbao.androidcustomcamera.whole.record.filters; + +import android.content.res.Resources; +import android.opengl.GLES20; +import android.util.Log; +import android.util.SparseArray; + +import com.aserbao.androidcustomcamera.whole.record.utils.MatrixUtils; + +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.FloatBuffer; +import java.util.Arrays; + +/** + * Description: + */ +public abstract class AFilter { + + private static final String TAG="Filter"; + + + public static boolean DEBUG=true; + /** + * 单位矩阵 + */ + public static final float[] OM= MatrixUtils.getOriginalMatrix(); + /** + * 程序句柄 + */ + protected int mProgram; + /** + * 顶点坐标句柄 + */ + protected int mHPosition; + /** + * 纹理坐标句柄 + */ + protected int mHCoord; + /** + * 总变换矩阵句柄 + */ + protected int mHMatrix; + /** + * 默认纹理贴图句柄 + */ + protected int mHTexture; + + protected Resources mRes; + + + /** + * 顶点坐标Buffer + */ + protected FloatBuffer mVerBuffer; + + /** + * 纹理坐标Buffer + */ + protected FloatBuffer mTexBuffer; + + /** + * 索引坐标Buffer + */ + + protected int mFlag=0; + + private float[] matrix= Arrays.copyOf(OM,16); + + private int textureType=0; //默认使用Texture2D0 + private int textureId=0; + //顶点坐标 + private float pos[] = { + -1.0f, 1.0f, + -1.0f, -1.0f, + 1.0f, 1.0f, + 1.0f, -1.0f, + }; + + //纹理坐标 + private float[] coord={ + 0.0f, 0.0f, + 0.0f, 1.0f, + 1.0f, 0.0f, + 1.0f, 1.0f, + }; + + private SparseArray mBools; + private SparseArray mInts; + private SparseArray mFloats; + + + public AFilter(Resources mRes){ + this.mRes=mRes; + initBuffer(); + } + + public final void create(){ + onCreate(); + } + + public final void setSize(int width,int height){ + onSizeChanged(width,height); + } + + public void draw(){ + onClear(); + onUseProgram(); + onSetExpandData(); + onBindTexture(); + onDraw(); + } + public void draw(long time){ + onClear(); + onUseProgram(); + onSetExpandData(); + onBindTexture(); + onDraw(); + } + + public final void setMatrix(float[] matrix){ + this.matrix=matrix; + } + + public float[] getMatrix(){ + return matrix; + } + + public final void setTextureType(int type){ + this.textureType=type; + } + + public final int getTextureType(){ + return textureType; + } + + public final int getTextureId(){ + return textureId; + } + + public final void setTextureId(int textureId){ + this.textureId=textureId; + } + + public void setFlag(int flag){ + this.mFlag=flag; + } + + public int getFlag(){ + return mFlag; + } + + public void setFloat(int type,float ... params){ + if(mFloats==null){ + mFloats=new SparseArray<>(); + } + mFloats.put(type,params); + } + public void setInt(int type,int ... params){ + if(mInts==null){ + mInts=new SparseArray<>(); + } + mInts.put(type,params); + } + public void setBool(int type,boolean ... params){ + if(mBools==null){ + mBools=new SparseArray<>(); + } + mBools.put(type,params); + } + + public boolean getBool(int type,int index) { + if (mBools == null) return false; + boolean[] b = mBools.get(type); + return !(b == null || b.length <= index) && b[index]; + } + + public int getInt(int type,int index){ + if (mInts == null) return 0; + int[] b = mInts.get(type); + if(b == null || b.length <= index){ + return 0; + } + return b[index]; + } + + public float getFloat(int type,int index){ + if (mFloats == null) return 0; + float[] b = mFloats.get(type); + if(b == null || b.length <= index){ + return 0; + } + return b[index]; + } + + public int getOutputTexture(){ + return -1; + } + + /** + * 实现此方法,完成程序的创建,可直接调用createProgram来实现 + */ + protected abstract void onCreate(); + protected abstract void onSizeChanged(int width,int height); + + protected final void createProgram(String vertex, String fragment){ + mProgram= uCreateGlProgram(vertex,fragment); + mHPosition= GLES20.glGetAttribLocation(mProgram, "vPosition"); + mHCoord= GLES20.glGetAttribLocation(mProgram,"vCoord"); + mHMatrix= GLES20.glGetUniformLocation(mProgram,"vMatrix"); + mHTexture= GLES20.glGetUniformLocation(mProgram,"vTexture"); + } + + protected final void createProgramByAssetsFile(String vertex, String fragment){ + createProgram(uRes(mRes,vertex),uRes(mRes,fragment)); + } + + /** + * Buffer初始化 + */ + protected void initBuffer(){ + ByteBuffer a= ByteBuffer.allocateDirect(32); + a.order(ByteOrder.nativeOrder()); + mVerBuffer=a.asFloatBuffer(); + mVerBuffer.put(pos); + mVerBuffer.position(0); + ByteBuffer b= ByteBuffer.allocateDirect(32); + b.order(ByteOrder.nativeOrder()); + mTexBuffer=b.asFloatBuffer(); + mTexBuffer.put(coord); + mTexBuffer.position(0); + } + + protected void onUseProgram(){ + GLES20.glUseProgram(mProgram); + } + + /** + * 启用顶点坐标和纹理坐标进行绘制 + */ + protected void onDraw(){ + GLES20.glEnableVertexAttribArray(mHPosition); + GLES20.glVertexAttribPointer(mHPosition,2, GLES20.GL_FLOAT, false, 0,mVerBuffer); + GLES20.glEnableVertexAttribArray(mHCoord); + GLES20.glVertexAttribPointer(mHCoord, 2, GLES20.GL_FLOAT, false, 0, mTexBuffer); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP,0,4); + GLES20.glDisableVertexAttribArray(mHPosition); + GLES20.glDisableVertexAttribArray(mHCoord); + } + + /** + * 清除画布 + */ + protected void onClear(){ + GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f); + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); + } + + /** + * 设置其他扩展数据 + */ + protected void onSetExpandData(){ + GLES20.glUniformMatrix4fv(mHMatrix,1,false,matrix,0); + } + + /** + * 绑定默认纹理 + */ + protected void onBindTexture(){ + GLES20.glActiveTexture(GLES20.GL_TEXTURE0+textureType); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,getTextureId()); + GLES20.glUniform1i(mHTexture,textureType); + } + + public static void glError(int code, Object index){ + if(DEBUG&&code!=0){ + Log.e(TAG,"glError:"+code+"---"+index); + } + } + + //通过路径加载Assets中的文本内容 + public static String uRes(Resources mRes, String path){ + StringBuilder result=new StringBuilder(); + try{ + InputStream is=mRes.getAssets().open(path); + int ch; + byte[] buffer=new byte[1024]; + while (-1!=(ch=is.read(buffer))){ + result.append(new String(buffer,0,ch)); + } + }catch (Exception e){ + return null; + } + return result.toString().replaceAll("\\r\\n","\n"); + } + + //创建GL程序 + public static int uCreateGlProgram(String vertexSource, String fragmentSource){ + int vertex=uLoadShader(GLES20.GL_VERTEX_SHADER,vertexSource); + if(vertex==0)return 0; + int fragment=uLoadShader(GLES20.GL_FRAGMENT_SHADER,fragmentSource); + if(fragment==0)return 0; + int program= GLES20.glCreateProgram(); + if(program!=0){ + GLES20.glAttachShader(program,vertex); + GLES20.glAttachShader(program,fragment); + GLES20.glLinkProgram(program); + int[] linkStatus=new int[1]; + GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS,linkStatus,0); + if(linkStatus[0]!= GLES20.GL_TRUE){ + glError(1,"Could not link program:"+ GLES20.glGetProgramInfoLog(program)); + GLES20.glDeleteProgram(program); + program=0; + } + } + return program; + } + + /**加载shader*/ + public static int uLoadShader(int shaderType, String source){ + int shader= GLES20.glCreateShader(shaderType); + if(0!=shader){ + GLES20.glShaderSource(shader,source); + GLES20.glCompileShader(shader); + int[] compiled=new int[1]; + GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS,compiled,0); + if(compiled[0]==0){ + glError(1,"Could not compile shader:"+shaderType); + glError(1,"GLES20 Error:"+ GLES20.glGetShaderInfoLog(shader)); + GLES20.glDeleteShader(shader); + shader=0; + } + } + return shader; + } + + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/CameraDrawProcessFilter.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/CameraDrawProcessFilter.java new file mode 100644 index 0000000..1728c82 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/CameraDrawProcessFilter.java @@ -0,0 +1,83 @@ +package com.aserbao.androidcustomcamera.whole.record.filters; + +import android.content.res.Resources; +import android.opengl.GLES20; + +import com.aserbao.androidcustomcamera.whole.record.utils.EasyGlUtils; +import com.aserbao.androidcustomcamera.whole.record.utils.MatrixUtils; + +/** + * description:这里重写了draw方法,所以父类中的矩阵对此绘制无效 + * Created by aserbao on 2018/5/15. + */ + +public class CameraDrawProcessFilter extends BaseFilter { + private BaseFilter mFilter; + + private int[] fFrame = new int[1]; + private int[] fRender = new int[1]; + private int[] fTexture = new int[1]; + + private int width; + private int height; + + public CameraDrawProcessFilter(Resources mRes) { + super(mRes); + mFilter=new NoneFilter(mRes); + float[] OM= MatrixUtils.getOriginalMatrix(); + MatrixUtils.flip(OM,false,false);//矩阵上下翻转 + mFilter.setMatrix(OM); + } + + @Override + protected void onCreate() { + mFilter.create(); + } + + @Override + public int getOutputTexture() { + return fTexture[0]; + } + + @Override + public void draw() { + boolean b= GLES20.glIsEnabled(GLES20.GL_CULL_FACE); + if(b){ + GLES20.glDisable(GLES20.GL_CULL_FACE); + } + GLES20.glViewport(0,0,width,height); + EasyGlUtils.bindFrameTexture(fFrame[0],fTexture[0]); + GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, + GLES20.GL_RENDERBUFFER, fRender[0]); + mFilter.setTextureId(getTextureId()); + mFilter.draw(); + EasyGlUtils.unBindFrameBuffer(); + if(b){ + GLES20.glEnable(GLES20.GL_CULL_FACE); + } + } + + @Override + protected void onSizeChanged(int width, int height) { + if(this.width!=width&&this.height!=height){ + this.width=width; + this.height=height; + mFilter.setSize(width,height); + deleteFrameBuffer(); + GLES20.glGenFramebuffers(1,fFrame,0); + GLES20.glGenRenderbuffers(1,fRender,0); + GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER,fRender[0]); + GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16, + width, height); + GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, + GLES20.GL_RENDERBUFFER, fRender[0]); + GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER,0); + EasyGlUtils.genTexturesWithParameter(1,fTexture,0, GLES20.GL_RGBA,width,height); + } + } + private void deleteFrameBuffer() { + GLES20.glDeleteRenderbuffers(1, fRender, 0); + GLES20.glDeleteFramebuffers(1, fFrame, 0); + GLES20.glDeleteTextures(1, fTexture, 0); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/GifDecoder.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/GifDecoder.java new file mode 100644 index 0000000..bc672b0 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/GifDecoder.java @@ -0,0 +1,624 @@ +package com.aserbao.androidcustomcamera.whole.record.filters; + +import android.graphics.Bitmap; +import android.graphics.Bitmap.Config; + +import java.io.InputStream; +import java.util.Vector; + +//Handler for read & extract Bitmap from *.gif +public class GifDecoder { + + // to store *.gif data, Bitmap & delay + class GifFrame { + // to access image & delay w/o interfaces + public Bitmap image; + public int delay; + + public GifFrame(Bitmap im, int del) { + image = im; + delay = del; + } + } + + // to define some error type + public static final int STATUS_OK = 0; + public static final int STATUS_FORMAT_ERROR = 1; + public static final int STATUS_OPEN_ERROR = 2; + + protected int status; + + protected InputStream in; + + protected int width; // full image width + protected int height; // full image height + protected boolean gctFlag; // global color table used + protected int gctSize; // size of global color table + protected int loopCount = 1; // iterations; 0 = repeat forever + + protected int[] gct; // global color table + protected int[] lct; // local color table + protected int[] act; // active color table + + protected int bgIndex; // background color index + protected int bgColor; // background color + protected int lastBgColor; // previous bg color + protected int pixelAspect; // pixel aspect ratio + + protected boolean lctFlag; // local color table flag + protected boolean interlace; // interlace flag + protected int lctSize; // local color table size + + protected int ix, iy, iw, ih; // current image rectangle + protected int lrx, lry, lrw, lrh; + protected Bitmap image; // current frame + protected Bitmap lastImage; // previous frame + protected int frameindex = 0; + + public int getFrameindex() { + return frameindex; + } + + public void setFrameindex(int frameindex) { + this.frameindex = frameindex; + if (frameindex > frames.size() - 1) { + frameindex = 0; + } + } + + protected byte[] block = new byte[256]; // current data block + protected int blockSize = 0; // block size + + // last graphic control extension info + protected int dispose = 0; + // 0=no action; 1=leave in place; 2=restore to bg; 3=restore to prev + protected int lastDispose = 0; + protected boolean transparency = false; // use transparent color + protected int delay = 0; // delay in milliseconds + protected int transIndex; // transparent color index + + protected static final int MaxStackSize = 4096; + // max decoder pixel stack size + + // LZW decoder working arrays + protected short[] prefix; + protected byte[] suffix; + protected byte[] pixelStack; + protected byte[] pixels; + + protected Vector frames; // frames read from current file + protected int frameCount; + + // to get its Width / Height + public int getWidth() { + return width; + } + + public int getHeigh() { + return height; + } + + /** + * Gets display duration for specified frame. + * + * @param n + * int index of frame + * @return delay in milliseconds + */ + public int getDelay(int n) { + delay = -1; + if ((n >= 0) && (n < frameCount)) { + delay = ((GifFrame) frames.elementAt(n)).delay; + } + return delay; + } + + public int getFrameCount() { + return frameCount; + } + + public Bitmap getImage() { + return getFrame(0); + } + + public int getLoopCount() { + return loopCount; + } + + protected void setPixels() { + int[] dest = new int[width * height]; + // fill in starting image contents based on last image's dispose code + if (lastDispose > 0) { + if (lastDispose == 3) { + // use image before last + int n = frameCount - 2; + if (n > 0) { + lastImage = getFrame(n - 1); + } else { + lastImage = null; + } + } + if (lastImage != null) { + lastImage.getPixels(dest, 0, width, 0, 0, width, height); + // copy pixels + if (lastDispose == 2) { + // fill last image rect area with background color + int c = 0; + if (!transparency) { + c = lastBgColor; + } + for (int i = 0; i < lrh; i++) { + int n1 = (lry + i) * width + lrx; + int n2 = n1 + lrw; + for (int k = n1; k < n2; k++) { + dest[k] = c; + } + } + } + } + } + + // copy each source line to the appropriate place in the destination + int pass = 1; + int inc = 8; + int iline = 0; + for (int i = 0; i < ih; i++) { + int line = i; + if (interlace) { + if (iline >= ih) { + pass++; + switch (pass) { + case 2: + iline = 4; + break; + case 3: + iline = 2; + inc = 4; + break; + case 4: + iline = 1; + inc = 2; + } + } + line = iline; + iline += inc; + } + line += iy; + if (line < height) { + int k = line * width; + int dx = k + ix; // start of line in dest + int dlim = dx + iw; // end of dest line + if ((k + width) < dlim) { + dlim = k + width; // past dest edge + } + int sx = i * iw; // start of line in source + while (dx < dlim) { + // map color and insert in destination + int index = ((int) pixels[sx++]) & 0xff; + int c = act[index]; + if (c != 0) { + dest[dx] = c; + } + dx++; + } + } + } + image = Bitmap.createBitmap(dest, width, height, Config.ARGB_4444); + } + + public Bitmap getFrame(int n) { + Bitmap im = null; + if ((n >= 0) && (n < frameCount)) { + im = ((GifFrame) frames.elementAt(n)).image; + } + return im; + } + + public Bitmap nextBitmap() { + frameindex++; + if (frameindex > frames.size() - 1) { + frameindex = 0; + } + return ((GifFrame) frames.elementAt(frameindex)).image; + } + + public int nextDelay() { + return ((GifFrame) frames.elementAt(frameindex)).delay; + } + + // to read & parse all *.gif stream + public int read(InputStream is) { + init(); + if (is != null) { + in = is; + + readHeader(); + if (!err()) { + readContents(); + if (frameCount < 0) { + status = STATUS_FORMAT_ERROR; + } + } + } else { + status = STATUS_OPEN_ERROR; + } + try { + is.close(); + } catch (Exception e) { + e.printStackTrace(); + } + return status; + } + + protected void decodeImageData() { + int NullCode = -1; + int npix = iw * ih; + int available, clear, code_mask, code_size, end_of_information, in_code, old_code, bits, code, count, i, datum, data_size, first, top, bi, pi; + + if ((pixels == null) || (pixels.length < npix)) { + pixels = new byte[npix]; // allocate new pixel array + } + if (prefix == null) { + prefix = new short[MaxStackSize]; + } + if (suffix == null) { + suffix = new byte[MaxStackSize]; + } + if (pixelStack == null) { + pixelStack = new byte[MaxStackSize + 1]; + } + // Initialize GIF data stream decoder. + data_size = read(); + clear = 1 << data_size; + end_of_information = clear + 1; + available = clear + 2; + old_code = NullCode; + code_size = data_size + 1; + code_mask = (1 << code_size) - 1; + for (code = 0; code < clear; code++) { + prefix[code] = 0; + suffix[code] = (byte) code; + } + + // Decode GIF pixel stream. + datum = bits = count = first = top = pi = bi = 0; + for (i = 0; i < npix;) { + if (top == 0) { + if (bits < code_size) { + // Load bytes until there are enough bits for a code. + if (count == 0) { + // Read a new data block. + count = readBlock(); + if (count <= 0) { + break; + } + bi = 0; + } + datum += (((int) block[bi]) & 0xff) << bits; + bits += 8; + bi++; + count--; + continue; + } + // Get the next code. + code = datum & code_mask; + datum >>= code_size; + bits -= code_size; + + // Interpret the code + if ((code > available) || (code == end_of_information)) { + break; + } + if (code == clear) { + // Reset decoder. + code_size = data_size + 1; + code_mask = (1 << code_size) - 1; + available = clear + 2; + old_code = NullCode; + continue; + } + if (old_code == NullCode) { + pixelStack[top++] = suffix[code]; + old_code = code; + first = code; + continue; + } + in_code = code; + if (code == available) { + pixelStack[top++] = (byte) first; + code = old_code; + } + while (code > clear) { + pixelStack[top++] = suffix[code]; + code = prefix[code]; + } + first = ((int) suffix[code]) & 0xff; + // Add a new string to the string table, + if (available >= MaxStackSize) { + break; + } + pixelStack[top++] = (byte) first; + prefix[available] = (short) old_code; + suffix[available] = (byte) first; + available++; + if (((available & code_mask) == 0) + && (available < MaxStackSize)) { + code_size++; + code_mask += available; + } + old_code = in_code; + } + + // Pop a pixel off the pixel stack. + top--; + pixels[pi++] = pixelStack[top]; + i++; + } + for (i = pi; i < npix; i++) { + pixels[i] = 0; // clear missing pixels + } + } + + protected boolean err() { + return status != STATUS_OK; + } + + // to initia variable + public void init() { + status = STATUS_OK; + frameCount = 0; + frames = new Vector(); + gct = null; + lct = null; + } + + protected int read() { + int curByte = 0; + try { + curByte = in.read(); + } catch (Exception e) { + status = STATUS_FORMAT_ERROR; + } + return curByte; + } + + protected int readBlock() { + blockSize = read(); + int n = 0; + if (blockSize > 0) { + try { + int count = 0; + while (n < blockSize) { + count = in.read(block, n, blockSize - n); + if (count == -1) { + break; + } + n += count; + } + } catch (Exception e) { + e.printStackTrace(); + } + if (n < blockSize) { + status = STATUS_FORMAT_ERROR; + } + } + return n; + } + + // Global Color Table + protected int[] readColorTable(int ncolors) { + int nbytes = 3 * ncolors; + int[] tab = null; + byte[] c = new byte[nbytes]; + int n = 0; + try { + n = in.read(c); + } catch (Exception e) { + e.printStackTrace(); + } + if (n < nbytes) { + status = STATUS_FORMAT_ERROR; + } else { + tab = new int[256]; // max size to avoid bounds checks + int i = 0; + int j = 0; + while (i < ncolors) { + int r = ((int) c[j++]) & 0xff; + int g = ((int) c[j++]) & 0xff; + int b = ((int) c[j++]) & 0xff; + tab[i++] = 0xff000000 | (r << 16) | (g << 8) | b; + } + } + return tab; + } + + // Image Descriptor + protected void readContents() { + // read GIF file content blocks + boolean done = false; + while (!(done || err())) { + int code = read(); + switch (code) { + case 0x2C: // image separator + readImage(); + break; + case 0x21: // extension + code = read(); + switch (code) { + case 0xf9: // graphics control extension + readGraphicControlExt(); + break; + + case 0xff: // application extension + readBlock(); + String app = ""; + for (int i = 0; i < 11; i++) { + app += (char) block[i]; + } + if (app.equals("NETSCAPE2.0")) { + readNetscapeExt(); + } else { + skip(); // don't care + } + break; + default: // uninteresting extension + skip(); + } + break; + + case 0x3b: // terminator + done = true; + break; + + case 0x00: // bad byte, but keep going and see what happens + break; + default: + status = STATUS_FORMAT_ERROR; + } + } + } + + protected void readGraphicControlExt() { + read(); // block size + int packed = read(); // packed fields + dispose = (packed & 0x1c) >> 2; // disposal method + if (dispose == 0) { + dispose = 1; // elect to keep old image if discretionary + } + transparency = (packed & 1) != 0; + delay = readShort() * 10; // delay in milliseconds + transIndex = read(); // transparent color index + read(); // block terminator + } + + // to get Stream - Head + protected void readHeader() { + String id = ""; + for (int i = 0; i < 6; i++) { + id += (char) read(); + } + if (!id.startsWith("GIF")) { + status = STATUS_FORMAT_ERROR; + return; + } + readLSD(); + if (gctFlag && !err()) { + gct = readColorTable(gctSize); + bgColor = gct[bgIndex]; + } + } + + protected void readImage() { + // offset of X + ix = readShort(); // (sub)image position & size + // offset of Y + iy = readShort(); + // width of bitmap + iw = readShort(); + // height of bitmap + ih = readShort(); + + // Local Color Table Flag + int packed = read(); + lctFlag = (packed & 0x80) != 0; // 1 - local color table flag + + // Interlace Flag, to array with interwoven if ENABLE, with order + // otherwise + interlace = (packed & 0x40) != 0; // 2 - interlace flag + // 3 - sort flag + // 4-5 - reserved + lctSize = 2 << (packed & 7); // 6-8 - local color table size + if (lctFlag) { + lct = readColorTable(lctSize); // read table + act = lct; // make local table active + } else { + act = gct; // make global table active + if (bgIndex == transIndex) { + bgColor = 0; + } + } + int save = 0; + if (transparency) { + save = act[transIndex]; + act[transIndex] = 0; // set transparent color if specified + } + if (act == null) { + status = STATUS_FORMAT_ERROR; // no color table defined + } + if (err()) { + return; + } + decodeImageData(); // decode pixel data + skip(); + if (err()) { + return; + } + frameCount++; + // create new image to receive frame data + image = Bitmap.createBitmap(width, height, Config.ARGB_4444); + // createImage(width, height); + setPixels(); // transfer pixel data to image + frames.addElement(new GifFrame(image, delay)); // add image to frame + // list + if (transparency) { + act[transIndex] = save; + } + resetFrame(); + } + + // Logical Screen Descriptor + protected void readLSD() { + // logical screen size + width = readShort(); + height = readShort(); + // packed fields + int packed = read(); + gctFlag = (packed & 0x80) != 0; // 1 : global color table flag + // 2-4 : color resolution + // 5 : gct sort flag + gctSize = 2 << (packed & 7); // 6-8 : gct size + bgIndex = read(); // background color index + pixelAspect = read(); // pixel aspect ratio + } + + protected void readNetscapeExt() { + do { + readBlock(); + if (block[0] == 1) { + // loop count sub-block + int b1 = ((int) block[1]) & 0xff; + int b2 = ((int) block[2]) & 0xff; + loopCount = (b2 << 8) | b1; + } + } while ((blockSize > 0) && !err()); + } + + // read 8 bit data + protected int readShort() { + // read 16-bit value, LSB first + return read() | (read() << 8); + } + + protected void resetFrame() { + lastDispose = dispose; + lrx = ix; + lry = iy; + lrw = iw; + lrh = ih; + lastImage = image; + lastBgColor = bgColor; + dispose = 0; + transparency = false; + delay = 0; + lct = null; + } + + /** + * Skips variable length blocks up to and including next zero length block. + */ + protected void skip() { + do { + readBlock(); + } while ((blockSize > 0) && !err()); + } +} \ No newline at end of file diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/GroupFilter.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/GroupFilter.java index c171c82..cfd723c 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/GroupFilter.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/GroupFilter.java @@ -17,8 +17,8 @@ public class GroupFilter extends BaseFilter{ - private Queue mFilterQueue; - private List mFilters; + private Queue mFilterQueue; + private List mFilters; private int width=0, height=0; private int size=0; @@ -29,7 +29,7 @@ public GroupFilter(Resources mRes) { } - public void addFilter(final BaseFilter filter){ + public void addFilter(final AFilter filter){ MatrixUtils.flip(filter.getMatrix(),false,true); mFilterQueue.add(filter); } @@ -41,8 +41,8 @@ public boolean removeFilter(BaseFilter filter){ return b; } - public BaseFilter removeFilter(int index){ - BaseFilter f=mFilters.remove(index); + public AFilter removeFilter(int index){ + AFilter f= mFilters.remove(index); if(f!=null){ size--; } @@ -51,10 +51,10 @@ public BaseFilter removeFilter(int index){ /** * 双Texture,一个输入一个输出,循环往复 */ - public void draw(){ + public void draw(long time){ updateFilter(); textureIndex=0; - for (BaseFilter filter:mFilters){ + for (AFilter filter:mFilters){ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fFrame[0]); GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, fTexture[textureIndex%2], 0); @@ -66,14 +66,14 @@ public void draw(){ }else{ filter.setTextureId(fTexture[(textureIndex-1)%2]); } - filter.draw(); + filter.draw(time); unBindFrame(); textureIndex++; } } private void updateFilter(){ - BaseFilter f; + AFilter f; while ((f=mFilterQueue.poll())!=null){ f.create(); f.setSize(width,height); diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/NoFilter.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/NoFilter.java new file mode 100644 index 0000000..a9f1f35 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/NoFilter.java @@ -0,0 +1,34 @@ +package com.aserbao.androidcustomcamera.whole.record.filters; + +import android.content.res.Resources; +import android.opengl.GLES20; + +/** + * Description: + */ +public class NoFilter extends AFilter { + + public NoFilter(Resources res) { + super(res); + } + + @Override + protected void onCreate() { + createProgramByAssetsFile("shader/base_vertex.sh", + "shader/base_fragment.sh"); + } + + /** + * 背景默认为黑色 + */ + @Override + protected void onClear() { + GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); + } + + @Override + protected void onSizeChanged(int width, int height) { + + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/ProcessFilter.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/ProcessFilter.java index 6d01b0d..287f114 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/ProcessFilter.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/ProcessFilter.java @@ -6,14 +6,15 @@ import com.aserbao.androidcustomcamera.whole.record.utils.EasyGlUtils; import com.aserbao.androidcustomcamera.whole.record.utils.MatrixUtils; + /** - * description:这里重写了draw方法,所以父类中的矩阵对此绘制无效 - * Created by aserbao on 2018/5/15. + * draw并不执行父类的draw方法,所以矩阵对它无效 + * Description: */ +public class ProcessFilter extends AFilter { -public class ProcessFilter extends BaseFilter { - private BaseFilter mFilter; - + private AFilter mFilter; + //创建离屏buffer private int[] fFrame = new int[1]; private int[] fRender = new int[1]; private int[] fTexture = new int[1]; @@ -21,14 +22,20 @@ public class ProcessFilter extends BaseFilter { private int width; private int height; + public ProcessFilter(Resources mRes) { super(mRes); - mFilter=new NoneFilter(mRes); + mFilter=new NoFilter(mRes); float[] OM= MatrixUtils.getOriginalMatrix(); MatrixUtils.flip(OM,false,true);//矩阵上下翻转 mFilter.setMatrix(OM); } + @Override + protected void initBuffer() { + + } + @Override protected void onCreate() { mFilter.create(); @@ -48,7 +55,7 @@ public void draw() { GLES20.glViewport(0,0,width,height); EasyGlUtils.bindFrameTexture(fFrame[0],fTexture[0]); GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, - GLES20.GL_RENDERBUFFER, fRender[0]); + GLES20.GL_RENDERBUFFER, fRender[0]); mFilter.setTextureId(getTextureId()); mFilter.draw(); EasyGlUtils.unBindFrameBuffer(); @@ -68,16 +75,18 @@ protected void onSizeChanged(int width, int height) { GLES20.glGenRenderbuffers(1,fRender,0); GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER,fRender[0]); GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16, - width, height); + width, height); GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, - GLES20.GL_RENDERBUFFER, fRender[0]); + GLES20.GL_RENDERBUFFER, fRender[0]); GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER,0); EasyGlUtils.genTexturesWithParameter(1,fTexture,0, GLES20.GL_RGBA,width,height); } } + private void deleteFrameBuffer() { GLES20.glDeleteRenderbuffers(1, fRender, 0); GLES20.glDeleteFramebuffers(1, fFrame, 0); GLES20.glDeleteTextures(1, fTexture, 0); } + } diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/RotationOESFilter.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/RotationOESFilter.java new file mode 100644 index 0000000..6a7801b --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/RotationOESFilter.java @@ -0,0 +1,66 @@ +package com.aserbao.androidcustomcamera.whole.record.filters; + +import android.content.res.Resources; + +/** + * Created by Administrator on 2017/6/19 0019. + */ + +public class RotationOESFilter extends OESFilter { + public static final int ROT_0 = 0; + public static final int ROT_90 = 90; + public static final int ROT_180 = 180; + public static final int ROT_270 = 270; + + public RotationOESFilter(Resources mRes) { + super(mRes); + } + + /** + * 旋转视频操作 + * + * @param rotation + */ + public void setRotation(int rotation) { + float[] coord; + switch (rotation) { + case ROT_0: + coord = new float[]{ + 0.0f, 0.0f, + 0.0f, 1.0f, + 1.0f, 0.0f, + 1.0f, 1.0f, + }; + break; + case ROT_90: + coord = new float[]{ + 0.0f, 1.0f, + 1.0f, 1.0f, + 0.0f, 0.0f, + 1.0f, 0.0f + }; + break; + case ROT_180: + coord = new float[]{ + 1.0f, 1.0f, + 1.0f, 0.0f, + 0.0f, 1.0f, + 0.0f, 0.0f, + }; + break; + case ROT_270: + coord = new float[]{ + 1.0f, 0.0f, + 0.0f, 0.0f, + 1.0f, 1.0f, + 0.0f, 1.0f + }; + break; + default: + return; + } + mTexBuffer.clear(); + mTexBuffer.put(coord); + mTexBuffer.position(0); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/WaterMarkFilter.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/WaterMarkFilter.java index cdf1026..94c48db 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/WaterMarkFilter.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/WaterMarkFilter.java @@ -2,67 +2,143 @@ import android.content.res.Resources; import android.graphics.Bitmap; +import android.graphics.Matrix; import android.opengl.GLES20; import android.opengl.GLUtils; -import com.aserbao.androidcustomcamera.whole.record.utils.MatrixUtils; + + +import java.io.InputStream; /** - * description: - * Created by aserbao on 2018/5/15. + * 水印的Filter */ - -public class WaterMarkFilter extends NoneFilter{ - private NoneFilter mFilter; - private Bitmap mBitmap; +public class WaterMarkFilter extends NoFilter{ private int x,y,w,h; private int width,height; + private Bitmap mBitmap; + private Bitmap mGifBitmap; + private NoFilter mFilter; + public android.graphics.Matrix mMatrix; + public GifDecoder mGifDecoder; + public WaterMarkFilter(Resources mRes) { super(mRes); - mFilter=new NoneFilter(mRes); + mFilter=new NoFilter(mRes){ + @Override + protected void onClear() { + } + }; + } + private boolean mIsGif = false; + private int mGifId; + private int mRotateDegree; + private Resources mResources; + public WaterMarkFilter(Resources res, boolean isGif, int bitRes, float rotateDegree) { + super(res); + mResources = res; + mGifId = bitRes; + mIsGif = isGif; + mRotateDegree = (int)rotateDegree; + mFilter=new NoFilter(mRes){ + @Override + protected void onClear() { + } + }; + } + + public void setWaterMark(Bitmap bitmap){ + if(this.mBitmap!=null && !mBitmap.isRecycled()){ + this.mBitmap.recycle(); + mBitmap = null; + } + if (mGifBitmap != null && !mGifBitmap.isRecycled()) { + mGifBitmap.recycle(); + mGifBitmap = null; + } + this.mBitmap=bitmap; + } + private long mStartTime,mEndTime; + public void setShowTime(long startTime,long endTime){ + mStartTime = startTime; + mEndTime = endTime; + } + private float[] mRotationMatrix = new float[16]; + @Override + public void draw() { + super.draw(); + GLES20.glViewport(x,y,w == 0 ? mBitmap.getWidth():w,h==0?mBitmap.getHeight():h); + blendFunc(); + mFilter.draw(); + } + @Override + public void draw(long time) { + super.draw(); + if(mIsGif){ + createTexture(); + } + if(time > mStartTime && time < mEndTime) { + int i = (int) (mBitmap.getWidth() * 1.15); + int i1 = (int) (mBitmap.getHeight() * 1.15); + GLES20.glViewport(x, y, w == 0 ? i : w, h == 0 ? i1 : h); + blendFunc(); + mFilter.draw(); + } + } + + private void blendFunc() { + GLES20.glEnable(GLES20.GL_BLEND); +// GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA); + GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA);//使用这个混合算法可以合成带透明度的贴纸。参考:https://www.jianshu.com/p/2fb9d90b57f0 } @Override protected void onCreate() { + super.onCreate(); mFilter.create(); + if(mIsGif){ + mGifDecoder = new GifDecoder(); + InputStream inputStream = mResources.openRawResource(mGifId); + mGifDecoder.read(inputStream); + mMatrix = new android.graphics.Matrix(); + mMatrix.postRotate(mRotateDegree); + } createTexture(); } - private int[] textures=new int[1]; private void createTexture() { if(mBitmap!=null){ - //生成纹理 GLES20.glGenTextures(1,textures,0); - //生成纹理 GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,textures[0]); - //设置缩小过滤为使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色 GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); - //设置放大过滤为使用纹理中坐标最接近的若干个颜色,通过加权平均算法得到需要绘制的像素颜色 GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); - //设置环绕方向S,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合 GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); - //设置环绕方向T,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合 GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); - GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, mBitmap, 0); + if(!mIsGif) { + GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, mBitmap, 0); + }else { + mGifBitmap = mGifDecoder.nextBitmap(); + if (mGifBitmap != null) { + GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, Bitmap.createBitmap(mGifBitmap, 0, 0, mGifBitmap.getWidth(), mGifBitmap.getHeight(), mMatrix, true), 0); + } + } //对画面进行矩阵旋转 - MatrixUtils.flip(mFilter.getMatrix(),false,true); - +// MatrixUtils.flip(mFilter.getMatrix(),false,true); mFilter.setTextureId(textures[0]); } } - + public void setMatrix(Matrix matrix){ + mMatrix = matrix; + } @Override protected void onSizeChanged(int width, int height) { this.width=width; this.height=height; + /*GLES20.glEnable(GLES20.GL_BLEND); + GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA,GLES20.GL_ONE_MINUS_SRC_ALPHA);*/ mFilter.setSize(width,height); } - @Override - public void draw(){ - GLES20.glViewport(x,y,w == 0 ? mBitmap.getWidth():w,h==0?mBitmap.getHeight():h); - mFilter.draw(); - } public void setPosition(int x,int y,int width,int height){ this.x=x; this.y=y; diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/WaterMarkFilterO.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/WaterMarkFilterO.java new file mode 100644 index 0000000..5604e8b --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/WaterMarkFilterO.java @@ -0,0 +1,72 @@ +package com.aserbao.androidcustomcamera.whole.record.filters; + +import android.content.res.Resources; +import android.graphics.Bitmap; +import android.opengl.GLES20; +import android.opengl.GLUtils; + +import com.aserbao.androidcustomcamera.whole.record.utils.MatrixUtils; + +/** + * description: + * Created by aserbao on 2018/5/15. + */ + + +public class WaterMarkFilterO extends NoneFilter{ + private NoneFilter mFilter; + private Bitmap mBitmap; + private int x,y,w,h; + private int width,height; + public WaterMarkFilterO(Resources mRes) { + super(mRes); + mFilter=new NoneFilter(mRes); + } + + @Override + protected void onCreate() { + mFilter.create(); + createTexture(); + } + + private int[] textures=new int[1]; + private void createTexture() { + if(mBitmap!=null){ + //生成纹理 + GLES20.glGenTextures(1,textures,0); + //生成纹理 + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,textures[0]); + //设置缩小过滤为使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色 + GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); + //设置放大过滤为使用纹理中坐标最接近的若干个颜色,通过加权平均算法得到需要绘制的像素颜色 + GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); + //设置环绕方向S,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合 + GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); + //设置环绕方向T,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合 + GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, mBitmap, 0); + //对画面进行矩阵旋转 +// MatrixUtils.flip(mFilter.getMatrix(),false,true); + + mFilter.setTextureId(textures[0]); + } + } + + @Override + protected void onSizeChanged(int width, int height) { + this.width=width; + this.height=height; + mFilter.setSize(width,height); + } + @Override + public void draw(){ + GLES20.glViewport(x,y,w == 0 ? mBitmap.getWidth():w,h==0?mBitmap.getHeight():h); + mFilter.draw(); + } + public void setPosition(int x,int y,int width,int height){ + this.x=x; + this.y=y; + this.w=width; + this.h=height; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/gpuFilters/baseFilter/MagicBeautyFilter.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/gpuFilters/baseFilter/MagicBeautyFilter.java index bf48bcf..84bdbf1 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/gpuFilters/baseFilter/MagicBeautyFilter.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/gpuFilters/baseFilter/MagicBeautyFilter.java @@ -6,10 +6,6 @@ import com.aserbao.androidcustomcamera.whole.record.filters.gpuFilters.utils.OpenGlUtils; -/** - * Created by cj on 2017/5/22. - * 美白的filter - */ public class MagicBeautyFilter extends GPUImageFilter { private int mSingleStepOffsetLocation; private int mParamsLocation; diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/ui/CameraView.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/ui/CameraView.java index 9f8c52e..f34fd0c 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/ui/CameraView.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/ui/CameraView.java @@ -43,7 +43,6 @@ private void init() { setRenderMode(RENDERMODE_WHEN_DIRTY);//主动调用渲染 setPreserveEGLContextOnPause(true);//保存Context当pause时 setCameraDistance(100);//相机距离 - /**初始化Camera的绘制类*/ mCameraDrawer = new CameraDrawer(getResources()); /**初始化相机的管理类*/ diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/ui/CustomRecordImageView.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/ui/CustomRecordImageView.java new file mode 100644 index 0000000..a1d1a44 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/ui/CustomRecordImageView.java @@ -0,0 +1,202 @@ +package com.aserbao.androidcustomcamera.whole.record.ui; + +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.RectF; +import android.os.Handler; +import android.os.Message; +import android.support.annotation.Nullable; +import android.util.AttributeSet; +import android.util.Log; +import android.view.View; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.utils.StaticFinalValues; + +import java.lang.ref.WeakReference; + + +/** + * description: + * Created by aserbao on 2018/7/24. + */ + + +public class CustomRecordImageView extends View { + + private static final String TAG = "CustomRecordImageView"; + + public static final int START = 1; + public static final int STOP = 2; + public static final int PROCESS = 3; + public Paint mPaint; + private int radiu = 0; + private boolean isAdd = true; + private int isChangeNum = 0; + private int changeTime = 10;//中间改变需要几次 + private int mStartHWidth = 150; + private int mStopHWidth = 100; + private int mStartPaintWidth = 10; + private int mStopPaintWidth = 15; + private int mStopIntervalWidth = 10; + private int mCorner = 10;//圆角半径 + private boolean isRecording = false;//是否正在录制中 + private int cuurStatus = 1 ;//0表示正在录制,1表示暂停录制,2表示暂停到开始中间过程,3表示录制到暂停的中间过程 + + public CustomRecordImageView(Context context) { + this(context,null); + } + + public CustomRecordImageView(Context context, @Nullable AttributeSet attrs) { + this(context, attrs,0); + } + + public CustomRecordImageView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { + super(context, attrs, defStyleAttr); + initView(); + } + + private void initView() { + mPaint = new Paint(); + mPaint.setStrokeWidth(5); + mPaint.setColor(Color.parseColor("#fc4253")); + mPaint.setAntiAlias(true); + } + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + switch (cuurStatus){ + case 0: + if (isAdd) { + if (radiu < 15) { + radiu++; + } else { + isAdd = false; + } + } else { + if (radiu > 0) { + radiu--; + } else { + isAdd = true; + } + } + mPaint.setStyle(Paint.Style.STROKE); + mPaint.setStrokeWidth(radiu + mStartPaintWidth); + RectF rectF = new RectF(getWidth() / 2 - mStartHWidth, getHeight() / 2 - mStartHWidth, getWidth() / 2 + mStartHWidth, getHeight() / 2 + mStartHWidth); + canvas.drawArc(rectF, 0, 360, false, mPaint); + int hald = mStartHWidth / 3; + RectF rectF1 = new RectF(getWidth() / 2 - hald, getHeight() / 2 - hald, getWidth() / 2 + hald, getHeight() / 2 + hald); + mPaint.setStyle(Paint.Style.FILL); + canvas.drawRoundRect(rectF1, mCorner + radiu, mCorner + radiu , mPaint); + invalidate(); + break; + case 1: + mPaint.setStyle(Paint.Style.STROKE); + mPaint.setStrokeWidth(mStopPaintWidth); + RectF rectF2 = new RectF(getWidth() / 2 - mStopHWidth, getHeight() / 2 - mStopHWidth, getWidth() / 2 + mStopHWidth, getHeight() / 2 + mStopHWidth); + canvas.drawArc(rectF2, 0, 360, false, mPaint); + mPaint.setStyle(Paint.Style.FILL); + canvas.drawCircle(getWidth() / 2,getHeight() / 2, mStopHWidth - mStopPaintWidth - mStopIntervalWidth , mPaint); + break; + case 2: + isChangeNum ++ ; + if(isChangeNum <= changeTime){ + mPaint.setStyle(Paint.Style.STROKE); + mPaint.setStrokeWidth(mStopPaintWidth); + if(isRecording) { + int hWidth = (mStartHWidth - mStopHWidth) / changeTime * isChangeNum + mStopHWidth; + RectF rectF3 = new RectF(getWidth() / 2 - hWidth, getHeight() / 2 - hWidth, getWidth() / 2 + hWidth, getHeight() / 2 + hWidth); + canvas.drawArc(rectF3, 0, 360, false, mPaint); + int hald2 = mStartHWidth / 3; + mPaint.setStyle(Paint.Style.FILL); + int middle = changeTime / 2; + if (isChangeNum > middle) { + RectF rectF22 = new RectF(getWidth() / 2 - hald2, getHeight() / 2 - hald2, getWidth() / 2 + hald2, getHeight() / 2 + hald2); + int cuurCorner = 50 - (50 - mCorner) / middle * (isChangeNum - middle); + canvas.drawRoundRect(rectF22, cuurCorner, cuurCorner, mPaint); + }else{ + int radius1 = mStopHWidth - mStopPaintWidth - mStopIntervalWidth; + int radius = radius1 - (radius1 - hald2) / middle * isChangeNum; + canvas.drawCircle(getWidth() / 2, getHeight() / 2, radius, mPaint); + } + }else{ + int hWidth =mStartHWidth - (mStartHWidth - mStopHWidth) / changeTime * isChangeNum ; + RectF rectF3 = new RectF(getWidth() / 2 - hWidth, getHeight() / 2 - hWidth, getWidth() / 2 + hWidth, getHeight() / 2 + hWidth); + canvas.drawArc(rectF3, 0, 360, false, mPaint); + mPaint.setStyle(Paint.Style.FILL); + int hald2 = mStartHWidth / 3; + int middle = changeTime / 2; + if (isChangeNum < middle) { + RectF rectF22 = new RectF(getWidth() / 2 - hald2, getHeight() / 2 - hald2, getWidth() / 2 + hald2, getHeight() / 2 + hald2); + int cuurCorner = mCorner + (50 - mCorner) / middle * isChangeNum ; + canvas.drawRoundRect(rectF22, cuurCorner, cuurCorner, mPaint); + }else{ + int radius1 = mStopHWidth - mStopPaintWidth - mStopIntervalWidth; + int radius = hald2 + (radius1 - hald2) / middle * (isChangeNum - middle); + canvas.drawCircle(getWidth() / 2, getHeight() / 2, radius, mPaint); + } + } + }else{ + isChangeNum = 0; + if(isRecording){ + cuurStatus = 0; + }else{ + cuurStatus = 1; + } + } + invalidate(); + Log.e(TAG, "onDraw: " ); + break; + } + } + + public boolean getRecordStatus(){ + return isRecording; + } + public void startRecord(){ + isRecording = true; + cuurStatus = 2; + invalidate(); + } + public void stopRecord(){ + isRecording = false; + cuurStatus = 2; + invalidate(); + } + + private MyHandler mMyHandler = new MyHandler(new WeakReference(this)); + + public class MyHandler extends Handler { + private WeakReference mCustomRecordImageViewWeakReference; + + public MyHandler(WeakReference customRecordImageViewWeakReference) { + mCustomRecordImageViewWeakReference = customRecordImageViewWeakReference; + } + + @Override + public void handleMessage(Message msg) { + super.handleMessage(msg); + CustomRecordImageView customRecordImageView = mCustomRecordImageViewWeakReference.get(); + if (customRecordImageView != null) { + switch (msg.what){ + case StaticFinalValues.EMPTY: + while(true) { + customRecordImageView.invalidate(); + } + case STOP: + break; + case START: + break; + } + } + } + } + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/ui/ProgressView.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/ui/ProgressView.java index 9f09600..cb1786b 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/ui/ProgressView.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/ui/ProgressView.java @@ -102,7 +102,7 @@ public void dispatchMessage(Message msg) { case HANDLER_INVALIDATE_ACTIVE: if (!progressView.mStop) sendEmptyMessageDelayed(HANDLER_INVALIDATE_ACTIVE, 50); - progressView.invalidate(); + progressView.invalidate(); if(!progressView.mProgressChanged) { progressView.mActiveState = !progressView.mActiveState; }else{ diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/ui/SlideGpuFilterGroup.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/ui/SlideGpuFilterGroup.java index a33ce9a..57ac8bd 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/ui/SlideGpuFilterGroup.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/ui/SlideGpuFilterGroup.java @@ -39,7 +39,13 @@ public class SlideGpuFilterGroup { private int curIndex = 0; private Scroller scroller; private OnFilterChangeListener mListener; - + public void setFilter(int i){ + curIndex = i; + locked = true; + downX = -1; + needSwitch = true; + direction = -1; + } public SlideGpuFilterGroup() { initFilter(); scroller = new Scroller(MyApplication.getContext()); @@ -180,11 +186,11 @@ private void reCreateLeftFilter() { increaseCurIndex(); leftFilter.destroy(); leftFilter = curFilter; - curFilter = rightFilter; - rightFilter = getFilter(getRightIndex()); + rightFilter = getFilter(curIndex); rightFilter.init(); rightFilter.onDisplaySizeChanged(width, height); rightFilter.onInputSizeChanged(width, height); + curFilter = rightFilter; needSwitch = false; } diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/utils/MatrixUtils.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/utils/MatrixUtils.java index ca7f241..46ef8a7 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/utils/MatrixUtils.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/utils/MatrixUtils.java @@ -21,7 +21,7 @@ public class MatrixUtils { } /** - * use {@link #getMatrix} instead + * use {@link #getMatrix} instead``` */ public static void getShowMatrix(float[] matrix,int imgWidth,int imgHeight,int viewWidth,int viewHeight){ diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/selCover/SelCoverAdapter.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/selCover/SelCoverAdapter.java new file mode 100644 index 0000000..6bf3cd5 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/selCover/SelCoverAdapter.java @@ -0,0 +1,67 @@ +package com.aserbao.androidcustomcamera.whole.selCover; + +import android.content.Context; +import android.graphics.Bitmap; +import android.support.v7.widget.RecyclerView; +import android.view.LayoutInflater; +import android.view.View; +import android.view.ViewGroup; +import android.widget.ImageView; + +import com.aserbao.androidcustomcamera.R; + +import java.util.List; + +import butterknife.BindView; +import butterknife.ButterKnife; + +/** + * description: + * Created by aserbao on 2018/2/8. + */ + + +public class SelCoverAdapter extends RecyclerView.Adapter { + + private Context mContext; + private List mBitmapList; + + public SelCoverAdapter(Context context) { + mContext = context; + + } + public void addBitmapList(List bitmapList){ + mBitmapList = bitmapList; + notifyDataSetChanged(); + } + @Override + public MyViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { + View view = LayoutInflater.from(mContext).inflate(R.layout.sel_cover_item, parent, false); + return new MyViewHolder(view); + } + + @Override + public void onBindViewHolder(MyViewHolder holder, int position) { + if(position < mBitmapList.size()) { + holder.mSelCoverIv.setImageBitmap(mBitmapList.get(position)); + } + } + + @Override + public int getItemCount() { + int ret = 0; + if (mBitmapList != null) { + ret = mBitmapList.size(); + } + return ret; + } + + class MyViewHolder extends RecyclerView.ViewHolder { + @BindView(R.id.sel_cover_iv) + ImageView mSelCoverIv; + public MyViewHolder(View itemView) { + super(itemView); + ButterKnife.bind(this, itemView); + } + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/selCover/SelCoverTimeActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/selCover/SelCoverTimeActivity.java new file mode 100644 index 0000000..d2307f6 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/selCover/SelCoverTimeActivity.java @@ -0,0 +1,232 @@ +package com.aserbao.androidcustomcamera.whole.selCover; + +import android.content.Context; +import android.content.ContextWrapper; +import android.content.Intent; +import android.graphics.Bitmap; +import android.media.MediaMetadataRetriever; +import android.net.Uri; +import android.os.AsyncTask; +import android.os.Bundle; +import android.os.Handler; +import android.os.Message; +import android.support.v7.app.AppCompatActivity; +import android.support.v7.widget.LinearLayoutManager; +import android.support.v7.widget.RecyclerView; +import android.util.Log; +import android.view.View; +import android.view.ViewGroup; +import android.widget.FrameLayout; +import android.widget.ImageView; +import android.widget.RelativeLayout; +import android.widget.TextView; +import android.widget.VideoView; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.utils.StaticFinalValues; +import com.aserbao.androidcustomcamera.base.utils.StatusBarUtil; +import com.aserbao.androidcustomcamera.whole.selCover.view.ThumbnailSelTimeView; + +import java.lang.ref.WeakReference; +import java.util.ArrayList; +import java.util.List; + +import butterknife.BindView; +import butterknife.ButterKnife; +import butterknife.OnClick; + +import static com.aserbao.androidcustomcamera.base.utils.StaticFinalValues.COMR_FROM_SEL_COVER_TIME_ACTIVITY; + +public class SelCoverTimeActivity extends AppCompatActivity { + + private static final int SEL_TIME = 0; + private static final int SUBMIT = 1; + private static final int SAVE_BITMAP = 2; + @BindView(R.id.iv_back) + ImageView mIvBack; + @BindView(R.id.cut_time_finish_tv) + TextView mCutTimeFinishTv; + @BindView(R.id.rl_title) + RelativeLayout mRlTitle; + @BindView(R.id.cut_recycler_view) + RecyclerView mCutRecyclerView; + @BindView(R.id.thumb_sel_time_view) + ThumbnailSelTimeView mThumbSelTimeView; + @BindView(R.id.sel_cover_video_view) + VideoView mSelCoverVideoView; + @BindView(R.id.sel_cover_tv) + TextView mSelCoverTv; + @BindView(R.id.sel_cover_frame_layout) + FrameLayout mSelCoverFrameLayout; + private List mBitmapList = new ArrayList<>(); + private String mVideoPath = "/storage/emulated/0/ych/321.mp4"; + public SelCoverAdapter mSelCoverAdapter; + private float mSelStartTime = 0.5f; + private boolean mIsSelTime;//是否点了完成按钮 + public String mVideoRotation; + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_sel_cover_time); + ButterKnife.bind(this); + mVideoPath = getIntent().getStringExtra(StaticFinalValues.VIDEOFILEPATH); + initThumbs(); + initSetParam(); + initView(); + initListener(); + StatusBarUtil.transparencyBar(this); + } + + private void initListener() { + mThumbSelTimeView.setOnScrollBorderListener(new ThumbnailSelTimeView.OnScrollBorderListener() { + @Override + public void OnScrollBorder(float start, float end) { + } + + @Override + public void onScrollStateChange() { + myHandler.removeMessages(SEL_TIME); + float rectLeft = mThumbSelTimeView.getRectLeft(); + mSelStartTime = (mVideoDuration * rectLeft) / 1000; + Log.e("Atest", "onScrollStateChange: " +mSelStartTime ); + mSelCoverVideoView.seekTo((int) mSelStartTime); + myHandler.sendEmptyMessage(SEL_TIME); + } + }); + } + + private void initSetParam() { + ViewGroup.LayoutParams layoutParams = mSelCoverVideoView.getLayoutParams(); + if(mVideoRotation.equals("0") && mVideoWidth > mVideoHeight) {//本地视频横屏 0表示竖屏 + layoutParams.width = 1120; + layoutParams.height = 630; + }else{ + layoutParams.width = 630; + layoutParams.height = 1120; + } + + mSelCoverVideoView.setLayoutParams(layoutParams); + mSelCoverVideoView.setVideoPath(mVideoPath); + mSelCoverVideoView.start(); + mSelCoverVideoView.getDuration(); + } + + private void initView() { + mSelCoverAdapter = new SelCoverAdapter(this); + LinearLayoutManager linearLayoutManager = new LinearLayoutManager(this, LinearLayoutManager.HORIZONTAL, false) { + @Override + public boolean canScrollHorizontally() { + return false; + } + }; + mCutRecyclerView.setLayoutManager(linearLayoutManager); + mCutRecyclerView.setAdapter(mSelCoverAdapter); + } + + public int mVideoHeight, mVideoWidth, mVideoDuration; + + private void initThumbs() { + final MediaMetadataRetriever mediaMetadata = new MediaMetadataRetriever(); + mediaMetadata.setDataSource(this, Uri.parse(mVideoPath)); + mVideoRotation = mediaMetadata.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION); + mVideoWidth = Integer.parseInt(mediaMetadata.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH)); + mVideoHeight = Integer.parseInt(mediaMetadata.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT)); + mVideoDuration = Integer.parseInt(mediaMetadata.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)); + final int frame = 10; + final int frameTime = mVideoDuration / frame * 1000; + new AsyncTask() { + @Override + protected Boolean doInBackground(Void... params) { + for (int x = 0; x < frame; x++) { + Bitmap bitmap = mediaMetadata.getFrameAtTime(frameTime * x, MediaMetadataRetriever.OPTION_CLOSEST_SYNC); + Message msg = myHandler.obtainMessage(); + msg.what = SAVE_BITMAP; + msg.obj = bitmap; + msg.arg1 = x; + myHandler.sendMessage(msg); + } + mediaMetadata.release(); + return true; + } + + @Override + protected void onPostExecute(Boolean result) { + myHandler.sendEmptyMessage(SUBMIT); + } + }.execute(); + } + + private Handler myHandler = new MyHandler(this); + private static class MyHandler extends Handler { + private WeakReference mActivityWeakReference; + + public MyHandler(SelCoverTimeActivity activityWeakReference) { + mActivityWeakReference = new WeakReference(activityWeakReference); + } + + @Override + public void handleMessage(Message msg) { + SelCoverTimeActivity activity = mActivityWeakReference.get(); + if (activity != null) { + switch (msg.what) { + case SEL_TIME: + activity.mSelCoverVideoView.seekTo((int) activity.mSelStartTime * 1000 ); + activity.mSelCoverVideoView.start(); + sendEmptyMessageDelayed(SEL_TIME,1000); + break; + case SAVE_BITMAP: + activity.mBitmapList.add(msg.arg1, (Bitmap) msg.obj); + break; + case SUBMIT: + activity.mSelCoverAdapter.addBitmapList(activity.mBitmapList); + sendEmptyMessageDelayed(SEL_TIME,1000); + break; + } + } + } + } + + + @OnClick({R.id.iv_back, R.id.cut_time_finish_tv}) + public void onViewClicked(View view) { + switch (view.getId()) { + case R.id.iv_back: + onBackPressed(); + break; + case R.id.cut_time_finish_tv: + mIsSelTime = true; + onBackPressed(); + break; + } + } + + @Override + public void onBackPressed() { + Intent intent = getIntent(); + if(mIsSelTime){ + if(mSelStartTime < 0.5f){ + mSelStartTime = 0.5f; + } + intent.putExtra(StaticFinalValues.CUT_TIME,mSelStartTime); + }else{ + intent.putExtra(StaticFinalValues.CUT_TIME,0.5f); + } + setResult(COMR_FROM_SEL_COVER_TIME_ACTIVITY,intent); + super.onBackPressed(); + } + + @Override + protected void attachBaseContext(Context newBase) { + super.attachBaseContext(new ContextWrapper(newBase) + { + @Override + public Object getSystemService(String name) + { + if (Context.AUDIO_SERVICE.equals(name)) + return getApplicationContext().getSystemService(name); + return super.getSystemService(name); + } + }); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/selCover/view/ThumbnailSelTimeView.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/selCover/view/ThumbnailSelTimeView.java new file mode 100644 index 0000000..be01439 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/selCover/view/ThumbnailSelTimeView.java @@ -0,0 +1,155 @@ +package com.aserbao.androidcustomcamera.whole.selCover.view; + +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.RectF; +import android.support.annotation.Nullable; +import android.util.AttributeSet; +import android.view.MotionEvent; +import android.view.View; + +import com.aserbao.androidcustomcamera.R; + +/** + * Created by zhaoshuang on 17/8/22. + */ + +public class ThumbnailSelTimeView extends View { + + private int mWidth; + private int mHeight; + private Paint mPaint; + private RectF rectF; + private int rectWidth = 100; + private OnScrollBorderListener onScrollBorderListener; + public int mDp2; + + public ThumbnailSelTimeView(Context context) { + this(context,null); + } + + public ThumbnailSelTimeView(Context context, @Nullable AttributeSet attrs) { + this(context, attrs,0); + } + + public ThumbnailSelTimeView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { + super(context, attrs, defStyleAttr); + init(); + } + + private void init() { + mPaint = new Paint(); + mPaint.setAntiAlias(true); + mPaint.setStyle(Paint.Style.STROKE); + mDp2 = (int)getResources().getDimension(R.dimen.dp2); + rectWidth = (int)getResources().getDimension(R.dimen.dp48); + mPaint.setStrokeWidth(mDp2); + } + public interface OnScrollBorderListener{ + void OnScrollBorder(float start, float end); + void onScrollStateChange(); + } + public float getRectLeft(){ + return (float)rectF.right/(float)mWidth; + } + public void setOnScrollBorderListener(OnScrollBorderListener listener){ + this.onScrollBorderListener = listener; + } + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + + if (mWidth == 0) { + mWidth = getWidth(); + mHeight = getHeight(); + + rectF = new RectF(); + rectF.left = 0; + rectF.top = 0; + rectF.right = rectWidth; + rectF.bottom = mHeight; + } + } + + private float downX; + private boolean scrollLeft; + private boolean scrollRight; + + @Override + public boolean onTouchEvent(MotionEvent event) { + + move(event); + return scrollLeft || scrollRight; + } + + boolean scrollChange; + private boolean move(MotionEvent event) { + switch (event.getAction()) { + case MotionEvent.ACTION_DOWN: + downX = event.getX(); + if (downX > rectF.left && downX < rectF.right) { + scrollLeft = true; + } + break; + case MotionEvent.ACTION_MOVE: + float moveX = event.getX(); + float scrollX = moveX - downX; + if (scrollLeft) { + rectF.left = rectF.left + scrollX; + rectF.right = rectF.left + rectWidth; + + if(rectF.left < 0){ + rectF.left = 0; + rectF.right = rectF.left + rectWidth; + } + if(rectF.right > mWidth){ + rectF.right = mWidth; + rectF.left = mWidth - rectWidth; + } + scrollChange = true; + invalidate(); + } + if(onScrollBorderListener != null){ + onScrollBorderListener.OnScrollBorder(rectF.left,rectF.right); + } + downX = moveX; + break; + case MotionEvent.ACTION_CANCEL: + case MotionEvent.ACTION_UP: + downX = 0; + scrollLeft = false; + scrollRight = false; + if(scrollChange && onScrollBorderListener != null){ + onScrollBorderListener.onScrollStateChange(); + } + scrollChange = false; + break; + } + return true; + } + + @Override + protected void onDraw(Canvas canvas) { + + mPaint.setColor(Color.WHITE); + mPaint.setStyle(Paint.Style.STROKE); + canvas.drawRect(rectF.left,rectF.top,rectF.left + rectWidth,rectF.bottom,mPaint); + mPaint.setColor(Color.parseColor("#99313133")); + mPaint.setStyle(Paint.Style.FILL); + RectF rectF3 = new RectF(); + rectF3.left = 0; + rectF3.top = 0; + rectF3.right = rectF.left - mDp2; + rectF3.bottom = mHeight; + canvas.drawRect(rectF3, mPaint); + + RectF rectF4 = new RectF(); + rectF4.left = rectF.right + mDp2; + rectF4.top = 0; + rectF4.right = mWidth; + rectF4.bottom = mHeight; + canvas.drawRect(rectF4, mPaint); + } +} \ No newline at end of file diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/videoPlayer/VideoPlayerActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/videoPlayer/VideoPlayerActivity.java index fe8712e..58a891b 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/whole/videoPlayer/VideoPlayerActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/videoPlayer/VideoPlayerActivity.java @@ -1,52 +1,185 @@ package com.aserbao.androidcustomcamera.whole.videoPlayer; -import android.app.Activity; import android.content.Intent; -import android.text.TextUtils; -import android.widget.Toast; -import android.widget.VideoView; +import android.media.MediaPlayer; +import android.net.Uri; +import android.os.Bundle; +import android.os.Handler; +import android.os.Looper; +import android.os.SystemClock; +import android.provider.MediaStore; +import android.support.annotation.Nullable; +import android.support.v7.app.AppCompatActivity; +import android.util.Log; +import android.view.View; +import android.view.WindowManager; +import android.widget.Chronometer; +import android.widget.ImageView; +import android.widget.MediaController; import com.aserbao.androidcustomcamera.R; -import com.aserbao.androidcustomcamera.base.activity.BaseActivity; +import com.aserbao.androidcustomcamera.whole.editVideo.view.PopTopTipWindow; +import com.aserbao.androidcustomcamera.whole.videoPlayer.view.FullScreenVideoView; + +import java.io.File; import butterknife.BindView; +import butterknife.OnClick; + +public class VideoPlayerActivity extends AppCompatActivity { + + @BindView(R.id.picture_close) + ImageView pictureClose; + @BindView(R.id.meet_download) + ImageView meetDownload; + private String TAG = VideoPlayerActivity.class.getSimpleName(); + @BindView(R.id.video_view) + public FullScreenVideoView fullScreenVideoView; + + private String videoFilePath; + + @BindView(R.id.timer) + public Chronometer timer; + private PopTopTipWindow popTopTipWindow; + + + @Override + protected void onCreate(@Nullable Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); + setContentView(R.layout.activity_video_player); + initData(); + } + + + protected void initData() { + + /*** + * 将播放器关联上一个音频或者视频文件 + * videoView.setVideoURI(Uri uri) + * videoView.setVideoPath(String path) + * 以上两个方法都可以。 + */ + videoFilePath = getIntent().getStringExtra("videoFilePath"); + + Log.e(TAG, "videoFilePath=" + videoFilePath); + + fullScreenVideoView.setVideoPath(videoFilePath); + + /** + * w为其提供一个控制器,控制其暂停、播放……等功能 + */ + fullScreenVideoView.setMediaController(new MediaController(this)); + +// /** +// * 视频循环播放 +// */ +// fullScreenVideoView.setOnPreparedListener(new MediaPlayer.OnPreparedListener() { +// @Override +// public void onPrepared(MediaPlayer mp) { +// mp.start(); +// mp.setLooping(true); +// } +// }); + + /** + * 视频或者音频到结尾时触发的方法 + */ + fullScreenVideoView.setOnCompletionListener(new MediaPlayer.OnCompletionListener() { + @Override + public void onCompletion(MediaPlayer mp) { + Log.e("通知", "完成"); +// Toast.makeText(VideoPlayerActivity.this,"播放结束",Toast.LENGTH_LONG).show(); +// fullScreenVideoView.setVideoPath(videoFilePath); + timer.setBase(SystemClock.elapsedRealtime());//计时器清零 + int xiaoshi = (int) ((SystemClock.elapsedRealtime() - timer.getBase()) / 1000 / 60); + timer.setFormat("0"+ String.valueOf(xiaoshi)+":%s"); + timer.start(); + fullScreenVideoView.start(); + } + }); + -public class VideoPlayerActivity extends BaseActivity { + fullScreenVideoView.setOnErrorListener(new MediaPlayer.OnErrorListener() { + + @Override + public boolean onError(MediaPlayer mp, int what, int extra) { + Log.e("通知", "播放中出现错误"); + return false; + } + }); + + } - private static final String VIDEO_PATH = "video_path"; - @BindView(R.id.video_player_vv) - VideoView mVideoPlayerVv; @Override - protected int setLayoutId() { - return R.layout.activity_video_player; + protected void onResume() { + super.onResume(); + timer.setBase(SystemClock.elapsedRealtime());//计时器清零 + int xiaoshi = (int) ((SystemClock.elapsedRealtime() - timer.getBase()) / 1000 / 60); + timer.setFormat("0"+ String.valueOf(xiaoshi)+":%s"); + timer.start(); + fullScreenVideoView.start(); } + private int floatViewState = 4; + + + @Override protected void onPause() { super.onPause(); + fullScreenVideoView.pause(); } - @Override - protected void onDestroy() { - super.onDestroy(); - mVideoPlayerVv.resume(); - mVideoPlayerVv = null; + private boolean hasDownload = false; + + private Handler handler = new Handler(Looper.getMainLooper()); + + @OnClick({R.id.picture_close, R.id.meet_download}) + public void onClick(View view) { + switch (view.getId()) { + case R.id.picture_close: + finish(); + break; + case R.id.meet_download: + downloadVideoFile(); + break; + } } - public void initView(){ - String stringExtra = getIntent().getStringExtra(VIDEO_PATH); - if (TextUtils.isEmpty(stringExtra)) { - Toast.makeText(this, "文本路径错误", Toast.LENGTH_SHORT).show(); - }else { - mVideoPlayerVv.setVideoPath(stringExtra); - mVideoPlayerVv.start(); + private void downloadVideoFile() { + hasDownload = true; + popTopTipWindow = new PopTopTipWindow(VideoPlayerActivity.this, "保存成功"); + handler.postDelayed(new Runnable() { + @Override + public void run() { + popTopTipWindow.dimss(); + } + }, 2000); + // 把视频文件插入到系统图库 + File outFile = new File(videoFilePath); + try { + MediaStore.Images.Media.insertImage(getContentResolver(), outFile.getAbsolutePath(), outFile.getName(), ""); + } catch (Exception e) { + Log.e("FileUtils", "异常:" + e); } + // 最后通知图库更新 + sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, Uri.parse("file://" + outFile.getAbsolutePath()))); } - public static void launch(Activity activity, String videoPath) { - Intent intent = new Intent(activity, VideoPlayerActivity.class); - intent.putExtra(VIDEO_PATH, videoPath); - activity.startActivity(intent); + + @Override + protected void onDestroy() { + super.onDestroy(); + if (!hasDownload) { + File outFile = new File(videoFilePath); + if (!outFile.exists()) { + Log.e(TAG, "文件不存在"); + } else { + outFile.delete(); + } + } } + } diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/videoPlayer/VideoPlayerActivity2.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/videoPlayer/VideoPlayerActivity2.java new file mode 100644 index 0000000..1f641d6 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/videoPlayer/VideoPlayerActivity2.java @@ -0,0 +1,260 @@ +package com.aserbao.androidcustomcamera.whole.videoPlayer; + +import android.app.Activity; +import android.content.ContentResolver; +import android.content.ContentValues; +import android.content.Context; +import android.content.Intent; +import android.content.pm.ActivityInfo; +import android.content.res.Configuration; +import android.net.Uri; +import android.os.Build; +import android.os.Bundle; +import android.provider.MediaStore; +import android.support.v7.app.AppCompatActivity; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.view.Window; +import android.view.WindowManager; +import android.widget.FrameLayout; +import android.widget.RelativeLayout; +import android.widget.Toast; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.MyApplication; +import com.aserbao.androidcustomcamera.base.utils.DisplayUtil; +import com.aserbao.androidcustomcamera.base.utils.StaticFinalValues; +import com.aserbao.androidcustomcamera.base.utils.StatusBarUtil; +import com.aserbao.androidcustomcamera.whole.editVideo.VideoEditActivity; +import com.aserbao.androidcustomcamera.whole.jiaozivideo.JZVideoPlayer; +import com.aserbao.androidcustomcamera.whole.jiaozivideo.PublicVideoJZVideo; +import com.aserbao.androidcustomcamera.whole.selCover.SelCoverTimeActivity; + + +import java.io.File; + +import butterknife.BindView; +import butterknife.ButterKnife; +import butterknife.OnClick; + +import static com.aserbao.androidcustomcamera.base.utils.StaticFinalValues.COMR_FROM_SEL_COVER_TIME_ACTIVITY; +import static com.aserbao.androidcustomcamera.base.utils.StaticFinalValues.COMR_FROM_VIDEO_EDIT_TIME_ACTIVITY; + + +public class VideoPlayerActivity2 extends AppCompatActivity { + private static final String TAG = VideoPlayerActivity2.class.getSimpleName(); + @BindView(R.id.public_video_jz_video) + PublicVideoJZVideo mPublicVideoJZVideo; + + + + @BindView(R.id.pop_video_loading_fl) + FrameLayout mPopVideoLoadingFl; + + private RelativeLayout rlVideo; + private String videoFilePath = "/storage/emulated/0/ych/123.mp4", mOnLineVideoFilePath; + private Context mContext; + + public static void launch(Activity activity, String outputPath) { + Intent intent = new Intent(activity, VideoPlayerActivity2.class); + intent.putExtra(StaticFinalValues.VIDEOFILEPATH, outputPath); + activity.startActivity(intent); + } + + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + getWindow().addFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);//状态栏半透明 + requestWindowFeature(Window.FEATURE_NO_TITLE); + getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, + WindowManager.LayoutParams.FLAG_FULLSCREEN); + setContentView(R.layout.activity_video_player2); + ButterKnife.bind(this); + mContext = this; + initData(); + StatusBarUtil.transparencyBar(this); + } + + private void initData() { + videoFilePath = getIntent().getStringExtra(StaticFinalValues.VIDEOFILEPATH); + } + + + + private void playVideo() { + mPublicVideoJZVideo.setUp(videoFilePath, JZVideoPlayer.SCREEN_LAYOUT_NORMAL, ""); + mPublicVideoJZVideo.startVideo(); + } + + + @Override + protected void onResume() { + super.onResume(); + playVideo(); + } + + @Override + protected void onPause() { + super.onPause(); + JZVideoPlayer.releaseAllVideos(); + } + + @Override + protected void onStop() { + super.onStop(); + } + + @Override + protected void onDestroy() { + super.onDestroy(); + } + + // 当屏幕发生切换时调用 + @Override + public void onConfigurationChanged(Configuration newConfig) { + super.onConfigurationChanged(newConfig); + if (newConfig.orientation == Configuration.ORIENTATION_LANDSCAPE) {// 横屏 + setSystemUiHide();// 隐藏最上面那一栏 + setVideoViewScale(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);// 设置为全屏 + + + // 强制移除半屏状态 + getWindow().clearFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN); + getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); + } else if (newConfig.orientation == Configuration.ORIENTATION_PORTRAIT) {// 竖屏 + setSystemUiShow();// 显示最上面那一栏 + setVideoViewScale(ViewGroup.LayoutParams.MATCH_PARENT, DisplayUtil.dipToPx(this, 240)); + + + // 强制移除全屏状态 + getWindow().clearFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); + getWindow().addFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN); + } + } + + // 设置VideoView的大小 + private void setVideoViewScale(int width, int height) { + rlVideo = (RelativeLayout) findViewById(R.id.video_layout); + ViewGroup.LayoutParams params = rlVideo.getLayoutParams(); + params.width = width; + params.height = height; + rlVideo.setLayoutParams(params); + } + + // 隐藏SystemUi + private void setSystemUiHide() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { + View decorView = getWindow().getDecorView(); + decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_STABLE + | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION + | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN + | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION + | View.SYSTEM_UI_FLAG_FULLSCREEN + | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY); + } + } + + // 显示SystemUi + private void setSystemUiShow() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { + View decorView = getWindow().getDecorView(); + decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_VISIBLE); + } + } + + + + + @Override + public boolean dispatchTouchEvent(MotionEvent ev) { + if (mPopVideoLoadingFl!= null && mPopVideoLoadingFl.getVisibility() == View.VISIBLE) { + return true; + } else { + return super.dispatchTouchEvent(ev); + } + } + + @OnClick({R.id.video_player2_edit_video_tv, R.id.video_player2_sel_cover, R.id.back_iv, R.id.video_player_tv_storage, R.id.video_player_tv_public}) + public void onViewClicked(View view) { + + switch (view.getId()) { + + case R.id.video_player2_edit_video_tv: + Intent intent = new Intent(MyApplication.getContext(), VideoEditActivity.class); + intent.putExtra(StaticFinalValues.VIDEOFILEPATH,videoFilePath); + startActivityForResult(intent,COMR_FROM_VIDEO_EDIT_TIME_ACTIVITY); + break; + case R.id.video_player2_sel_cover: + Intent intent2 = new Intent(VideoPlayerActivity2.this, SelCoverTimeActivity.class); + intent2.putExtra(StaticFinalValues.VIDEOFILEPATH, videoFilePath); + startActivityForResult(intent2, COMR_FROM_SEL_COVER_TIME_ACTIVITY); + break; + case R.id.back_iv: + onBackPressed(); + break; + + case R.id.video_player_tv_storage: + break; + case R.id.video_player_tv_public: + storeToPhoto(videoFilePath); + break; + } + } + + // 返回事件 + @Override + public void onBackPressed() { + if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) { + setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); + } + if (mPopVideoLoadingFl!= null && mPopVideoLoadingFl.getVisibility() != View.VISIBLE) { + super.onBackPressed(); + } + } + + + @Override + protected void onActivityResult(int requestCode, int resultCode, Intent data) { + super.onActivityResult(requestCode, resultCode, data); + if (data == null) { + return; + } + switch (requestCode){ + case StaticFinalValues.COMR_FROM_VIDEO_EDIT_TIME_ACTIVITY: + videoFilePath = data.getStringExtra(StaticFinalValues.VIDEOFILEPATH); + playVideo(); + break; + case StaticFinalValues.COMR_FROM_SEL_COVER_TIME_ACTIVITY: + videoFilePath = data.getStringExtra(StaticFinalValues.VIDEOFILEPATH); + int selTime = data.getIntExtra(StaticFinalValues.CUT_TIME, 0); + Toast.makeText(mContext, String.valueOf(selTime), Toast.LENGTH_SHORT).show(); + playVideo(); + break; + } + } + + private void storeToPhoto(String path) { + ContentResolver localContentResolver = this.getContentResolver(); + /*String path = task.getPath(); + String filename = task.getFilename();*/ + ContentValues localContentValues = getVideoContentValues(this, new File(path), System.currentTimeMillis()); + Uri localUri = localContentResolver.insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, localContentValues); + Toast.makeText(mContext, "保存到相册成功,路径为"+ path, Toast.LENGTH_SHORT).show(); + } + + + public static ContentValues getVideoContentValues(Context paramContext, File paramFile, long paramLong) { + ContentValues localContentValues = new ContentValues(); + localContentValues.put("title", paramFile.getName()); + localContentValues.put("_display_name", paramFile.getName()); + localContentValues.put("mime_type", "video/3gp"); + localContentValues.put("datetaken", Long.valueOf(paramLong)); + localContentValues.put("date_modified", Long.valueOf(paramLong)); + localContentValues.put("date_added", Long.valueOf(paramLong)); + localContentValues.put("_data", paramFile.getAbsolutePath()); + localContentValues.put("_size", Long.valueOf(paramFile.length())); + return localContentValues; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/videoPlayer/VideoViewPlayerActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/videoPlayer/VideoViewPlayerActivity.java new file mode 100644 index 0000000..875caea --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/videoPlayer/VideoViewPlayerActivity.java @@ -0,0 +1,54 @@ +package com.aserbao.androidcustomcamera.whole.videoPlayer; + +import android.app.Activity; +import android.content.Intent; +import android.text.TextUtils; +import android.widget.Toast; +import android.widget.VideoView; + +import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.base.activity.BaseActivity; + +import butterknife.BindView; + +import static com.aserbao.androidcustomcamera.blocks.mediaCodec.recordCamera.utils.FileUtils.VIDEO_PATH; + +public class VideoViewPlayerActivity extends BaseActivity { + + + @BindView(R.id.video_player_vv) + VideoView mVideoPlayerVv; + + @Override + protected int setLayoutId() { + return R.layout.activity_video_view_player; + } + + @Override + protected void onPause() { + super.onPause(); + } + + @Override + protected void onDestroy() { + super.onDestroy(); + mVideoPlayerVv.resume(); + mVideoPlayerVv = null; + } + + public void initView(){ + String stringExtra = getIntent().getStringExtra(VIDEO_PATH); + if (TextUtils.isEmpty(stringExtra)) { + Toast.makeText(this, "文本路径错误", Toast.LENGTH_SHORT).show(); + }else { + mVideoPlayerVv.setVideoPath(stringExtra); + mVideoPlayerVv.start(); + } + } + + public static void launch(Activity activity, String videoPath) { + Intent intent = new Intent(activity, VideoViewPlayerActivity.class); + intent.putExtra(VIDEO_PATH, videoPath); + activity.startActivity(intent); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/videoPlayer/view/CustomVideoView.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/videoPlayer/view/CustomVideoView.java new file mode 100644 index 0000000..19c409b --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/videoPlayer/view/CustomVideoView.java @@ -0,0 +1,38 @@ +package com.aserbao.androidcustomcamera.whole.videoPlayer.view; + +import android.content.Context; +import android.util.AttributeSet; +import android.widget.VideoView; + +/** + * date:2017/2/14 + * des:自定义VideoView,切换横屏时可以全屏播放 + * Create by suqi + */ + +public class CustomVideoView extends VideoView { + + private int defaultWidth; + private int defaultHeight; + + public CustomVideoView(Context context) { + this(context, null); + } + + public CustomVideoView(Context context, AttributeSet attrs) { + this(context, attrs, 0); + } + + public CustomVideoView(Context context, AttributeSet attrs, int defStyleAttr) { + super(context, attrs, defStyleAttr); + defaultWidth = context.getResources().getDisplayMetrics().widthPixels; + defaultHeight = context.getResources().getDisplayMetrics().heightPixels; + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + int width = getDefaultSize(defaultWidth, widthMeasureSpec); + int height = getDefaultSize(defaultHeight, heightMeasureSpec); + setMeasuredDimension(width, height); + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/videoPlayer/view/FullScreenVideoView.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/videoPlayer/view/FullScreenVideoView.java new file mode 100644 index 0000000..4537656 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/videoPlayer/view/FullScreenVideoView.java @@ -0,0 +1,37 @@ +package com.aserbao.androidcustomcamera.whole.videoPlayer.view; + +import android.content.Context; +import android.util.AttributeSet; +import android.view.MotionEvent; +import android.widget.VideoView; + +/** + * Created by Administrator on 2017/9/13. + */ + +public class FullScreenVideoView extends VideoView { + public FullScreenVideoView(Context context, AttributeSet attrs, int defStyle) { + super(context, attrs, defStyle); + } + + public FullScreenVideoView(Context context, AttributeSet attrs) { + super(context, attrs); + } + + public FullScreenVideoView(Context context) { + super(context); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {//这里重写onMeasure的方法 + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + int width = getDefaultSize(0, widthMeasureSpec);//得到默认的大小(0,宽度测量规范) + int height = getDefaultSize(0, heightMeasureSpec);//得到默认的大小(0,高度度测量规范) + setMeasuredDimension(width, height); //设置测量尺寸,将高和宽放进去 + } + + @Override + public boolean onTouchEvent(MotionEvent ev) { //不执行父类的toggleMediaControlsVisiblity();方法 弹出默认进度条 + return true; + } +} diff --git a/app/src/main/res/anim/top_enter.xml b/app/src/main/res/anim/top_enter.xml new file mode 100644 index 0000000..fcb9335 --- /dev/null +++ b/app/src/main/res/anim/top_enter.xml @@ -0,0 +1,8 @@ + + + + + diff --git a/app/src/main/res/anim/top_exit.xml b/app/src/main/res/anim/top_exit.xml new file mode 100644 index 0000000..ae0cf60 --- /dev/null +++ b/app/src/main/res/anim/top_exit.xml @@ -0,0 +1,9 @@ + + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable-hdpi/back_white.png b/app/src/main/res/drawable-hdpi/back_white.png new file mode 100644 index 0000000..f722354 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/back_white.png differ diff --git a/app/src/main/res/drawable-hdpi/bigicon_backwhite.png b/app/src/main/res/drawable-hdpi/bigicon_backwhite.png new file mode 100644 index 0000000..c46a9e2 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bigicon_backwhite.png differ diff --git a/app/src/main/res/drawable-hdpi/bigicon_center.png b/app/src/main/res/drawable-hdpi/bigicon_center.png new file mode 100644 index 0000000..51573e5 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bigicon_center.png differ diff --git a/app/src/main/res/drawable-hdpi/bigicon_timeout_small.png b/app/src/main/res/drawable-hdpi/bigicon_timeout_small.png new file mode 100644 index 0000000..c0a895c Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bigicon_timeout_small.png differ diff --git a/app/src/main/res/drawable-hdpi/bigicon_video_rotate.png b/app/src/main/res/drawable-hdpi/bigicon_video_rotate.png new file mode 100644 index 0000000..c660b57 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bigicon_video_rotate.png differ diff --git a/app/src/main/res/drawable-hdpi/bt_clip.png b/app/src/main/res/drawable-hdpi/bt_clip.png new file mode 100644 index 0000000..36932f7 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bt_clip.png differ diff --git a/app/src/main/res/drawable-hdpi/bt_cover.png b/app/src/main/res/drawable-hdpi/bt_cover.png new file mode 100644 index 0000000..9668dee Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bt_cover.png differ diff --git a/app/src/main/res/drawable-hdpi/bubble1.png b/app/src/main/res/drawable-hdpi/bubble1.png new file mode 100644 index 0000000..8dd8752 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bubble1.png differ diff --git a/app/src/main/res/drawable-hdpi/bubble2.png b/app/src/main/res/drawable-hdpi/bubble2.png new file mode 100644 index 0000000..cbb0e6b Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bubble2.png differ diff --git a/app/src/main/res/drawable-hdpi/bubble3.png b/app/src/main/res/drawable-hdpi/bubble3.png new file mode 100644 index 0000000..7f8eae8 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bubble3.png differ diff --git a/app/src/main/res/drawable-hdpi/bubble4.png b/app/src/main/res/drawable-hdpi/bubble4.png new file mode 100644 index 0000000..143ec82 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bubble4.png differ diff --git a/app/src/main/res/drawable-hdpi/bubble5.png b/app/src/main/res/drawable-hdpi/bubble5.png new file mode 100644 index 0000000..8dd0d61 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bubble5.png differ diff --git a/app/src/main/res/drawable-hdpi/bubble6.png b/app/src/main/res/drawable-hdpi/bubble6.png new file mode 100644 index 0000000..3999f9b Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bubble6.png differ diff --git a/app/src/main/res/drawable-hdpi/bubble7.png b/app/src/main/res/drawable-hdpi/bubble7.png new file mode 100644 index 0000000..f8c77f0 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bubble7.png differ diff --git a/app/src/main/res/drawable-hdpi/bubble8.png b/app/src/main/res/drawable-hdpi/bubble8.png new file mode 100644 index 0000000..2ccb552 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bubble8.png differ diff --git a/app/src/main/res/drawable-hdpi/bubbleeight.png b/app/src/main/res/drawable-hdpi/bubbleeight.png new file mode 100644 index 0000000..3c6e60a Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bubbleeight.png differ diff --git a/app/src/main/res/drawable-hdpi/bubblefive.png b/app/src/main/res/drawable-hdpi/bubblefive.png new file mode 100644 index 0000000..b9c7720 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bubblefive.png differ diff --git a/app/src/main/res/drawable-hdpi/bubblefour.png b/app/src/main/res/drawable-hdpi/bubblefour.png new file mode 100644 index 0000000..ecc38e0 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bubblefour.png differ diff --git a/app/src/main/res/drawable-hdpi/bubbleone.png b/app/src/main/res/drawable-hdpi/bubbleone.png new file mode 100644 index 0000000..5ea2c05 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bubbleone.png differ diff --git a/app/src/main/res/drawable-hdpi/bubbleseven.png b/app/src/main/res/drawable-hdpi/bubbleseven.png new file mode 100644 index 0000000..d184c4d Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bubbleseven.png differ diff --git a/app/src/main/res/drawable-hdpi/bubblesix.png b/app/src/main/res/drawable-hdpi/bubblesix.png new file mode 100644 index 0000000..173e294 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bubblesix.png differ diff --git a/app/src/main/res/drawable-hdpi/bubblethree.png b/app/src/main/res/drawable-hdpi/bubblethree.png new file mode 100644 index 0000000..0683ae7 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bubblethree.png differ diff --git a/app/src/main/res/drawable-hdpi/bubbletwo.png b/app/src/main/res/drawable-hdpi/bubbletwo.png new file mode 100644 index 0000000..1a22e67 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bubbletwo.png differ diff --git a/app/src/main/res/drawable-hdpi/bufuhanzhe.png b/app/src/main/res/drawable-hdpi/bufuhanzhe.png new file mode 100644 index 0000000..042246b Binary files /dev/null and b/app/src/main/res/drawable-hdpi/bufuhanzhe.png differ diff --git a/app/src/main/res/drawable-hdpi/burangwo.png b/app/src/main/res/drawable-hdpi/burangwo.png new file mode 100644 index 0000000..8a90893 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/burangwo.png differ diff --git a/app/src/main/res/drawable-hdpi/buyue.png b/app/src/main/res/drawable-hdpi/buyue.png new file mode 100644 index 0000000..440d615 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/buyue.png differ diff --git a/app/src/main/res/drawable-hdpi/camera_delete.png b/app/src/main/res/drawable-hdpi/camera_delete.png new file mode 100644 index 0000000..78ac744 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/camera_delete.png differ diff --git a/app/src/main/res/drawable-hdpi/camera_deletion.png b/app/src/main/res/drawable-hdpi/camera_deletion.png new file mode 100644 index 0000000..7f5eeab Binary files /dev/null and b/app/src/main/res/drawable-hdpi/camera_deletion.png differ diff --git a/app/src/main/res/drawable-hdpi/camera_filter1.png b/app/src/main/res/drawable-hdpi/camera_filter1.png new file mode 100644 index 0000000..711049a Binary files /dev/null and b/app/src/main/res/drawable-hdpi/camera_filter1.png differ diff --git a/app/src/main/res/drawable-hdpi/camera_filter2.png b/app/src/main/res/drawable-hdpi/camera_filter2.png new file mode 100644 index 0000000..8695a90 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/camera_filter2.png differ diff --git a/app/src/main/res/drawable-hdpi/camera_filter3.png b/app/src/main/res/drawable-hdpi/camera_filter3.png new file mode 100644 index 0000000..2338d60 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/camera_filter3.png differ diff --git a/app/src/main/res/drawable-hdpi/camera_filter4.png b/app/src/main/res/drawable-hdpi/camera_filter4.png new file mode 100644 index 0000000..fe54fb9 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/camera_filter4.png differ diff --git a/app/src/main/res/drawable-hdpi/camera_filter5.png b/app/src/main/res/drawable-hdpi/camera_filter5.png new file mode 100644 index 0000000..600d49b Binary files /dev/null and b/app/src/main/res/drawable-hdpi/camera_filter5.png differ diff --git a/app/src/main/res/drawable-hdpi/camera_filter6.png b/app/src/main/res/drawable-hdpi/camera_filter6.png new file mode 100644 index 0000000..b0bbd89 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/camera_filter6.png differ diff --git a/app/src/main/res/drawable-hdpi/camera_play.png b/app/src/main/res/drawable-hdpi/camera_play.png new file mode 100644 index 0000000..b9f24f3 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/camera_play.png differ diff --git a/app/src/main/res/drawable-hdpi/camera_rotate.png b/app/src/main/res/drawable-hdpi/camera_rotate.png new file mode 100644 index 0000000..ef334a8 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/camera_rotate.png differ diff --git a/app/src/main/res/drawable-hdpi/camera_scaling.png b/app/src/main/res/drawable-hdpi/camera_scaling.png new file mode 100644 index 0000000..1e90239 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/camera_scaling.png differ diff --git a/app/src/main/res/drawable-hdpi/camera_select_normal.png b/app/src/main/res/drawable-hdpi/camera_select_normal.png new file mode 100644 index 0000000..8899383 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/camera_select_normal.png differ diff --git a/app/src/main/res/drawable-hdpi/camera_sticker.png b/app/src/main/res/drawable-hdpi/camera_sticker.png new file mode 100644 index 0000000..11b813a Binary files /dev/null and b/app/src/main/res/drawable-hdpi/camera_sticker.png differ diff --git a/app/src/main/res/drawable-hdpi/camera_subtitle.png b/app/src/main/res/drawable-hdpi/camera_subtitle.png new file mode 100644 index 0000000..a1c1602 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/camera_subtitle.png differ diff --git a/app/src/main/res/drawable-hdpi/color_fc4253_radius_5_bg.xml b/app/src/main/res/drawable-hdpi/color_fc4253_radius_5_bg.xml new file mode 100644 index 0000000..19b1825 --- /dev/null +++ b/app/src/main/res/drawable-hdpi/color_fc4253_radius_5_bg.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable-hdpi/find_bottom.png b/app/src/main/res/drawable-hdpi/find_bottom.png new file mode 100644 index 0000000..ed2e74c Binary files /dev/null and b/app/src/main/res/drawable-hdpi/find_bottom.png differ diff --git a/app/src/main/res/drawable-hdpi/global_back.png b/app/src/main/res/drawable-hdpi/global_back.png new file mode 100644 index 0000000..1e215ea Binary files /dev/null and b/app/src/main/res/drawable-hdpi/global_back.png differ diff --git a/app/src/main/res/drawable-hdpi/index_play.png b/app/src/main/res/drawable-hdpi/index_play.png new file mode 100644 index 0000000..34f36b7 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/index_play.png differ diff --git a/app/src/main/res/drawable-hdpi/jz_back_tiny_normal.png b/app/src/main/res/drawable-hdpi/jz_back_tiny_normal.png new file mode 100644 index 0000000..e6ca6f4 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/jz_back_tiny_normal.png differ diff --git a/app/src/main/res/drawable-hdpi/jz_back_tiny_pressed.png b/app/src/main/res/drawable-hdpi/jz_back_tiny_pressed.png new file mode 100644 index 0000000..2ca8252 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/jz_back_tiny_pressed.png differ diff --git a/app/src/main/res/drawable-hdpi/jz_loading.xml b/app/src/main/res/drawable-hdpi/jz_loading.xml new file mode 100644 index 0000000..63a4188 --- /dev/null +++ b/app/src/main/res/drawable-hdpi/jz_loading.xml @@ -0,0 +1,7 @@ + + diff --git a/app/src/main/res/drawable-hdpi/jz_loading_bg.png b/app/src/main/res/drawable-hdpi/jz_loading_bg.png new file mode 100644 index 0000000..2b3d375 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/jz_loading_bg.png differ diff --git a/app/src/main/res/drawable-hdpi/jz_seek_thumb_normal.xml b/app/src/main/res/drawable-hdpi/jz_seek_thumb_normal.xml new file mode 100644 index 0000000..e928712 --- /dev/null +++ b/app/src/main/res/drawable-hdpi/jz_seek_thumb_normal.xml @@ -0,0 +1,8 @@ + + + + + diff --git a/app/src/main/res/drawable-hdpi/jz_seek_thumb_pressed.xml b/app/src/main/res/drawable-hdpi/jz_seek_thumb_pressed.xml new file mode 100644 index 0000000..14e409a --- /dev/null +++ b/app/src/main/res/drawable-hdpi/jz_seek_thumb_pressed.xml @@ -0,0 +1,8 @@ + + + + + diff --git a/app/src/main/res/drawable-hdpi/my_camera.png b/app/src/main/res/drawable-hdpi/my_camera.png new file mode 100644 index 0000000..d370bf8 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/my_camera.png differ diff --git a/app/src/main/res/drawable-hdpi/notice_select.png b/app/src/main/res/drawable-hdpi/notice_select.png new file mode 100644 index 0000000..7c0ecc7 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/notice_select.png differ diff --git a/app/src/main/res/drawable-hdpi/notice_unselect.png b/app/src/main/res/drawable-hdpi/notice_unselect.png new file mode 100644 index 0000000..1f089f8 Binary files /dev/null and b/app/src/main/res/drawable-hdpi/notice_unselect.png differ diff --git a/app/src/main/res/drawable-hdpi/tv_circle_white10_bg.xml b/app/src/main/res/drawable-hdpi/tv_circle_white10_bg.xml index adae916..55380df 100644 --- a/app/src/main/res/drawable-hdpi/tv_circle_white10_bg.xml +++ b/app/src/main/res/drawable-hdpi/tv_circle_white10_bg.xml @@ -1,5 +1,5 @@ - + \ No newline at end of file diff --git a/app/src/main/res/drawable-hdpi/tv_circle_white40_bg.xml b/app/src/main/res/drawable-hdpi/tv_circle_white40_bg.xml index 523001f..43b88c3 100644 --- a/app/src/main/res/drawable-hdpi/tv_circle_white40_bg.xml +++ b/app/src/main/res/drawable-hdpi/tv_circle_white40_bg.xml @@ -1,5 +1,5 @@ - + \ No newline at end of file diff --git a/app/src/main/res/drawable-v24/ic_launcher_foreground.xml b/app/src/main/res/drawable-v24/ic_launcher_foreground.xml index c3903ed..fc6d884 100644 --- a/app/src/main/res/drawable-v24/ic_launcher_foreground.xml +++ b/app/src/main/res/drawable-v24/ic_launcher_foreground.xml @@ -4,7 +4,7 @@ android:height="108dp" android:viewportHeight="108" android:viewportWidth="108"> - - - + + + + + diff --git a/app/src/main/res/drawable-xhdpi/jz_close_volume.png b/app/src/main/res/drawable-xhdpi/jz_close_volume.png new file mode 100644 index 0000000..1e08431 Binary files /dev/null and b/app/src/main/res/drawable-xhdpi/jz_close_volume.png differ diff --git a/app/src/main/res/drawable-xhdpi/jz_dialog_progress.xml b/app/src/main/res/drawable-xhdpi/jz_dialog_progress.xml new file mode 100644 index 0000000..35179bd --- /dev/null +++ b/app/src/main/res/drawable-xhdpi/jz_dialog_progress.xml @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + diff --git a/app/src/main/res/drawable-xhdpi/jz_error_normal.png b/app/src/main/res/drawable-xhdpi/jz_error_normal.png new file mode 100644 index 0000000..a09424c Binary files /dev/null and b/app/src/main/res/drawable-xhdpi/jz_error_normal.png differ diff --git a/app/src/main/res/drawable-xhdpi/jz_error_pressed.png b/app/src/main/res/drawable-xhdpi/jz_error_pressed.png new file mode 100644 index 0000000..4fc8372 Binary files /dev/null and b/app/src/main/res/drawable-xhdpi/jz_error_pressed.png differ diff --git a/app/src/main/res/drawable-xhdpi/jz_forward_icon.png b/app/src/main/res/drawable-xhdpi/jz_forward_icon.png new file mode 100644 index 0000000..f7e3188 Binary files /dev/null and b/app/src/main/res/drawable-xhdpi/jz_forward_icon.png differ diff --git a/app/src/main/res/drawable-xhdpi/jz_restart_normal.png b/app/src/main/res/drawable-xhdpi/jz_restart_normal.png new file mode 100644 index 0000000..618616e Binary files /dev/null and b/app/src/main/res/drawable-xhdpi/jz_restart_normal.png differ diff --git a/app/src/main/res/drawable-xhdpi/jz_restart_pressed.png b/app/src/main/res/drawable-xhdpi/jz_restart_pressed.png new file mode 100644 index 0000000..21d4632 Binary files /dev/null and b/app/src/main/res/drawable-xhdpi/jz_restart_pressed.png differ diff --git a/app/src/main/res/drawable-xhdpi/mudengkoudai.png b/app/src/main/res/drawable-xhdpi/mudengkoudai.png new file mode 100644 index 0000000..8e1ff21 Binary files /dev/null and b/app/src/main/res/drawable-xhdpi/mudengkoudai.png differ diff --git a/app/src/main/res/drawable-xhdpi/nizabushagntian.png b/app/src/main/res/drawable-xhdpi/nizabushagntian.png new file mode 100644 index 0000000..e7a684d Binary files /dev/null and b/app/src/main/res/drawable-xhdpi/nizabushagntian.png differ diff --git a/app/src/main/res/drawable-xhdpi/nizaidouwo.png b/app/src/main/res/drawable-xhdpi/nizaidouwo.png new file mode 100644 index 0000000..b3055f2 Binary files /dev/null and b/app/src/main/res/drawable-xhdpi/nizaidouwo.png differ diff --git a/app/src/main/res/drawable-xhdpi/video_player_tv_bg_gray.xml b/app/src/main/res/drawable-xhdpi/video_player_tv_bg_gray.xml new file mode 100644 index 0000000..2215967 --- /dev/null +++ b/app/src/main/res/drawable-xhdpi/video_player_tv_bg_gray.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable-xhdpi/xiase.png b/app/src/main/res/drawable-xhdpi/xiase.png new file mode 100644 index 0000000..9264d14 Binary files /dev/null and b/app/src/main/res/drawable-xhdpi/xiase.png differ diff --git a/app/src/main/res/drawable-xhdpi/zan.png b/app/src/main/res/drawable-xhdpi/zan.png new file mode 100644 index 0000000..662e0b2 Binary files /dev/null and b/app/src/main/res/drawable-xhdpi/zan.png differ diff --git a/app/src/main/res/drawable-xxhdpi/aini.png b/app/src/main/res/drawable-xxhdpi/aini.png new file mode 100644 index 0000000..ccddde5 Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/aini.png differ diff --git a/app/src/main/res/drawable-xxhdpi/back_white_no_shadow.png b/app/src/main/res/drawable-xxhdpi/back_white_no_shadow.png new file mode 100644 index 0000000..2134b7f Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/back_white_no_shadow.png differ diff --git a/app/src/main/res/drawable-xxhdpi/bigicon_backwhite.png b/app/src/main/res/drawable-xxhdpi/bigicon_backwhite.png new file mode 100644 index 0000000..c46a9e2 Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/bigicon_backwhite.png differ diff --git a/app/src/main/res/drawable-xxhdpi/btn_black50_r15_bg.xml b/app/src/main/res/drawable-xxhdpi/btn_black50_r15_bg.xml new file mode 100644 index 0000000..d2363da --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/btn_black50_r15_bg.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable-xxhdpi/btn_edit_next_bg.xml b/app/src/main/res/drawable-xxhdpi/btn_edit_next_bg.xml new file mode 100644 index 0000000..b792d9b --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/btn_edit_next_bg.xml @@ -0,0 +1,11 @@ + + + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable-xxhdpi/camera_select_selected.png b/app/src/main/res/drawable-xxhdpi/camera_select_selected.png new file mode 100644 index 0000000..b72a11d Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/camera_select_selected.png differ diff --git a/app/src/main/res/drawable-xxhdpi/color_010101_radius_15_bg.xml b/app/src/main/res/drawable-xxhdpi/color_010101_radius_15_bg.xml new file mode 100644 index 0000000..943a272 --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/color_010101_radius_15_bg.xml @@ -0,0 +1,10 @@ + + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable-xxhdpi/color_050505_radiu_10_bg.xml b/app/src/main/res/drawable-xxhdpi/color_050505_radiu_10_bg.xml new file mode 100644 index 0000000..c19604f --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/color_050505_radiu_10_bg.xml @@ -0,0 +1,8 @@ + + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable-xxhdpi/color_333333_radius_14_bg.xml b/app/src/main/res/drawable-xxhdpi/color_333333_radius_14_bg.xml new file mode 100644 index 0000000..3c38e9f --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/color_333333_radius_14_bg.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable-xxhdpi/color_aa000000_radius_50_bg.xml b/app/src/main/res/drawable-xxhdpi/color_aa000000_radius_50_bg.xml new file mode 100644 index 0000000..b50871e --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/color_aa000000_radius_50_bg.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable-xxhdpi/color_cc000000_radius_6_bg.xml b/app/src/main/res/drawable-xxhdpi/color_cc000000_radius_6_bg.xml new file mode 100644 index 0000000..421a56c --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/color_cc000000_radius_6_bg.xml @@ -0,0 +1,9 @@ + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable-xxhdpi/color_efefef_radius_100_bg.xml b/app/src/main/res/drawable-xxhdpi/color_efefef_radius_100_bg.xml new file mode 100644 index 0000000..4b785af --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/color_efefef_radius_100_bg.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable-xxhdpi/color_fc4253_radius_7_bg.xml b/app/src/main/res/drawable-xxhdpi/color_fc4253_radius_7_bg.xml new file mode 100644 index 0000000..7274dbd --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/color_fc4253_radius_7_bg.xml @@ -0,0 +1,11 @@ + + + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable-xxhdpi/filter_item_bg.xml b/app/src/main/res/drawable-xxhdpi/filter_item_bg.xml new file mode 100644 index 0000000..8878e3d --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/filter_item_bg.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable-xxhdpi/index_timeout.png b/app/src/main/res/drawable-xxhdpi/index_timeout.png new file mode 100644 index 0000000..b89ae3e Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/index_timeout.png differ diff --git a/app/src/main/res/drawable-xxhdpi/item_message_check_bg.xml b/app/src/main/res/drawable-xxhdpi/item_message_check_bg.xml new file mode 100644 index 0000000..ffdef3d --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/item_message_check_bg.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable-xxhdpi/jz_battery_level_10.png b/app/src/main/res/drawable-xxhdpi/jz_battery_level_10.png new file mode 100644 index 0000000..1d5b5f6 Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/jz_battery_level_10.png differ diff --git a/app/src/main/res/drawable-xxhdpi/jz_battery_level_100.png b/app/src/main/res/drawable-xxhdpi/jz_battery_level_100.png new file mode 100644 index 0000000..6f0cf51 Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/jz_battery_level_100.png differ diff --git a/app/src/main/res/drawable-xxhdpi/jz_battery_level_30.png b/app/src/main/res/drawable-xxhdpi/jz_battery_level_30.png new file mode 100644 index 0000000..947e905 Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/jz_battery_level_30.png differ diff --git a/app/src/main/res/drawable-xxhdpi/jz_battery_level_50.png b/app/src/main/res/drawable-xxhdpi/jz_battery_level_50.png new file mode 100644 index 0000000..e4c75ff Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/jz_battery_level_50.png differ diff --git a/app/src/main/res/drawable-xxhdpi/jz_battery_level_70.png b/app/src/main/res/drawable-xxhdpi/jz_battery_level_70.png new file mode 100644 index 0000000..25177bf Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/jz_battery_level_70.png differ diff --git a/app/src/main/res/drawable-xxhdpi/jz_battery_level_90.png b/app/src/main/res/drawable-xxhdpi/jz_battery_level_90.png new file mode 100644 index 0000000..6c681c5 Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/jz_battery_level_90.png differ diff --git a/app/src/main/res/drawable-xxhdpi/jz_bottom_progress.xml b/app/src/main/res/drawable-xxhdpi/jz_bottom_progress.xml new file mode 100644 index 0000000..a825aab --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/jz_bottom_progress.xml @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/app/src/main/res/drawable-xxhdpi/jz_bottom_seek_progress.xml b/app/src/main/res/drawable-xxhdpi/jz_bottom_seek_progress.xml new file mode 100644 index 0000000..d9db9e1 --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/jz_bottom_seek_progress.xml @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/app/src/main/res/drawable-xxhdpi/jz_bottom_seek_thumb.xml b/app/src/main/res/drawable-xxhdpi/jz_bottom_seek_thumb.xml new file mode 100644 index 0000000..b68ee1e --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/jz_bottom_seek_thumb.xml @@ -0,0 +1,5 @@ + + + + + diff --git a/app/src/main/res/drawable-xxhdpi/jz_click_error_selector.xml b/app/src/main/res/drawable-xxhdpi/jz_click_error_selector.xml new file mode 100644 index 0000000..403021c --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/jz_click_error_selector.xml @@ -0,0 +1,5 @@ + + + + + diff --git a/app/src/main/res/drawable-xxhdpi/jz_click_pause_selector.xml b/app/src/main/res/drawable-xxhdpi/jz_click_pause_selector.xml new file mode 100644 index 0000000..fe59c97 --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/jz_click_pause_selector.xml @@ -0,0 +1,8 @@ + + + + + + + diff --git a/app/src/main/res/drawable-xxhdpi/jz_click_play_selector.xml b/app/src/main/res/drawable-xxhdpi/jz_click_play_selector.xml new file mode 100644 index 0000000..4465c0c --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/jz_click_play_selector.xml @@ -0,0 +1,7 @@ + + + + + + diff --git a/app/src/main/res/drawable-xxhdpi/jz_click_replay_selector.xml b/app/src/main/res/drawable-xxhdpi/jz_click_replay_selector.xml new file mode 100644 index 0000000..9b5ca8c --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/jz_click_replay_selector.xml @@ -0,0 +1,5 @@ + + + + + diff --git a/app/src/main/res/drawable-xxhdpi/jz_click_share_selector.xml b/app/src/main/res/drawable-xxhdpi/jz_click_share_selector.xml new file mode 100644 index 0000000..0de0b6f --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/jz_click_share_selector.xml @@ -0,0 +1,5 @@ + + + + + diff --git a/app/src/main/res/drawable-xxhdpi/jz_enlarge.png b/app/src/main/res/drawable-xxhdpi/jz_enlarge.png new file mode 100644 index 0000000..190fc88 Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/jz_enlarge.png differ diff --git a/app/src/main/res/drawable-xxhdpi/jz_share_normal.png b/app/src/main/res/drawable-xxhdpi/jz_share_normal.png new file mode 100644 index 0000000..73d62e1 Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/jz_share_normal.png differ diff --git a/app/src/main/res/drawable-xxhdpi/jz_share_pressed.png b/app/src/main/res/drawable-xxhdpi/jz_share_pressed.png new file mode 100644 index 0000000..e899314 Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/jz_share_pressed.png differ diff --git a/app/src/main/res/drawable-xxhdpi/jz_shrink.png b/app/src/main/res/drawable-xxhdpi/jz_shrink.png new file mode 100644 index 0000000..cfa8ced Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/jz_shrink.png differ diff --git a/app/src/main/res/drawable-xxhdpi/loading1.png b/app/src/main/res/drawable-xxhdpi/loading1.png new file mode 100644 index 0000000..76de601 Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/loading1.png differ diff --git a/app/src/main/res/drawable-xxhdpi/loading_video_progress.xml b/app/src/main/res/drawable-xxhdpi/loading_video_progress.xml new file mode 100644 index 0000000..cf035fd --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/loading_video_progress.xml @@ -0,0 +1,21 @@ + + + + + diff --git a/app/src/main/res/drawable-xxhdpi/vw_ic_arrow_down.png b/app/src/main/res/drawable-xxhdpi/vw_ic_arrow_down.png new file mode 100644 index 0000000..a978cf1 Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/vw_ic_arrow_down.png differ diff --git a/app/src/main/res/drawable-xxhdpi/vw_ic_back.png b/app/src/main/res/drawable-xxhdpi/vw_ic_back.png new file mode 100644 index 0000000..216b988 Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/vw_ic_back.png differ diff --git a/app/src/main/res/drawable-xxhdpi/vw_ic_camera.png b/app/src/main/res/drawable-xxhdpi/vw_ic_camera.png new file mode 100644 index 0000000..58fe265 Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/vw_ic_camera.png differ diff --git a/app/src/main/res/drawable-xxhdpi/vw_ic_checked.png b/app/src/main/res/drawable-xxhdpi/vw_ic_checked.png new file mode 100644 index 0000000..f3c2aa4 Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/vw_ic_checked.png differ diff --git a/app/src/main/res/drawable-xxhdpi/vw_ic_record_audio.png b/app/src/main/res/drawable-xxhdpi/vw_ic_record_audio.png new file mode 100644 index 0000000..2756836 Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/vw_ic_record_audio.png differ diff --git a/app/src/main/res/drawable-xxhdpi/vw_ic_uncheck.png b/app/src/main/res/drawable-xxhdpi/vw_ic_uncheck.png new file mode 100644 index 0000000..5ee0495 Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/vw_ic_uncheck.png differ diff --git a/app/src/main/res/drawable-xxhdpi/vw_selector_cbx.xml b/app/src/main/res/drawable-xxhdpi/vw_selector_cbx.xml new file mode 100644 index 0000000..e94fa61 --- /dev/null +++ b/app/src/main/res/drawable-xxhdpi/vw_selector_cbx.xml @@ -0,0 +1,7 @@ + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable-xxhdpi/welcome.jpg b/app/src/main/res/drawable-xxhdpi/welcome.jpg new file mode 100644 index 0000000..47c3b65 Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/welcome.jpg differ diff --git a/app/src/main/res/drawable/bg.png b/app/src/main/res/drawable/bg.png new file mode 100644 index 0000000..3e7b722 Binary files /dev/null and b/app/src/main/res/drawable/bg.png differ diff --git a/app/src/main/res/drawable/change_hue.xml b/app/src/main/res/drawable/change_hue.xml new file mode 100644 index 0000000..41f87c6 --- /dev/null +++ b/app/src/main/res/drawable/change_hue.xml @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable/color_white60_radius_5_bg.xml b/app/src/main/res/drawable/color_white60_radius_5_bg.xml new file mode 100644 index 0000000..3e30079 --- /dev/null +++ b/app/src/main/res/drawable/color_white60_radius_5_bg.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable/ic_launcher_background.xml b/app/src/main/res/drawable/ic_launcher_background.xml index 5713f34..0459402 100644 --- a/app/src/main/res/drawable/ic_launcher_background.xml +++ b/app/src/main/res/drawable/ic_launcher_background.xml @@ -5,165 +5,165 @@ android:height="108dp" android:viewportHeight="108" android:viewportWidth="108"> - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + +