diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000..04f40f9 Binary files /dev/null and b/.DS_Store differ diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 2461c3b..0000000 --- a/.gitignore +++ /dev/null @@ -1,12 +0,0 @@ -*.iml -.cxx -.idea -.gradle -/local.properties -/.idea/workspace.xml -/.idea/libraries -.DS_Store -/build -/capturess -.externalNativeBuild -/app/.cxx/ diff --git a/.idea/caches/build_file_checksums.ser b/.idea/caches/build_file_checksums.ser deleted file mode 100644 index 4c444d8..0000000 Binary files a/.idea/caches/build_file_checksums.ser and /dev/null differ diff --git a/.idea/codeStyles/Project.xml b/.idea/codeStyles/Project.xml deleted file mode 100644 index 681f41a..0000000 --- a/.idea/codeStyles/Project.xml +++ /dev/null @@ -1,116 +0,0 @@ - - - - - - - -
- - - - xmlns:android - - ^$ - - - -
-
- - - - xmlns:.* - - ^$ - - - BY_NAME - -
-
- - - - .*:id - - http://schemas.android.com/apk/res/android - - - -
-
- - - - .*:name - - http://schemas.android.com/apk/res/android - - - -
-
- - - - name - - ^$ - - - -
-
- - - - style - - ^$ - - - -
-
- - - - .* - - ^$ - - - BY_NAME - -
-
- - - - .* - - http://schemas.android.com/apk/res/android - - - ANDROID_ATTRIBUTE_ORDER - -
-
- - - - .* - - .* - - - BY_NAME - -
-
-
-
-
-
\ No newline at end of file diff --git a/.idea/encodings.xml b/.idea/encodings.xml deleted file mode 100644 index 15a15b2..0000000 --- a/.idea/encodings.xml +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/.idea/gradle.xml b/.idea/gradle.xml deleted file mode 100644 index 5cd135a..0000000 --- a/.idea/gradle.xml +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - \ No newline at end of file diff --git a/.idea/markdown-navigator/profiles_settings.xml b/.idea/markdown-navigator/profiles_settings.xml deleted file mode 100644 index 57927c5..0000000 --- a/.idea/markdown-navigator/profiles_settings.xml +++ /dev/null @@ -1,3 +0,0 @@ - - - \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml deleted file mode 100644 index 0f7266d..0000000 --- a/.idea/misc.xml +++ /dev/null @@ -1,133 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1.8 - - - - - - - - \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml deleted file mode 100644 index fee4f63..0000000 --- a/.idea/modules.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/.idea/runConfigurations.xml b/.idea/runConfigurations.xml deleted file mode 100644 index 7f68460..0000000 --- a/.idea/runConfigurations.xml +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml deleted file mode 100644 index 94a25f7..0000000 --- a/.idea/vcs.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/README.md b/README.md index f990f8d..2c6cb9b 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,10 @@ # 作者简介 Mr. Zhu ,英文名aserbao! 从事Android开发多年,技术不高,用来工作刚刚好。对视频音视频处理,硬编码这一块有一定的研究。之前北漂,现在深漂。同名微信公众号aserbao的维护者,喜欢看书,摄影,交友,目前生活这样子。欢迎大家关注我的公众号和微信一起学习交流。 +时间宝贵,咨询技术问题有偿哦。 + +If you have any question about this repo,please prepare money,no free. + # 学习资料 - [Android 零基础开发相机](https://gitbook.cn/gitchat/activity/5aeb03e3af08a333483d71c1) - [Android openGl开发详解(一) - 简单图形的基本绘制](https://www.jianshu.com/p/92d02ac80611) @@ -9,9 +13,9 @@ Mr. Zhu ,英文名aserbao! 从事Android开发多年,技术不高,用来 - [Android 自定义相机开发(三) —— 了解下EGL](https://www.jianshu.com/p/1e82021b10b4) # 欢迎关注公众号一起学习交流Android技术 -|公众号(aserbao)|个人微信号(小老头)|交流群(若过期加个人微信拉群)| -|--|--|--| -|![](https://github.com/aserbao/AserbaosAndroid/blob/master/app/src/main/assets/images/weixin.jpg)|![](https://github.com/aserbao/AserbaosAndroid/blob/master/app/src/main/assets/images/we_chat.jpg)|![](https://github.com/aserbao/AserbaosAndroid/blob/master/app/src/main/assets/images/Android%E4%BA%A4%E6%B5%81%E7%BE%A4.jpg)| +|公众号(aserbao)| +|--| +|![](https://github.com/aserbao/AserbaosAndroid/blob/master/app/src/main/assets/images/weixin.jpg) # 项目介绍 项目目前功能有: @@ -53,4 +57,19 @@ Mr. Zhu ,英文名aserbao! 从事Android开发多年,技术不高,用来 |--|--| |![](https://github.com/aserbao/AserbaosAndroid/blob/master/app/src/main/assets/images/%E5%BE%AE%E4%BF%A1%E6%89%93%E8%B5%8F.jpeg)|![](https://github.com/aserbao/AserbaosAndroid/blob/master/app/src/main/assets/images/%E6%94%AF%E4%BB%98%E5%AE%9D%E6%89%93%E8%B5%8F.jpeg)| +### 打赏记录 +|备注|金额| +|--|--| +|贺利军|66.66¥| +|小个子|200¥| +|小Qiao|66.66¥| +|一路狂奔|20¥| +|Passerby「路人‘」不欢|66¥| +|有点小激动|20¥| +|Orange| 30¥| +|卡霾哈霾哈|66¥| +> 说明:大家挣钱不易,如果项目对你有所帮助再打赏。如果有遇到小问题有时间也会免费帮忙解决。 + +### Star History +[![Star History Chart](https://api.star-history.com/svg?repos=aserbao/AndroidCamera&type=Date)](https://star-history.com/#aserbao/AndroidCamera&Date) diff --git a/README_en.md b/README_en.md new file mode 100644 index 0000000..24214bd --- /dev/null +++ b/README_en.md @@ -0,0 +1,71 @@ + +# About the author +Mr. Zhu, English name aserbao! Engaged in Android development for many years, the technology is not high, and it is just right for work. There is a certain amount of research on video audio & video processing, hardcoding. Before Beipiao, now Shenpiao. The maintainer of the WeChat official account aserbao of the same name, likes reading, photography, making friends, and currently lives like this. Welcome to follow my official account and WeChat to learn and communicate together. + +Time is precious, and consulting technical issues is paid. + +If you have any question about this repo,please prepare money,no free. + +# Learning materials +- [Android Zero Base Development Camera](https://gitbook.cn/gitchat/activity/5aeb03e3af08a333483d71c1) +- [Android openGl development details (a) - simple graphics basic drawing](https://www.jianshu.com/p/92d02ac80611) +- [Android openGl development details (2) - through SurfaceView, TextureView, GlSurfaceView display camera preview (with demo) ](https://www.jianshu.com/p/db8ecba6037a) +- [Android Custom Camera Development (3) - Learn about EGL](https://www.jianshu.com/p/1e82021b10b4) + +# Welcome to pay attention to the official account to learn and communicate Android technology together +|weChat official account(aserbao) +|--| +|![](https://github.com/aserbao/AserbaosAndroid/blob/master/app/src/main/assets/images/weixin.jpg)| + +# Project Functions Introduction +The current functions of the project are:: +- Segmented Record RecorderActivity +- Multi-video synthesis Record RecorderActivity +- Countdown Record RecorderActivity +- Remove Rollback Video RecorderActivity +- Add Filter RecorderActivity +- Video cutting LocalVideoActivity +- Video orientation Change LocalVideoActivity +- Video Rotation LocalVideoActivity +- Handle Video Frame SelCoverTimeActivity +- Add watermark for Video VideoEditActivity +- Add dynamic stickers for Video VideoEditActivity +- Add dynamic subtitles for Video VideoEditActivity +- Text to Video PrimaryMediaCodecActivity + +## Overall Functional Button Effect Diagram : +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/录制.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/摄像头切换.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/倒计时.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/回删功能.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/本地编辑.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/编辑界面.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/选封面.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/添加贴纸.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/添加字幕.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/保存到相册.gif) + + + +## Single Functional Button: +The single point mainly includes the use of MediaCodec, AudioRecord, MediaExtractor, and MediaMuxer. The interface effect is as follows: + +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/Mediacodec的基本用法.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/EncodeDecode.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/ExtractDecode.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/DecodeEditEncode.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/Mediacodec录制随音乐改变.gif)![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/MediaExtractor.gif) +![](https://github.com/aserbao/CommonSource/blob/master/AndroidCamera/images/MediaMuxer.gif) + +## Tip +Open source is not easy, if there is help to reward it, reward please note ID. + +|WeChat Tipping | Alipay Tipping| +|--|--| +|![](https://github.com/aserbao/AserbaosAndroid/blob/master/app/src/main/assets/images/%E5%BE%AE%E4%BF%A1%E6%89%93%E8%B5%8F.jpeg)|![](https://github.com/aserbao/AserbaosAndroid/blob/master/app/src/main/assets/images/%E6%94%AF%E4%BB%98%E5%AE%9D%E6%89%93%E8%B5%8F.jpeg)| + +### Tipping Record +| Remarks | Amount | +|--|--| +|贺利军|66.66¥| +|小个子|200¥| +|小Qiao|66.66¥| +|一路狂奔|20¥| +|Passerby「路人‘」不欢|66¥| +|有点小激动|20¥| +|Orange| 30¥| +|卡霾哈霾哈|66¥| +> Description: It is not easy for everyone to make money. If the project helps you, you will be rewarded. diff --git a/app/.DS_Store b/app/.DS_Store new file mode 100644 index 0000000..78dcfbd Binary files /dev/null and b/app/.DS_Store differ diff --git a/app/.gitignore b/app/.gitignore deleted file mode 100644 index 796b96d..0000000 --- a/app/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/build diff --git a/app/build.gradle b/app/build.gradle index c4683fe..a156792 100644 --- a/app/build.gradle +++ b/app/build.gradle @@ -15,7 +15,7 @@ android { externalNativeBuild { cmake { cppFlags " " - abiFilters 'x86', 'x86_64', 'armeabi-v7a', 'arm64-v8a' + abiFilters 'armeabi-v7a' } } } @@ -36,6 +36,15 @@ android { path "CMakeLists.txt" } } + + testOptions { + unitTests.all { + // All the usual Gradle options. + jvmArgs '-XX:MaxPermSize=256m' + } + + unitTests.returnDefaultValues = true + } } dependencies { @@ -57,6 +66,10 @@ dependencies { implementation 'com.googlecode.mp4parser:isoparser:1.1.21' implementation files('libs/fmod.jar') implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" + + + testCompile 'junit:junit:4.12' + testCompile "org.mockito:mockito-core:1.9.5" } repositories { mavenCentral() diff --git a/app/libs/arm64-v8a/libfmod.so b/app/libs/arm64-v8a/libfmod.so deleted file mode 100755 index fe85211..0000000 Binary files a/app/libs/arm64-v8a/libfmod.so and /dev/null differ diff --git a/app/libs/arm64-v8a/libfmodL.so b/app/libs/arm64-v8a/libfmodL.so deleted file mode 100755 index 5dbeb5c..0000000 Binary files a/app/libs/arm64-v8a/libfmodL.so and /dev/null differ diff --git a/app/libs/armeabi/libfmod.so b/app/libs/armeabi/libfmod.so deleted file mode 100755 index b97bfec..0000000 Binary files a/app/libs/armeabi/libfmod.so and /dev/null differ diff --git a/app/libs/armeabi/libfmodL.so b/app/libs/armeabi/libfmodL.so deleted file mode 100755 index 7b45160..0000000 Binary files a/app/libs/armeabi/libfmodL.so and /dev/null differ diff --git a/app/libs/x86/libfmod.so b/app/libs/x86/libfmod.so deleted file mode 100755 index 0c606ba..0000000 Binary files a/app/libs/x86/libfmod.so and /dev/null differ diff --git a/app/libs/x86/libfmodL.so b/app/libs/x86/libfmodL.so deleted file mode 100755 index 318d99e..0000000 Binary files a/app/libs/x86/libfmodL.so and /dev/null differ diff --git a/app/release/app-release.apk b/app/release/app-release.apk deleted file mode 100644 index d0da0cd..0000000 Binary files a/app/release/app-release.apk and /dev/null differ diff --git a/app/release/output.json b/app/release/output.json deleted file mode 100644 index c429e31..0000000 --- a/app/release/output.json +++ /dev/null @@ -1 +0,0 @@ -[{"outputType":{"type":"APK"},"apkData":{"type":"MAIN","splits":[],"versionCode":1,"versionName":"1.0","enabled":true,"outputFile":"app-release.apk","fullName":"release","baseName":"release"},"path":"app-release.apk","properties":{}}] \ No newline at end of file diff --git a/app/src/androidTest/java/com/aserbao/androidcustomcamera/ExampleInstrumentedTest.java b/app/src/androidTest/java/com/aserbao/androidcustomcamera/ExampleInstrumentedTest.java deleted file mode 100644 index e02b4a3..0000000 --- a/app/src/androidTest/java/com/aserbao/androidcustomcamera/ExampleInstrumentedTest.java +++ /dev/null @@ -1,26 +0,0 @@ -package com.aserbao.androidcustomcamera; - -import android.content.Context; -import android.support.test.InstrumentationRegistry; -import android.support.test.runner.AndroidJUnit4; - -import org.junit.Test; -import org.junit.runner.RunWith; - -import static org.junit.Assert.*; - -/** - * Instrumented test, which will execute on an Android device. - * - * @see Testing documentation - */ -@RunWith(AndroidJUnit4.class) -public class ExampleInstrumentedTest { - @Test - public void useAppContext() throws Exception { - // Context of the app under test. - Context appContext = InstrumentationRegistry.getTargetContext(); - - assertEquals("com.aserbao.androidcustomcamera", appContext.getPackageName()); - } -} diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml index 10f28cf..cd50b9f 100644 --- a/app/src/main/AndroidManifest.xml +++ b/app/src/main/AndroidManifest.xml @@ -8,23 +8,25 @@ - - - + + + + + @@ -38,7 +40,9 @@ + android:screenOrientation="portrait"> + + @@ -52,13 +56,8 @@ - - - - - - - + + diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/WelcomeActivity.kt b/app/src/main/java/com/aserbao/androidcustomcamera/WelcomeActivity.kt new file mode 100644 index 0000000..0ef193a --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/WelcomeActivity.kt @@ -0,0 +1,48 @@ +package com.aserbao.androidcustomcamera + +import android.animation.Animator +import android.animation.AnimatorListenerAdapter +import android.animation.ObjectAnimator +import android.animation.PropertyValuesHolder +import android.content.Intent +import android.util.Log +import com.aserbao.androidcustomcamera.base.activity.BaseActivity +import com.aserbao.androidcustomcamera.utils.CheckPermissionUtil +import com.aserbao.androidcustomcamera.whole.record.RecorderActivity +import kotlinx.android.synthetic.main.activity_welcome.* + + +class WelcomeActivity : BaseActivity() { + + override fun setLayoutId(): Int { + return R.layout.activity_welcome + } + + override fun initView() { + super.initView() + } + + fun exectorAnimator(){ + val valuesHolder0 = PropertyValuesHolder.ofFloat("scaleX", 1.0f, 1.5f) + val valuesHolder1 = PropertyValuesHolder.ofFloat("scaleY", 1.0f, 1.5f) + val objectAnimator: ObjectAnimator = ObjectAnimator.ofPropertyValuesHolder(bgIV, valuesHolder0, valuesHolder1) + objectAnimator.addListener(object : AnimatorListenerAdapter() { + override fun onAnimationEnd(animation: Animator?) { + super.onAnimationEnd(animation) + startActivity(Intent(this@WelcomeActivity, RecorderActivity::class.java)) + finish() + } + }) + objectAnimator.setDuration(2000).start() + } + + override fun onRequestPermissionsResult(requestCode: Int, permissions: Array, grantResults: IntArray) { + super.onRequestPermissionsResult(requestCode, permissions, grantResults) + Log.e("TAG", "onRequestPermissionsResult: $requestCode") + if(CheckPermissionUtil.isCameraGranted()) { + exectorAnimator() + }else{ + startRequestPermission() + } + } +} \ No newline at end of file diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/base/activity/BaseActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/base/activity/BaseActivity.java index 0f9a893..99f4259 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/base/activity/BaseActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/base/activity/BaseActivity.java @@ -28,10 +28,9 @@ public void onCreate(Bundle savedInstanceState) { ButterKnife.bind(this); startRequestPermission(); initView(); - getLifecycle() } - private void startRequestPermission() { + protected void startRequestPermission() { ActivityCompat.requestPermissions(this,BASIC_PERMISSIONS,123); } public void initView(){ diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/atestcases/EncodeDecodeTest.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/atestcases/EncodeDecodeTest.java new file mode 100644 index 0000000..c47c091 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/atestcases/EncodeDecodeTest.java @@ -0,0 +1,993 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.aserbao.androidcustomcamera.blocks.atestcases; +import android.graphics.SurfaceTexture; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaCodecList; +import android.media.MediaFormat; +import android.opengl.EGL14; +import android.opengl.GLES20; +import android.opengl.GLES11Ext; +import android.opengl.GLSurfaceView; +import android.opengl.Matrix; +import android.os.Environment; +import android.test.AndroidTestCase; +import android.util.Log; +import android.view.Surface; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.FloatBuffer; +import java.util.Arrays; +import javax.microedition.khronos.egl.EGL10; +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.egl.EGLContext; +import javax.microedition.khronos.egl.EGLDisplay; +import javax.microedition.khronos.egl.EGLSurface; +import javax.microedition.khronos.opengles.GL; +import javax.microedition.khronos.opengles.GL10; +/** + * Generates a series of video frames, encodes them, decodes them, and tests for significant + * divergence from the original. + *

+ * There are two ways to connect an encoder to a decoder. The first is to pass the output + * buffers from the encoder to the input buffers of the decoder, using ByteBuffer.put() to + * copy the bytes. With this approach, we need to watch for BUFFER_FLAG_CODEC_CONFIG, and + * if seen we use format.setByteBuffer("csd-0") followed by decoder.configure() to pass the + * meta-data through. + *

+ * The second way is to write the buffers to a file and then stream it back in. With this + * approach it is necessary to use a MediaExtractor to retrieve the format info and skip past + * the meta-data. + *

+ * The former can be done entirely in memory, but requires that the encoder and decoder + * operate simultaneously (the I/O buffers are owned by MediaCodec). The latter requires + * writing to disk, because MediaExtractor can only accept a file or URL as a source. + *

+ * The direct encoder-to-decoder approach isn't currently tested elsewhere in this CTS + * package, so we use that here. + * + * @link https://android.googlesource.com/platform/cts/+/b04c81bfc2761b21293f9c095da38c757e570fd3/tests/tests/media/src/android/media/cts/EncodeDecodeTest.java + */ +public class EncodeDecodeTest extends AndroidTestCase { + private static final String TAG = "EncodeDecodeTest"; + private static final boolean VERBOSE = false; // lots of logging + private static final boolean DEBUG_SAVE_FILE = false; // save copy of encoded movie + private static final String DEBUG_FILE_NAME_BASE = "/storage/emulated/0/"; + // parameters for the encoder + private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding + private static final int BIT_RATE = 1000000; // 1Mbps + private static final int FRAME_RATE = 15; // 15fps + private static final int IFRAME_INTERVAL = 10; // 10 seconds between I-frames + // movie length, in frames + private static final int NUM_FRAMES = 30; // two seconds of video + private static final int TEST_Y = 240; // YUV values for colored rect + private static final int TEST_U = 220; + private static final int TEST_V = 200; + private static final int TEST_R0 = 0; // RGB eqivalent of {0,0,0} + private static final int TEST_G0 = 136; + private static final int TEST_B0 = 0; + private static final int TEST_R1 = 255; // RGB equivalent of {240,220,200} + private static final int TEST_G1 = 166; + private static final int TEST_B1 = 255; + // size of a frame, in pixels + private int mWidth = -1; + private int mHeight = -1; + /** + * Tests streaming of AVC video through the encoder and decoder. Data is encoded from + * a series of byte[] buffers and decoded into ByteBuffers. The output is checked for + * validity. + */ + public void testEncodeDecodeVideoFromBufferToBufferQCIF() throws Exception { + setSize(176, 144); + testEncodeDecodeVideoFromBuffer(false); + } + public void testEncodeDecodeVideoFromBufferToBufferQVGA() throws Exception { + setSize(320, 240); + testEncodeDecodeVideoFromBuffer(false); + } + public void testEncodeDecodeVideoFromBufferToBuffer720p() throws Exception { + setSize(1280, 720); + testEncodeDecodeVideoFromBuffer(false); + } + /** + * Tests streaming of AVC video through the encoder and decoder. Data is encoded from + * a series of byte[] buffers and decoded into Surfaces. The output is checked for + * validity but some frames may be dropped. + *

+ * Because of the way SurfaceTexture.OnFrameAvailableListener works, we need to run this + * test on a thread that doesn't have a Looper configured. If we don't, the test will + * pass, but we won't actually test the output because we'll never receive the "frame + * available" notifications". The CTS test framework seems to be configuring a Looper on + * the test thread, so we have to hand control off to a new thread for the duration of + * the test. + */ + public void testEncodeDecodeVideoFromBufferToSurfaceQCIF() throws Throwable { + setSize(176, 144); + BufferToSurfaceWrapper.runTest(this); + } + public void testEncodeDecodeVideoFromBufferToSurfaceQVGA() throws Throwable { + setSize(320, 240); + BufferToSurfaceWrapper.runTest(this); + } + public void testEncodeDecodeVideoFromBufferToSurface720p() throws Throwable { + setSize(1280, 720); + BufferToSurfaceWrapper.runTest(this); + } + /** Wraps testEncodeDecodeVideoFromBuffer(true) */ + private static class BufferToSurfaceWrapper implements Runnable { + private Throwable mThrowable; + private EncodeDecodeTest mTest; + private BufferToSurfaceWrapper(EncodeDecodeTest test) { + mTest = test; + } + public void run() { + try { + mTest.testEncodeDecodeVideoFromBuffer(true); + } catch (Throwable th) { + mThrowable = th; + } + } + /** + * Entry point. + */ + public static void runTest(EncodeDecodeTest obj) throws Throwable { + BufferToSurfaceWrapper wrapper = new BufferToSurfaceWrapper(obj); + Thread th = new Thread(wrapper, "codec test"); + th.start(); + th.join(); + if (wrapper.mThrowable != null) { + throw wrapper.mThrowable; + } + } + } + /** + * Sets the desired frame size. + */ + private void setSize(int width, int height) { + if ((width % 16) != 0 || (height % 16) != 0) { + Log.w(TAG, "WARNING: width or height not multiple of 16"); + } + mWidth = width; + mHeight = height; + } + /** + * Tests encoding and subsequently decoding video from frames generated into a buffer. + *

+ * We encode several frames of a video test pattern using MediaCodec, then decode the + * output with MediaCodec and do some simple checks. + *

+ * See http://b.android.com/37769 for a discussion of input format pitfalls. + */ + private void testEncodeDecodeVideoFromBuffer(boolean toSurface) throws Exception { + MediaCodecInfo codecInfo = selectCodec(MIME_TYPE); + if (codecInfo == null) { + // Don't fail CTS if they don't have an AVC codec (not here, anyway). + Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE); + return; + } + if (VERBOSE) Log.d(TAG, "found codec: " + codecInfo.getName()); + int colorFormat = selectColorFormat(codecInfo, MIME_TYPE); + if (VERBOSE) Log.d(TAG, "found colorFormat: " + colorFormat); + // We avoid the device-specific limitations on width and height by using values that + // are multiples of 16, which all tested devices seem to be able to handle. + MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); + // Set some properties. Failing to specify some of these can cause the MediaCodec + // configure() call to throw an unhelpful exception. + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); + format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE); + format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); + if (VERBOSE) Log.d(TAG, "format: " + format); + // Create a MediaCodec for the desired codec, then configure it as an encoder with + // our desired properties. + MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName()); + encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + encoder.start(); + // Create a MediaCodec for the decoder, just based on the MIME type. The various + // format details will be passed through the csd-0 meta-data later on. + MediaCodec decoder = MediaCodec.createDecoderByType(MIME_TYPE); + try { + encodeDecodeVideoFromBuffer(encoder, colorFormat, decoder, toSurface); + } finally { + if (VERBOSE) Log.d(TAG, "releasing codecs"); + encoder.stop(); + decoder.stop(); + encoder.release(); + decoder.release(); + } + } + /** + * Returns the first codec capable of encoding the specified MIME type, or null if no + * match was found. + */ + private static MediaCodecInfo selectCodec(String mimeType) { + int numCodecs = MediaCodecList.getCodecCount(); + for (int i = 0; i < numCodecs; i++) { + MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); + if (!codecInfo.isEncoder()) { + continue; + } + String[] types = codecInfo.getSupportedTypes(); + for (int j = 0; j < types.length; j++) { + if (types[j].equalsIgnoreCase(mimeType)) { + return codecInfo; + } + } + } + return null; + } + /** + * Returns a color format that is supported by the codec and by this test code. If no + * match is found, this throws a test failure -- the set of formats known to the test + * should be expanded for new platforms. + */ + private static int selectColorFormat(MediaCodecInfo codecInfo, String mimeType) { + MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType); + for (int i = 0; i < capabilities.colorFormats.length; i++) { + int colorFormat = capabilities.colorFormats[i]; + switch (colorFormat) { + // these are the formats we know how to handle for this test + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar: + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar: + case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar: + return colorFormat; + default: + break; + } + } + fail("couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType); + return 0; // not reached + } + /** + * Does the actual work for encoding frames from buffers of byte[]. + */ + private void encodeDecodeVideoFromBuffer(MediaCodec encoder, int encoderColorFormat, + MediaCodec decoder, boolean toSurface) { + final int TIMEOUT_USEC = 10000; + ByteBuffer[] encoderInputBuffers = encoder.getInputBuffers(); + ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers(); + ByteBuffer[] decoderInputBuffers = null; + ByteBuffer[] decoderOutputBuffers = null; + MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); + int decoderColorFormat = -12345; // init to invalid value + int generateIndex = 0; + int checkIndex = 0; + boolean decoderConfigured = false; + SurfaceStuff surfaceStuff = null; + // The size of a frame of video data, in the formats we handle, is stride*sliceHeight + // for Y, and (stride/2)*(sliceHeight/2) for each of the Cb and Cr channels. Application + // of algebra and assuming that stride==width and sliceHeight==height yields: + byte[] frameData = new byte[mWidth * mHeight * 3 / 2]; + // Just out of curiosity. + long rawSize = 0; + long encodedSize = 0; + // Save a copy to disk. Useful for debugging the test. + FileOutputStream outputStream = null; + if (DEBUG_SAVE_FILE) { + String fileName = DEBUG_FILE_NAME_BASE + mWidth + "x" + mHeight + ".mp4"; + try { + outputStream = new FileOutputStream(fileName); + Log.d(TAG, "encoded output will be saved as " + fileName); + } catch (IOException ioe) { + Log.w(TAG, "Unable to create debug output file " + fileName); + throw new RuntimeException(ioe); + } + } + if (toSurface) { + surfaceStuff = new SurfaceStuff(mWidth, mHeight); + } + // Loop until the output side is done. + boolean inputDone = false; + boolean encoderDone = false; + boolean outputDone = false; + while (!outputDone) { + if (VERBOSE) Log.d(TAG, "loop"); + // If we're not done submitting frames, generate a new one and submit it. By + // doing this on every loop we're working to ensure that the encoder always has + // work to do. + // + // We don't really want a timeout here, but sometimes there's a delay opening + // the encoder device, so a short timeout can keep us from spinning hard. + if (!inputDone) { + int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC); + if (VERBOSE) Log.d(TAG, "inputBufIndex=" + inputBufIndex); + if (inputBufIndex >= 0) { + long ptsUsec = generateIndex * 1000000 / FRAME_RATE; + if (generateIndex == NUM_FRAMES) { + // Send an empty frame with the end-of-stream flag set. If we set EOS + // on a frame with data, that frame data will be ignored, and the + // output will be short one frame. + encoder.queueInputBuffer(inputBufIndex, 0, 0, ptsUsec, + MediaCodec.BUFFER_FLAG_END_OF_STREAM); + inputDone = true; + if (VERBOSE) Log.d(TAG, "sent input EOS (with zero-length frame)"); + } else { + generateFrame(generateIndex, encoderColorFormat, frameData); + ByteBuffer inputBuf = encoderInputBuffers[inputBufIndex]; + // the buffer should be sized to hold one full frame + assertTrue(inputBuf.capacity() >= frameData.length); + inputBuf.clear(); + inputBuf.put(frameData); + encoder.queueInputBuffer(inputBufIndex, 0, frameData.length, ptsUsec, 0); + if (VERBOSE) Log.d(TAG, "submitted frame " + generateIndex + " to enc"); + } + generateIndex++; + } else { + // either all in use, or we timed out during initial setup + if (VERBOSE) Log.d(TAG, "input buffer not available"); + } + } + // Check for output from the encoder. If there's no output yet, we either need to + // provide more input, or we need to wait for the encoder to work its magic. We + // can't actually tell which is the case, so if we can't get an output buffer right + // away we loop around and see if it wants more input. + // + // Once we get EOS from the encoder, we don't need to do this anymore. + if (!encoderDone) { + int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC); + if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + // no output available yet + if (VERBOSE) Log.d(TAG, "no output from encoder available"); + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + // not expected for an encoder + encoderOutputBuffers = encoder.getOutputBuffers(); + if (VERBOSE) Log.d(TAG, "encoder output buffers changed"); + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + // not expected for an encoder + MediaFormat newFormat = encoder.getOutputFormat(); + if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat); + } else if (encoderStatus < 0) { + fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); + } else { // encoderStatus >= 0 + ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; + if (encodedData == null) { + fail("encoderOutputBuffer " + encoderStatus + " was null"); + } + // It's usually necessary to adjust the ByteBuffer values to match BufferInfo. + encodedData.position(info.offset); + encodedData.limit(info.offset + info.size); + encodedSize += info.size; + if (outputStream != null) { + byte[] data = new byte[info.size]; + encodedData.get(data); + encodedData.position(info.offset); + try { + outputStream.write(data); + } catch (IOException ioe) { + Log.w(TAG, "failed writing debug data to file"); + throw new RuntimeException(ioe); + } + } + if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + // Codec config info. Only expected on first packet. + assertFalse(decoderConfigured); + MediaFormat format = + MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); + format.setByteBuffer("csd-0", encodedData); + decoder.configure(format, toSurface ? surfaceStuff.getSurface() : null, + null, 0); + decoder.start(); + decoderInputBuffers = decoder.getInputBuffers(); + decoderOutputBuffers = decoder.getOutputBuffers(); + decoderConfigured = true; + if (VERBOSE) Log.d(TAG, "decoder configured (" + info.size + " bytes)"); + } else { + // Get a decoder input buffer, blocking until it's available. + assertTrue(decoderConfigured); + int inputBufIndex = decoder.dequeueInputBuffer(-1); + ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; + inputBuf.clear(); + inputBuf.put(encodedData); + decoder.queueInputBuffer(inputBufIndex, 0, info.size, info.presentationTimeUs, + info.flags); + encoderDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; + if (VERBOSE) Log.d(TAG, "passed " + info.size + " bytes to decoder" + + (encoderDone ? " (EOS)" : "")); + } + encoder.releaseOutputBuffer(encoderStatus, false); + } + } + // Check for output from the decoder. We want to do this on every loop to avoid + // the possibility of stalling the pipeline. We use a short timeout to avoid + // burning CPU if the decoder is hard at work but the next frame isn't quite ready. + // + // If we're decoding to a Surface, we'll get notified here as usual but the + // ByteBuffer references will be null. The data is sent to Surface instead. + if (decoderConfigured) { + int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC); + if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + // no output available yet + if (VERBOSE) Log.d(TAG, "no output from decoder available"); + } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + if (VERBOSE) Log.d(TAG, "decoder output buffers changed"); + decoderOutputBuffers = decoder.getOutputBuffers(); + } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + // this happens before the first frame is returned + MediaFormat decoderOutputFormat = decoder.getOutputFormat(); + decoderColorFormat = + decoderOutputFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT); + if (VERBOSE) Log.d(TAG, "decoder output format changed: " + + decoderOutputFormat); + } else if (decoderStatus < 0) { + fail("unexpected result from deocder.dequeueOutputBuffer: " + decoderStatus); + } else { // decoderStatus >= 0 + if (!toSurface) { + ByteBuffer outputFrame = decoderOutputBuffers[decoderStatus]; + outputFrame.position(info.offset); + outputFrame.limit(info.offset + info.size); + rawSize += info.size; + if (info.size == 0) { + if (VERBOSE) Log.d(TAG, "got empty frame"); + } else { + if (VERBOSE) Log.d(TAG, "decoded, checking frame " + checkIndex); + checkFrame(checkIndex++, decoderColorFormat, outputFrame); + } + if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + if (VERBOSE) Log.d(TAG, "output EOS"); + outputDone = true; + } + } else { + // Before we release+render this buffer, check to see if data from a + // previous go-round has latched. + surfaceStuff.checkNewImageIfAvailable(); + if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus + + " (size=" + info.size + ")"); + rawSize += info.size; + if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + if (VERBOSE) Log.d(TAG, "output EOS"); + outputDone = true; + } + } + // If output is going to a Surface, the second argument should be true. + // If not, the value doesn't matter. + // + // If we are sending to a Surface, then some time after we call this the + // data will be made available to SurfaceTexture, and the onFrameAvailable() + // callback will fire. + decoder.releaseOutputBuffer(decoderStatus, true /*render*/); + } + } + } + if (VERBOSE) Log.d(TAG, "encoded " + NUM_FRAMES + " frames at " + + mWidth + "x" + mHeight + ": raw=" + rawSize + ", enc=" + encodedSize); + if (outputStream != null) { + try { + outputStream.close(); + } catch (IOException ioe) { + Log.w(TAG, "failed closing debug file"); + throw new RuntimeException(ioe); + } + } + } + /** + * Generates data for frame N into the supplied buffer. We have an 8-frame animation + * sequence that wraps around. It looks like this: + *

+     *   0 1 2 3
+     *   7 6 5 4
+     * 
+ * We draw one of the eight rectangles and leave the rest set to the zero-fill color. + */ + private void generateFrame(int frameIndex, int colorFormat, byte[] frameData) { + final int HALF_WIDTH = mWidth / 2; + boolean semiPlanar = isSemiPlanarYUV(colorFormat); + // Set to zero. In YUV this is a dull green. + Arrays.fill(frameData, (byte) 0); + int startX, startY, countX, countY; + frameIndex %= 8; + //frameIndex = (frameIndex / 8) % 8; // use this instead for debug -- easier to see + if (frameIndex < 4) { + startX = frameIndex * (mWidth / 4); + startY = 0; + } else { + startX = (7 - frameIndex) * (mWidth / 4); + startY = mHeight / 2; + } + for (int y = startY + (mHeight/2) - 1; y >= startY; --y) { + for (int x = startX + (mWidth/4) - 1; x >= startX; --x) { + if (semiPlanar) { + // full-size Y, followed by CbCr pairs at half resolution + // e.g. Nexus 4 OMX.qcom.video.encoder.avc COLOR_FormatYUV420SemiPlanar + // e.g. Galaxy Nexus OMX.TI.DUCATI1.VIDEO.H264E + // OMX_TI_COLOR_FormatYUV420PackedSemiPlanar + frameData[y * mWidth + x] = (byte) TEST_Y; + if ((x & 0x01) == 0 && (y & 0x01) == 0) { + frameData[mWidth*mHeight + y * HALF_WIDTH + x] = (byte) TEST_U; + frameData[mWidth*mHeight + y * HALF_WIDTH + x + 1] = (byte) TEST_V; + } + } else { + // full-size Y, followed by quarter-size Cb and quarter-size Cr + // e.g. Nexus 10 OMX.Exynos.AVC.Encoder COLOR_FormatYUV420Planar + // e.g. Nexus 7 OMX.Nvidia.h264.encoder COLOR_FormatYUV420Planar + frameData[y * mWidth + x] = (byte) TEST_Y; + if ((x & 0x01) == 0 && (y & 0x01) == 0) { + frameData[mWidth*mHeight + (y/2) * HALF_WIDTH + (x/2)] = (byte) TEST_U; + frameData[mWidth*mHeight + HALF_WIDTH * (mHeight / 2) + + (y/2) * HALF_WIDTH + (x/2)] = (byte) TEST_V; + } + } + } + } + if (false) { + // make sure that generate and check agree + Log.d(TAG, "SPOT CHECK"); + checkFrame(frameIndex, colorFormat, ByteBuffer.wrap(frameData)); + Log.d(TAG, "SPOT CHECK DONE"); + } + } + /** + * Performs a simple check to see if the frame is more or less right. + *

+ * See {@link generateFrame} for a description of the layout. The idea is to sample + * one pixel from the middle of the 8 regions, and verify that the correct one has + * the non-background color. We can't know exactly what the video encoder has done + * with our frames, so we just check to see if it looks like more or less the right thing. + *

+ * Throws a failure if the frame looks wrong. + */ + private void checkFrame(int frameIndex, int colorFormat, ByteBuffer frameData) { + final int HALF_WIDTH = mWidth / 2; + boolean frameFailed = false; + if (colorFormat == 0x7FA30C03) { + // Nexus 4 decoder output OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka + Log.d(TAG, "unable to check frame contents for colorFormat=" + + Integer.toHexString(colorFormat)); + return; + } + boolean semiPlanar = isSemiPlanarYUV(colorFormat); + frameIndex %= 8; + for (int i = 0; i < 8; i++) { + int x, y; + if (i < 4) { + x = i * (mWidth / 4) + (mWidth / 8); + y = mHeight / 4; + } else { + x = (7 - i) * (mWidth / 4) + (mWidth / 8); + y = (mHeight * 3) / 4; + } + int testY, testU, testV; + if (semiPlanar) { + // Galaxy Nexus uses OMX_TI_COLOR_FormatYUV420PackedSemiPlanar + testY = frameData.get(y * mWidth + x) & 0xff; + testU = frameData.get(mWidth*mHeight + 2*(y/2) * HALF_WIDTH + 2*(x/2)) & 0xff; + testV = frameData.get(mWidth*mHeight + 2*(y/2) * HALF_WIDTH + 2*(x/2) + 1) & 0xff; + } else { + // Nexus 10, Nexus 7 use COLOR_FormatYUV420Planar + testY = frameData.get(y * mWidth + x) & 0xff; + testU = frameData.get(mWidth*mHeight + (y/2) * HALF_WIDTH + (x/2)) & 0xff; + testV = frameData.get(mWidth*mHeight + HALF_WIDTH * (mHeight / 2) + + (y/2) * HALF_WIDTH + (x/2)) & 0xff; + } + boolean failed = false; + if (i == frameIndex) { + failed = !isColorClose(testY, TEST_Y) || + !isColorClose(testU, TEST_U) || + !isColorClose(testV, TEST_V); + } else { + // should be our zeroed-out buffer + failed = !isColorClose(testY, 0) || + !isColorClose(testU, 0) || + !isColorClose(testV, 0); + } + if (failed) { + Log.w(TAG, "Bad frame " + frameIndex + " (r=" + i + ": Y=" + testY + + " U=" + testU + " V=" + testV + ")"); + frameFailed = true; + } + } + if (frameFailed) { + fail("bad frame (" + frameIndex + ")"); + } + } + /** + * Returns true if the actual color value is close to the expected color value. + */ + static boolean isColorClose(int actual, int expected) { + if (expected < 5) { + return actual < (expected + 5); + } else if (expected > 250) { + return actual > (expected - 5); + } else { + return actual > (expected - 5) && actual < (expected + 5); + } + } + /** + * Returns true if the specified color format is semi-planar YUV. Throws an exception + * if the color format is not recognized (e.g. not YUV). + */ + private static boolean isSemiPlanarYUV(int colorFormat) { + switch (colorFormat) { + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar: + return false; + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar: + case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar: + return true; + default: + throw new RuntimeException("unknown format " + colorFormat); + } + } + /** + * Holds state associated with a Surface used for output. + *

+ * By default, the Surface will be using a BufferQueue in asynchronous mode, so we + * will likely miss a number of frames. + */ + private static class SurfaceStuff implements SurfaceTexture.OnFrameAvailableListener { + private static final int EGL_OPENGL_ES2_BIT = 4; + private EGL10 mEGL; + private EGLDisplay mEGLDisplay; + private EGLContext mEGLContext; + private EGLSurface mEGLSurface; + private SurfaceTexture mSurfaceTexture; + private Surface mSurface; + private boolean mFrameAvailable = false; // guarded by "this" + private int mWidth; + private int mHeight; + private VideoRender mVideoRender; + public SurfaceStuff(int width, int height) { + mWidth = width; + mHeight = height; + eglSetup(); + mVideoRender = new VideoRender(); + mVideoRender.onSurfaceCreated(); + // Even if we don't access the SurfaceTexture after the constructor returns, we + // still need to keep a reference to it. The Surface doesn't retain a reference + // at the Java level, so if we don't either then the object can get GCed, which + // causes the native finalizer to run. + if (VERBOSE) Log.d(TAG, "textureID=" + mVideoRender.getTextureId()); + mSurfaceTexture = new SurfaceTexture(mVideoRender.getTextureId()); + // This doesn't work if SurfaceStuff is created on the thread that CTS started for + // these test cases. + // + // The CTS-created thread has a Looper, and the SurfaceTexture constructor will + // create a Handler that uses it. The "frame available" message is delivered + // there, but since we're not a Looper-based thread we'll never see it. For + // this to do anything useful, SurfaceStuff must be created on a thread without + // a Looper, so that SurfaceTexture uses the main application Looper instead. + // + // Java language note: passing "this" out of a constructor is generally unwise, + // but we should be able to get away with it here. + mSurfaceTexture.setOnFrameAvailableListener(this); + mSurface = new Surface(mSurfaceTexture); + } + /** + * Prepares EGL. We want a GLES 2.0 context and a surface that supports pbuffer. + */ + private void eglSetup() { + mEGL = (EGL10)EGLContext.getEGL(); + mEGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); + if (!mEGL.eglInitialize(mEGLDisplay, null)) { + fail("unable to initialize EGL10"); + } + // Configure surface for pbuffer and OpenGL ES 2.0. We want enough RGB bits + // to be able to tell if the frame is reasonable. + int[] attribList = { + EGL10.EGL_RED_SIZE, 8, + EGL10.EGL_GREEN_SIZE, 8, + EGL10.EGL_BLUE_SIZE, 8, + EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT, + EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, + EGL10.EGL_NONE + }; + EGLConfig[] configs = new EGLConfig[1]; + int[] numConfigs = new int[1]; + if (!mEGL.eglChooseConfig(mEGLDisplay, attribList, configs, 1, numConfigs)) { + fail("unable to find RGB888+pbuffer EGL config"); + } + // Configure context for OpenGL ES 2.0. + int[] attrib_list = { + EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, + EGL10.EGL_NONE + }; + mEGLContext = mEGL.eglCreateContext(mEGLDisplay, configs[0], EGL10.EGL_NO_CONTEXT, + attrib_list); + checkEglError("eglCreateContext"); + assertNotNull(mEGLContext); + // Create a pbuffer surface. By using this for output, we can use glReadPixels + // to test values in the output. + int[] surfaceAttribs = { + EGL10.EGL_WIDTH, mWidth, + EGL10.EGL_HEIGHT, mHeight, + EGL10.EGL_NONE + }; + mEGLSurface = mEGL.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs); + checkEglError("eglCreatePbufferSurface"); + assertNotNull(mEGLSurface); + if (!mEGL.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) { + fail("eglMakeCurrent failed"); + } + } + /** + * Checks for EGL errors. + */ + private void checkEglError(String msg) { + boolean failed = false; + int error; + while ((error = mEGL.eglGetError()) != EGL10.EGL_SUCCESS) { + Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error)); + failed = true; + } + if (failed) { + fail("EGL error encountered (see log)"); + } + } + /** + * Returns the Surface that the MediaCodec will draw onto. + */ + public Surface getSurface() { + return mSurface; + } + /** + * Latches the next buffer into the texture if one is available, and checks it for + * validity. Must be called from the thread that created the SurfaceStuff object. + */ + public void checkNewImageIfAvailable() { + boolean newStuff = false; + synchronized (this) { + if (mSurfaceTexture != null && mFrameAvailable) { + mFrameAvailable = false; + newStuff = true; + } + } + if (newStuff) { + mVideoRender.checkGlError("before updateTexImage"); + mSurfaceTexture.updateTexImage(); + mVideoRender.onDrawFrame(mSurfaceTexture); + checkSurfaceFrame(); + } + } + @Override + public void onFrameAvailable(SurfaceTexture st) { + if (VERBOSE) Log.d(TAG, "new frame available"); + synchronized (this) { + mFrameAvailable = true; + } + } + /** + * Attempts to check the frame for correctness. + *

+ * Our definition of "correct" is based on knowing what the frame sequence number is, + * which we can't reliably get by counting frames since the underlying mechanism can + * drop frames. The alternative would be to use the presentation time stamp that + * we passed to the video encoder, but there's no way to get that from the texture. + *

+ * All we can do is verify that it looks something like a frame we'd expect, i.e. + * green with exactly one pink rectangle. + */ + private void checkSurfaceFrame() { + ByteBuffer pixelBuf = ByteBuffer.allocateDirect(4); // TODO - reuse this + int numColoredRects = 0; + int rectPosn = -1; + for (int i = 0; i < 8; i++) { + // Note the coordinates are inverted on the Y-axis in GL. + int x, y; + if (i < 4) { + x = i * (mWidth / 4) + (mWidth / 8); + y = (mHeight * 3) / 4; + } else { + x = (7 - i) * (mWidth / 4) + (mWidth / 8); + y = mHeight / 4; + } + GLES20.glReadPixels(x, y, 1, 1, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, pixelBuf); + int r = pixelBuf.get(0) & 0xff; + int g = pixelBuf.get(1) & 0xff; + int b = pixelBuf.get(2) & 0xff; + if (isColorClose(r, TEST_R0) && + isColorClose(g, TEST_G0) && + isColorClose(b, TEST_B0)) { + // empty space + } else if (isColorClose(r, TEST_R1) && + isColorClose(g, TEST_G1) && + isColorClose(b, TEST_B1)) { + // colored rect + numColoredRects++; + rectPosn = i; + } else { + // wtf + Log.w(TAG, "found unexpected color r=" + r + " g=" + g + " b=" + b); + } + } + if (numColoredRects != 1) { + fail("Found surface with colored rects != 1 (" + numColoredRects + ")"); + } else { + if (VERBOSE) Log.d(TAG, "good surface, looks like index " + rectPosn); + } + } + } + /** + * GL code to fill a surface with a texture. This class was largely copied from + * VideoSurfaceView.VideoRender. + *

+ * TODO: merge implementations + */ + private static class VideoRender { + private static final int FLOAT_SIZE_BYTES = 4; + private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; + private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; + private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; + private final float[] mTriangleVerticesData = { + // X, Y, Z, U, V + -1.0f, -1.0f, 0, 0.f, 0.f, + 1.0f, -1.0f, 0, 1.f, 0.f, + -1.0f, 1.0f, 0, 0.f, 1.f, + 1.0f, 1.0f, 0, 1.f, 1.f, + }; + private FloatBuffer mTriangleVertices; + private final String mVertexShader = + "uniform mat4 uMVPMatrix;\n" + + "uniform mat4 uSTMatrix;\n" + + "attribute vec4 aPosition;\n" + + "attribute vec4 aTextureCoord;\n" + + "varying vec2 vTextureCoord;\n" + + "void main() {\n" + + " gl_Position = uMVPMatrix * aPosition;\n" + + " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + + "}\n"; + private final String mFragmentShader = + "#extension GL_OES_EGL_image_external : require\n" + + "precision mediump float;\n" + + "varying vec2 vTextureCoord;\n" + + "uniform samplerExternalOES sTexture;\n" + + "void main() {\n" + + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + + "}\n"; + private float[] mMVPMatrix = new float[16]; + private float[] mSTMatrix = new float[16]; + private int mProgram; + private int mTextureID = -12345; + private int muMVPMatrixHandle; + private int muSTMatrixHandle; + private int maPositionHandle; + private int maTextureHandle; + public VideoRender() { + mTriangleVertices = ByteBuffer.allocateDirect( + mTriangleVerticesData.length * FLOAT_SIZE_BYTES) + .order(ByteOrder.nativeOrder()).asFloatBuffer(); + mTriangleVertices.put(mTriangleVerticesData).position(0); + Matrix.setIdentityM(mSTMatrix, 0); + } + public int getTextureId() { + return mTextureID; + } + public void onDrawFrame(SurfaceTexture st) { + checkGlError("onDrawFrame start"); + st.getTransformMatrix(mSTMatrix); + GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f); + GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); + GLES20.glUseProgram(mProgram); + checkGlError("glUseProgram"); + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID); + mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); + GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, + TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); + checkGlError("glVertexAttribPointer maPosition"); + GLES20.glEnableVertexAttribArray(maPositionHandle); + checkGlError("glEnableVertexAttribArray maPositionHandle"); + mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); + GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false, + TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); + checkGlError("glVertexAttribPointer maTextureHandle"); + GLES20.glEnableVertexAttribArray(maTextureHandle); + checkGlError("glEnableVertexAttribArray maTextureHandle"); + Matrix.setIdentityM(mMVPMatrix, 0); + GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0); + GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + checkGlError("glDrawArrays"); + GLES20.glFinish(); + } + public void onSurfaceCreated() { + mProgram = createProgram(mVertexShader, mFragmentShader); + if (mProgram == 0) { + Log.e(TAG, "failed creating program"); + return; + } + maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition"); + checkGlError("glGetAttribLocation aPosition"); + if (maPositionHandle == -1) { + throw new RuntimeException("Could not get attrib location for aPosition"); + } + maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord"); + checkGlError("glGetAttribLocation aTextureCoord"); + if (maTextureHandle == -1) { + throw new RuntimeException("Could not get attrib location for aTextureCoord"); + } + muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix"); + checkGlError("glGetUniformLocation uMVPMatrix"); + if (muMVPMatrixHandle == -1) { + throw new RuntimeException("Could not get attrib location for uMVPMatrix"); + } + muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix"); + checkGlError("glGetUniformLocation uSTMatrix"); + if (muSTMatrixHandle == -1) { + throw new RuntimeException("Could not get attrib location for uSTMatrix"); + } + int[] textures = new int[1]; + GLES20.glGenTextures(1, textures, 0); + mTextureID = textures[0]; + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID); + checkGlError("glBindTexture mTextureID"); + GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, + GLES20.GL_NEAREST); + GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, + GLES20.GL_LINEAR); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, + GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, + GLES20.GL_CLAMP_TO_EDGE); + checkGlError("glTexParameter"); + } + private int loadShader(int shaderType, String source) { + int shader = GLES20.glCreateShader(shaderType); + checkGlError("glCreateShader type=" + shaderType); + GLES20.glShaderSource(shader, source); + GLES20.glCompileShader(shader); + int[] compiled = new int[1]; + GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); + if (compiled[0] == 0) { + Log.e(TAG, "Could not compile shader " + shaderType + ":"); + Log.e(TAG, GLES20.glGetShaderInfoLog(shader)); + GLES20.glDeleteShader(shader); + shader = 0; + } + return shader; + } + private int createProgram(String vertexSource, String fragmentSource) { + int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); + if (vertexShader == 0) { + return 0; + } + int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); + if (pixelShader == 0) { + return 0; + } + int program = GLES20.glCreateProgram(); + checkGlError("glCreateProgram"); + if (program == 0) { + Log.e(TAG, "Could not create program"); + } + GLES20.glAttachShader(program, vertexShader); + checkGlError("glAttachShader"); + GLES20.glAttachShader(program, pixelShader); + checkGlError("glAttachShader"); + GLES20.glLinkProgram(program); + int[] linkStatus = new int[1]; + GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); + if (linkStatus[0] != GLES20.GL_TRUE) { + Log.e(TAG, "Could not link program: "); + Log.e(TAG, GLES20.glGetProgramInfoLog(program)); + GLES20.glDeleteProgram(program); + program = 0; + } + return program; + } + public void checkGlError(String op) { + int error; + while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { + Log.e(TAG, op + ": glError " + error); + throw new RuntimeException(op + ": glError " + error); + } + } + } +} \ No newline at end of file diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/atestcases/TestCaseActivity.kt b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/atestcases/TestCaseActivity.kt new file mode 100644 index 0000000..085b90a --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/atestcases/TestCaseActivity.kt @@ -0,0 +1,22 @@ +package com.aserbao.androidcustomcamera.blocks.atestcases + +import android.support.v7.app.AppCompatActivity +import android.os.Bundle +import android.view.View +import com.aserbao.androidcustomcamera.base.activity.RVBaseActivity +import com.aserbao.androidcustomcamera.base.beans.BaseRecyclerBean + +/** + * https://android.googlesource.com/platform/cts/+/b04c81bfc2761b21293f9c095da38c757e570fd3/tests/tests/media/src/android/media + */ +class TestCaseActivity : RVBaseActivity() { + override fun itemClickBack(view: View?, position: Int, isLongClick: Boolean, comeFrom: Int) { + when(position){ +// 0 -> EncodeDecodeTest + } + } + override fun initGetData() { + mBaseRecyclerBeen.add(BaseRecyclerBean("EncodeDecodeTest",0)) + } + +} \ No newline at end of file diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/FFmpegActivity.kt b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/FFmpegActivity.kt index d9fbfda..fc15dee 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/FFmpegActivity.kt +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/FFmpegActivity.kt @@ -3,8 +3,10 @@ package com.aserbao.androidcustomcamera.blocks.ffmpeg import Jni.FFmpegCmd import VideoHandle.* import android.os.Environment +import android.support.annotation.MainThread import android.util.Log import android.view.View +import android.widget.Toast import com.aserbao.androidcustomcamera.base.activity.RVBaseActivity import com.aserbao.androidcustomcamera.base.beans.BaseRecyclerBean import com.aserbao.androidcustomcamera.blocks.ffmpeg.beans.WaterFilter @@ -18,12 +20,13 @@ var absolutePath = Environment.getExternalStorageDirectory().absolutePath class FFmpegActivity : RVBaseActivity(),OnEditorListener { override fun initGetData() { + mBaseRecyclerBeen.add(BaseRecyclerBean("取消", 100)) mBaseRecyclerBeen.add(BaseRecyclerBean("视频中抽取音频", 0)) mBaseRecyclerBeen.add(BaseRecyclerBean("视频添加水印", 1)) mBaseRecyclerBeen.add(BaseRecyclerBean("无损视频合并", 2)) mBaseRecyclerBeen.add(BaseRecyclerBean("多段视频合并", 3)) mBaseRecyclerBeen.add(BaseRecyclerBean("多段视频加水印并合成", 4)) - mBaseRecyclerBeen.add(BaseRecyclerBean("取消", 5)) + mBaseRecyclerBeen.add(BaseRecyclerBean("视频添加配乐并调整音量大小", 5)) mInputs.add(WaterFilter(videoPath1,png1)) mInputs.add(WaterFilter(videoPath2,png2)) @@ -53,12 +56,19 @@ class FFmpegActivity : RVBaseActivity(),OnEditorListener { override fun itemClickBack(view: View, position: Int, isLongClick: Boolean, comeFrom: Int) { mStartTime = System.currentTimeMillis() when(position){ + 100 ->{ +// FFmpegCmd.exit() + addMusicToVideo1() + } 0 ->{ FFmpegUtils.demuxer(videoPath1,outputMusicPath,EpEditor.Format.MP3,this) } 1 ->{ - var epVideo1 = EpVideo(videoPath1) - epVideo1.addDraw(EpDraw(png1,0,0,576f,1024f,false)) + var tempVideoPath = "/storage/emulated/0/Android/data/com.getremark.playground/files/Movies/15871817738614870009935443.mp4" + var tempBitmapPath = "/storage/emulated/0/playground/temp/123.png" + var epVideo1 = EpVideo(tempVideoPath) +// var epVideo1 = EpVideo(videoPath1) + epVideo1.addDraw(EpDraw(tempBitmapPath,0,0,576f,1024f,false)) val outputOption = EpEditor.OutputOption(outputPathMp4) EpEditor.exec(epVideo1, outputOption,this) } @@ -83,11 +93,35 @@ class FFmpegActivity : RVBaseActivity(),OnEditorListener { addWaterFilterOneLine() } 5 ->{ - FFmpegCmd.exit() + addMusicToVideo() } } } + fun addMusicToVideo(){ + var inputVideo = absolutePath + "/5.mp4" +// var inputVideo = absolutePath + "/temp.mp4" + var inputMusic = absolutePath + "/input.mp3" + var outputVideo = absolutePath + "/output.mp4" + var videoVolume = 0.5f + var musicVolume = 1f + FFmpegUtils.music(inputVideo,inputVideo,outputVideo,videoVolume,musicVolume,this) +// FFmpegUtils.addMusicForMp4(inputVideo,inputMusic,videoVolume,musicVolume,outputVideo,this) + } + fun addMusicToVideo1(){ +// var inputVideo = absolutePath + "/5.mp4" +// var inputVideo = absolutePath + "/temp.mp4" +// var inputMusic = absolutePath + "/input.mp3" +// var inputVideo = "/storage/emulated/0/playground/temp/.capture/.remark-1588920936552.mp4" + var inputVideo = absolutePath + "/test1.mp4" + var inputMusic = absolutePath +"/er.m4a" + var outputVideo = absolutePath + "/output.mp4" + var videoVolume = 1f + var musicVolume = 1f + FFmpegUtils.music(inputVideo,inputMusic,outputVideo,videoVolume,musicVolume,this) + } + + private fun addWaterFilterOneLine() { // ffmpeg -i 2.mp4 -i 3.mp4 -i img1.png -i img2.png -filter_complex "[0:v][2:v]overlay=0:0[in1];[1:v][3:v]overlay=0:10[in2];[in1][in2]concat" -y output.mp4 //开始处理 @@ -160,6 +194,7 @@ class FFmpegActivity : RVBaseActivity(),OnEditorListener { if(cuurIndex == 3){ itemClickBack(mBaseRv,2,false,2) }*/ + Log.e(TAG, ": onSuccess 耗时: " + (System.currentTimeMillis() - mStartTime) ); } @@ -170,6 +205,4 @@ class FFmpegActivity : RVBaseActivity(),OnEditorListener { override fun onProgress(progress: Float) { Log.e(TAG, ": onProgress" + progress ); } - - } \ No newline at end of file diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/utils/FFmpegUtils.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/utils/FFmpegUtils.java index 2ce388f..db9787d 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/utils/FFmpegUtils.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/utils/FFmpegUtils.java @@ -73,6 +73,35 @@ public static void music(String videoin, String audioin, String output, float vi execCmd(cmd, d, onEditorListener); } + /** + * 给视频添加配乐 + * @param inputVideoPath + * @param inputMusicPath + * @param videoVolume 0~1 + * @param musicVolume 0~1 + * @param outputVideoPath + */ + public static void addMusicForMp4(String inputVideoPath,String inputMusicPath,float videoVolume,float musicVolume,String outputVideoPath,final OnEditorListener onEditorListener){ +// ffmpeg -y -i 123.mp4 -i 5.aac -filter_complex "[0:a]aformat=sample_fmts=fltp:sample_rates=44100:channel_layouts=stereo,volume=1.0[a0]; +// [1:a]aformat=sample_fmts=fltp:sample_rates=44100:channel_layouts=stereo,volume=0.5[a1];[a0][a1]amix=inputs=2:duration=first[aout]" -map "[aout]" -ac 2 -c:v copy -map 0:v:0 output.mp4 + CmdList cmd = new CmdList(); + cmd.append("ffmpeg").append("-y").append("-i").append(inputVideoPath) + .append("-i").append(inputMusicPath) + .append("-filter_complex") + .append("[0:a]volume=" + videoVolume + "[a0];[1:a]volume=" + musicVolume + "[a1];[a0][a1]amix=inputs=2:duration=first[aout]") + .append("-map") + .append("[aout]") + .append("-ac") + .append("2") + /*.append("-c:v") + .append("-copy")*/ + .append("-map") + .append("0:v:0") + .append(outputVideoPath); + long d = VideoUitls.getDuration(inputVideoPath); + execCmd(cmd, d, onEditorListener); + } + /** * 音视频分离 * @@ -186,7 +215,7 @@ public static void changePTS(String videoin, String out, float times, EpEditor.P * @param out 输出路径 * @param w 输出图片宽度 * @param h 输出图片高度 - * @param rate 每秒视频生成图片数 + * @param rate 每秒视频数 * @param onEditorListener 回调接口 */ public static void video2pic(String videoin, String out, int w, int h, float rate, OnEditorListener onEditorListener) { @@ -398,4 +427,6 @@ public void onProgress(final float progress) { } }); } + + } diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/PrimaryMediaCodecActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/PrimaryMediaCodecActivity.java index bcb6363..49b152a 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/PrimaryMediaCodecActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/PrimaryMediaCodecActivity.java @@ -35,9 +35,9 @@ public class PrimaryMediaCodecActivity extends BaseActivity { private static final String MIME_TYPE = "video/avc"; private static final int WIDTH = 720; private static final int HEIGHT = 1280; - private static final int BIT_RATE = 4000000; - private static final int FRAMES_PER_SECOND = 4; - private static final int IFRAME_INTERVAL = 5; + private static final int BIT_RATE = 3000000; + private static final int FRAMES_PER_SECOND = 30; + private static final int IFRAME_INTERVAL = 1; private static final int NUM_FRAMES = 4 * 100; private static final int START_RECORDING = 0; @@ -71,8 +71,8 @@ public void onViewClicked(View view) { case R.id.btn_recording: if (mBtnRecording.getText().equals("开始录制")) { try { -// mOutputFile = new File(Environment.getExternalStorageDirectory().getAbsolutePath(), System.currentTimeMillis() + ".mp4"); - mOutputFile = new File(FileUtils.getStorageMp4("PrimaryMediaCodecActivity")); + mOutputFile = new File(Environment.getExternalStorageDirectory().getAbsolutePath(), System.currentTimeMillis() + ".mp4"); +// mOutputFile = new File(FileUtils.getStorageMp4("PrimaryMediaCodecActivity")); startRecording(mOutputFile); mPrimaryMcTv.setText("文件保存路径为:" + mOutputFile.toString()); mBtnRecording.setText("停止录制"); diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/show/MediaCodecShowOnGlSurfaceView.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/show/MediaCodecShowOnGlSurfaceView.java index 148d2eb..8f5e21b 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/show/MediaCodecShowOnGlSurfaceView.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/show/MediaCodecShowOnGlSurfaceView.java @@ -76,9 +76,8 @@ public void onViewClicked(View view) { startActivityForResult(intent2, StaticFinalValues.REQUEST_CODE_PICK_VIDEO); break; case R.id.decode_show_btn: - /*MediaCodecUtil1 mediaCodecUtil1 = new MediaCodecUtil1(videoFileName, mHolder.getSurface()); - mediaCodecUtil1.start();*/ - + MediaCodecUtil1 mediaCodecUtil1 = new MediaCodecUtil1(videoFileName, mHolder.getSurface()); + mediaCodecUtil1.start(); break; case R.id.detail_video_btn: new Thread(new Runnable() { diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/MediaExtractorActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/MediaExtractorActivity.java index 8788670..0bdf1c0 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/MediaExtractorActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/MediaExtractorActivity.java @@ -12,6 +12,7 @@ import com.aserbao.androidcustomcamera.base.interfaces.IDetailCallBackListener; import com.aserbao.androidcustomcamera.blocks.interfaces.ICallBackListener; import com.aserbao.androidcustomcamera.blocks.mediaExtractor.combineTwoVideo.CombineTwoVideos; +import com.aserbao.androidcustomcamera.blocks.mediaExtractor.combineTwoVideo.CombineVideoAndMusic; import com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.FrequencyView; import com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.TransAacHandlerPure; import com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.decoder.DecoderAudioAAC2PCMPlay; @@ -101,7 +102,11 @@ public void cuurentFrequenty(int cuurentFrequenty, double volume) { // decoderAudioAndGetDb.start(audioMp3Path1, MIMETYPE_AUDIO_MPEG); break; case R.id.exchange_video_and_audio: - CombineTwoVideos.combineTwoVideos(path + "/aserbao.mp4", 0, path + "/lan.mp4", new File(path + "/aserbao.mp3"), this); +// CombineTwoVideos.combineTwoVideos(path + "/aserbao.mp4", 0, path + "/lan.mp4", new File(path + "/aserbao.mp3"), this); + String inputVideo = "/storage/emulated/0/douyin.mp4"; + String outputVideo = "/storage/emulated/0/douyinOut.mp4"; + String inputMusic = "/storage/emulated/0/pg/.bgm/40e613e5e3695ab44b4f31e25088d7ac"; + CombineVideoAndMusic.combineTwoVideos(inputMusic, 0, inputVideo, new File(outputVideo), this); break; case R.id.decoder_aac_and_player: String audioPath = Environment.getExternalStorageDirectory().getAbsolutePath() + "/aac.aac"; diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/combineTwoVideo/CombineVideoAndMusic.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/combineTwoVideo/CombineVideoAndMusic.java new file mode 100644 index 0000000..0ab90a5 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/combineTwoVideo/CombineVideoAndMusic.java @@ -0,0 +1,152 @@ +package com.aserbao.androidcustomcamera.blocks.mediaExtractor.combineTwoVideo; + +import android.media.MediaCodec; +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.util.Log; + +import com.aserbao.androidcustomcamera.base.interfaces.IDetailCallBackListener; + +import java.io.File; +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * 功能: 替换视频1中的视频 + * @author aserbao + * @date : On 2019/1/3 6:12 PM + * @email: 1142803753@qq.com + * @project:AndroidCamera + * @package:com.aserbao.androidcustomcamera.blocks.mediaCodec.primary.mp3TranslateAAC + * @Copyright: 个人版权所有 + */ +public class CombineVideoAndMusic { + private static final String TAG = "CombineTwoVideos"; + /** + * 合成视频1的音频和视频2的图像 + * + * @param audioVideoPath 提供音频的视频 + * @param audioStartTime 音频的开始时间 + * @param frameVideoPath 提供图像的视频 + * @param combinedVideoOutFile 合成后的文件 + */ + public static void combineTwoVideos(String audioVideoPath, + long audioStartTime, + String frameVideoPath, + File combinedVideoOutFile, + IDetailCallBackListener iDetailCallBackListener) { + MediaExtractor audioVideoExtractor = new MediaExtractor(); + int mainAudioExtractorTrackIndex = -1; //提供音频的视频的音频轨(有点拗口) + int mainAudioMuxerTrackIndex = -1; //合成后的视频的音频轨 + int mainAudioMaxInputSize = 0; //能获取的音频的最大值 + + MediaExtractor frameVideoExtractor = new MediaExtractor(); + int frameExtractorTrackIndex = -1; //视频轨 + int frameMuxerTrackIndex = -1; //合成后的视频的视频轨 + int frameMaxInputSize = 0; //能获取的视频的最大值 + int frameRate = 0; //视频的帧率 + long frameDuration = 0; + + MediaMuxer muxer = null; //用于合成音频与视频 + + try { + muxer = new MediaMuxer(combinedVideoOutFile.getPath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + + audioVideoExtractor.setDataSource(audioVideoPath); //设置视频源 + //音轨信息 + int audioTrackCount = audioVideoExtractor.getTrackCount(); //获取数据源的轨道数 + //在此循环轨道数,目的是找到我们想要的音频轨 + for (int i = 0; i < audioTrackCount; i++) { + MediaFormat format = audioVideoExtractor.getTrackFormat(i); //得到指定索引的记录格式 + String mimeType = format.getString(MediaFormat.KEY_MIME); //主要描述mime类型的媒体格式 + if (mimeType.startsWith("audio/")) { //找到音轨 + mainAudioExtractorTrackIndex = i; + mainAudioMuxerTrackIndex = muxer.addTrack(format); //将音轨添加到MediaMuxer,并返回新的轨道 + mainAudioMaxInputSize = format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); //得到能获取的有关音频的最大值 +// mainAudioDuration = format.getLong(MediaFormat.KEY_DURATION); + } + } + + //图像信息 + frameVideoExtractor.setDataSource(frameVideoPath); //设置视频源 + int trackCount = frameVideoExtractor.getTrackCount(); //获取数据源的轨道数 + //在此循环轨道数,目的是找到我们想要的视频轨 + for (int i = 0; i < trackCount; i++) { + MediaFormat format = frameVideoExtractor.getTrackFormat(i); //得到指定索引的媒体格式 + String mimeType = format.getString(MediaFormat.KEY_MIME); //主要描述mime类型的媒体格式 + if (mimeType.startsWith("video/")) { //找到视频轨 + frameExtractorTrackIndex = i; + frameMuxerTrackIndex = muxer.addTrack(format); //将视频轨添加到MediaMuxer,并返回新的轨道 + frameMaxInputSize = format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); //得到能获取的有关视频的最大值 + frameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE); //获取视频的帧率 + frameDuration = format.getLong(MediaFormat.KEY_DURATION); //获取视频时长 + } + } + + muxer.start(); //开始合成 + + audioVideoExtractor.selectTrack(mainAudioExtractorTrackIndex); //将提供音频的视频选择到音轨上 + MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo(); + ByteBuffer audioByteBuffer = ByteBuffer.allocate(mainAudioMaxInputSize); + while (true) { + int readSampleSize = audioVideoExtractor.readSampleData(audioByteBuffer, 0); //检索当前编码的样本并将其存储在字节缓冲区中 + if (readSampleSize < 0) { //如果没有可获取的样本则退出循环 + audioVideoExtractor.unselectTrack(mainAudioExtractorTrackIndex); + break; + } + + long sampleTime = audioVideoExtractor.getSampleTime(); //获取当前展示样本的时间(单位毫秒) + + if (sampleTime < audioStartTime) { //如果样本时间小于我们想要的开始时间就快进 + audioVideoExtractor.advance(); //推进到下一个样本,类似快进 + continue; + } + + if (sampleTime > audioStartTime + frameDuration) { //如果样本时间大于开始时间+视频时长,就退出循环 + break; + } + //设置样本编码信息 + audioBufferInfo.size = readSampleSize; + audioBufferInfo.offset = 0; + audioBufferInfo.flags = audioVideoExtractor.getSampleFlags(); + audioBufferInfo.presentationTimeUs = sampleTime - audioStartTime; + + muxer.writeSampleData(mainAudioMuxerTrackIndex, audioByteBuffer, audioBufferInfo); //将样本写入 + audioVideoExtractor.advance(); //推进到下一个样本,类似快进 + } + + frameVideoExtractor.selectTrack(frameExtractorTrackIndex); //将提供视频图像的视频选择到视频轨上 + MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo(); + ByteBuffer videoByteBuffer = ByteBuffer.allocate(frameMaxInputSize); + while (true) { + int readSampleSize = frameVideoExtractor.readSampleData(videoByteBuffer, 0); //检索当前编码的样本并将其存储在字节缓冲区中 + if (readSampleSize < 0) { //如果没有可获取的样本则退出循环 + frameVideoExtractor.unselectTrack(frameExtractorTrackIndex); + break; + } + //设置样本编码信息 + videoBufferInfo.size = readSampleSize; + videoBufferInfo.offset = 0; + videoBufferInfo.flags = frameVideoExtractor.getSampleFlags(); + videoBufferInfo.presentationTimeUs += 1000 * 1000 / frameRate; + + muxer.writeSampleData(frameMuxerTrackIndex, videoByteBuffer, videoBufferInfo); //将样本写入 + frameVideoExtractor.advance(); //推进到下一个样本,类似快进 + } + } catch (IOException e) { + iDetailCallBackListener.failed(e); + Log.e(TAG, "combineTwoVideos: ", e); + } finally { + //释放资源 + audioVideoExtractor.release(); + frameVideoExtractor.release(); + if (muxer != null) { + muxer.release(); + } + iDetailCallBackListener.success(); + Log.e(TAG, "combineTwoVideos: " ); + } + } + +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/utils/CheckPermissionUtil.java b/app/src/main/java/com/aserbao/androidcustomcamera/utils/CheckPermissionUtil.java new file mode 100755 index 0000000..f4ccf50 --- /dev/null +++ b/app/src/main/java/com/aserbao/androidcustomcamera/utils/CheckPermissionUtil.java @@ -0,0 +1,259 @@ +package com.aserbao.androidcustomcamera.utils; + +import android.Manifest; +import android.content.Context; +import android.content.pm.PackageManager; +import android.location.LocationManager; +import android.media.AudioFormat; +import android.media.AudioRecord; +import android.media.MediaRecorder; +import android.os.Build; +import android.support.v4.content.ContextCompat; +import android.util.Log; + +import static com.aserbao.androidcustomcamera.base.MyApplication.getContext; + + +/** + */ + +public class CheckPermissionUtil { + + private static String TAG = "CheckPermissionUtil"; + + // 音频获取源 + public static int audioSource = MediaRecorder.AudioSource.MIC; + // 设置音频采样率,44100是目前的标准,但是某些设备仍然支持22050,16000,11025 + public static int sampleRateInHz = 44100; + // 设置音频的录制的声道CHANNEL_IN_STEREO为双声道,CHANNEL_CONFIGURATION_MONO为单声道 + public static int channelConfig = AudioFormat.CHANNEL_IN_STEREO; + // 音频数据格式:PCM 16位每个样本。保证设备支持。PCM 8位每个样本。不一定能得到设备支持。 + public static int audioFormat = AudioFormat.ENCODING_PCM_16BIT; + // 缓冲区字节大小 + public static int bufferSizeInBytes = 0; + + /** + * @return true 已经授权 获取地理位置权限 + */ + public static boolean isLocationPermGrantedAndOpen(){ + boolean result = false; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + Log.i(TAG,"isLocationPermGrantedAndOpen()--- Build.VERSION.SDK_INT >= Build.VERSION_CODES.M "); + if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED || + ContextCompat.checkSelfPermission(getContext(), Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED ) + { + Log.i(TAG,"isLocationPermGrantedAndOpen()--- result = false"); + result = false; + } else { + if(isOPenGPS(getContext())){ + Log.i(TAG,"isLocationPermGrantedAndOpen()--- result = true"); + result = true; + } + } + } else { + Log.i(TAG,"isLocationPermGrantedAndOpen()--- Build.VERSION.SDK_INT < Build.VERSION_CODES.M "); + result = true; + } + return result; + } + + /** + * @return true 照相机权限 + */ + public static boolean isCameraGranted(){ + boolean result = false; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + Log.i(TAG,"isCameraGranted()--- Build.VERSION.SDK_INT >= Build.VERSION_CODES.M "); + if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED || + ContextCompat.checkSelfPermission(getContext(), Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED || + ContextCompat.checkSelfPermission(getContext(), Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED || + ContextCompat.checkSelfPermission(getContext(), Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) + { + Log.i(TAG,"isCameraGranted()--- result = false"); + result = false; + } else { + result = true; + } + } else { + Log.i(TAG,"isCameraGranted()--- Build.VERSION.SDK_INT < Build.VERSION_CODES.M "); + result = true; + } + return result; + } + + /** + * 只用这个方法,一些华为手机的地理权限打开与否,不能准确判断出,需要上面的方法 isLocationPermGrantedAndOpen + * @return true 已经授权 获取地理位置权限 + */ + public static boolean isLocationPermGranted(){ + boolean result = false; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED || + ContextCompat.checkSelfPermission(getContext(), Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED ) + { + result = false; + } else { + result = true; + } + } else { + result = true; + } + return result; + } + + /** + * @return true + */ + public static boolean isStoragePermGranted(){ + boolean result = false; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED || + ContextCompat.checkSelfPermission(getContext(), Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED ) + { + result = false; + } else { + result = true; + } + } else { + result = true; + } + return result; + } + + /** + * Function:判断录音权限,兼容android6.0以下以及以上系统 + */ + + /** + * 判断是是否有录音权限 + */ + public static boolean isHasPermission(final Context context){ + bufferSizeInBytes = 0; + bufferSizeInBytes = AudioRecord.getMinBufferSize(sampleRateInHz, + channelConfig, audioFormat); + + AudioRecord audioRecord = null; + try { + // 美图手机这里会抛 IllegalArgumentException + // https://fabric.io/getremark/android/apps/com.getremark.spot/issues/5b719a816007d59fcdac62f0?time=last-seven-days + audioRecord = new AudioRecord(audioSource, sampleRateInHz, + channelConfig, audioFormat, bufferSizeInBytes); + } catch (Exception e) { + e.printStackTrace(); + return false; + } + + //开始录制音频 + try{ + // 防止某些手机崩溃,例如联想 + audioRecord.startRecording(); + }catch (IllegalStateException e){ + e.printStackTrace(); + } + /** + * 根据开始录音判断是否有录音权限 + */ + if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { + return false; + } + audioRecord.stop(); + audioRecord.release(); + audioRecord = null; + + return true; + } + + /** + * @return true 已经授权 获取照相机权限 + */ + public static boolean isCameraPermissionGranted(){ + return isPermissionGranted(Manifest.permission.CAMERA); + } + + public static boolean isReadStoragePermissionsGranted(){ + return isPermissionGranted(Manifest.permission.READ_EXTERNAL_STORAGE); + } + + public static boolean isWriteStoragePermissionsGranted(){ + return isPermissionGranted(Manifest.permission.WRITE_EXTERNAL_STORAGE); + } + + public static boolean isPermissionGranted(String permission) { + boolean isRecorder = false; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + if (ContextCompat.checkSelfPermission(getContext(), permission) != PackageManager.PERMISSION_GRANTED) { + isRecorder = false; + } else { + isRecorder = true; + } + } else { + isRecorder = true; + } + return isRecorder; + } + + public static boolean isRecordAudioPermissionsGranted(){ + return isPermissionGranted(Manifest.permission.RECORD_AUDIO); + } + + public static boolean isWriteSettingPermissionsGranted(){ + boolean isRecorder = false; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.WRITE_SETTINGS) != PackageManager.PERMISSION_GRANTED) { + isRecorder = false; + } else { + isRecorder = true; + } + } else { + isRecorder = true; + } + return isRecorder; + } + + public static boolean isContactsPermissionGranted() { + boolean result = false; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.READ_CONTACTS) != PackageManager.PERMISSION_GRANTED) { + result = false; + } else { + result = true; + } + } else { + result = true; + } + Log.i(TAG, "isContactsPermissionGranted()--- result = " + result); + return result; + } + + + public static boolean isReadSmsPermissionGranted() { + boolean result; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.READ_SMS) != PackageManager.PERMISSION_GRANTED) { + result = false; + } else { + result = true; + } + }else { + result = false; + } + return result; + + } + + + public static boolean isOPenGPS(final Context context) { + LocationManager locationManager + = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE); + // 通过GPS卫星定位,定位级别可以精确到街(通过24颗卫星定位,在室外和空旷的地方定位准确、速度快) + boolean gps = locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER); + Log.i("isOPenGPS()","gps = "+gps); + // 通过WLAN或移动网络(3G/2G)确定的位置(也称作AGPS,辅助GPS定位。主要用于在室内或遮盖物(建筑群或茂密的深林等)密集的地方定位) + boolean network = locationManager.isProviderEnabled(LocationManager.NETWORK_PROVIDER); + Log.i("isOPenGPS()","network = "+network); + if (gps || network) { + return true; + } + return false; + } +} diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/RecorderActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/RecorderActivity.java index 8d9f3a7..2b6aa78 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/RecorderActivity.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/RecorderActivity.java @@ -19,6 +19,7 @@ import android.widget.Toast; import com.aserbao.androidcustomcamera.R; +import com.aserbao.androidcustomcamera.WelcomeActivity; import com.aserbao.androidcustomcamera.base.MyApplication; import com.aserbao.androidcustomcamera.base.activity.BaseActivity; import com.aserbao.androidcustomcamera.base.pop.PopupManager; @@ -36,6 +37,8 @@ import com.aserbao.androidcustomcamera.whole.record.ui.SlideGpuFilterGroup; import com.aserbao.androidcustomcamera.whole.videoPlayer.VideoPlayerActivity2; +import org.jetbrains.annotations.NotNull; + import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.concurrent.ExecutorService; @@ -371,6 +374,8 @@ public void run() { } }); } + + private static class MyHandler extends Handler { private WeakReference mVideoRecordActivity; diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/beans/MediaObject.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/beans/MediaObject.java index 8f36614..5f3d382 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/beans/MediaObject.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/beans/MediaObject.java @@ -103,8 +103,6 @@ public void stopRecord(Context context, MediaObject mediaObject){ } public MediaPart getCurrentPart() { - /*if (mMediaPart != null) - return mMediaPart;*/ if (mMediaList != null && mMediaList.size() > 0) mMediaPart = mMediaList.get(mMediaList.size() - 1); return mMediaPart; diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/GifDecoder.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/GifDecoder.java index f6ab71c..bc672b0 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/GifDecoder.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/GifDecoder.java @@ -19,7 +19,6 @@ public GifFrame(Bitmap im, int del) { image = im; delay = del; } - } // to define some error type diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/WaterMarkFilter.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/WaterMarkFilter.java index daa60d1..94c48db 100644 --- a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/WaterMarkFilter.java +++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/filters/WaterMarkFilter.java @@ -125,7 +125,6 @@ private void createTexture() { } //对画面进行矩阵旋转 // MatrixUtils.flip(mFilter.getMatrix(),false,true); - mFilter.setTextureId(textures[0]); } } diff --git a/app/src/main/res/drawable-xxhdpi/welcome.jpg b/app/src/main/res/drawable-xxhdpi/welcome.jpg new file mode 100644 index 0000000..47c3b65 Binary files /dev/null and b/app/src/main/res/drawable-xxhdpi/welcome.jpg differ diff --git a/app/src/main/res/drawable/bg.png b/app/src/main/res/drawable/bg.png new file mode 100644 index 0000000..3e7b722 Binary files /dev/null and b/app/src/main/res/drawable/bg.png differ diff --git a/app/src/main/res/layout/activity_welcome.xml b/app/src/main/res/layout/activity_welcome.xml new file mode 100644 index 0000000..c5b4602 --- /dev/null +++ b/app/src/main/res/layout/activity_welcome.xml @@ -0,0 +1,24 @@ + + + + + + + \ No newline at end of file diff --git a/app/src/main/res/mipmap-hdpi/ic_launcher.png b/app/src/main/res/mipmap-hdpi/ic_launcher.png deleted file mode 100644 index 14cd7c6..0000000 Binary files a/app/src/main/res/mipmap-hdpi/ic_launcher.png and /dev/null differ diff --git a/app/src/main/res/mipmap-mdpi/ic_launcher.png b/app/src/main/res/mipmap-mdpi/ic_launcher.png deleted file mode 100644 index 7eafda5..0000000 Binary files a/app/src/main/res/mipmap-mdpi/ic_launcher.png and /dev/null differ diff --git a/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/app/src/main/res/mipmap-xhdpi/ic_launcher.png deleted file mode 100644 index b1b8e74..0000000 Binary files a/app/src/main/res/mipmap-xhdpi/ic_launcher.png and /dev/null differ diff --git a/app/src/main/res/mipmap-xxhdpi/ic_lanuch.png b/app/src/main/res/mipmap-xxhdpi/ic_lanuch.png new file mode 100644 index 0000000..0a29b09 Binary files /dev/null and b/app/src/main/res/mipmap-xxhdpi/ic_lanuch.png differ diff --git a/app/src/main/res/values/strings.xml b/app/src/main/res/values/strings.xml index 82d618e..8dc6939 100644 --- a/app/src/main/res/values/strings.xml +++ b/app/src/main/res/values/strings.xml @@ -1,5 +1,5 @@ - AndroidCustomCamera + ACamera android.support.design.widget.AppBarLayout$ScrollingViewBehavior 点击输入文字 diff --git a/app/src/test/java/com/aserbao/androidcustomcamera/ExampleUnitTest.java b/app/src/test/java/com/aserbao/androidcustomcamera/ExampleUnitTest.java deleted file mode 100644 index 3d0c920..0000000 --- a/app/src/test/java/com/aserbao/androidcustomcamera/ExampleUnitTest.java +++ /dev/null @@ -1,17 +0,0 @@ -package com.aserbao.androidcustomcamera; - -import org.junit.Test; - -import static org.junit.Assert.*; - -/** - * Example local unit test, which will execute on the development machine (host). - * - * @see Testing documentation - */ -public class ExampleUnitTest { - @Test - public void addition_isCorrect() throws Exception { - assertEquals(4, 2 + 2); - } -} \ No newline at end of file diff --git a/gradle.properties b/gradle.properties deleted file mode 100644 index 55cf1d4..0000000 --- a/gradle.properties +++ /dev/null @@ -1,19 +0,0 @@ -## Project-wide Gradle settings. -# -# For more details on how to configure your build environment visit -# http://www.gradle.org/docs/current/userguide/build_environment.html -# -# Specifies the JVM arguments used for the daemon process. -# The setting is particularly useful for tweaking memory settings. -# Default value: -Xmx1024m -XX:MaxPermSize=256m -# org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8 -# -# When configured, Gradle will run in incubating parallel mode. -# This option should only be used with decoupled projects. More details, visit -# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects -# org.gradle.parallel=true -#Fri May 04 17:34:37 CST 2018 -systemProp.http.proxyHost=127.0.0.1 -systemProp.http.nonProxyHosts=localhost, 127.0.0.1 -org.gradle.jvmargs=-Xmx1536m -systemProp.http.proxyPort=8087 diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar deleted file mode 100644 index 13372ae..0000000 Binary files a/gradle/wrapper/gradle-wrapper.jar and /dev/null differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties deleted file mode 100644 index 83a47b1..0000000 --- a/gradle/wrapper/gradle-wrapper.properties +++ /dev/null @@ -1,6 +0,0 @@ -#Tue Jan 14 20:55:13 CST 2020 -distributionBase=GRADLE_USER_HOME -distributionPath=wrapper/dists -zipStoreBase=GRADLE_USER_HOME -zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-5.4.1-all.zip diff --git a/gradlew b/gradlew index 2f11c14..01a7181 100644 --- a/gradlew +++ b/gradlew @@ -8,7 +8,6 @@ # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. DEFAULT_JVM_OPTS="" - APP_NAME="Gradle" APP_BASE_NAME=`basename "$0"`