+ * There are two ways to connect an encoder to a decoder. The first is to pass the output + * buffers from the encoder to the input buffers of the decoder, using ByteBuffer.put() to + * copy the bytes. With this approach, we need to watch for BUFFER_FLAG_CODEC_CONFIG, and + * if seen we use format.setByteBuffer("csd-0") followed by decoder.configure() to pass the + * meta-data through. + *
+ * The second way is to write the buffers to a file and then stream it back in. With this + * approach it is necessary to use a MediaExtractor to retrieve the format info and skip past + * the meta-data. + *
+ * The former can be done entirely in memory, but requires that the encoder and decoder + * operate simultaneously (the I/O buffers are owned by MediaCodec). The latter requires + * writing to disk, because MediaExtractor can only accept a file or URL as a source. + *
+ * The direct encoder-to-decoder approach isn't currently tested elsewhere in this CTS + * package, so we use that here. + * + * @link https://android.googlesource.com/platform/cts/+/b04c81bfc2761b21293f9c095da38c757e570fd3/tests/tests/media/src/android/media/cts/EncodeDecodeTest.java + */ +public class EncodeDecodeTest extends AndroidTestCase { + private static final String TAG = "EncodeDecodeTest"; + private static final boolean VERBOSE = false; // lots of logging + private static final boolean DEBUG_SAVE_FILE = false; // save copy of encoded movie + private static final String DEBUG_FILE_NAME_BASE = "/storage/emulated/0/"; + // parameters for the encoder + private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding + private static final int BIT_RATE = 1000000; // 1Mbps + private static final int FRAME_RATE = 15; // 15fps + private static final int IFRAME_INTERVAL = 10; // 10 seconds between I-frames + // movie length, in frames + private static final int NUM_FRAMES = 30; // two seconds of video + private static final int TEST_Y = 240; // YUV values for colored rect + private static final int TEST_U = 220; + private static final int TEST_V = 200; + private static final int TEST_R0 = 0; // RGB eqivalent of {0,0,0} + private static final int TEST_G0 = 136; + private static final int TEST_B0 = 0; + private static final int TEST_R1 = 255; // RGB equivalent of {240,220,200} + private static final int TEST_G1 = 166; + private static final int TEST_B1 = 255; + // size of a frame, in pixels + private int mWidth = -1; + private int mHeight = -1; + /** + * Tests streaming of AVC video through the encoder and decoder. Data is encoded from + * a series of byte[] buffers and decoded into ByteBuffers. The output is checked for + * validity. + */ + public void testEncodeDecodeVideoFromBufferToBufferQCIF() throws Exception { + setSize(176, 144); + testEncodeDecodeVideoFromBuffer(false); + } + public void testEncodeDecodeVideoFromBufferToBufferQVGA() throws Exception { + setSize(320, 240); + testEncodeDecodeVideoFromBuffer(false); + } + public void testEncodeDecodeVideoFromBufferToBuffer720p() throws Exception { + setSize(1280, 720); + testEncodeDecodeVideoFromBuffer(false); + } + /** + * Tests streaming of AVC video through the encoder and decoder. Data is encoded from + * a series of byte[] buffers and decoded into Surfaces. The output is checked for + * validity but some frames may be dropped. + *
+ * Because of the way SurfaceTexture.OnFrameAvailableListener works, we need to run this + * test on a thread that doesn't have a Looper configured. If we don't, the test will + * pass, but we won't actually test the output because we'll never receive the "frame + * available" notifications". The CTS test framework seems to be configuring a Looper on + * the test thread, so we have to hand control off to a new thread for the duration of + * the test. + */ + public void testEncodeDecodeVideoFromBufferToSurfaceQCIF() throws Throwable { + setSize(176, 144); + BufferToSurfaceWrapper.runTest(this); + } + public void testEncodeDecodeVideoFromBufferToSurfaceQVGA() throws Throwable { + setSize(320, 240); + BufferToSurfaceWrapper.runTest(this); + } + public void testEncodeDecodeVideoFromBufferToSurface720p() throws Throwable { + setSize(1280, 720); + BufferToSurfaceWrapper.runTest(this); + } + /** Wraps testEncodeDecodeVideoFromBuffer(true) */ + private static class BufferToSurfaceWrapper implements Runnable { + private Throwable mThrowable; + private EncodeDecodeTest mTest; + private BufferToSurfaceWrapper(EncodeDecodeTest test) { + mTest = test; + } + public void run() { + try { + mTest.testEncodeDecodeVideoFromBuffer(true); + } catch (Throwable th) { + mThrowable = th; + } + } + /** + * Entry point. + */ + public static void runTest(EncodeDecodeTest obj) throws Throwable { + BufferToSurfaceWrapper wrapper = new BufferToSurfaceWrapper(obj); + Thread th = new Thread(wrapper, "codec test"); + th.start(); + th.join(); + if (wrapper.mThrowable != null) { + throw wrapper.mThrowable; + } + } + } + /** + * Sets the desired frame size. + */ + private void setSize(int width, int height) { + if ((width % 16) != 0 || (height % 16) != 0) { + Log.w(TAG, "WARNING: width or height not multiple of 16"); + } + mWidth = width; + mHeight = height; + } + /** + * Tests encoding and subsequently decoding video from frames generated into a buffer. + *
+ * We encode several frames of a video test pattern using MediaCodec, then decode the + * output with MediaCodec and do some simple checks. + *
+ * See http://b.android.com/37769 for a discussion of input format pitfalls. + */ + private void testEncodeDecodeVideoFromBuffer(boolean toSurface) throws Exception { + MediaCodecInfo codecInfo = selectCodec(MIME_TYPE); + if (codecInfo == null) { + // Don't fail CTS if they don't have an AVC codec (not here, anyway). + Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE); + return; + } + if (VERBOSE) Log.d(TAG, "found codec: " + codecInfo.getName()); + int colorFormat = selectColorFormat(codecInfo, MIME_TYPE); + if (VERBOSE) Log.d(TAG, "found colorFormat: " + colorFormat); + // We avoid the device-specific limitations on width and height by using values that + // are multiples of 16, which all tested devices seem to be able to handle. + MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); + // Set some properties. Failing to specify some of these can cause the MediaCodec + // configure() call to throw an unhelpful exception. + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); + format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE); + format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); + if (VERBOSE) Log.d(TAG, "format: " + format); + // Create a MediaCodec for the desired codec, then configure it as an encoder with + // our desired properties. + MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName()); + encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + encoder.start(); + // Create a MediaCodec for the decoder, just based on the MIME type. The various + // format details will be passed through the csd-0 meta-data later on. + MediaCodec decoder = MediaCodec.createDecoderByType(MIME_TYPE); + try { + encodeDecodeVideoFromBuffer(encoder, colorFormat, decoder, toSurface); + } finally { + if (VERBOSE) Log.d(TAG, "releasing codecs"); + encoder.stop(); + decoder.stop(); + encoder.release(); + decoder.release(); + } + } + /** + * Returns the first codec capable of encoding the specified MIME type, or null if no + * match was found. + */ + private static MediaCodecInfo selectCodec(String mimeType) { + int numCodecs = MediaCodecList.getCodecCount(); + for (int i = 0; i < numCodecs; i++) { + MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); + if (!codecInfo.isEncoder()) { + continue; + } + String[] types = codecInfo.getSupportedTypes(); + for (int j = 0; j < types.length; j++) { + if (types[j].equalsIgnoreCase(mimeType)) { + return codecInfo; + } + } + } + return null; + } + /** + * Returns a color format that is supported by the codec and by this test code. If no + * match is found, this throws a test failure -- the set of formats known to the test + * should be expanded for new platforms. + */ + private static int selectColorFormat(MediaCodecInfo codecInfo, String mimeType) { + MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType); + for (int i = 0; i < capabilities.colorFormats.length; i++) { + int colorFormat = capabilities.colorFormats[i]; + switch (colorFormat) { + // these are the formats we know how to handle for this test + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar: + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar: + case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar: + return colorFormat; + default: + break; + } + } + fail("couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType); + return 0; // not reached + } + /** + * Does the actual work for encoding frames from buffers of byte[]. + */ + private void encodeDecodeVideoFromBuffer(MediaCodec encoder, int encoderColorFormat, + MediaCodec decoder, boolean toSurface) { + final int TIMEOUT_USEC = 10000; + ByteBuffer[] encoderInputBuffers = encoder.getInputBuffers(); + ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers(); + ByteBuffer[] decoderInputBuffers = null; + ByteBuffer[] decoderOutputBuffers = null; + MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); + int decoderColorFormat = -12345; // init to invalid value + int generateIndex = 0; + int checkIndex = 0; + boolean decoderConfigured = false; + SurfaceStuff surfaceStuff = null; + // The size of a frame of video data, in the formats we handle, is stride*sliceHeight + // for Y, and (stride/2)*(sliceHeight/2) for each of the Cb and Cr channels. Application + // of algebra and assuming that stride==width and sliceHeight==height yields: + byte[] frameData = new byte[mWidth * mHeight * 3 / 2]; + // Just out of curiosity. + long rawSize = 0; + long encodedSize = 0; + // Save a copy to disk. Useful for debugging the test. + FileOutputStream outputStream = null; + if (DEBUG_SAVE_FILE) { + String fileName = DEBUG_FILE_NAME_BASE + mWidth + "x" + mHeight + ".mp4"; + try { + outputStream = new FileOutputStream(fileName); + Log.d(TAG, "encoded output will be saved as " + fileName); + } catch (IOException ioe) { + Log.w(TAG, "Unable to create debug output file " + fileName); + throw new RuntimeException(ioe); + } + } + if (toSurface) { + surfaceStuff = new SurfaceStuff(mWidth, mHeight); + } + // Loop until the output side is done. + boolean inputDone = false; + boolean encoderDone = false; + boolean outputDone = false; + while (!outputDone) { + if (VERBOSE) Log.d(TAG, "loop"); + // If we're not done submitting frames, generate a new one and submit it. By + // doing this on every loop we're working to ensure that the encoder always has + // work to do. + // + // We don't really want a timeout here, but sometimes there's a delay opening + // the encoder device, so a short timeout can keep us from spinning hard. + if (!inputDone) { + int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC); + if (VERBOSE) Log.d(TAG, "inputBufIndex=" + inputBufIndex); + if (inputBufIndex >= 0) { + long ptsUsec = generateIndex * 1000000 / FRAME_RATE; + if (generateIndex == NUM_FRAMES) { + // Send an empty frame with the end-of-stream flag set. If we set EOS + // on a frame with data, that frame data will be ignored, and the + // output will be short one frame. + encoder.queueInputBuffer(inputBufIndex, 0, 0, ptsUsec, + MediaCodec.BUFFER_FLAG_END_OF_STREAM); + inputDone = true; + if (VERBOSE) Log.d(TAG, "sent input EOS (with zero-length frame)"); + } else { + generateFrame(generateIndex, encoderColorFormat, frameData); + ByteBuffer inputBuf = encoderInputBuffers[inputBufIndex]; + // the buffer should be sized to hold one full frame + assertTrue(inputBuf.capacity() >= frameData.length); + inputBuf.clear(); + inputBuf.put(frameData); + encoder.queueInputBuffer(inputBufIndex, 0, frameData.length, ptsUsec, 0); + if (VERBOSE) Log.d(TAG, "submitted frame " + generateIndex + " to enc"); + } + generateIndex++; + } else { + // either all in use, or we timed out during initial setup + if (VERBOSE) Log.d(TAG, "input buffer not available"); + } + } + // Check for output from the encoder. If there's no output yet, we either need to + // provide more input, or we need to wait for the encoder to work its magic. We + // can't actually tell which is the case, so if we can't get an output buffer right + // away we loop around and see if it wants more input. + // + // Once we get EOS from the encoder, we don't need to do this anymore. + if (!encoderDone) { + int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC); + if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + // no output available yet + if (VERBOSE) Log.d(TAG, "no output from encoder available"); + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + // not expected for an encoder + encoderOutputBuffers = encoder.getOutputBuffers(); + if (VERBOSE) Log.d(TAG, "encoder output buffers changed"); + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + // not expected for an encoder + MediaFormat newFormat = encoder.getOutputFormat(); + if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat); + } else if (encoderStatus < 0) { + fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); + } else { // encoderStatus >= 0 + ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; + if (encodedData == null) { + fail("encoderOutputBuffer " + encoderStatus + " was null"); + } + // It's usually necessary to adjust the ByteBuffer values to match BufferInfo. + encodedData.position(info.offset); + encodedData.limit(info.offset + info.size); + encodedSize += info.size; + if (outputStream != null) { + byte[] data = new byte[info.size]; + encodedData.get(data); + encodedData.position(info.offset); + try { + outputStream.write(data); + } catch (IOException ioe) { + Log.w(TAG, "failed writing debug data to file"); + throw new RuntimeException(ioe); + } + } + if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + // Codec config info. Only expected on first packet. + assertFalse(decoderConfigured); + MediaFormat format = + MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); + format.setByteBuffer("csd-0", encodedData); + decoder.configure(format, toSurface ? surfaceStuff.getSurface() : null, + null, 0); + decoder.start(); + decoderInputBuffers = decoder.getInputBuffers(); + decoderOutputBuffers = decoder.getOutputBuffers(); + decoderConfigured = true; + if (VERBOSE) Log.d(TAG, "decoder configured (" + info.size + " bytes)"); + } else { + // Get a decoder input buffer, blocking until it's available. + assertTrue(decoderConfigured); + int inputBufIndex = decoder.dequeueInputBuffer(-1); + ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; + inputBuf.clear(); + inputBuf.put(encodedData); + decoder.queueInputBuffer(inputBufIndex, 0, info.size, info.presentationTimeUs, + info.flags); + encoderDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; + if (VERBOSE) Log.d(TAG, "passed " + info.size + " bytes to decoder" + + (encoderDone ? " (EOS)" : "")); + } + encoder.releaseOutputBuffer(encoderStatus, false); + } + } + // Check for output from the decoder. We want to do this on every loop to avoid + // the possibility of stalling the pipeline. We use a short timeout to avoid + // burning CPU if the decoder is hard at work but the next frame isn't quite ready. + // + // If we're decoding to a Surface, we'll get notified here as usual but the + // ByteBuffer references will be null. The data is sent to Surface instead. + if (decoderConfigured) { + int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC); + if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + // no output available yet + if (VERBOSE) Log.d(TAG, "no output from decoder available"); + } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + if (VERBOSE) Log.d(TAG, "decoder output buffers changed"); + decoderOutputBuffers = decoder.getOutputBuffers(); + } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + // this happens before the first frame is returned + MediaFormat decoderOutputFormat = decoder.getOutputFormat(); + decoderColorFormat = + decoderOutputFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT); + if (VERBOSE) Log.d(TAG, "decoder output format changed: " + + decoderOutputFormat); + } else if (decoderStatus < 0) { + fail("unexpected result from deocder.dequeueOutputBuffer: " + decoderStatus); + } else { // decoderStatus >= 0 + if (!toSurface) { + ByteBuffer outputFrame = decoderOutputBuffers[decoderStatus]; + outputFrame.position(info.offset); + outputFrame.limit(info.offset + info.size); + rawSize += info.size; + if (info.size == 0) { + if (VERBOSE) Log.d(TAG, "got empty frame"); + } else { + if (VERBOSE) Log.d(TAG, "decoded, checking frame " + checkIndex); + checkFrame(checkIndex++, decoderColorFormat, outputFrame); + } + if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + if (VERBOSE) Log.d(TAG, "output EOS"); + outputDone = true; + } + } else { + // Before we release+render this buffer, check to see if data from a + // previous go-round has latched. + surfaceStuff.checkNewImageIfAvailable(); + if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus + + " (size=" + info.size + ")"); + rawSize += info.size; + if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + if (VERBOSE) Log.d(TAG, "output EOS"); + outputDone = true; + } + } + // If output is going to a Surface, the second argument should be true. + // If not, the value doesn't matter. + // + // If we are sending to a Surface, then some time after we call this the + // data will be made available to SurfaceTexture, and the onFrameAvailable() + // callback will fire. + decoder.releaseOutputBuffer(decoderStatus, true /*render*/); + } + } + } + if (VERBOSE) Log.d(TAG, "encoded " + NUM_FRAMES + " frames at " + + mWidth + "x" + mHeight + ": raw=" + rawSize + ", enc=" + encodedSize); + if (outputStream != null) { + try { + outputStream.close(); + } catch (IOException ioe) { + Log.w(TAG, "failed closing debug file"); + throw new RuntimeException(ioe); + } + } + } + /** + * Generates data for frame N into the supplied buffer. We have an 8-frame animation + * sequence that wraps around. It looks like this: + *
+ * 0 1 2 3 + * 7 6 5 4 + *+ * We draw one of the eight rectangles and leave the rest set to the zero-fill color. + */ + private void generateFrame(int frameIndex, int colorFormat, byte[] frameData) { + final int HALF_WIDTH = mWidth / 2; + boolean semiPlanar = isSemiPlanarYUV(colorFormat); + // Set to zero. In YUV this is a dull green. + Arrays.fill(frameData, (byte) 0); + int startX, startY, countX, countY; + frameIndex %= 8; + //frameIndex = (frameIndex / 8) % 8; // use this instead for debug -- easier to see + if (frameIndex < 4) { + startX = frameIndex * (mWidth / 4); + startY = 0; + } else { + startX = (7 - frameIndex) * (mWidth / 4); + startY = mHeight / 2; + } + for (int y = startY + (mHeight/2) - 1; y >= startY; --y) { + for (int x = startX + (mWidth/4) - 1; x >= startX; --x) { + if (semiPlanar) { + // full-size Y, followed by CbCr pairs at half resolution + // e.g. Nexus 4 OMX.qcom.video.encoder.avc COLOR_FormatYUV420SemiPlanar + // e.g. Galaxy Nexus OMX.TI.DUCATI1.VIDEO.H264E + // OMX_TI_COLOR_FormatYUV420PackedSemiPlanar + frameData[y * mWidth + x] = (byte) TEST_Y; + if ((x & 0x01) == 0 && (y & 0x01) == 0) { + frameData[mWidth*mHeight + y * HALF_WIDTH + x] = (byte) TEST_U; + frameData[mWidth*mHeight + y * HALF_WIDTH + x + 1] = (byte) TEST_V; + } + } else { + // full-size Y, followed by quarter-size Cb and quarter-size Cr + // e.g. Nexus 10 OMX.Exynos.AVC.Encoder COLOR_FormatYUV420Planar + // e.g. Nexus 7 OMX.Nvidia.h264.encoder COLOR_FormatYUV420Planar + frameData[y * mWidth + x] = (byte) TEST_Y; + if ((x & 0x01) == 0 && (y & 0x01) == 0) { + frameData[mWidth*mHeight + (y/2) * HALF_WIDTH + (x/2)] = (byte) TEST_U; + frameData[mWidth*mHeight + HALF_WIDTH * (mHeight / 2) + + (y/2) * HALF_WIDTH + (x/2)] = (byte) TEST_V; + } + } + } + } + if (false) { + // make sure that generate and check agree + Log.d(TAG, "SPOT CHECK"); + checkFrame(frameIndex, colorFormat, ByteBuffer.wrap(frameData)); + Log.d(TAG, "SPOT CHECK DONE"); + } + } + /** + * Performs a simple check to see if the frame is more or less right. + *
+ * See {@link generateFrame} for a description of the layout. The idea is to sample + * one pixel from the middle of the 8 regions, and verify that the correct one has + * the non-background color. We can't know exactly what the video encoder has done + * with our frames, so we just check to see if it looks like more or less the right thing. + *
+ * Throws a failure if the frame looks wrong. + */ + private void checkFrame(int frameIndex, int colorFormat, ByteBuffer frameData) { + final int HALF_WIDTH = mWidth / 2; + boolean frameFailed = false; + if (colorFormat == 0x7FA30C03) { + // Nexus 4 decoder output OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka + Log.d(TAG, "unable to check frame contents for colorFormat=" + + Integer.toHexString(colorFormat)); + return; + } + boolean semiPlanar = isSemiPlanarYUV(colorFormat); + frameIndex %= 8; + for (int i = 0; i < 8; i++) { + int x, y; + if (i < 4) { + x = i * (mWidth / 4) + (mWidth / 8); + y = mHeight / 4; + } else { + x = (7 - i) * (mWidth / 4) + (mWidth / 8); + y = (mHeight * 3) / 4; + } + int testY, testU, testV; + if (semiPlanar) { + // Galaxy Nexus uses OMX_TI_COLOR_FormatYUV420PackedSemiPlanar + testY = frameData.get(y * mWidth + x) & 0xff; + testU = frameData.get(mWidth*mHeight + 2*(y/2) * HALF_WIDTH + 2*(x/2)) & 0xff; + testV = frameData.get(mWidth*mHeight + 2*(y/2) * HALF_WIDTH + 2*(x/2) + 1) & 0xff; + } else { + // Nexus 10, Nexus 7 use COLOR_FormatYUV420Planar + testY = frameData.get(y * mWidth + x) & 0xff; + testU = frameData.get(mWidth*mHeight + (y/2) * HALF_WIDTH + (x/2)) & 0xff; + testV = frameData.get(mWidth*mHeight + HALF_WIDTH * (mHeight / 2) + + (y/2) * HALF_WIDTH + (x/2)) & 0xff; + } + boolean failed = false; + if (i == frameIndex) { + failed = !isColorClose(testY, TEST_Y) || + !isColorClose(testU, TEST_U) || + !isColorClose(testV, TEST_V); + } else { + // should be our zeroed-out buffer + failed = !isColorClose(testY, 0) || + !isColorClose(testU, 0) || + !isColorClose(testV, 0); + } + if (failed) { + Log.w(TAG, "Bad frame " + frameIndex + " (r=" + i + ": Y=" + testY + + " U=" + testU + " V=" + testV + ")"); + frameFailed = true; + } + } + if (frameFailed) { + fail("bad frame (" + frameIndex + ")"); + } + } + /** + * Returns true if the actual color value is close to the expected color value. + */ + static boolean isColorClose(int actual, int expected) { + if (expected < 5) { + return actual < (expected + 5); + } else if (expected > 250) { + return actual > (expected - 5); + } else { + return actual > (expected - 5) && actual < (expected + 5); + } + } + /** + * Returns true if the specified color format is semi-planar YUV. Throws an exception + * if the color format is not recognized (e.g. not YUV). + */ + private static boolean isSemiPlanarYUV(int colorFormat) { + switch (colorFormat) { + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar: + return false; + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: + case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar: + case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar: + return true; + default: + throw new RuntimeException("unknown format " + colorFormat); + } + } + /** + * Holds state associated with a Surface used for output. + *
+ * By default, the Surface will be using a BufferQueue in asynchronous mode, so we + * will likely miss a number of frames. + */ + private static class SurfaceStuff implements SurfaceTexture.OnFrameAvailableListener { + private static final int EGL_OPENGL_ES2_BIT = 4; + private EGL10 mEGL; + private EGLDisplay mEGLDisplay; + private EGLContext mEGLContext; + private EGLSurface mEGLSurface; + private SurfaceTexture mSurfaceTexture; + private Surface mSurface; + private boolean mFrameAvailable = false; // guarded by "this" + private int mWidth; + private int mHeight; + private VideoRender mVideoRender; + public SurfaceStuff(int width, int height) { + mWidth = width; + mHeight = height; + eglSetup(); + mVideoRender = new VideoRender(); + mVideoRender.onSurfaceCreated(); + // Even if we don't access the SurfaceTexture after the constructor returns, we + // still need to keep a reference to it. The Surface doesn't retain a reference + // at the Java level, so if we don't either then the object can get GCed, which + // causes the native finalizer to run. + if (VERBOSE) Log.d(TAG, "textureID=" + mVideoRender.getTextureId()); + mSurfaceTexture = new SurfaceTexture(mVideoRender.getTextureId()); + // This doesn't work if SurfaceStuff is created on the thread that CTS started for + // these test cases. + // + // The CTS-created thread has a Looper, and the SurfaceTexture constructor will + // create a Handler that uses it. The "frame available" message is delivered + // there, but since we're not a Looper-based thread we'll never see it. For + // this to do anything useful, SurfaceStuff must be created on a thread without + // a Looper, so that SurfaceTexture uses the main application Looper instead. + // + // Java language note: passing "this" out of a constructor is generally unwise, + // but we should be able to get away with it here. + mSurfaceTexture.setOnFrameAvailableListener(this); + mSurface = new Surface(mSurfaceTexture); + } + /** + * Prepares EGL. We want a GLES 2.0 context and a surface that supports pbuffer. + */ + private void eglSetup() { + mEGL = (EGL10)EGLContext.getEGL(); + mEGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); + if (!mEGL.eglInitialize(mEGLDisplay, null)) { + fail("unable to initialize EGL10"); + } + // Configure surface for pbuffer and OpenGL ES 2.0. We want enough RGB bits + // to be able to tell if the frame is reasonable. + int[] attribList = { + EGL10.EGL_RED_SIZE, 8, + EGL10.EGL_GREEN_SIZE, 8, + EGL10.EGL_BLUE_SIZE, 8, + EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT, + EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, + EGL10.EGL_NONE + }; + EGLConfig[] configs = new EGLConfig[1]; + int[] numConfigs = new int[1]; + if (!mEGL.eglChooseConfig(mEGLDisplay, attribList, configs, 1, numConfigs)) { + fail("unable to find RGB888+pbuffer EGL config"); + } + // Configure context for OpenGL ES 2.0. + int[] attrib_list = { + EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, + EGL10.EGL_NONE + }; + mEGLContext = mEGL.eglCreateContext(mEGLDisplay, configs[0], EGL10.EGL_NO_CONTEXT, + attrib_list); + checkEglError("eglCreateContext"); + assertNotNull(mEGLContext); + // Create a pbuffer surface. By using this for output, we can use glReadPixels + // to test values in the output. + int[] surfaceAttribs = { + EGL10.EGL_WIDTH, mWidth, + EGL10.EGL_HEIGHT, mHeight, + EGL10.EGL_NONE + }; + mEGLSurface = mEGL.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs); + checkEglError("eglCreatePbufferSurface"); + assertNotNull(mEGLSurface); + if (!mEGL.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) { + fail("eglMakeCurrent failed"); + } + } + /** + * Checks for EGL errors. + */ + private void checkEglError(String msg) { + boolean failed = false; + int error; + while ((error = mEGL.eglGetError()) != EGL10.EGL_SUCCESS) { + Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error)); + failed = true; + } + if (failed) { + fail("EGL error encountered (see log)"); + } + } + /** + * Returns the Surface that the MediaCodec will draw onto. + */ + public Surface getSurface() { + return mSurface; + } + /** + * Latches the next buffer into the texture if one is available, and checks it for + * validity. Must be called from the thread that created the SurfaceStuff object. + */ + public void checkNewImageIfAvailable() { + boolean newStuff = false; + synchronized (this) { + if (mSurfaceTexture != null && mFrameAvailable) { + mFrameAvailable = false; + newStuff = true; + } + } + if (newStuff) { + mVideoRender.checkGlError("before updateTexImage"); + mSurfaceTexture.updateTexImage(); + mVideoRender.onDrawFrame(mSurfaceTexture); + checkSurfaceFrame(); + } + } + @Override + public void onFrameAvailable(SurfaceTexture st) { + if (VERBOSE) Log.d(TAG, "new frame available"); + synchronized (this) { + mFrameAvailable = true; + } + } + /** + * Attempts to check the frame for correctness. + *
+ * Our definition of "correct" is based on knowing what the frame sequence number is, + * which we can't reliably get by counting frames since the underlying mechanism can + * drop frames. The alternative would be to use the presentation time stamp that + * we passed to the video encoder, but there's no way to get that from the texture. + *
+ * All we can do is verify that it looks something like a frame we'd expect, i.e. + * green with exactly one pink rectangle. + */ + private void checkSurfaceFrame() { + ByteBuffer pixelBuf = ByteBuffer.allocateDirect(4); // TODO - reuse this + int numColoredRects = 0; + int rectPosn = -1; + for (int i = 0; i < 8; i++) { + // Note the coordinates are inverted on the Y-axis in GL. + int x, y; + if (i < 4) { + x = i * (mWidth / 4) + (mWidth / 8); + y = (mHeight * 3) / 4; + } else { + x = (7 - i) * (mWidth / 4) + (mWidth / 8); + y = mHeight / 4; + } + GLES20.glReadPixels(x, y, 1, 1, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, pixelBuf); + int r = pixelBuf.get(0) & 0xff; + int g = pixelBuf.get(1) & 0xff; + int b = pixelBuf.get(2) & 0xff; + if (isColorClose(r, TEST_R0) && + isColorClose(g, TEST_G0) && + isColorClose(b, TEST_B0)) { + // empty space + } else if (isColorClose(r, TEST_R1) && + isColorClose(g, TEST_G1) && + isColorClose(b, TEST_B1)) { + // colored rect + numColoredRects++; + rectPosn = i; + } else { + // wtf + Log.w(TAG, "found unexpected color r=" + r + " g=" + g + " b=" + b); + } + } + if (numColoredRects != 1) { + fail("Found surface with colored rects != 1 (" + numColoredRects + ")"); + } else { + if (VERBOSE) Log.d(TAG, "good surface, looks like index " + rectPosn); + } + } + } + /** + * GL code to fill a surface with a texture. This class was largely copied from + * VideoSurfaceView.VideoRender. + *
+ * TODO: merge implementations
+ */
+ private static class VideoRender {
+ private static final int FLOAT_SIZE_BYTES = 4;
+ private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
+ private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
+ private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
+ private final float[] mTriangleVerticesData = {
+ // X, Y, Z, U, V
+ -1.0f, -1.0f, 0, 0.f, 0.f,
+ 1.0f, -1.0f, 0, 1.f, 0.f,
+ -1.0f, 1.0f, 0, 0.f, 1.f,
+ 1.0f, 1.0f, 0, 1.f, 1.f,
+ };
+ private FloatBuffer mTriangleVertices;
+ private final String mVertexShader =
+ "uniform mat4 uMVPMatrix;\n" +
+ "uniform mat4 uSTMatrix;\n" +
+ "attribute vec4 aPosition;\n" +
+ "attribute vec4 aTextureCoord;\n" +
+ "varying vec2 vTextureCoord;\n" +
+ "void main() {\n" +
+ " gl_Position = uMVPMatrix * aPosition;\n" +
+ " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
+ "}\n";
+ private final String mFragmentShader =
+ "#extension GL_OES_EGL_image_external : require\n" +
+ "precision mediump float;\n" +
+ "varying vec2 vTextureCoord;\n" +
+ "uniform samplerExternalOES sTexture;\n" +
+ "void main() {\n" +
+ " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
+ "}\n";
+ private float[] mMVPMatrix = new float[16];
+ private float[] mSTMatrix = new float[16];
+ private int mProgram;
+ private int mTextureID = -12345;
+ private int muMVPMatrixHandle;
+ private int muSTMatrixHandle;
+ private int maPositionHandle;
+ private int maTextureHandle;
+ public VideoRender() {
+ mTriangleVertices = ByteBuffer.allocateDirect(
+ mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
+ .order(ByteOrder.nativeOrder()).asFloatBuffer();
+ mTriangleVertices.put(mTriangleVerticesData).position(0);
+ Matrix.setIdentityM(mSTMatrix, 0);
+ }
+ public int getTextureId() {
+ return mTextureID;
+ }
+ public void onDrawFrame(SurfaceTexture st) {
+ checkGlError("onDrawFrame start");
+ st.getTransformMatrix(mSTMatrix);
+ GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
+ GLES20.glUseProgram(mProgram);
+ checkGlError("glUseProgram");
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
+ mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
+ GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
+ TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
+ checkGlError("glVertexAttribPointer maPosition");
+ GLES20.glEnableVertexAttribArray(maPositionHandle);
+ checkGlError("glEnableVertexAttribArray maPositionHandle");
+ mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
+ GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
+ TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
+ checkGlError("glVertexAttribPointer maTextureHandle");
+ GLES20.glEnableVertexAttribArray(maTextureHandle);
+ checkGlError("glEnableVertexAttribArray maTextureHandle");
+ Matrix.setIdentityM(mMVPMatrix, 0);
+ GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
+ GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ checkGlError("glDrawArrays");
+ GLES20.glFinish();
+ }
+ public void onSurfaceCreated() {
+ mProgram = createProgram(mVertexShader, mFragmentShader);
+ if (mProgram == 0) {
+ Log.e(TAG, "failed creating program");
+ return;
+ }
+ maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
+ checkGlError("glGetAttribLocation aPosition");
+ if (maPositionHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for aPosition");
+ }
+ maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
+ checkGlError("glGetAttribLocation aTextureCoord");
+ if (maTextureHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for aTextureCoord");
+ }
+ muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
+ checkGlError("glGetUniformLocation uMVPMatrix");
+ if (muMVPMatrixHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for uMVPMatrix");
+ }
+ muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
+ checkGlError("glGetUniformLocation uSTMatrix");
+ if (muSTMatrixHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for uSTMatrix");
+ }
+ int[] textures = new int[1];
+ GLES20.glGenTextures(1, textures, 0);
+ mTextureID = textures[0];
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
+ checkGlError("glBindTexture mTextureID");
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
+ GLES20.GL_NEAREST);
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
+ GLES20.GL_LINEAR);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
+ GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
+ GLES20.GL_CLAMP_TO_EDGE);
+ checkGlError("glTexParameter");
+ }
+ private int loadShader(int shaderType, String source) {
+ int shader = GLES20.glCreateShader(shaderType);
+ checkGlError("glCreateShader type=" + shaderType);
+ GLES20.glShaderSource(shader, source);
+ GLES20.glCompileShader(shader);
+ int[] compiled = new int[1];
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+ if (compiled[0] == 0) {
+ Log.e(TAG, "Could not compile shader " + shaderType + ":");
+ Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
+ GLES20.glDeleteShader(shader);
+ shader = 0;
+ }
+ return shader;
+ }
+ private int createProgram(String vertexSource, String fragmentSource) {
+ int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+ if (vertexShader == 0) {
+ return 0;
+ }
+ int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+ if (pixelShader == 0) {
+ return 0;
+ }
+ int program = GLES20.glCreateProgram();
+ checkGlError("glCreateProgram");
+ if (program == 0) {
+ Log.e(TAG, "Could not create program");
+ }
+ GLES20.glAttachShader(program, vertexShader);
+ checkGlError("glAttachShader");
+ GLES20.glAttachShader(program, pixelShader);
+ checkGlError("glAttachShader");
+ GLES20.glLinkProgram(program);
+ int[] linkStatus = new int[1];
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GLES20.GL_TRUE) {
+ Log.e(TAG, "Could not link program: ");
+ Log.e(TAG, GLES20.glGetProgramInfoLog(program));
+ GLES20.glDeleteProgram(program);
+ program = 0;
+ }
+ return program;
+ }
+ public void checkGlError(String op) {
+ int error;
+ while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+ Log.e(TAG, op + ": glError " + error);
+ throw new RuntimeException(op + ": glError " + error);
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/atestcases/TestCaseActivity.kt b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/atestcases/TestCaseActivity.kt
new file mode 100644
index 0000000..085b90a
--- /dev/null
+++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/atestcases/TestCaseActivity.kt
@@ -0,0 +1,22 @@
+package com.aserbao.androidcustomcamera.blocks.atestcases
+
+import android.support.v7.app.AppCompatActivity
+import android.os.Bundle
+import android.view.View
+import com.aserbao.androidcustomcamera.base.activity.RVBaseActivity
+import com.aserbao.androidcustomcamera.base.beans.BaseRecyclerBean
+
+/**
+ * https://android.googlesource.com/platform/cts/+/b04c81bfc2761b21293f9c095da38c757e570fd3/tests/tests/media/src/android/media
+ */
+class TestCaseActivity : RVBaseActivity() {
+ override fun itemClickBack(view: View?, position: Int, isLongClick: Boolean, comeFrom: Int) {
+ when(position){
+// 0 -> EncodeDecodeTest
+ }
+ }
+ override fun initGetData() {
+ mBaseRecyclerBeen.add(BaseRecyclerBean("EncodeDecodeTest",0))
+ }
+
+}
\ No newline at end of file
diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/FFmpegActivity.kt b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/FFmpegActivity.kt
index d9fbfda..fc15dee 100644
--- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/FFmpegActivity.kt
+++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/FFmpegActivity.kt
@@ -3,8 +3,10 @@ package com.aserbao.androidcustomcamera.blocks.ffmpeg
import Jni.FFmpegCmd
import VideoHandle.*
import android.os.Environment
+import android.support.annotation.MainThread
import android.util.Log
import android.view.View
+import android.widget.Toast
import com.aserbao.androidcustomcamera.base.activity.RVBaseActivity
import com.aserbao.androidcustomcamera.base.beans.BaseRecyclerBean
import com.aserbao.androidcustomcamera.blocks.ffmpeg.beans.WaterFilter
@@ -18,12 +20,13 @@ var absolutePath = Environment.getExternalStorageDirectory().absolutePath
class FFmpegActivity : RVBaseActivity(),OnEditorListener {
override fun initGetData() {
+ mBaseRecyclerBeen.add(BaseRecyclerBean("取消", 100))
mBaseRecyclerBeen.add(BaseRecyclerBean("视频中抽取音频", 0))
mBaseRecyclerBeen.add(BaseRecyclerBean("视频添加水印", 1))
mBaseRecyclerBeen.add(BaseRecyclerBean("无损视频合并", 2))
mBaseRecyclerBeen.add(BaseRecyclerBean("多段视频合并", 3))
mBaseRecyclerBeen.add(BaseRecyclerBean("多段视频加水印并合成", 4))
- mBaseRecyclerBeen.add(BaseRecyclerBean("取消", 5))
+ mBaseRecyclerBeen.add(BaseRecyclerBean("视频添加配乐并调整音量大小", 5))
mInputs.add(WaterFilter(videoPath1,png1))
mInputs.add(WaterFilter(videoPath2,png2))
@@ -53,12 +56,19 @@ class FFmpegActivity : RVBaseActivity(),OnEditorListener {
override fun itemClickBack(view: View, position: Int, isLongClick: Boolean, comeFrom: Int) {
mStartTime = System.currentTimeMillis()
when(position){
+ 100 ->{
+// FFmpegCmd.exit()
+ addMusicToVideo1()
+ }
0 ->{
FFmpegUtils.demuxer(videoPath1,outputMusicPath,EpEditor.Format.MP3,this)
}
1 ->{
- var epVideo1 = EpVideo(videoPath1)
- epVideo1.addDraw(EpDraw(png1,0,0,576f,1024f,false))
+ var tempVideoPath = "/storage/emulated/0/Android/data/com.getremark.playground/files/Movies/15871817738614870009935443.mp4"
+ var tempBitmapPath = "/storage/emulated/0/playground/temp/123.png"
+ var epVideo1 = EpVideo(tempVideoPath)
+// var epVideo1 = EpVideo(videoPath1)
+ epVideo1.addDraw(EpDraw(tempBitmapPath,0,0,576f,1024f,false))
val outputOption = EpEditor.OutputOption(outputPathMp4)
EpEditor.exec(epVideo1, outputOption,this)
}
@@ -83,11 +93,35 @@ class FFmpegActivity : RVBaseActivity(),OnEditorListener {
addWaterFilterOneLine()
}
5 ->{
- FFmpegCmd.exit()
+ addMusicToVideo()
}
}
}
+ fun addMusicToVideo(){
+ var inputVideo = absolutePath + "/5.mp4"
+// var inputVideo = absolutePath + "/temp.mp4"
+ var inputMusic = absolutePath + "/input.mp3"
+ var outputVideo = absolutePath + "/output.mp4"
+ var videoVolume = 0.5f
+ var musicVolume = 1f
+ FFmpegUtils.music(inputVideo,inputVideo,outputVideo,videoVolume,musicVolume,this)
+// FFmpegUtils.addMusicForMp4(inputVideo,inputMusic,videoVolume,musicVolume,outputVideo,this)
+ }
+ fun addMusicToVideo1(){
+// var inputVideo = absolutePath + "/5.mp4"
+// var inputVideo = absolutePath + "/temp.mp4"
+// var inputMusic = absolutePath + "/input.mp3"
+// var inputVideo = "/storage/emulated/0/playground/temp/.capture/.remark-1588920936552.mp4"
+ var inputVideo = absolutePath + "/test1.mp4"
+ var inputMusic = absolutePath +"/er.m4a"
+ var outputVideo = absolutePath + "/output.mp4"
+ var videoVolume = 1f
+ var musicVolume = 1f
+ FFmpegUtils.music(inputVideo,inputMusic,outputVideo,videoVolume,musicVolume,this)
+ }
+
+
private fun addWaterFilterOneLine() {
// ffmpeg -i 2.mp4 -i 3.mp4 -i img1.png -i img2.png -filter_complex "[0:v][2:v]overlay=0:0[in1];[1:v][3:v]overlay=0:10[in2];[in1][in2]concat" -y output.mp4
//开始处理
@@ -160,6 +194,7 @@ class FFmpegActivity : RVBaseActivity(),OnEditorListener {
if(cuurIndex == 3){
itemClickBack(mBaseRv,2,false,2)
}*/
+
Log.e(TAG, ": onSuccess 耗时: " + (System.currentTimeMillis() - mStartTime) );
}
@@ -170,6 +205,4 @@ class FFmpegActivity : RVBaseActivity(),OnEditorListener {
override fun onProgress(progress: Float) {
Log.e(TAG, ": onProgress" + progress );
}
-
-
}
\ No newline at end of file
diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/utils/FFmpegUtils.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/utils/FFmpegUtils.java
index 2ce388f..db9787d 100644
--- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/utils/FFmpegUtils.java
+++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/ffmpeg/utils/FFmpegUtils.java
@@ -73,6 +73,35 @@ public static void music(String videoin, String audioin, String output, float vi
execCmd(cmd, d, onEditorListener);
}
+ /**
+ * 给视频添加配乐
+ * @param inputVideoPath
+ * @param inputMusicPath
+ * @param videoVolume 0~1
+ * @param musicVolume 0~1
+ * @param outputVideoPath
+ */
+ public static void addMusicForMp4(String inputVideoPath,String inputMusicPath,float videoVolume,float musicVolume,String outputVideoPath,final OnEditorListener onEditorListener){
+// ffmpeg -y -i 123.mp4 -i 5.aac -filter_complex "[0:a]aformat=sample_fmts=fltp:sample_rates=44100:channel_layouts=stereo,volume=1.0[a0];
+// [1:a]aformat=sample_fmts=fltp:sample_rates=44100:channel_layouts=stereo,volume=0.5[a1];[a0][a1]amix=inputs=2:duration=first[aout]" -map "[aout]" -ac 2 -c:v copy -map 0:v:0 output.mp4
+ CmdList cmd = new CmdList();
+ cmd.append("ffmpeg").append("-y").append("-i").append(inputVideoPath)
+ .append("-i").append(inputMusicPath)
+ .append("-filter_complex")
+ .append("[0:a]volume=" + videoVolume + "[a0];[1:a]volume=" + musicVolume + "[a1];[a0][a1]amix=inputs=2:duration=first[aout]")
+ .append("-map")
+ .append("[aout]")
+ .append("-ac")
+ .append("2")
+ /*.append("-c:v")
+ .append("-copy")*/
+ .append("-map")
+ .append("0:v:0")
+ .append(outputVideoPath);
+ long d = VideoUitls.getDuration(inputVideoPath);
+ execCmd(cmd, d, onEditorListener);
+ }
+
/**
* 音视频分离
*
@@ -186,7 +215,7 @@ public static void changePTS(String videoin, String out, float times, EpEditor.P
* @param out 输出路径
* @param w 输出图片宽度
* @param h 输出图片高度
- * @param rate 每秒视频生成图片数
+ * @param rate 每秒视频数
* @param onEditorListener 回调接口
*/
public static void video2pic(String videoin, String out, int w, int h, float rate, OnEditorListener onEditorListener) {
@@ -398,4 +427,6 @@ public void onProgress(final float progress) {
}
});
}
+
+
}
diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/PrimaryMediaCodecActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/PrimaryMediaCodecActivity.java
index bcb6363..49b152a 100644
--- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/PrimaryMediaCodecActivity.java
+++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/primary/PrimaryMediaCodecActivity.java
@@ -35,9 +35,9 @@ public class PrimaryMediaCodecActivity extends BaseActivity {
private static final String MIME_TYPE = "video/avc";
private static final int WIDTH = 720;
private static final int HEIGHT = 1280;
- private static final int BIT_RATE = 4000000;
- private static final int FRAMES_PER_SECOND = 4;
- private static final int IFRAME_INTERVAL = 5;
+ private static final int BIT_RATE = 3000000;
+ private static final int FRAMES_PER_SECOND = 30;
+ private static final int IFRAME_INTERVAL = 1;
private static final int NUM_FRAMES = 4 * 100;
private static final int START_RECORDING = 0;
@@ -71,8 +71,8 @@ public void onViewClicked(View view) {
case R.id.btn_recording:
if (mBtnRecording.getText().equals("开始录制")) {
try {
-// mOutputFile = new File(Environment.getExternalStorageDirectory().getAbsolutePath(), System.currentTimeMillis() + ".mp4");
- mOutputFile = new File(FileUtils.getStorageMp4("PrimaryMediaCodecActivity"));
+ mOutputFile = new File(Environment.getExternalStorageDirectory().getAbsolutePath(), System.currentTimeMillis() + ".mp4");
+// mOutputFile = new File(FileUtils.getStorageMp4("PrimaryMediaCodecActivity"));
startRecording(mOutputFile);
mPrimaryMcTv.setText("文件保存路径为:" + mOutputFile.toString());
mBtnRecording.setText("停止录制");
diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/show/MediaCodecShowOnGlSurfaceView.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/show/MediaCodecShowOnGlSurfaceView.java
index 148d2eb..8f5e21b 100644
--- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/show/MediaCodecShowOnGlSurfaceView.java
+++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaCodec/show/MediaCodecShowOnGlSurfaceView.java
@@ -76,9 +76,8 @@ public void onViewClicked(View view) {
startActivityForResult(intent2, StaticFinalValues.REQUEST_CODE_PICK_VIDEO);
break;
case R.id.decode_show_btn:
- /*MediaCodecUtil1 mediaCodecUtil1 = new MediaCodecUtil1(videoFileName, mHolder.getSurface());
- mediaCodecUtil1.start();*/
-
+ MediaCodecUtil1 mediaCodecUtil1 = new MediaCodecUtil1(videoFileName, mHolder.getSurface());
+ mediaCodecUtil1.start();
break;
case R.id.detail_video_btn:
new Thread(new Runnable() {
diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/MediaExtractorActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/MediaExtractorActivity.java
index 8788670..0bdf1c0 100644
--- a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/MediaExtractorActivity.java
+++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/MediaExtractorActivity.java
@@ -12,6 +12,7 @@
import com.aserbao.androidcustomcamera.base.interfaces.IDetailCallBackListener;
import com.aserbao.androidcustomcamera.blocks.interfaces.ICallBackListener;
import com.aserbao.androidcustomcamera.blocks.mediaExtractor.combineTwoVideo.CombineTwoVideos;
+import com.aserbao.androidcustomcamera.blocks.mediaExtractor.combineTwoVideo.CombineVideoAndMusic;
import com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.FrequencyView;
import com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.TransAacHandlerPure;
import com.aserbao.androidcustomcamera.blocks.mediaExtractor.primary.decoder.DecoderAudioAAC2PCMPlay;
@@ -101,7 +102,11 @@ public void cuurentFrequenty(int cuurentFrequenty, double volume) {
// decoderAudioAndGetDb.start(audioMp3Path1, MIMETYPE_AUDIO_MPEG);
break;
case R.id.exchange_video_and_audio:
- CombineTwoVideos.combineTwoVideos(path + "/aserbao.mp4", 0, path + "/lan.mp4", new File(path + "/aserbao.mp3"), this);
+// CombineTwoVideos.combineTwoVideos(path + "/aserbao.mp4", 0, path + "/lan.mp4", new File(path + "/aserbao.mp3"), this);
+ String inputVideo = "/storage/emulated/0/douyin.mp4";
+ String outputVideo = "/storage/emulated/0/douyinOut.mp4";
+ String inputMusic = "/storage/emulated/0/pg/.bgm/40e613e5e3695ab44b4f31e25088d7ac";
+ CombineVideoAndMusic.combineTwoVideos(inputMusic, 0, inputVideo, new File(outputVideo), this);
break;
case R.id.decoder_aac_and_player:
String audioPath = Environment.getExternalStorageDirectory().getAbsolutePath() + "/aac.aac";
diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/combineTwoVideo/CombineVideoAndMusic.java b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/combineTwoVideo/CombineVideoAndMusic.java
new file mode 100644
index 0000000..0ab90a5
--- /dev/null
+++ b/app/src/main/java/com/aserbao/androidcustomcamera/blocks/mediaExtractor/combineTwoVideo/CombineVideoAndMusic.java
@@ -0,0 +1,152 @@
+package com.aserbao.androidcustomcamera.blocks.mediaExtractor.combineTwoVideo;
+
+import android.media.MediaCodec;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.media.MediaMuxer;
+import android.util.Log;
+
+import com.aserbao.androidcustomcamera.base.interfaces.IDetailCallBackListener;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/**
+ * 功能: 替换视频1中的视频
+ * @author aserbao
+ * @date : On 2019/1/3 6:12 PM
+ * @email: 1142803753@qq.com
+ * @project:AndroidCamera
+ * @package:com.aserbao.androidcustomcamera.blocks.mediaCodec.primary.mp3TranslateAAC
+ * @Copyright: 个人版权所有
+ */
+public class CombineVideoAndMusic {
+ private static final String TAG = "CombineTwoVideos";
+ /**
+ * 合成视频1的音频和视频2的图像
+ *
+ * @param audioVideoPath 提供音频的视频
+ * @param audioStartTime 音频的开始时间
+ * @param frameVideoPath 提供图像的视频
+ * @param combinedVideoOutFile 合成后的文件
+ */
+ public static void combineTwoVideos(String audioVideoPath,
+ long audioStartTime,
+ String frameVideoPath,
+ File combinedVideoOutFile,
+ IDetailCallBackListener iDetailCallBackListener) {
+ MediaExtractor audioVideoExtractor = new MediaExtractor();
+ int mainAudioExtractorTrackIndex = -1; //提供音频的视频的音频轨(有点拗口)
+ int mainAudioMuxerTrackIndex = -1; //合成后的视频的音频轨
+ int mainAudioMaxInputSize = 0; //能获取的音频的最大值
+
+ MediaExtractor frameVideoExtractor = new MediaExtractor();
+ int frameExtractorTrackIndex = -1; //视频轨
+ int frameMuxerTrackIndex = -1; //合成后的视频的视频轨
+ int frameMaxInputSize = 0; //能获取的视频的最大值
+ int frameRate = 0; //视频的帧率
+ long frameDuration = 0;
+
+ MediaMuxer muxer = null; //用于合成音频与视频
+
+ try {
+ muxer = new MediaMuxer(combinedVideoOutFile.getPath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
+
+ audioVideoExtractor.setDataSource(audioVideoPath); //设置视频源
+ //音轨信息
+ int audioTrackCount = audioVideoExtractor.getTrackCount(); //获取数据源的轨道数
+ //在此循环轨道数,目的是找到我们想要的音频轨
+ for (int i = 0; i < audioTrackCount; i++) {
+ MediaFormat format = audioVideoExtractor.getTrackFormat(i); //得到指定索引的记录格式
+ String mimeType = format.getString(MediaFormat.KEY_MIME); //主要描述mime类型的媒体格式
+ if (mimeType.startsWith("audio/")) { //找到音轨
+ mainAudioExtractorTrackIndex = i;
+ mainAudioMuxerTrackIndex = muxer.addTrack(format); //将音轨添加到MediaMuxer,并返回新的轨道
+ mainAudioMaxInputSize = format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); //得到能获取的有关音频的最大值
+// mainAudioDuration = format.getLong(MediaFormat.KEY_DURATION);
+ }
+ }
+
+ //图像信息
+ frameVideoExtractor.setDataSource(frameVideoPath); //设置视频源
+ int trackCount = frameVideoExtractor.getTrackCount(); //获取数据源的轨道数
+ //在此循环轨道数,目的是找到我们想要的视频轨
+ for (int i = 0; i < trackCount; i++) {
+ MediaFormat format = frameVideoExtractor.getTrackFormat(i); //得到指定索引的媒体格式
+ String mimeType = format.getString(MediaFormat.KEY_MIME); //主要描述mime类型的媒体格式
+ if (mimeType.startsWith("video/")) { //找到视频轨
+ frameExtractorTrackIndex = i;
+ frameMuxerTrackIndex = muxer.addTrack(format); //将视频轨添加到MediaMuxer,并返回新的轨道
+ frameMaxInputSize = format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); //得到能获取的有关视频的最大值
+ frameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE); //获取视频的帧率
+ frameDuration = format.getLong(MediaFormat.KEY_DURATION); //获取视频时长
+ }
+ }
+
+ muxer.start(); //开始合成
+
+ audioVideoExtractor.selectTrack(mainAudioExtractorTrackIndex); //将提供音频的视频选择到音轨上
+ MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo();
+ ByteBuffer audioByteBuffer = ByteBuffer.allocate(mainAudioMaxInputSize);
+ while (true) {
+ int readSampleSize = audioVideoExtractor.readSampleData(audioByteBuffer, 0); //检索当前编码的样本并将其存储在字节缓冲区中
+ if (readSampleSize < 0) { //如果没有可获取的样本则退出循环
+ audioVideoExtractor.unselectTrack(mainAudioExtractorTrackIndex);
+ break;
+ }
+
+ long sampleTime = audioVideoExtractor.getSampleTime(); //获取当前展示样本的时间(单位毫秒)
+
+ if (sampleTime < audioStartTime) { //如果样本时间小于我们想要的开始时间就快进
+ audioVideoExtractor.advance(); //推进到下一个样本,类似快进
+ continue;
+ }
+
+ if (sampleTime > audioStartTime + frameDuration) { //如果样本时间大于开始时间+视频时长,就退出循环
+ break;
+ }
+ //设置样本编码信息
+ audioBufferInfo.size = readSampleSize;
+ audioBufferInfo.offset = 0;
+ audioBufferInfo.flags = audioVideoExtractor.getSampleFlags();
+ audioBufferInfo.presentationTimeUs = sampleTime - audioStartTime;
+
+ muxer.writeSampleData(mainAudioMuxerTrackIndex, audioByteBuffer, audioBufferInfo); //将样本写入
+ audioVideoExtractor.advance(); //推进到下一个样本,类似快进
+ }
+
+ frameVideoExtractor.selectTrack(frameExtractorTrackIndex); //将提供视频图像的视频选择到视频轨上
+ MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo();
+ ByteBuffer videoByteBuffer = ByteBuffer.allocate(frameMaxInputSize);
+ while (true) {
+ int readSampleSize = frameVideoExtractor.readSampleData(videoByteBuffer, 0); //检索当前编码的样本并将其存储在字节缓冲区中
+ if (readSampleSize < 0) { //如果没有可获取的样本则退出循环
+ frameVideoExtractor.unselectTrack(frameExtractorTrackIndex);
+ break;
+ }
+ //设置样本编码信息
+ videoBufferInfo.size = readSampleSize;
+ videoBufferInfo.offset = 0;
+ videoBufferInfo.flags = frameVideoExtractor.getSampleFlags();
+ videoBufferInfo.presentationTimeUs += 1000 * 1000 / frameRate;
+
+ muxer.writeSampleData(frameMuxerTrackIndex, videoByteBuffer, videoBufferInfo); //将样本写入
+ frameVideoExtractor.advance(); //推进到下一个样本,类似快进
+ }
+ } catch (IOException e) {
+ iDetailCallBackListener.failed(e);
+ Log.e(TAG, "combineTwoVideos: ", e);
+ } finally {
+ //释放资源
+ audioVideoExtractor.release();
+ frameVideoExtractor.release();
+ if (muxer != null) {
+ muxer.release();
+ }
+ iDetailCallBackListener.success();
+ Log.e(TAG, "combineTwoVideos: " );
+ }
+ }
+
+}
diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/utils/CheckPermissionUtil.java b/app/src/main/java/com/aserbao/androidcustomcamera/utils/CheckPermissionUtil.java
new file mode 100755
index 0000000..f4ccf50
--- /dev/null
+++ b/app/src/main/java/com/aserbao/androidcustomcamera/utils/CheckPermissionUtil.java
@@ -0,0 +1,259 @@
+package com.aserbao.androidcustomcamera.utils;
+
+import android.Manifest;
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.location.LocationManager;
+import android.media.AudioFormat;
+import android.media.AudioRecord;
+import android.media.MediaRecorder;
+import android.os.Build;
+import android.support.v4.content.ContextCompat;
+import android.util.Log;
+
+import static com.aserbao.androidcustomcamera.base.MyApplication.getContext;
+
+
+/**
+ */
+
+public class CheckPermissionUtil {
+
+ private static String TAG = "CheckPermissionUtil";
+
+ // 音频获取源
+ public static int audioSource = MediaRecorder.AudioSource.MIC;
+ // 设置音频采样率,44100是目前的标准,但是某些设备仍然支持22050,16000,11025
+ public static int sampleRateInHz = 44100;
+ // 设置音频的录制的声道CHANNEL_IN_STEREO为双声道,CHANNEL_CONFIGURATION_MONO为单声道
+ public static int channelConfig = AudioFormat.CHANNEL_IN_STEREO;
+ // 音频数据格式:PCM 16位每个样本。保证设备支持。PCM 8位每个样本。不一定能得到设备支持。
+ public static int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
+ // 缓冲区字节大小
+ public static int bufferSizeInBytes = 0;
+
+ /**
+ * @return true 已经授权 获取地理位置权限
+ */
+ public static boolean isLocationPermGrantedAndOpen(){
+ boolean result = false;
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ Log.i(TAG,"isLocationPermGrantedAndOpen()--- Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ");
+ if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED ||
+ ContextCompat.checkSelfPermission(getContext(), Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED )
+ {
+ Log.i(TAG,"isLocationPermGrantedAndOpen()--- result = false");
+ result = false;
+ } else {
+ if(isOPenGPS(getContext())){
+ Log.i(TAG,"isLocationPermGrantedAndOpen()--- result = true");
+ result = true;
+ }
+ }
+ } else {
+ Log.i(TAG,"isLocationPermGrantedAndOpen()--- Build.VERSION.SDK_INT < Build.VERSION_CODES.M ");
+ result = true;
+ }
+ return result;
+ }
+
+ /**
+ * @return true 照相机权限
+ */
+ public static boolean isCameraGranted(){
+ boolean result = false;
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ Log.i(TAG,"isCameraGranted()--- Build.VERSION.SDK_INT >= Build.VERSION_CODES.M ");
+ if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED ||
+ ContextCompat.checkSelfPermission(getContext(), Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED ||
+ ContextCompat.checkSelfPermission(getContext(), Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED ||
+ ContextCompat.checkSelfPermission(getContext(), Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED)
+ {
+ Log.i(TAG,"isCameraGranted()--- result = false");
+ result = false;
+ } else {
+ result = true;
+ }
+ } else {
+ Log.i(TAG,"isCameraGranted()--- Build.VERSION.SDK_INT < Build.VERSION_CODES.M ");
+ result = true;
+ }
+ return result;
+ }
+
+ /**
+ * 只用这个方法,一些华为手机的地理权限打开与否,不能准确判断出,需要上面的方法 isLocationPermGrantedAndOpen
+ * @return true 已经授权 获取地理位置权限
+ */
+ public static boolean isLocationPermGranted(){
+ boolean result = false;
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED ||
+ ContextCompat.checkSelfPermission(getContext(), Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED )
+ {
+ result = false;
+ } else {
+ result = true;
+ }
+ } else {
+ result = true;
+ }
+ return result;
+ }
+
+ /**
+ * @return true
+ */
+ public static boolean isStoragePermGranted(){
+ boolean result = false;
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED ||
+ ContextCompat.checkSelfPermission(getContext(), Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED )
+ {
+ result = false;
+ } else {
+ result = true;
+ }
+ } else {
+ result = true;
+ }
+ return result;
+ }
+
+ /**
+ * Function:判断录音权限,兼容android6.0以下以及以上系统
+ */
+
+ /**
+ * 判断是是否有录音权限
+ */
+ public static boolean isHasPermission(final Context context){
+ bufferSizeInBytes = 0;
+ bufferSizeInBytes = AudioRecord.getMinBufferSize(sampleRateInHz,
+ channelConfig, audioFormat);
+
+ AudioRecord audioRecord = null;
+ try {
+ // 美图手机这里会抛 IllegalArgumentException
+ // https://fabric.io/getremark/android/apps/com.getremark.spot/issues/5b719a816007d59fcdac62f0?time=last-seven-days
+ audioRecord = new AudioRecord(audioSource, sampleRateInHz,
+ channelConfig, audioFormat, bufferSizeInBytes);
+ } catch (Exception e) {
+ e.printStackTrace();
+ return false;
+ }
+
+ //开始录制音频
+ try{
+ // 防止某些手机崩溃,例如联想
+ audioRecord.startRecording();
+ }catch (IllegalStateException e){
+ e.printStackTrace();
+ }
+ /**
+ * 根据开始录音判断是否有录音权限
+ */
+ if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
+ return false;
+ }
+ audioRecord.stop();
+ audioRecord.release();
+ audioRecord = null;
+
+ return true;
+ }
+
+ /**
+ * @return true 已经授权 获取照相机权限
+ */
+ public static boolean isCameraPermissionGranted(){
+ return isPermissionGranted(Manifest.permission.CAMERA);
+ }
+
+ public static boolean isReadStoragePermissionsGranted(){
+ return isPermissionGranted(Manifest.permission.READ_EXTERNAL_STORAGE);
+ }
+
+ public static boolean isWriteStoragePermissionsGranted(){
+ return isPermissionGranted(Manifest.permission.WRITE_EXTERNAL_STORAGE);
+ }
+
+ public static boolean isPermissionGranted(String permission) {
+ boolean isRecorder = false;
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ if (ContextCompat.checkSelfPermission(getContext(), permission) != PackageManager.PERMISSION_GRANTED) {
+ isRecorder = false;
+ } else {
+ isRecorder = true;
+ }
+ } else {
+ isRecorder = true;
+ }
+ return isRecorder;
+ }
+
+ public static boolean isRecordAudioPermissionsGranted(){
+ return isPermissionGranted(Manifest.permission.RECORD_AUDIO);
+ }
+
+ public static boolean isWriteSettingPermissionsGranted(){
+ boolean isRecorder = false;
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.WRITE_SETTINGS) != PackageManager.PERMISSION_GRANTED) {
+ isRecorder = false;
+ } else {
+ isRecorder = true;
+ }
+ } else {
+ isRecorder = true;
+ }
+ return isRecorder;
+ }
+
+ public static boolean isContactsPermissionGranted() {
+ boolean result = false;
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.READ_CONTACTS) != PackageManager.PERMISSION_GRANTED) {
+ result = false;
+ } else {
+ result = true;
+ }
+ } else {
+ result = true;
+ }
+ Log.i(TAG, "isContactsPermissionGranted()--- result = " + result);
+ return result;
+ }
+
+
+ public static boolean isReadSmsPermissionGranted() {
+ boolean result;
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.READ_SMS) != PackageManager.PERMISSION_GRANTED) {
+ result = false;
+ } else {
+ result = true;
+ }
+ }else {
+ result = false;
+ }
+ return result;
+
+ }
+
+
+ public static boolean isOPenGPS(final Context context) {
+ LocationManager locationManager
+ = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE);
+ // 通过GPS卫星定位,定位级别可以精确到街(通过24颗卫星定位,在室外和空旷的地方定位准确、速度快)
+ boolean gps = locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER);
+ Log.i("isOPenGPS()","gps = "+gps);
+ // 通过WLAN或移动网络(3G/2G)确定的位置(也称作AGPS,辅助GPS定位。主要用于在室内或遮盖物(建筑群或茂密的深林等)密集的地方定位)
+ boolean network = locationManager.isProviderEnabled(LocationManager.NETWORK_PROVIDER);
+ Log.i("isOPenGPS()","network = "+network);
+ if (gps || network) {
+ return true;
+ }
+ return false;
+ }
+}
diff --git a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/RecorderActivity.java b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/RecorderActivity.java
index 8d9f3a7..2b6aa78 100644
--- a/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/RecorderActivity.java
+++ b/app/src/main/java/com/aserbao/androidcustomcamera/whole/record/RecorderActivity.java
@@ -19,6 +19,7 @@
import android.widget.Toast;
import com.aserbao.androidcustomcamera.R;
+import com.aserbao.androidcustomcamera.WelcomeActivity;
import com.aserbao.androidcustomcamera.base.MyApplication;
import com.aserbao.androidcustomcamera.base.activity.BaseActivity;
import com.aserbao.androidcustomcamera.base.pop.PopupManager;
@@ -36,6 +37,8 @@
import com.aserbao.androidcustomcamera.whole.record.ui.SlideGpuFilterGroup;
import com.aserbao.androidcustomcamera.whole.videoPlayer.VideoPlayerActivity2;
+import org.jetbrains.annotations.NotNull;
+
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.concurrent.ExecutorService;
@@ -371,6 +374,8 @@ public void run() {
}
});
}
+
+
private static class MyHandler extends Handler {
private WeakReference