在 Pixel 3A 设备上使用 mediacodec android 将 ARGB_8888 位图编码为视频文件失败

问题描述

我正在使用 android MediaCodec 对视频文件进行解码和编码。除了Pixel 3a 设备,解码和编码都可以使用以下代码正常工作。对于编码,编码器使用一组位图来创建视频文件。但只有在 Pixel 3A 设备上,位图编码失败并产生失真的视频文件

设备详情: 名称:Pixel 3A,安卓版本:11

public class ImageProcessor implements Runnable {
    private static final String VIDEO = "video/";
    private static final String TAG = "VideoDecoder";
    private static final long DEFAULT_TIMEOUT_US = 0;[enter image description here][1]
    private final String inputFile;
    private final String outputFile;
    private MediaCodec mDecoder;

    private MediaExtractor mExtractor;
    private RenderScript rs;
    private ScriptIntrinsicYuvToRGB yuvToRgbIntrinsic;
    private int width;
    private int height;

    private MediaCodec mEncoder;
    private Mediamuxer mediamuxer;
    private int mTrackIndex;
    private ScriptC_rotators rotateScript;
    private int newWidth = 0,newHeight = 0;

    private int preRotateHeight;
    private int preRotateWidth;
    private Allocation fromrotateallocation;
    private Allocation toRotateallocation;
    private int frameIndex;
    private int deviceOrientation;
    private int sensorOrientation;
    private final Handler handler;

    boolean sawOutputEOS = false;
    boolean sawInputEOS = false;

    private static final int SENSOR_ORIENTATION_DEFAULT_degrees = 90;
    private FrameObject defaultObject;
    private int faceBlurCount;
    private long startTime;
    private float frameRate;
    private int generateIndex;

    public ImageProcessor1(Handler handler,String inputFile,String outputFile) {
        this.inputFile = inputFile;
        this.outputFile = outputFile;
        this.handler = handler;
    }

    public void setDeviceOrientation(int deviceOrientation) {
        this.deviceOrientation = deviceOrientation;
    }

    public void setSensorOrientation(int sensorOrientation) {
        this.sensorOrientation = sensorOrientation;
    }

    public void setDefaultObject(FrameObject frameObject) {
        this.defaultObject = frameObject;
    }

    private void init() {
        try {
            mExtractor = new MediaExtractor();
            mExtractor.setDataSource(inputFile);
            
            MediaMetadataRetriever retriever = new MediaMetadataRetriever();
            retriever.setDataSource(inputFile);

            FFmpegMediaMetadataRetriever MetadataRetriever = new FFmpegMediaMetadataRetriever();
            MetadataRetriever.setDataSource(inputFile);

            rs = RenderScript.create(Globals.getAppContext());
            yuvToRgbIntrinsic = ScriptIntrinsicYuvToRGB.create(rs,Element.U8_4(rs));
            rotateScript = new ScriptC_rotators(rs);

            for (int i = 0; i < mExtractor.getTrackCount(); i++) {
                MediaFormat format = mExtractor.getTrackFormat(i);

                String mimeType = format.getString(MediaFormat.KEY_MIME);

                width = format.getInteger(MediaFormat.KEY_WIDTH);
                height = format.getInteger(MediaFormat.KEY_HEIGHT);

                frameRate = Float.parseFloat(MetadataRetriever.extractMetadata(
                        FFmpegMediaMetadataRetriever.MetaDATA_KEY_FRAMERATE));

                int bitRate = Integer.parseInt(retriever.extractMetadata(MediaMetadataRetriever.MetaDATA_KEY_BITRATE));
                if (mimeType != null && mimeType.startsWith(VIDEO)) {
                    mExtractor.selectTrack(i);
                    mDecoder = MediaCodec.createDecoderByType(mimeType);
                    mDecoder.configure(format,null,0 /* Decoder */);
                    mDecoder.start();

                    MediaCodecInfo mediaCodecInfo = selectCodec(mimeType);
                    if (mediaCodecInfo == null) {
                        throw new RuntimeException("Failed to initialise codec");
                    }

                    switch (deviceOrientation) {
                        case Surface.ROTATION_0:
                        case Surface.ROTATION_180:
                            newWidth = height;
                            newHeight = width;
                            break;

                        case Surface.ROTATION_90:
                        case Surface.ROTATION_270:
                            newWidth = width;
                            newHeight = height;
                            break;
                    }
                    MediaFormat mediaFormat = MediaFormat.createVideoFormat(mimeType,newWidth,newHeight);
                    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE,bitRate);
                    mediaFormat.setFloat(MediaFormat.KEY_FRAME_RATE,frameRate);
                    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
                    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,1);
                    mEncoder.configure(mediaFormat,MediaCodec.CONfigURE_FLAG_ENCODE);
                    mEncoder.start();

                    mediamuxer = new Mediamuxer(outputFile,Mediamuxer.OutputFormat.muxer_OUTPUT_MPEG_4);
                    break;
                }
            }
        } catch (IOException e) {
            throw new RuntimeException("Failed to initialise codec");
        }
    }


    /**
     * Returns the first codec capable of encoding the specified MIME type,or null if no
     * match was found.
     */
    private MediaCodecInfo selectCodec(String mimeType) throws IOException {
        MediaCodecList list = new MediaCodecList(MediaCodecList.ALL_CODECS);
        MediaCodecInfo[] codecInfos = list.getCodecInfos();
        for (MediaCodecInfo info : codecInfos) {
            if (info.isEncoder()) {
                mEncoder = MediaCodec.createByCodecName(info.getName());
                String[] types = info.getSupportedTypes();
                for (String type : types) {
                    if (type.equalsIgnoreCase(mimeType)) {
                        return info;
                    }
                }
            }
        }
        return null;
    }

    public void startProcessing() {
        init();

        MediaCodec.BufferInfo decoderBufferInfo = new MediaCodec.BufferInfo();
        MediaCodec.BufferInfo encoderBufferInfo = new MediaCodec.BufferInfo();
        startTime = System.currentTimeMillis();
        while (!sawOutputEOS) {
            Log.d(TAG,"startProcessing: " + frameIndex);
            if (!sawInputEOS && mDecoder != null) {
                int inputBufferId = mDecoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
                if (inputBufferId >= 0) {
                    ByteBuffer inputBuffer = mDecoder.getInputBuffer(inputBufferId);
                    int sampleSize = mExtractor.readSampleData(inputBuffer,0);
                    if (sampleSize < 0) {
                        mDecoder.queueInputBuffer(inputBufferId,0L,MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        sawInputEOS = true;
                    } else {
                        if (mExtractor != null) {
                            long presentationTimeUs = mExtractor.getSampleTime();
                            mDecoder.queueInputBuffer(inputBufferId,sampleSize,presentationTimeUs,0);
                            mExtractor.advance();
                        }
                    }
                }
            }

            int outputBufferId = mDecoder.dequeueOutputBuffer(decoderBufferInfo,DEFAULT_TIMEOUT_US);
            if (outputBufferId >= 0) {
                if ((decoderBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    sawOutputEOS = true;
                    Log.d(TAG,"endProcessing: " + TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - startTime));
                }

                boolean doRender = (decoderBufferInfo.size != 0);
                if (doRender && mDecoder != null) {
                    Image image = mDecoder.getoutputimage(outputBufferId);
                    if (image != null) {
                        try {
                            frameIndex++;
                            byte[] frameData = quarterNV21(convertYUV420888ToNV21(image),image.getWidth(),image.getHeight());
                            byte[] data = getDataFromImage(image);

                            Type.Builder yuvType = new Type.Builder(rs,Element.U8(rs)).setX(data.length);
                            Allocation in = Allocation.createTyped(rs,yuvType.create(),Allocation.USAGE_SCRIPT);

                            Type.Builder rgbaType = new Type.Builder(rs,Element.RGBA_8888(rs)).setX(width).setY(height);
                            Allocation out = Allocation.createTyped(rs,rgbaType.create(),Allocation.USAGE_SCRIPT);

                            Bitmap bitmap = Bitmap.createBitmap(width,height,Bitmap.Config.ARGB_8888);

                            in.copyFromUnchecked(data);

                            yuvToRgbIntrinsic.setInput(in);
                            yuvToRgbIntrinsic.forEach(out);
                            out.copyTo(bitmap);
                            image.close();
                            encodeBitmaps(bitmap,encoderBufferInfo);
                        } catch (Exception e) {
                            Log.d(TAG,"startProcessing: " + e.getMessage());
                        }
                    }
                    if (mDecoder != null) {
                        mDecoder.releaSEOutputBuffer(outputBufferId,false);
                    }
                }
            }
        }
    }

    private long computePresentationTime(int frameIndex) {
        return 132 + frameIndex * 1000000 / (int)frameRate;
    }

    private byte[] convertYUV420888ToNV21(Image image) {
        byte[] data;
        ByteBuffer buffer0 = image.getPlanes()[0].getBuffer();
        ByteBuffer buffer2 = image.getPlanes()[2].getBuffer();
        int buffer0_size = buffer0.remaining();
        int buffer2_size = buffer2.remaining();
        data = new byte[buffer0_size + buffer2_size];
        buffer0.get(data,buffer0_size);
        buffer2.get(data,buffer0_size,buffer2_size);
        return data;
    }

    private byte[] quarterNV21(byte[] data,int iWidth,int iHeight) {
        byte[] yuv = new byte[iWidth * iHeight * 3 / 2];
        // halve yuma
        int i = 0;
        for (int y = 0; y < iHeight; y++) {
            for (int x = 0; x < iWidth; x++) {
                yuv[i] = data[y * iWidth + x];
                i++;
            }
        }
        return yuv;
    }

    private void release() {
        try {
            if (mExtractor != null) {
                mExtractor.release();
                mExtractor = null;
            }
            if (mDecoder != null) {
                mDecoder.stop();
                mDecoder.release();
                mDecoder = null;
            }

            if (mEncoder != null) {
                mEncoder.stop();
                mEncoder.release();
                mEncoder = null;
            }

            if (mediamuxer != null) {
                mediamuxer.stop();
                mediamuxer.release();
                mediamuxer = null;
            }

        } catch (Exception e) {
            Log.d(TAG,"imageprocessor release: " + e.fillInStackTrace());
        }
        Message message = handler.obtainMessage();
        Bundle bundle = new Bundle();
        bundle.putString(FrameUtil.computation_SUCCESS_KEY,this.outputFile);
        bundle.putInt(FrameUtil.FACE_BLUR_COUNT,faceBlurCount);
        message.setData(bundle);
        handler.sendMessage(message);
    }

    // encode the bitmap to a new video file
    private void encodeBitmaps(Bitmap bitmap,MediaCodec.BufferInfo encoderBufferInfo) {
        Bitmap rotatedBitmap = null;
        switch (deviceOrientation) {
            case Surface.ROTATION_0:
                if (sensorOrientation == SENSOR_ORIENTATION_DEFAULT_degrees) {
                    rotatedBitmap = rotateBitmap(bitmap,270);
                } else {
                    rotatedBitmap = rotateBitmap(bitmap,90);
                }
                break;

            case Surface.ROTATION_90:
                Bitmap newBitmap = rotateBitmap(bitmap,90);
                bitmap.recycle();
                rotatedBitmap = rotateBitmap(newBitmap,90);
                break;

            default:
                rotatedBitmap = bitmap;
        }

        byte[] bytes = getNV21(rotatedBitmap.getWidth(),rotatedBitmap.getHeight(),rotatedBitmap);
        int inputBufIndex = mEncoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
        long ptsUsec = computePresentationTime(generateIndex);
        if (inputBufIndex >= 0) {
            ByteBuffer inputBuffer = mEncoder.getInputBuffer(inputBufIndex);
            if (inputBuffer != null) {
                inputBuffer.clear();
                inputBuffer.put(bytes);
                mEncoder.queueInputBuffer(inputBufIndex,bytes.length,ptsUsec,0);
                generateIndex++;
            }
        }
        int encoderStatus = mEncoder.dequeueOutputBuffer(encoderBufferInfo,DEFAULT_TIMEOUT_US);
        if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            MediaFormat newFormat = mEncoder.getoutputFormat();
            mTrackIndex = mediamuxer.addTrack(newFormat);
            mediamuxer.start();
        } else if (encoderBufferInfo.size != 0) {
            ByteBuffer outputBuffer = mEncoder.getoutputBuffer(encoderStatus);
            if (outputBuffer != null) {
                outputBuffer.position(encoderBufferInfo.offset);
                outputBuffer.limit(encoderBufferInfo.offset + encoderBufferInfo.size);
                mediamuxer.writeSampleData(mTrackIndex,outputBuffer,encoderBufferInfo);
                mEncoder.releaSEOutputBuffer(encoderStatus,false);
            }
            if ((encoderBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                mEncoder.signalEndOfInputStream();
            }
        }
    }
    
    private Allocation getFromrotateallocation(Bitmap bitmap) {
        int targetHeight = bitmap.getWidth();
        int targetWidth = bitmap.getHeight();
        if (targetHeight != preRotateHeight || targetWidth != preRotateWidth) {
            preRotateHeight = targetHeight;
            preRotateWidth = targetWidth;
            fromrotateallocation = Allocation.createFromBitmap(rs,bitmap,Allocation.MipmapControl.MIPMAP_NONE,Allocation.USAGE_SCRIPT);
        }
        return fromrotateallocation;
    }

    private Allocation getToRotateallocation(Bitmap bitmap) {
        int targetHeight = bitmap.getWidth();
        int targetWidth = bitmap.getHeight();
        if (targetHeight != preRotateHeight || targetWidth != preRotateWidth) {
            toRotateallocation = Allocation.createFromBitmap(rs,Allocation.USAGE_SCRIPT);
        }
        return toRotateallocation;
    }


    private Bitmap rotateBitmap(Bitmap bitmap,int angle) {
        Bitmap.Config config = bitmap.getConfig();
        int targetHeight = bitmap.getWidth();
        int targetWidth = bitmap.getHeight();

        rotateScript.set_inWidth(bitmap.getWidth());
        rotateScript.set_inHeight(bitmap.getHeight());

        Allocation sourceAllocation = getFromrotateallocation(bitmap);
        sourceAllocation.copyFrom(bitmap);
        rotateScript.set_inImage(sourceAllocation);

        Bitmap target = Bitmap.createBitmap(targetWidth,targetHeight,config);
        final Allocation targetAllocation = getToRotateallocation(target);
        if (angle == 90) {
            rotateScript.forEach_rotate_90_clockwise(targetAllocation,targetAllocation);
        } else {
            rotateScript.forEach_rotate_270_clockwise(targetAllocation,targetAllocation);
        }

        targetAllocation.copyTo(target);

        return target;
    }

    private byte[] getNV21(int inputWidth,int inputHeight,Bitmap bitmap) {
        int[] argb = new int[inputWidth * inputHeight];
        bitmap.getPixels(argb,inputWidth,inputHeight);
        byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2];
        encodeYUV420SP(yuv,argb,inputHeight);
        bitmap.recycle();
        return yuv;
    }

    private void encodeYUV420SP(byte[] yuv420sp,int[] rgb,int width,int height) {

        final int frameSize = width * height;

        int yIndex = 0;
        int uvIndex = frameSize;

        int R,G,B,Y,U,V;
        int index = 0;
        for (int j = 0; j < height; j++) {
            for (int i = 0; i < width; i++) {

                //a = (aRGB[index] & 0xff000000) >> 24; //not using it right Now
                R = (rgb[index] & 0xff0000) >> 16;
                G = (rgb[index] & 0xff00) >> 8;
                B = (rgb[index] & 0xff);


                Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
                U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
                V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;

                yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : (Math.min(Y,255)));
                if (j % 2 == 0 && index % 2 == 0) {
                    yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : (Math.min(U,255)));
                    yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : (Math.min(V,255)));

                }
                index++;
            }
        }
    }

    private static byte[] getDataFromImage(Image image) {
        Rect crop = image.getCropRect();
        int format = image.getFormat();
        int width = crop.width();
        int height = crop.height();
        Image.Plane[] planes = image.getPlanes();
        byte[] data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
        byte[] rowData = new byte[planes[0].getRowStride()];

        int channelOffset = 0;
        int outputStride = 1;
        for (int i = 0; i < planes.length; i++) {
            switch (i) {
                case 0:
                    channelOffset = 0;
                    outputStride = 1;
                    break;
                case 1:
                    channelOffset = width * height + 1;
                    outputStride = 2;
                    break;
                case 2:
                    channelOffset = width * height;
                    outputStride = 2;
                    break;
            }

            ByteBuffer buffer = planes[i].getBuffer();
            int rowStride = planes[i].getRowStride();
            int pixelStride = planes[i].getPixelStride();

            int shift = (i == 0) ? 0 : 1;
            int w = width >> shift;
            int h = height >> shift;
            buffer.position(rowStride * (crop.top >> shift) + pixelStride * (crop.left >> shift));
            for (int row = 0; row < h; row++) {
                int length;
                if (pixelStride == 1 && outputStride == 1) {
                    length = w;
                    buffer.get(data,channelOffset,length);
                    channelOffset += length;
                } else {
                    length = (w - 1) * pixelStride + 1;
                    buffer.get(rowData,length);
                    for (int col = 0; col < w; coL++) {
                        data[channelOffset] = rowData[col * pixelStride];
                        channelOffset += outputStride;
                    }
                }
                if (row < h - 1) {
                    buffer.position(buffer.position() + rowStride - length);
                }
            }
        }
        return data;
    }
    
    @Override
    public void run() {
        try {
            startProcessing();
        } catch (Exception ex) {
            Log.d(TAG,"run: " + ex.getCause());
        } finally {
            release();
        }
    }

    public void stopProcessing() {
        sawOutputEOS = true;
    }
}

请查看代码并告诉我我做错了什么。

扭曲的视频帧

解决方法

暂无找到可以解决该程序问题的有效方法,小编努力寻找整理中!

如果你已经找到好的解决方法,欢迎将解决方案带上本链接一起发送给小编。

小编邮箱:dio#foxmail.com (将#修改为@)