BebopVideoView to Mat


#1

Hello.
I come up against the great problem.
I`m try to BebopVideoView to Mat. (BebopVideoView is parrot drone source code)
But I was failed for several days.
Here is the code.

package com.hyeonjung.dronecontroll.view;

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.os.Environment;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;

import com.parrot.arsdk.arcontroller.ARCONTROLLER_STREAM_CODEC_TYPE_ENUM;

import com.parrot.arsdk.arcontroller.ARControllerCodec;
import com.parrot.arsdk.arcontroller.ARFrame;

import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;

import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;

 public class BebopVideoView extends SurfaceView implements SurfaceHolder.Callback {

    private static final String TAG = "BebopVideoView";
    private static final String VIDEO_MIME_TYPE = "video/avc";
    private static final int VIDEO_DEQUEUE_TIMEOUT = 33000;

    private MediaCodec mMediaCodec;
    private Lock mReadyLock;

    private boolean mIsCodecConfigured = false;

    private ByteBuffer mSpsBuffer;
    private ByteBuffer mPpsBuffer;

    private ByteBuffer[] mBuffers;

    private static final int VIDEO_WIDTH = 640;
    private static final int VIDEO_HEIGHT = 368;

    public byte[] a;
    public Mat k;


    public BebopVideoView(Context context) {
        super(context);
        customInit();
    }

    public BebopVideoView(Context context, AttributeSet attrs) {
        super(context, attrs);
        customInit();
    }

    public BebopVideoView(Context context, AttributeSet attrs, int defStyleAttr) {
        super(context, attrs, defStyleAttr);
        customInit();
    }

    private void customInit() {
        mReadyLock = new ReentrantLock();
        getHolder().addCallback(this);
    }

    public void displayFrame(ARFrame frame) {
        mReadyLock.lock();

        if ((mMediaCodec != null)) {
            if (mIsCodecConfigured) {
                // Here we have either a good PFrame, or an IFrame
                int index = -1;

                try {
                    index = mMediaCodec.dequeueInputBuffer(VIDEO_DEQUEUE_TIMEOUT);
                } catch (IllegalStateException e) {
                    Log.e(TAG, "Error while dequeue input buffer");
                }
                if (index >= 0) {
                    ByteBuffer b;
                    if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
                        b = mMediaCodec.getInputBuffer(index); // fill inputBuffer with valid data
                    }
                    else {
                        b = mBuffers[index]; // fill inputBuffer with valid data
                        b.clear();
                    }

                    if (b != null) {
                        b.put(frame.getByteData(), 0, frame.getDataSize()); //write to b.
                        getMat(frame);
                        saveMat(k);
                    }

                    try {
                        mMediaCodec.queueInputBuffer(index, 0, frame.getDataSize(), 0, 0); //end of stream
                    } catch (IllegalStateException e) {
                        Log.e(TAG, "Error while queue input buffer");
                    }
                }
            }

            // Try to display previous frame
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            int outIndex;
            try {
                outIndex = mMediaCodec.dequeueOutputBuffer(info, 0);

                while (outIndex >= 0) {
                    mMediaCodec.releaseOutputBuffer(outIndex, true);
                    outIndex = mMediaCodec.dequeueOutputBuffer(info, 0);
                }
            } catch (IllegalStateException e) {
                Log.e(TAG, "Error while dequeue input buffer (outIndex)");
            }
        }


        mReadyLock.unlock();
    }

    public void configureDecoder(ARControllerCodec codec) {
        mReadyLock.lock();

        if (codec.getType() == ARCONTROLLER_STREAM_CODEC_TYPE_ENUM.ARCONTROLLER_STREAM_CODEC_TYPE_H264) {
            ARControllerCodec.H264 codecH264 = codec.getAsH264();

            mSpsBuffer = ByteBuffer.wrap(codecH264.getSps().getByteData());
            mPpsBuffer = ByteBuffer.wrap(codecH264.getPps().getByteData());
        }

        if ((mMediaCodec != null) && (mSpsBuffer != null)) {
            configureMediaCodec();
        }

        mReadyLock.unlock();
    }

    private void configureMediaCodec() {
        MediaFormat format = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, VIDEO_WIDTH, VIDEO_HEIGHT);
        format.setByteBuffer("csd-0", mSpsBuffer);
        format.setByteBuffer("csd-1", mPpsBuffer);

        mMediaCodec.configure(format, getHolder().getSurface(), null, 0);
        mMediaCodec.start();

        if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.LOLLIPOP) {
            mBuffers = mMediaCodec.getInputBuffers();
        }

        mIsCodecConfigured = true;
    }

    private void initMediaCodec(String type) {
        try {
            mMediaCodec = MediaCodec.createDecoderByType(type);
        } catch (IOException e) {
            Log.e(TAG, "Exception", e);
        }

        if ((mMediaCodec != null) && (mSpsBuffer != null)) {
            configureMediaCodec();
        }
    }

    private void releaseMediaCodec() {
        if (mMediaCodec != null) {
            if (mIsCodecConfigured) {
                mMediaCodec.stop();
                mMediaCodec.release();
            }
            mIsCodecConfigured = false;
            mMediaCodec = null;
        }
    }

    @Override
    public void surfaceCreated(SurfaceHolder holder) {
        mReadyLock.lock();
        initMediaCodec(VIDEO_MIME_TYPE);
        mReadyLock.unlock();
    }

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {}

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {
        mReadyLock.lock();
        releaseMediaCodec();
        mReadyLock.unlock();
    }

    public void getMat(ARFrame frame) {
        k = new Mat();

        k.get(150, 150, frame.getByteData());
        k.put(150, 150, frame.getByteData());

       //or
       //byte[] a= new byte[b.remaining()];
       //b.get(a);
       //k.get(150, 150, a);
       //k.put(150, 150, a);

    }

    public void saveMat (Mat mat) {
        Mat mIntermediateMat = new Mat(150, 150, CvType.CV_8UC1);
        Imgproc.cvtColor(mat, mIntermediateMat, Imgproc.COLOR_GRAY2BGR);

        File path = new File(Environment.getExternalStorageDirectory() + "/data");
        path.mkdirs();
        File file = new File(path, "image.png");
        String filename = file.toString();
        Boolean bool = Imgcodecs.imwrite(filename, mIntermediateMat);

        if (bool)
            Log.i(TAG, "SUCCESS writing image to external storage");
        else
            Log.i(TAG, "Fail writing image to external storage");
    }

}

I think I can get an image related data from ByteBuffer b or frame.get ByteData ().

I was confirmed ByteBuffer b and frame.getByteData().

There was char data type with a range of -128 to 127.

So I was confirmed the result of getMat, saveMat and the result was a NULL(Mat k).

What is wrong?

Please help me T.T


Using JSVideoView sample code
#2

Hi,

The data you get from the frame.getByteData() method is an h.264 encoded frame, not a decoded picture. We tell the decoder to directly display the frame on a Surface, so in our code you never have access to the decoded picture.

Here is a stack overflow post discussing a similar subject (getting output from the video decoder) which might help you.

Regards,
Nicolas.


#3

Thank you so much you answer.
OMG…
So, I got the image below.

I’ll try them.
Thank you.


#4

Hi minsoo, were you able to convert the ARFrame info to Mat?


#5

Hello.
I just use getMat() to obtain frame information.
(getMat () is in the above code.)
However, the way I did was to convert the encoded frame information to mat.
So I failed.

If you want to get a complete mat image from frame information, you will have to decode the encoded frame information.


#6

Thanks minsoo, and how did you decode the encoded frame?.
I am trying to do it, but I don’t know how to convert each frame to a bitmap or any usefull information so I can use it with the OpenCV


#7

Here’s my solution:


#8

Thanks Synman, I modified the code according to your suggestion and I got the following:

package com.i3rivale.droneapp;

import android.graphics.Bitmap;
import android.media.MediaExtractor;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.SurfaceTexture;
import android.opengl.EGLSurface;
import android.media.Image;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.TextureView;

import com.parrot.arsdk.arcontroller.ARCONTROLLER_STREAM_CODEC_TYPE_ENUM;
import com.parrot.arsdk.arcontroller.ARControllerCodec;
import com.parrot.arsdk.arcontroller.ARFrame;

import org.opencv.core.Mat;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
 * Created by Rivale on 23/11/2016.
 */

public class VideoView extends TextureView implements TextureView.SurfaceTextureListener{

    private static final String TAG = "BebopVideoView";
    private static final String VIDEO_MIME_TYPE = "video/avc";
    private static final int VIDEO_DEQUEUE_TIMEOUT = 33000;

    private MediaCodec mMediaCodec;

    private Lock mReadyLock;

    private boolean mIsCodecConfigured = false;

    private ByteBuffer mSpsBuffer;
    private ByteBuffer mPpsBuffer;

    private ByteBuffer[] mBuffers;
    Surface surface;
    
    private static final int VIDEO_WIDTH = 640;
    private static final int VIDEO_HEIGHT = 368;

    public byte[] a;
    public Mat k;

    @Override
    public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height)
    {
        this.surface= new Surface(surface);

        mReadyLock.lock();
        initMediaCodec(VIDEO_MIME_TYPE);

        mReadyLock.unlock();
        //surfaceCreated=true;
    }

    @Override
    public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {

    }

    @Override
    public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
        mReadyLock.lock();
        releaseMediaCodec();
        mReadyLock.unlock();
        return false;
    }

    @Override
    public void onSurfaceTextureUpdated(SurfaceTexture surface) {

    }

    public VideoView(Context context) {
        super(context);
        customInit();
    }

    public VideoView(Context context, AttributeSet attrs) {
        super(context, attrs);
        customInit();
    }

    public VideoView(Context context, AttributeSet attrs, int defStyleAttr) {
        super(context, attrs, defStyleAttr);
        customInit();
    }

    private void customInit() {
        mReadyLock = new ReentrantLock();
        //getHolder().addCallback(this);
    }

    public void displayFrame(ARFrame frame) {
        mReadyLock.lock();
        if ((mMediaCodec != null)) {
            if (mIsCodecConfigured) {
                // Here we have either a good PFrame, or an IFrame
                int index = -1;

                try {
                    index = mMediaCodec.dequeueInputBuffer(VIDEO_DEQUEUE_TIMEOUT);
                } catch (IllegalStateException e) {
                    Log.e(TAG, "Error while dequeue input buffer");
                }
                if (index >= 0) {
                    ByteBuffer b;
                    if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
                        b = mMediaCodec.getInputBuffer(index);
                    } else {
                        b = mBuffers[index];
                        b.clear();
                    }

                    if (b != null) {

                        b.put(frame.getByteData(), 0, frame.getDataSize());
                    }

                    try {
                        mMediaCodec.queueInputBuffer(index, 0, frame.getDataSize(), 0, 0);
                    } catch (IllegalStateException e) {
                        Log.e(TAG, "Error while queue input buffer");
                    }
                }
            }

            // Try to display previous frame
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            int outIndex;
            try {

                outIndex = mMediaCodec.dequeueOutputBuffer(info, 0);

                while (outIndex >= 0) {
                    mMediaCodec.releaseOutputBuffer(outIndex, true);
                    outIndex = mMediaCodec.dequeueOutputBuffer(info, 0);
                }


            } catch (IllegalStateException e) {
                Log.e(TAG, "Error while dequeue input buffer (outIndex)");
            }
        }


        mReadyLock.unlock();
    }


    public void configureDecoder(ARControllerCodec codec) {
        mReadyLock.lock();


        if (codec.getType() == ARCONTROLLER_STREAM_CODEC_TYPE_ENUM.ARCONTROLLER_STREAM_CODEC_TYPE_H264) {
            ARControllerCodec.H264 codecH264 = codec.getAsH264();

            mSpsBuffer = ByteBuffer.wrap(codecH264.getSps().getByteData());
            mPpsBuffer = ByteBuffer.wrap(codecH264.getPps().getByteData());

        }


        if ((mMediaCodec != null) && (mSpsBuffer != null)) {
            configureMediaCodec();
        }

        mReadyLock.unlock();
    }
    /*
    12-26 16:15:26.013 29726-30224/com.i3rivale.droneapp D/BebopVideoView:
    outputimage={mime=video/raw, crop-top=0, crop-right=855, slice-height=480, color-format=2141391876,
    height=480, width=864, what=1869968451, crop-bottom=479, crop-left=0, stride=896}

    12-26 17:20:58.729 22819-25389/com.i3rivale.droneapp D/BebopVideoView:
    inputimage={height=368, width=640, mime=video/avc, adaptive-playback=1}
    * */

    private void configureMediaCodec() {
        mMediaCodec.stop();//?
        MediaFormat format = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, VIDEO_WIDTH, VIDEO_HEIGHT);
        format.setByteBuffer("csd-0", mSpsBuffer);
        format.setByteBuffer("csd-1", mPpsBuffer);

        mMediaCodec.configure(format, surface, null, 0);
        mMediaCodec.start();

        if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.LOLLIPOP) {
            mBuffers = mMediaCodec.getInputBuffers();
        }

        mIsCodecConfigured = true;
    }

    private void initMediaCodec(String type) {
        try {
            mMediaCodec = MediaCodec.createDecoderByType(type);
        } catch (IOException e) {
            Log.e(TAG, "Exception", e);
        }

        if ((mMediaCodec != null) && (mSpsBuffer != null)) {
            configureMediaCodec();
        }
    }

    private void releaseMediaCodec() {
        if (mMediaCodec != null) {
            if (mIsCodecConfigured) {
                mMediaCodec.stop();
                mMediaCodec.release();
            }
            mIsCodecConfigured = false;
            mMediaCodec = null;
        }
    }

}

but with that modification I don’t get any image displayed in the TextureView.
I create the variable surface (initialized in the onSurfaceTextureAvailable), but it is never displayed.

Also I notice that it is never entering to the function “onSurfaceTextureAvailable”, so it nevers configure the MediaCodec

Thanks in advance


#9

I manage to view the camera from the BEBOP using a TextureView as @synman suggested, however,when I try to get the bitmap from the Textureview, I always get an empty (black) bitmap.
Also when I try to get a screenshot the view corresponding to the Textureview is black

Any idea on how to solve this?

Thanks in advance


#10

I grab the bitmap in a thread/handler with no problems whatsoever.

Using a lock for synchronization I then render my detection results in onDraw.


#11

Could you help me with a sample code of the whole code, so I can see how to do the getBitmap.
This is what I have so far.:

public class VideoView extends TextureView implements TextureView.SurfaceTextureListener{

    private static final String TAG = "Video";
    private static final String VIDEO_MIME_TYPE = "video/avc";
    private static final int VIDEO_DEQUEUE_TIMEOUT = 33000;

    private MediaCodec mMediaCodec;

    private Lock mReadyLock;

    private boolean mIsCodecConfigured = false;

    private ByteBuffer mSpsBuffer;
    private ByteBuffer mPpsBuffer;

    private ByteBuffer[] mBuffers;
    Surface surface;
    SurfaceTexture surfaceTexture;
    TextureView Tv;

    private static final int VIDEO_WIDTH = 640;
    private static final int VIDEO_HEIGHT = 368;

    public byte[] a;
    public Mat k;

    @Override
    public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height)
    {
        //Log.d(TAG,"onsurfaceavailable");

        surfaceTexture=surface;
        this.surface= new Surface(surfaceTexture);


        mReadyLock.lock();
        initMediaCodec(VIDEO_MIME_TYPE);

        mReadyLock.unlock();
        //surfaceCreated=true;

    }

    @Override
    public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {

    }

    @Override
    public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
        mReadyLock.lock();
        releaseMediaCodec();
        mReadyLock.unlock();
        return false;
    }

    @Override
    public void onSurfaceTextureUpdated(SurfaceTexture surface) {

    }

    //Deprecated
   /* public VideoView(Context context) {
        super(context);
        customInit();
    }

    public VideoView(Context context, AttributeSet attrs) {
        super(context, attrs);
        customInit();
    }

    public VideoView(Context context, AttributeSet attrs, int defStyleAttr) {
        super(context, attrs, defStyleAttr);
        customInit();
    }*/
//I need to send the information of the Textureview to set the listener.
    public VideoView(Context context, TextureView TV) {
        super(context);
        Tv=TV;
        Tv.setSurfaceTextureListener(this);
        customInit();
    }

    private void customInit() {
        mReadyLock = new ReentrantLock();
        //getHolder().addCallback(this);
    }

    public void displayFrame(ARFrame frame) {



        //Log.d(TAG, "frame1");
        mReadyLock.lock();
        if ((mMediaCodec != null)) {
            //Log.d(TAG, "not null");
            if (mIsCodecConfigured) {
                // Here we have either a good PFrame, or an IFrame
                int index = -1;
                //Log.d(TAG, "configured");

                try {
                    index = mMediaCodec.dequeueInputBuffer(VIDEO_DEQUEUE_TIMEOUT);
                } catch (IllegalStateException e) {
                    Log.d(TAG, "Error while dequeue input buffer");
                }
                if (index >= 0) {
                    ByteBuffer b;
                    if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
                        b = mMediaCodec.getInputBuffer(index);
                    } else {
                        b = mBuffers[index];
                        b.clear();
                    }

                    if (b != null) {

                        b.put(frame.getByteData(), 0, frame.getDataSize());
                    }

                    try {
                        mMediaCodec.queueInputBuffer(index, 0, frame.getDataSize(), 0, 0);
                    } catch (IllegalStateException e) {
                        //Log.e(TAG, "Error while queue input buffer");
                    }
                }
            }

            //Log.d(TAG, "display");
            // Try to display previous frame
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            int outIndex;
            try {

                outIndex = mMediaCodec.dequeueOutputBuffer(info, 0);

                while (outIndex >= 0) {
                    mMediaCodec.releaseOutputBuffer(outIndex, true);
                    outIndex = mMediaCodec.dequeueOutputBuffer(info, 0);
                }
                //Log.d(TAG, "save");
                //DroneVideo.SaveImage(getBitmap());


            } catch (IllegalStateException e) {
                Log.e(TAG, "Error while dequeue input buffer (outIndex)");
            }
        }
        Bitmap bmp= Bitmap.createBitmap(640,368,Bitmap.Config.ARGB_8888);
        this.getBitmap(bmp);
        DroneVideo.SaveImage(bmp);


        mReadyLock.unlock();
    }


    public void configureDecoder(ARControllerCodec codec) {
        mReadyLock.lock();


        if (codec.getType() == ARCONTROLLER_STREAM_CODEC_TYPE_ENUM.ARCONTROLLER_STREAM_CODEC_TYPE_H264) {
            ARControllerCodec.H264 codecH264 = codec.getAsH264();

            mSpsBuffer = ByteBuffer.wrap(codecH264.getSps().getByteData());
            mPpsBuffer = ByteBuffer.wrap(codecH264.getPps().getByteData());

        }


        if ((mMediaCodec != null) && (mSpsBuffer != null)) {
            configureMediaCodec();
        }

        mReadyLock.unlock();
    }
  

    private void configureMediaCodec() {
        mMediaCodec.stop();//?
        MediaFormat format = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, VIDEO_WIDTH, VIDEO_HEIGHT);
        format.setByteBuffer("csd-0", mSpsBuffer);
        format.setByteBuffer("csd-1", mPpsBuffer);

        mMediaCodec.configure(format, surface, null, 0);
        mMediaCodec.start();

        if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.LOLLIPOP) {
            mBuffers = mMediaCodec.getInputBuffers();
        }

        mIsCodecConfigured = true;
    }

    private void initMediaCodec(String type) {
        try {
            mMediaCodec = MediaCodec.createDecoderByType(type);
        } catch (IOException e) {
            Log.e(TAG, "Exception", e);
        }

        if ((mMediaCodec != null) && (mSpsBuffer != null)) {
            configureMediaCodec();
        }
    }

    private void releaseMediaCodec() {
        if (mMediaCodec != null) {
            if (mIsCodecConfigured) {
                mMediaCodec.stop();
                mMediaCodec.release();
            }
            mIsCodecConfigured = false;
            mMediaCodec = null;
        }
    }




}

#12

If I have time tonight once I get home I post my whole implementation.


#13

Wow…
There were a lot of things while I was sleeping.
I asked the question of stackoverflow.
But I only got a black image like you.
So I thought I needed a decoding process to solve the black image problem.
I am currently writing code that performs decoding, but it is not easy.
I hope this problem will be solved as soon as.
:cry:


#14

Wow … I really appreciate your answer.
thank you.
If you write the whole code, I am going to be really happy. :grinning:


#15

So here are some guidelines / assumptions.

You’ll see I pass in the sps pps buffers as part of my displayFrame method. I do this because I have a service managing the underlying deviceController connection.

I overlay a custom view (CVClassifierView) on top of the texture view where I do my opencv work. I had previously mentioned that I did this within BebopVideoView but was mistaken.

CVClassifierView is wired into my activity’s onResume and onPause events (take a look at its resume and pause methods for more details).

This is a very basic OpenCV Classifier example. I’m sure you’ll want to add some blur and other options… or go with a completely different detection method (I’ve experimented with about 5 vastly different approaches / targets so far).

BebopVideoView

package com.shellware.arpro3.views;

import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;

import com.parrot.arsdk.arcontroller.ARFrame;
import com.shellware.arpro3.ARPro3Application;

import java.nio.ByteBuffer;

public class BebopVideoView extends TextureView implements TextureView.SurfaceTextureListener {

    private static final String CLASS_NAME = BebopVideoView.class.getSimpleName();

    private static final String VIDEO_MIME_TYPE = "video/avc";
    private static final int VIDEO_DEQUEUE_TIMEOUT = 33000;
    private static final int VIDEO_WIDTH = 640;
    private static final int VIDEO_HEIGHT = 368;
    private Surface surface;
    private MediaCodec mediaCodec;
    private boolean surfaceCreated = false;
    private boolean codecConfigured = false;
    private ByteBuffer[] buffers;

    public BebopVideoView(Context context) {
        this(context, null);
    }

    public BebopVideoView(Context context, AttributeSet attrs) {
        this(context, attrs, 0);
    }

    public BebopVideoView(Context context, AttributeSet attrs, int defStyleAttr) {
        super(context, attrs, defStyleAttr);
    }

    public void displayFrame(final ByteBuffer spsBuffer, final ByteBuffer ppsBuffer, ARFrame frame) {
        if (!surfaceCreated || spsBuffer == null) {
            return;
        }

        if (!codecConfigured) {
            configureMediaCodec(spsBuffer, ppsBuffer);
        }

        // Here we have either a good PFrame, or an IFrame
        int index = -1;

        try {
            index = mediaCodec.dequeueInputBuffer(VIDEO_DEQUEUE_TIMEOUT);
        } catch (IllegalStateException e) {
            Log.e(CLASS_NAME, "Error while dequeue input buffer");
        }
        if (index >= 0) {
            ByteBuffer b;
            if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
                b = mediaCodec.getInputBuffer(index);
            } else {
                b = buffers[index];
                b.clear();
            }

            if (b != null) {
                b.put(frame.getByteData(), 0, frame.getDataSize());
            }

            try {
                mediaCodec.queueInputBuffer(index, 0, frame.getDataSize(), 0, 0);
            } catch (IllegalStateException e) {
                Log.e(CLASS_NAME, "Error while queue input buffer");
            }
        }

        // Try to display previous frame
        MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
        int outIndex;
        try {
            outIndex = mediaCodec.dequeueOutputBuffer(info, 0);

            while (outIndex >= 0) {
                mediaCodec.releaseOutputBuffer(outIndex, true);
                outIndex = mediaCodec.dequeueOutputBuffer(info, 0);
            }

            if (ARPro3Application.DEBUG) Log.v(CLASS_NAME, "dequeueOutputBuffer");

        } catch (IllegalStateException e) {
            Log.e(CLASS_NAME, "Error while dequeue input buffer (outIndex)");
        }
    }

    private void configureMediaCodec(final ByteBuffer spsBuffer, final ByteBuffer ppsBuffer) {
        try {
            final MediaFormat format = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, VIDEO_WIDTH, VIDEO_HEIGHT);
            format.setByteBuffer("csd-0", spsBuffer);
            format.setByteBuffer("csd-1", ppsBuffer);

            mediaCodec = MediaCodec.createDecoderByType(VIDEO_MIME_TYPE);
            mediaCodec.configure(format, surface, null, 0);
            mediaCodec.start();

            if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.LOLLIPOP) {
                buffers = mediaCodec.getInputBuffers();
            }

            codecConfigured = true;
        } catch (Exception e) {
            Log.e(CLASS_NAME, "configureMediaCodec", e);
        }

        if (ARPro3Application.DEBUG) Log.v(CLASS_NAME, "configureMediaCodec");
    }

    @Override
    public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
        this.surface = new Surface(surface);
        surfaceCreated = true;

        if (ARPro3Application.DEBUG) Log.i(CLASS_NAME, "onSurfaceTextureAvailable");
    }

    @Override
    public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
    }

    @Override
    public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
        if (mediaCodec != null) {
            if (codecConfigured) {
                mediaCodec.stop();
                mediaCodec.release();
            }
            codecConfigured = false;
            mediaCodec = null;
        }

        if (surface != null) surface.release();
        if (this.surface != null) this.surface.release();

        if (ARPro3Application.DEBUG) Log.i(CLASS_NAME, "onSurfaceTextureDestroyed");
        return true;
    }

    @Override
    public void onSurfaceTextureUpdated(SurfaceTexture surface) {
    }
}

CVClassifierView

package com.shellware.arpro3.views;

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.os.Handler;
import android.util.AttributeSet;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;

import com.shellware.arpro3.R;

import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfInt;
import org.opencv.core.MatOfPoint;
import org.opencv.core.MatOfRect;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
import org.opencv.imgproc.Moments;
import org.opencv.objdetect.CascadeClassifier;

import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;

import static org.opencv.core.CvType.CV_8U;

/**
 * Shell M. Shrader - 12/1/16
 *
 * All Rights Reserved
 */

public class CVClassifierView extends View {

    private final static String CLASS_NAME = CVClassifierView.class.getSimpleName();
    private final Context ctx;

    private CascadeClassifier faceClassifier;
    private CascadeClassifier palmClassifier;
    private CascadeClassifier fistClassifier;

    private Handler openCVHandler = new Handler();
    private Thread openCVThread = null;

    private BebopVideoView bebopVideoView = null;
    private ImageView cvPreviewView = null;

    private Rect[] facesArray = null;

    private Paint paint;

    private final Object lock = new Object();

    public CVClassifierView(Context context) {
        this(context, null);
    }

    public CVClassifierView(Context context, AttributeSet attrs) {
        this(context, attrs, 0);
    }

    public CVClassifierView(Context context, AttributeSet attrs, int defStyleAttr) {
        super(context, attrs, defStyleAttr);

        ctx = context;

        // initialize our opencv cascade classifiers
        faceClassifier = new CascadeClassifier(cascadeFile(R.raw.haarcascade_upperbody));

        // initialize our canvas paint object
        paint = new Paint();
        paint.setAntiAlias(true);
        paint.setColor(Color.GREEN);
        paint.setStyle(Paint.Style.STROKE);
        paint.setStrokeWidth(4f);
    }

    private String cascadeFile(final int id) {
        final InputStream is = getResources().openRawResource(id);

        final File cascadeDir = ctx.getDir("cascade", Context.MODE_PRIVATE);
        final File cascadeFile = new File(cascadeDir, String.format(Locale.US, "%d.xml", id));

        try {
            final FileOutputStream os = new FileOutputStream(cascadeFile);
            final byte[] buffer = new byte[4096];

            int bytesRead;

            while ((bytesRead = is.read(buffer)) != -1) {
                os.write(buffer, 0, bytesRead);
            }

            is.close();
            os.close();
        } catch (Exception e) {
            Log.e(CLASS_NAME, "unable to open cascade file: " + cascadeFile.getName(), e);
            return null;
        }

        return cascadeFile.getAbsolutePath();
    }

    public void resume(final BebopVideoView bebopVideoView, final ImageView cvPreviewView) {
        if (getVisibility() == View.VISIBLE) {
            this.bebopVideoView = bebopVideoView;
            this.cvPreviewView = cvPreviewView;

            openCVThread = new CascadingThread(ctx);
            openCVThread.start();
        }
    }

    public void pause() {
        if (getVisibility() == View.VISIBLE) {
            openCVThread.interrupt();

            try {
                openCVThread.join();
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
    }

    private float mX = 0;
    private float mY = 0;

    private class CascadingThread extends Thread {
        private final Handler handler;
        boolean interrupted = false;

        private CascadingThread(final Context ctx) {
            handler = new Handler(ctx.getMainLooper());
        }

        public void interrupt() {
            interrupted = true;
        }

        @Override
        public void run() {
            Log.d(CLASS_NAME, "cascadeRunnable");

            final Mat firstMat = new Mat();
            final Mat mat = new Mat();

            while (!interrupted) {
                final Bitmap source = bebopVideoView.getBitmap();

                if (source != null) {
                    Utils.bitmapToMat(source, firstMat);
                    firstMat.assignTo(mat);

                    Imgproc.cvtColor(mat, mat, Imgproc.COLOR_RGBA2GRAY);

                    final int minRows = Math.round(mat.rows() * .12f);

                    final Size minSize = new Size(minRows, minRows);
                    final Size maxSize = new Size(0, 0);

                    final MatOfRect faces = new MatOfRect();

                    faceClassifier.detectMultiScale(mat, faces);

                    synchronized (lock) {
                        facesArray = faces.toArray();

                        mX = firstMat.width() / mat.width();
                        mY = firstMat.height() / mat.height();

                        faces.release();

                        runOnUiThread(new Runnable() {
                            @Override
                            public void run() {
                                invalidate();
                            }
                        });
                    }
                }

                try {
                    sleep(200);
                } catch (InterruptedException e) {
                    interrupted = true;
                }
            }

            firstMat.release();
            mat.release();
        }

        private void runOnUiThread(Runnable r) {
            handler.post(r);
        }
    }


    @Override
    protected void onDraw(Canvas canvas) {
//        Log.d(CLASS_NAME, "onDraw");

        synchronized(lock) {
            if (facesArray != null && facesArray.length > 0) {
                for (Rect target : facesArray) {
                    Log.i(CLASS_NAME, "found face size=" + target.area());
                    paint.setColor(Color.RED);
                    canvas.drawRect((float) target.tl().x * mX, (float) target.tl().y * mY, (float) target.br().x * mX, (float) target.br().y * mY, paint);
                }
            }
        }

        super.onDraw(canvas);
    }
}

#16

Hello. Synman.
I’m very grateful for your reply.
I’ll try to apply your code in my project.
I’m truly grateful.


#17

It’s no problem at all


#18

Thanks for our help, now I am able to get the bitmap from the textureview.


#19

The answer from synman and Rivale was really helpful.
If someone wants a BebopVideoView to Mat, see the code above.

Thank you so much. synman and Rivale!! :laughing:


#20

Thank you Synman for sharing your code.
It works fine for me. Front face detection with “haarcascade_frontalface_alt” in green.
I also could add profile face detection in blue.

But I don’t understand why you declare “cvPreviewView” in “resume” method.
Does I misunderstand something?