BebopVideoView to Mat

So here are some guidelines / assumptions.

You’ll see I pass in the sps pps buffers as part of my displayFrame method. I do this because I have a service managing the underlying deviceController connection.

I overlay a custom view (CVClassifierView) on top of the texture view where I do my opencv work. I had previously mentioned that I did this within BebopVideoView but was mistaken.

CVClassifierView is wired into my activity’s onResume and onPause events (take a look at its resume and pause methods for more details).

This is a very basic OpenCV Classifier example. I’m sure you’ll want to add some blur and other options… or go with a completely different detection method (I’ve experimented with about 5 vastly different approaches / targets so far).

BebopVideoView

package com.shellware.arpro3.views;

import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;

import com.parrot.arsdk.arcontroller.ARFrame;
import com.shellware.arpro3.ARPro3Application;

import java.nio.ByteBuffer;

public class BebopVideoView extends TextureView implements TextureView.SurfaceTextureListener {

    private static final String CLASS_NAME = BebopVideoView.class.getSimpleName();

    private static final String VIDEO_MIME_TYPE = "video/avc";
    private static final int VIDEO_DEQUEUE_TIMEOUT = 33000;
    private static final int VIDEO_WIDTH = 640;
    private static final int VIDEO_HEIGHT = 368;
    private Surface surface;
    private MediaCodec mediaCodec;
    private boolean surfaceCreated = false;
    private boolean codecConfigured = false;
    private ByteBuffer[] buffers;

    public BebopVideoView(Context context) {
        this(context, null);
    }

    public BebopVideoView(Context context, AttributeSet attrs) {
        this(context, attrs, 0);
    }

    public BebopVideoView(Context context, AttributeSet attrs, int defStyleAttr) {
        super(context, attrs, defStyleAttr);
    }

    public void displayFrame(final ByteBuffer spsBuffer, final ByteBuffer ppsBuffer, ARFrame frame) {
        if (!surfaceCreated || spsBuffer == null) {
            return;
        }

        if (!codecConfigured) {
            configureMediaCodec(spsBuffer, ppsBuffer);
        }

        // Here we have either a good PFrame, or an IFrame
        int index = -1;

        try {
            index = mediaCodec.dequeueInputBuffer(VIDEO_DEQUEUE_TIMEOUT);
        } catch (IllegalStateException e) {
            Log.e(CLASS_NAME, "Error while dequeue input buffer");
        }
        if (index >= 0) {
            ByteBuffer b;
            if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
                b = mediaCodec.getInputBuffer(index);
            } else {
                b = buffers[index];
                b.clear();
            }

            if (b != null) {
                b.put(frame.getByteData(), 0, frame.getDataSize());
            }

            try {
                mediaCodec.queueInputBuffer(index, 0, frame.getDataSize(), 0, 0);
            } catch (IllegalStateException e) {
                Log.e(CLASS_NAME, "Error while queue input buffer");
            }
        }

        // Try to display previous frame
        MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
        int outIndex;
        try {
            outIndex = mediaCodec.dequeueOutputBuffer(info, 0);

            while (outIndex >= 0) {
                mediaCodec.releaseOutputBuffer(outIndex, true);
                outIndex = mediaCodec.dequeueOutputBuffer(info, 0);
            }

            if (ARPro3Application.DEBUG) Log.v(CLASS_NAME, "dequeueOutputBuffer");

        } catch (IllegalStateException e) {
            Log.e(CLASS_NAME, "Error while dequeue input buffer (outIndex)");
        }
    }

    private void configureMediaCodec(final ByteBuffer spsBuffer, final ByteBuffer ppsBuffer) {
        try {
            final MediaFormat format = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, VIDEO_WIDTH, VIDEO_HEIGHT);
            format.setByteBuffer("csd-0", spsBuffer);
            format.setByteBuffer("csd-1", ppsBuffer);

            mediaCodec = MediaCodec.createDecoderByType(VIDEO_MIME_TYPE);
            mediaCodec.configure(format, surface, null, 0);
            mediaCodec.start();

            if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.LOLLIPOP) {
                buffers = mediaCodec.getInputBuffers();
            }

            codecConfigured = true;
        } catch (Exception e) {
            Log.e(CLASS_NAME, "configureMediaCodec", e);
        }

        if (ARPro3Application.DEBUG) Log.v(CLASS_NAME, "configureMediaCodec");
    }

    @Override
    public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
        this.surface = new Surface(surface);
        surfaceCreated = true;

        if (ARPro3Application.DEBUG) Log.i(CLASS_NAME, "onSurfaceTextureAvailable");
    }

    @Override
    public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
    }

    @Override
    public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
        if (mediaCodec != null) {
            if (codecConfigured) {
                mediaCodec.stop();
                mediaCodec.release();
            }
            codecConfigured = false;
            mediaCodec = null;
        }

        if (surface != null) surface.release();
        if (this.surface != null) this.surface.release();

        if (ARPro3Application.DEBUG) Log.i(CLASS_NAME, "onSurfaceTextureDestroyed");
        return true;
    }

    @Override
    public void onSurfaceTextureUpdated(SurfaceTexture surface) {
    }
}

CVClassifierView

package com.shellware.arpro3.views;

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.os.Handler;
import android.util.AttributeSet;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;

import com.shellware.arpro3.R;

import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfInt;
import org.opencv.core.MatOfPoint;
import org.opencv.core.MatOfRect;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
import org.opencv.imgproc.Moments;
import org.opencv.objdetect.CascadeClassifier;

import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;

import static org.opencv.core.CvType.CV_8U;

/**
 * Shell M. Shrader - 12/1/16
 *
 * All Rights Reserved
 */

public class CVClassifierView extends View {

    private final static String CLASS_NAME = CVClassifierView.class.getSimpleName();
    private final Context ctx;

    private CascadeClassifier faceClassifier;
    private CascadeClassifier palmClassifier;
    private CascadeClassifier fistClassifier;

    private Handler openCVHandler = new Handler();
    private Thread openCVThread = null;

    private BebopVideoView bebopVideoView = null;
    private ImageView cvPreviewView = null;

    private Rect[] facesArray = null;

    private Paint paint;

    private final Object lock = new Object();

    public CVClassifierView(Context context) {
        this(context, null);
    }

    public CVClassifierView(Context context, AttributeSet attrs) {
        this(context, attrs, 0);
    }

    public CVClassifierView(Context context, AttributeSet attrs, int defStyleAttr) {
        super(context, attrs, defStyleAttr);

        ctx = context;

        // initialize our opencv cascade classifiers
        faceClassifier = new CascadeClassifier(cascadeFile(R.raw.haarcascade_upperbody));

        // initialize our canvas paint object
        paint = new Paint();
        paint.setAntiAlias(true);
        paint.setColor(Color.GREEN);
        paint.setStyle(Paint.Style.STROKE);
        paint.setStrokeWidth(4f);
    }

    private String cascadeFile(final int id) {
        final InputStream is = getResources().openRawResource(id);

        final File cascadeDir = ctx.getDir("cascade", Context.MODE_PRIVATE);
        final File cascadeFile = new File(cascadeDir, String.format(Locale.US, "%d.xml", id));

        try {
            final FileOutputStream os = new FileOutputStream(cascadeFile);
            final byte[] buffer = new byte[4096];

            int bytesRead;

            while ((bytesRead = is.read(buffer)) != -1) {
                os.write(buffer, 0, bytesRead);
            }

            is.close();
            os.close();
        } catch (Exception e) {
            Log.e(CLASS_NAME, "unable to open cascade file: " + cascadeFile.getName(), e);
            return null;
        }

        return cascadeFile.getAbsolutePath();
    }

    public void resume(final BebopVideoView bebopVideoView, final ImageView cvPreviewView) {
        if (getVisibility() == View.VISIBLE) {
            this.bebopVideoView = bebopVideoView;
            this.cvPreviewView = cvPreviewView;

            openCVThread = new CascadingThread(ctx);
            openCVThread.start();
        }
    }

    public void pause() {
        if (getVisibility() == View.VISIBLE) {
            openCVThread.interrupt();

            try {
                openCVThread.join();
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
    }

    private float mX = 0;
    private float mY = 0;

    private class CascadingThread extends Thread {
        private final Handler handler;
        boolean interrupted = false;

        private CascadingThread(final Context ctx) {
            handler = new Handler(ctx.getMainLooper());
        }

        public void interrupt() {
            interrupted = true;
        }

        @Override
        public void run() {
            Log.d(CLASS_NAME, "cascadeRunnable");

            final Mat firstMat = new Mat();
            final Mat mat = new Mat();

            while (!interrupted) {
                final Bitmap source = bebopVideoView.getBitmap();

                if (source != null) {
                    Utils.bitmapToMat(source, firstMat);
                    firstMat.assignTo(mat);

                    Imgproc.cvtColor(mat, mat, Imgproc.COLOR_RGBA2GRAY);

                    final int minRows = Math.round(mat.rows() * .12f);

                    final Size minSize = new Size(minRows, minRows);
                    final Size maxSize = new Size(0, 0);

                    final MatOfRect faces = new MatOfRect();

                    faceClassifier.detectMultiScale(mat, faces);

                    synchronized (lock) {
                        facesArray = faces.toArray();

                        mX = firstMat.width() / mat.width();
                        mY = firstMat.height() / mat.height();

                        faces.release();

                        runOnUiThread(new Runnable() {
                            @Override
                            public void run() {
                                invalidate();
                            }
                        });
                    }
                }

                try {
                    sleep(200);
                } catch (InterruptedException e) {
                    interrupted = true;
                }
            }

            firstMat.release();
            mat.release();
        }

        private void runOnUiThread(Runnable r) {
            handler.post(r);
        }
    }


    @Override
    protected void onDraw(Canvas canvas) {
//        Log.d(CLASS_NAME, "onDraw");

        synchronized(lock) {
            if (facesArray != null && facesArray.length > 0) {
                for (Rect target : facesArray) {
                    Log.i(CLASS_NAME, "found face size=" + target.area());
                    paint.setColor(Color.RED);
                    canvas.drawRect((float) target.tl().x * mX, (float) target.tl().y * mY, (float) target.br().x * mX, (float) target.br().y * mY, paint);
                }
            }
        }

        super.onDraw(canvas);
    }
}

Hello. Synman.
I’m very grateful for your reply.
I’ll try to apply your code in my project.
I’m truly grateful.

It’s no problem at all

Thanks for our help, now I am able to get the bitmap from the textureview.

The answer from synman and Rivale was really helpful.
If someone wants a BebopVideoView to Mat, see the code above.

Thank you so much. synman and Rivale!! :laughing:

Thank you Synman for sharing your code.
It works fine for me. Front face detection with “haarcascade_frontalface_alt” in green.
I also could add profile face detection in blue.

But I don’t understand why you declare “cvPreviewView” in “resume” method.
Does I misunderstand something?

thread management… You don’t want to leave the cv processing thread running while the activity is not active.

resume starts the thread and pause tears it down

I am working on an image processing app on bebop drone and I am facing the same problem, i.e., the taken image is black:( My template is parrot sample project At first, i changed BebobVideoView into TextureView. Then, when I try to get image from BebobActivity as

final Bitmap bit = mVideoView.getBitmap();
ImageView im = (ImageView) findViewById(R.id.imageView);
im.setImageBitmap(bit);

the image is black. So I tried to create class in original BebopActivity class, where the new class extends thread. The process of taking bitmap is then run in an own thread as is it seen in synman example but again the image is black:(

Can someone please post working BebopActivity where working getBitmap() is shown? Thank you

I have used to take CVClassifierView and create own view of this class. In bebobActitivity I take the id and call resume. I the CVClassifierView, i try to paint the bitmap, but again, the output is black:(

Solved:) It has needed to append "this.setSurfaceTextureListener(this); " into the bebopVideoView

@hmattolou

hello.
I guess @mastemace comment would mean the code below.


[BebopActivity.java]

private void initIHM() {
      mVideoView = (BebopVideoView) findViewById(R.id.videoView);
      mVideoView.setSurfaceTextureListener(mVideoView);
 ...
}

[BebopVideoView.java]

public BebopVideoView(Context context) {
    this(context, null);
    ...
    setSurfaceTextureListener(this);
}

public BebopVideoView(Context context, AttributeSet attrs) {
    this(context, attrs, 0);
    ...
    setSurfaceTextureListener(this);
}

public BebopVideoView(Context context, AttributeSet attrs, int defStyleAttr) {
    super(context, attrs, defStyleAttr);
    ...
    setSurfaceTextureListener(this);
}

BebopVideoView is TextureView.


I think you can solve the problem by writing code like above.
Thanks.

Hi synman!!!
I’m mingyu from south korea and student.
Actually i have a final project with BebopDrone for my graduate what is SelfiDrone.
I saw your codes BebopVideoView & CVClassifierView file. But i don’t understand about thread.
If you don’t mind, Could you send me your wholecode of project? (i want to know procedure)

My email address is ‘darkpmg@gmail.com’.

Thank you synman.
Regards

Hi! mastemace!
I have some problem. If you are fine, could you send me BebopActivity.java and activity_bebop.xml?

my email is darkpmg@gmail.com

thank you

hello .Can you show how to set the argument of sps pps buffers . Any help will be appreciated!

Take a look at the ARSDK3 video stream events. You’ll find that the SDK provides them as part of setup.

Thanks!synman. Could you send me the whole project that get Mat from the frame?I try seveal days .But the Bitamp I saved is a Black picture .I dont know how to deal with it …Any help will be appreciated! Thank you!

I cannot give you my entire project as it is a retail application. If you have a specific question I am more than happy to help. The examples provided here, linked to over at stackoverflow work well with the parrot provided android sample.

Thank you . The android sample provide H264VideoView.java.I modify it with your BebopVideoView.java you provided above. And BebopAcitivity.java in the sample In the function configureDecoder(ARControllerCodec
codec)call the function mVideoView.configureDecoder(codec) to init the view. But configureDecoder didn’t include in your BebopVideoView.
my BebopActivity.jave
package com.parrot.sdksample.activity;

import android.annotation.TargetApi;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.media.Image;
import android.media.MediaCodec;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;

import com.parrot.arsdk.arcommands.ARCOMMANDS_ARDRONE3_MEDIARECORDEVENT_PICTUREEVENTCHANGED_ERROR_ENUM;
import com.parrot.arsdk.arcommands.ARCOMMANDS_ARDRONE3_PILOTINGSTATE_FLYINGSTATECHANGED_STATE_ENUM;
import com.parrot.arsdk.arcontroller.ARCONTROLLER_DEVICE_STATE_ENUM;
import com.parrot.arsdk.arcontroller.ARControllerCodec;
import com.parrot.arsdk.arcontroller.ARFrame;
import com.parrot.arsdk.ardiscovery.ARDiscoveryDeviceService;
import com.parrot.sdksample.R;
import com.parrot.sdksample.drone.BebopDrone;
import com.parrot.sdksample.view.H264VideoView;

import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;

import java.io.ByteArrayOutputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;

public class BebopActivity extends AppCompatActivity {
// Used to load the ‘native-lib’ library on application startup.
static {
System.loadLibrary(“bebop_video_decoder-lib”);
}
// public native String arframeToRGB();

private static final String TAG = "BebopActivity";
private BebopDrone mBebopDrone;

private ProgressDialog mConnectionProgressDialog;
private ProgressDialog mDownloadProgressDialog;

private H264VideoView mVideoView;

private TextView mBatteryLabel;
private Button mTakeOffLandBt;
private Button mDownloadBt;

private int mNbMaxDownload;
private int mCurrentDownloadIndex;
private  int pic_count=0;
public boolean b_openCV_load=false;
public boolean b_first_init=true;

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_bebop);

    initIHM();

    Intent intent = getIntent();
    ARDiscoveryDeviceService service = intent.getParcelableExtra(DeviceListActivity.EXTRA_DEVICE_SERVICE);
    mBebopDrone = new BebopDrone(this, service);
    mBebopDrone.addListener(mBebopListener);

}

@Override
protected void onStart() {
    super.onStart();

    // show a loading view while the bebop drone is connecting
    if ((mBebopDrone != null) && !(ARCONTROLLER_DEVICE_STATE_ENUM.ARCONTROLLER_DEVICE_STATE_RUNNING.equals(mBebopDrone.getConnectionState())))
    {
        mConnectionProgressDialog = new ProgressDialog(this, R.style.AppCompatAlertDialogStyle);
        mConnectionProgressDialog.setIndeterminate(true);
        mConnectionProgressDialog.setMessage("Connecting ...");
        mConnectionProgressDialog.setCancelable(false);
        mConnectionProgressDialog.show();

        // if the connection to the Bebop fails, finish the activity
        if (!mBebopDrone.connect()) {
            finish();
        }
    }


}

@Override
public void onBackPressed() {
    if (mBebopDrone != null)
    {
        mConnectionProgressDialog = new ProgressDialog(this, R.style.AppCompatAlertDialogStyle);
        mConnectionProgressDialog.setIndeterminate(true);
        mConnectionProgressDialog.setMessage("Disconnecting ...");
        mConnectionProgressDialog.setCancelable(false);
        mConnectionProgressDialog.show();

        if (!mBebopDrone.disconnect()) {
            finish();
        }
    }
}

@Override
public void onDestroy()
{
    mBebopDrone.dispose();
    super.onDestroy();
}

private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
    @Override
    public void onManagerConnected(int status) {
        switch (status) {
            case LoaderCallbackInterface.SUCCESS: {
                Log.i(TAG, "OpenCV loaded successfully");
                b_openCV_load=true;

// mOpenCvCameraView.enableView();
// mOpenCvCameraView.setOnTouchListener(ColorBlobDetectionActivity.this);
}
break;
default: {
super.onManagerConnected(status);
Log.i(TAG, “fail”);
}
break;
}
}
};

@Override
protected void onResume() {
    super.onResume();
   OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_4_0, this, mLoaderCallback);
    new Thread(new Runnable(){
        @Override
        public void run() {
            final Mat firstmat = new Mat();
            final Mat image = new Mat();
            while (true) {
                try {
                    Thread.sleep(1000);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
                // pic_count++;
                if (b_openCV_load == true && pic_count%50==0) {
                    if (pic_count >= 500)
                        pic_count = 0;


                    b_first_init=false;


                    final Bitmap mybitmap = mVideoView.getBitmap();
                    if (mybitmap != null) {

                        Utils.bitmapToMat(mVideoView.getBitmap(), image);
                        firstmat.assignTo(image);
                        Imgproc.cvtColor(image, image, Imgproc.COLOR_RGBA2GRAY);
                        // String pic_name=getApplicationContext().getFilesDir().getAbsolutePath()+"/kkkk" + pic_count + ".png";
                        String pic_name = getExternalFilesDir(Environment.DIRECTORY_MOVIES).toString() + "/kkkk" + pic_count + ".jpg";

                        if (Imgcodecs.imwrite(pic_name, image))
                            Log.d("onFrameReceived", "save " + pic_name + "  Success!");
                        else
                            Log.d("onFrameReceived", "save " + pic_name + "  Fail!!");
                    }
                }

            }
        }
    }).start();
}

    private void initIHM () {
        mVideoView = (H264VideoView) findViewById(R.id.videoView);

        findViewById(R.id.emergencyBt).setOnClickListener(new View.OnClickListener() {
            public void onClick(View v) {
                mBebopDrone.emergency();
            }
        });



            @Override
            public void onFrameReceived(ARFrame frame) {

                //mVideoView.displayFrame(frame);
              }

        };
    }
}

I can get Bitmap saved ,but alway be a black picture. I change the Surface To TextView in H264VideoView.jave.

Mat image = new Mat();
Mat firstMat = new Mat();
Bitmap mybitmap = mVideoView.getBitmap();
if (mybitmap != null) {

                       Utils.bitmapToMat(mybitmap, firstMat);
                       firstMat.assignTo(image);

                       Imgproc.cvtColor(image,image,Imgproc.COLOR_RGBA2GRAY/*Imgproc.COLOR_YUV2BGR*/);

Are you seeing a good video stream in the actual textureview?

You need to reformat your code above so that is is legible. I’ll try to look at it when I have time to decipher the wall of text you pasted :slight_smile: