How to make Cardboard using Google CardBoard Android SDK


#1

Product: [Bebop2]
Product version: [3.3.0]
SDK version: [3.10.0]
Use of libARController: [YES/NO] (Only for ARSDK)
SDK platform: [iOS/Android/Unix/Python…]
Reproductible with the official app: [YES/NO/Not tried]

Now Developing a mobile application (android) to control bebop drone, i recieve the video in a SurfaceView. I would like to use VrVideoView(Google VR) on this SurfaceView. i have no idea how to do that.

Is there any solution or good idea?

thats the code to receive the video from the drone and show it in surfaceview :

public class BebopVideoView extends SurfaceView implements SurfaceHolder.Callback {

private static final String TAG = "BebopVideoView";
private static final String VIDEO_MIME_TYPE = "video/avc";
private static final int VIDEO_DEQUEUE_TIMEOUT = 33000;

private MediaCodec mMediaCodec;
private Lock mReadyLock;

private boolean mIsCodecConfigured = false;

private ByteBuffer mSpsBuffer;
private ByteBuffer mPpsBuffer;

private ByteBuffer[] mBuffers;

private static final int VIDEO_WIDTH = 640;
private static final int VIDEO_HEIGHT = 368;

public BebopVideoView(Context context) {
    super(context);
    customInit();
}

public BebopVideoView(Context context, AttributeSet attrs) {
    super(context, attrs);
    customInit();
}

public BebopVideoView(Context context, AttributeSet attrs, int defStyleAttr) {
    super(context, attrs, defStyleAttr);
    customInit();
}

private void customInit() {
    mReadyLock = new ReentrantLock();
    getHolder().addCallback(this);
}

public void displayFrame(ARFrame frame) {
    mReadyLock.lock();

    if ((mMediaCodec != null)) {
        if (mIsCodecConfigured) {
            // Here we have either a good PFrame, or an IFrame
            int index = -1;

            try {
                index = mMediaCodec.dequeueInputBuffer(VIDEO_DEQUEUE_TIMEOUT);
            } catch (IllegalStateException e) {
                Log.e(TAG, "Error while dequeue input buffer");
            }
            if (index >= 0) {
                ByteBuffer b;
                if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
                    b = mMediaCodec.getInputBuffer(index);
                } else {
                    b = mBuffers[index];
                    b.clear();
                }

                if (b != null) {
                    b.put(frame.getByteData(), 0, frame.getDataSize());
                }

                try {
                    mMediaCodec.queueInputBuffer(index, 0, frame.getDataSize(), 0, 0);
                } catch (IllegalStateException e) {
                    Log.e(TAG, "Error while queue input buffer");
                }
            }
        }

        // Try to display previous frame
        MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
        int outIndex;
        try {
            outIndex = mMediaCodec.dequeueOutputBuffer(info, 0);

            while (outIndex >= 0) {
                mMediaCodec.releaseOutputBuffer(outIndex, true);
                outIndex = mMediaCodec.dequeueOutputBuffer(info, 0);
            }
        } catch (IllegalStateException e) {
            Log.e(TAG, "Error while dequeue input buffer (outIndex)");
        }
    }


    mReadyLock.unlock();
}

public void configureDecoder(ARControllerCodec codec) {
    mReadyLock.lock();

    if (codec.getType() == ARCONTROLLER_STREAM_CODEC_TYPE_ENUM.ARCONTROLLER_STREAM_CODEC_TYPE_H264) {
        ARControllerCodec.H264 codecH264 = codec.getAsH264();

        mSpsBuffer = ByteBuffer.wrap(codecH264.getSps().getByteData());
        mPpsBuffer = ByteBuffer.wrap(codecH264.getPps().getByteData());
    }

    if ((mMediaCodec != null) && (mSpsBuffer != null)) {
        configureMediaCodec();
    }

    mReadyLock.unlock();
}

private void configureMediaCodec() {
    mMediaCodec.stop();
    MediaFormat format = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, VIDEO_WIDTH, VIDEO_HEIGHT);
    format.setByteBuffer("csd-0", mSpsBuffer);
    format.setByteBuffer("csd-1", mPpsBuffer);

    mMediaCodec.configure(format, getHolder().getSurface(), null, 0);
    mMediaCodec.start();

    if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.LOLLIPOP) {
        mBuffers = mMediaCodec.getInputBuffers();
    }

    mIsCodecConfigured = true;
}

private void initMediaCodec(String type) {
    try {
        mMediaCodec = MediaCodec.createDecoderByType(type);
    } catch (IOException e) {
        Log.e(TAG, "Exception", e);
    }

    if ((mMediaCodec != null) && (mSpsBuffer != null)) {
        configureMediaCodec();
    }
}

private void releaseMediaCodec() {
    if (mMediaCodec != null) {
        if (mIsCodecConfigured) {
            mMediaCodec.stop();
            mMediaCodec.release();
        }
        mIsCodecConfigured = false;
        mMediaCodec = null;
    }
}

@Override
public void surfaceCreated(SurfaceHolder holder) {
    mReadyLock.lock();
    initMediaCodec(VIDEO_MIME_TYPE);
    mReadyLock.unlock();
}

@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {

}

@Override
public void surfaceDestroyed(SurfaceHolder holder) {
    mReadyLock.lock();
    releaseMediaCodec();
    mReadyLock.unlock();
}
}

and in the main classe i use :

private void initIHM() {

    mVideoView = (BebopVideoView) findViewById(R.id.videoView);

}
    @Override
public ARCONTROLLER_ERROR_ENUM configureDecoder(ARDeviceController deviceController, ARControllerCodec codec) {
    mVideoView.configureDecoder(codec);
    return ARCONTROLLER_ERROR_ENUM.ARCONTROLLER_OK;
}

@Override
public ARCONTROLLER_ERROR_ENUM onFrameReceived(ARDeviceController deviceController, ARFrame frame) {
    mVideoView.displayFrame(frame);
    return ARCONTROLLER_ERROR_ENUM.ARCONTROLLER_OK;
}

@Override
public void onFrameTimeout(ARDeviceController deviceController) {

}

#2

Hi,

From what I see, the VrVideoView does not seem to be the appropriate class.
You might try to look at GvrView.


#3

Thanks for your réponse, i changed surfaceView by GvrView :
public class BebopVideoView extends GvrView implements SurfaceHolder.Callback

but i get an error that can’t resolve methode “getHolder” :
getHolder().addCallback(this);
and in
mMediaCodec.configure(format, getHolder().getSurface(), null, 0);


#4

Can you help me please :confused: ?

i changed BebopVideoView to :
but i don’t know how define methods

public class BebopVideoView extends GLSurfaceView implements SurfaceHolder.Callback,GvrView.StereoRenderer {

    private static final String TAG = "BebopVideoView";
    private static final String VIDEO_MIME_TYPE = "video/avc";
    private static final int VIDEO_DEQUEUE_TIMEOUT = 33000;

    private MediaCodec mMediaCodec;
    private Lock mReadyLock;

    private boolean mIsCodecConfigured = false;

    private ByteBuffer mSpsBuffer;
    private ByteBuffer mPpsBuffer;

    private ByteBuffer[] mBuffers;

    private static final int VIDEO_WIDTH = 640;
    private static final int VIDEO_HEIGHT = 368;

    public BebopVideoView(Context context) {
        super(context);
        customInit();
    }

    public BebopVideoView(Context context, AttributeSet attrs) {
        super(context, attrs);
        customInit();
    }


    private void customInit() {
        mReadyLock = new ReentrantLock();
        getHolder().addCallback(this);
    }

    public void displayFrame(ARFrame frame) {
        mReadyLock.lock();

        if ((mMediaCodec != null)) {
            if (mIsCodecConfigured) {
                // Here we have either a good PFrame, or an IFrame
                int index = -1;

                try {
                    index = mMediaCodec.dequeueInputBuffer(VIDEO_DEQUEUE_TIMEOUT);
                } catch (IllegalStateException e) {
                    Log.e(TAG, "Error while dequeue input buffer");
                }
                if (index >= 0) {
                    ByteBuffer b;
                    if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
                        b = mMediaCodec.getInputBuffer(index);
                    } else {
                        b = mBuffers[index];
                        b.clear();
                    }

                    if (b != null) {
                        b.put(frame.getByteData(), 0, frame.getDataSize());
                    }

                    try {
                        mMediaCodec.queueInputBuffer(index, 0, frame.getDataSize(), 0, 0);
                    } catch (IllegalStateException e) {
                        Log.e(TAG, "Error while queue input buffer");
                    }
                }
            }

            // Try to display previous frame
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            int outIndex;
            try {
                outIndex = mMediaCodec.dequeueOutputBuffer(info, 0);

                while (outIndex >= 0) {
                    mMediaCodec.releaseOutputBuffer(outIndex, true);
                    outIndex = mMediaCodec.dequeueOutputBuffer(info, 0);
                }
            } catch (IllegalStateException e) {
                Log.e(TAG, "Error while dequeue input buffer (outIndex)");
            }
        }


        mReadyLock.unlock();
    }

    public void configureDecoder(ARControllerCodec codec) {
        mReadyLock.lock();

        if (codec.getType() == ARCONTROLLER_STREAM_CODEC_TYPE_ENUM.ARCONTROLLER_STREAM_CODEC_TYPE_H264) {
            ARControllerCodec.H264 codecH264 = codec.getAsH264();

            mSpsBuffer = ByteBuffer.wrap(codecH264.getSps().getByteData());
            mPpsBuffer = ByteBuffer.wrap(codecH264.getPps().getByteData());
        }

        if ((mMediaCodec != null) && (mSpsBuffer != null)) {
            configureMediaCodec();
        }

        mReadyLock.unlock();
    }

    private void configureMediaCodec() {
        mMediaCodec.stop();
        MediaFormat format = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, VIDEO_WIDTH, VIDEO_HEIGHT);
        format.setByteBuffer("csd-0", mSpsBuffer);
        format.setByteBuffer("csd-1", mPpsBuffer);

        mMediaCodec.configure(format, getHolder().getSurface(), null, 0);
        mMediaCodec.start();

        if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.LOLLIPOP) {
            mBuffers = mMediaCodec.getInputBuffers();
        }

        mIsCodecConfigured = true;
    }

    private void initMediaCodec(String type) {
        try {
            mMediaCodec = MediaCodec.createDecoderByType(type);
        } catch (IOException e) {
            Log.e(TAG, "Exception", e);
        }

        if ((mMediaCodec != null) && (mSpsBuffer != null)) {
            configureMediaCodec();
        }
    }

    private void releaseMediaCodec() {
        if (mMediaCodec != null) {
            if (mIsCodecConfigured) {
                mMediaCodec.stop();
                mMediaCodec.release();
            }
            mIsCodecConfigured = false;
            mMediaCodec = null;
        }
    }

    @Override
    public void surfaceCreated(SurfaceHolder holder) {
        mReadyLock.lock();
        initMediaCodec(VIDEO_MIME_TYPE);
        mReadyLock.unlock();
    }

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {

    }

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {
        mReadyLock.lock();
        releaseMediaCodec();
        mReadyLock.unlock();
    }

    @Override
    public void onNewFrame(HeadTransform headTransform) {

    }

    @Override
    public void onDrawEye(Eye eye) {

    }

    @Override
    public void onFinishFrame(Viewport viewport) {

    }

    @Override
    public void onSurfaceChanged(int i, int i1) {

    }

    @Override
    public void onSurfaceCreated(EGLConfig eglConfig) {

    }

    @Override
    public void onRendererShutdown() {

    }
}

#5

Sorry but I don’t know anything about the Cardboard sdk.
However, you can try to ask on the Cardboard support forum/website how to display an h264 video stream.