Hey. So I am able to use the following code (Streaming.py with some changes for physical drone connection) in a linux laptop
import time
import csv
import cv2
import math
import os
import shlex
import subprocess
import tempfile
import olympe
import threading
from concurrent.futures import ThreadPoolExecutor
import olympe_deps as od
from olympe.messages.ardrone3.Piloting import TakeOff, Landing, PCMD
from olympe.messages.ardrone3.Piloting import moveBy
from olympe.messages.ardrone3.PilotingState import FlyingStateChanged
from olympe.messages.ardrone3.PilotingSettings import MaxTilt
from olympe.messages.ardrone3.GPSSettingsState import GPSFixStateChanged
from olympe.messages.ardrone3.GPSState import NumberOfSatelliteChanged
from olympe.messages.ardrone3.GPSSettingsState import HomeChanged
from olympe.messages.ardrone3.PilotingState import PositionChanged
from olympe.messages.ardrone3.GPSSettingsState import GPSFixStateChanged
from olympe.messages.ardrone3.PilotingState import FlyingStateChanged
from olympe.messages.ardrone3.Piloting import Emergency
from olympe.messages.ardrone3.Piloting import NavigateHome
from olympe.messages.rth import return_to_home
class ParrotDrone:
def __init__(self):
# Create the olympe.Drone object from its IP address
self.drone = olympe.Drone("192.168.53.1", mpp=True, drone_type=od.ARSDK_DEVICE_TYPE_ANAFI4K, loglevel=2)
self.tempd = tempfile.mkdtemp(prefix="olympe_streaming_test_")
print("Olympe streaming example output dir: {}".format(self.tempd))
self.h264_frame_stats = []
self.h264_stats_file = open(
os.path.join(self.tempd, 'h264_stats.csv'), 'w+')
self.h264_stats_writer = csv.DictWriter(
self.h264_stats_file, ['fps', 'bitrate'])
self.h264_stats_writer.writeheader()
def start(self):
# Connect the the drone
self.drone.connection()
self.drone.set_streaming_output_files(
h264_data_file=os.path.join(self.tempd, 'h264_data.264'),
h264_meta_file=os.path.join(self.tempd, 'h264_metadata.json'),
# Here, we don't record the (huge) raw YUV video stream
# raw_data_file=os.path.join(self.tempd,'raw_data.bin'),
# raw_meta_file=os.path.join(self.tempd,'raw_metadata.json'),
)
# Setup your callback functions to do some live video processing
self.drone.set_streaming_callbacks(
raw_cb=self.yuv_frame_cb,
h264_cb=self.h264_frame_cb
)
# Start video streaming
self.drone.start_video_streaming()
def yuv_frame_cb(self, yuv_frame):
"""
This function will be called by Olympe for each decoded YUV frame.
:type yuv_frame: olympe.VideoFrame
"""
# the VideoFrame.info() dictionary contains some useful informations
# such as the video resolution
info = yuv_frame.info()
height, width = info["yuv"]["height"], info["yuv"]["width"]
# convert pdraw YUV flag to OpenCV YUV flag
cv2_cvt_color_flag = {
olympe.PDRAW_YUV_FORMAT_I420: cv2.COLOR_YUV2BGR_I420,
olympe.PDRAW_YUV_FORMAT_NV12: cv2.COLOR_YUV2BGR_NV12,
}[info["yuv"]["format"]]
# yuv_frame.as_ndarray() is a 2D numpy array with the proper "shape"
# i.e (3 * height / 2, width) because it's a YUV I420 or NV12 frame
# Use OpenCV to convert the yuv frame to RGB
cv2frame = cv2.cvtColor(yuv_frame.as_ndarray(), cv2_cvt_color_flag)
# Use OpenCV to show this frame
cv2.imshow("Olympe Streaming Example", cv2frame)
cv2.waitKey(1) # please OpenCV for 1 ms...
def h264_frame_cb(self, h264_frame):
"""
This function will be called by Olympe for each new h264 frame.
:type yuv_frame: olympe.VideoFrame
"""
# Get a ctypes pointer and size for this h264 frame
frame_pointer, frame_size = h264_frame.as_ctypes_pointer()
# For this example we will just compute some basic video stream stats
# (bitrate and FPS) but we could choose to resend it over an another
# interface or to decode it with our preferred hardware decoder..
# Compute some stats and dump them in a csv file
info = h264_frame.info()
frame_ts = info["ntp_raw_timestamp"]
if not bool(info["h264"]["is_sync"]):
if len(self.h264_frame_stats) > 0:
while True:
start_ts, _ = self.h264_frame_stats[0]
if (start_ts + 1e6) < frame_ts:
self.h264_frame_stats.pop(0)
else:
break
self.h264_frame_stats.append((frame_ts, frame_size))
h264_fps = len(self.h264_frame_stats)
h264_bitrate = (
8 * sum(map(lambda t: t[1], self.h264_frame_stats)))
self.h264_stats_writer.writerow(
{'fps': h264_fps, 'bitrate': h264_bitrate})
def stop(self):
# Properly stop the video stream and disconnect
self.drone.disconnection()
def getgpsfix(self):
print("\n\n GET GPS FUNC CALLED \n\n")
i = 0
while True:
print("\n\n THE LOOP IS STARTING \n\n")
print("Is GPS Fixed:")
print("Latitude:", self.drone.get_state(PositionChanged)["latitude"])
print("Longitude:", self.drone.get_state(PositionChanged)["longitude"])
print("Altitude:", self.drone.get_state(PositionChanged)["altitude"])
print("GPS Fix:", self.drone.get_state(GPSFixStateChanged)["fixed"])
print("Current Drone Mode:", self.drone.get_state(FlyingStateChanged)["state"])
time.sleep(3)
print("\n\n Loop Number {} \n\n".format(i + 1))
print("\n\n THE LOOP IS ENDING \n\n")
def gpslock(self):
while True:
gpslock = self.drone.get_state(GPSFixStateChanged)["fixed"]
if gpslock == 0:
print("\n\n DRONE DROP INITIALISING \n\n")
# self.drone(Emergency(_timeout=5))
print("\n\n DRONE DROPPED \n\n")
else:
print("\n\n GeoFence Pass \n\n")
if __name__ == "__main__":
streaming_example = ParrotDrone()
# creating thread
startdrone = threading.Thread(target=streaming_example.start)
print("TASK 2 STARTED \n\n\n")
gps = threading.Thread(target=streaming_example.getgpsfix)
print("TASK 2 OVER \n\n\n")
# ------------------- Main Function Execution ------------------------------------------
print("MAIN :: Drone Connection Starting \n\n\n")
startdrone.start()
print("MAIN :: Drone Connection Successfully \n\n\n")
time.sleep(5)
print("MAIN :: GPS Starting \n\n\n")
gps.start()
# streaming_example.gpslock()
while True:
time.sleep(0.1)
But I am not able to run the same code on the Raspberry Pi 3B+ But i get the following -
W pdraw_dmxstrm: failed to get an input buffer (-11)
W pdraw_dmxstrm: failed to get an input buffer (-11)
I pdraw_decavc: frame input: flush pending, discard frame
I pdraw_decavc: frame input: flush pending, discard frame
[h264 @ 0x6a247540] Reinit context to 1280x720, pix_fmt: yuv420p
W pdraw_dmxstrm: failed to get an input buffer (-11)
I pdraw_decavc: frame input: flush pending, discard frame
[h264 @ 0x6a247540] Reinit context to 1280x720, pix_fmt: yuv420p
W pdraw_dmxstrm: failed to get an input buffer (-11)
I pdraw_decavc: frame input: flush pending, discard frame
[h264 @ 0x6a247540] Reinit context to 1280x720, pix_fmt: yuv420p
I have connected the RP 3b+ with the SkyController 3 via USB C port. My olympe is running on RP.
I have build olympe on RP 3B+ using this thread.
Also, @ndessart if you have any suggestion or code snippet to get parrot anafi (physical drone) video feed on raspberry pi , please share it with me.