-
-
Save CasiaFan/684ec8c36624fb5ff61360c71ee9e4ec to your computer and use it in GitHub Desktop.
import subprocess as sp | |
import cv2 | |
import numpy as np | |
cmd = ["gst-launch-1.0", | |
"rtspsrc", "location=rtsp://admin:[email protected]/Streaming/Channels/1", "latency=100", "!", | |
"queue", "!", | |
"rtph264depay", "!", | |
"h264parse", "!", | |
"nvv4l2decoder", "drop-frame-interval=2", "!", | |
"nvvideoconvert", "!", | |
"video/x-raw,width=1920,height=1080,format=RGBA", "!", | |
"queue", "!", | |
"filesink", "location=/dev/stdout" | |
] | |
cmd2 = ["gst-launch-1.0", | |
"rtspsrc", "location=rtsp://admin:[email protected]/Streaming/Channels/1", "latency=100", "!", | |
"queue", "!", | |
"rtph264depay", "!", | |
"h264parse", "!", | |
"nvv4l2decoder", "drop-frame-interval=2", "!", | |
"nvvideoconvert", "!", | |
"video/x-raw,width=1920,height=1080,format=RGBA", "!", | |
"queue", "!", | |
"filesink", "location=/dev/stdout" | |
] | |
gst1 = sp.Popen(cmd, stdout = sp.PIPE, bufsize=10, ) | |
gst2 = sp.Popen(cmd2, stdout = sp.PIPE, bufsize=10, ) | |
w=1920 | |
h=1080 | |
k = w*h | |
head_length = 528 | |
# Including it will cause frame drift, so we need to remove the H.264 stream header first. | |
# Note header is added by camera producers and the header lenght could be different in different cameras. | |
# Since I couldn't get the particular value by parsing something. The value here is obtained | |
# by repeat trying and visualization. If someone know how to get it, please share it with us. Thanks so much! | |
gst1.stdout.read(head_length) | |
gst2.stdout.read(head_length) | |
while True: | |
x = gst1.stdout.read(int(w*h*4)) | |
x = np.fromstring(x, dtype=np.uint8).reshape((h, w, 4)) | |
y = gst2.stdout.read(int(w*h*4)) | |
y = np.fromstring(y, dtype=np.uint8).reshape((h, w, 4)) | |
x = x[:, :, 0:3] | |
y = y[:, :, 0:3] | |
cv2.imshow("im1", x) | |
cv2.imshow("im2", y) | |
if cv2.waitKey(20) & 0xFF == ord('q'): | |
break |
Hi,
I want to use gstreamer pip in opencv in jetson nano, What do I do?
gstream_elemets = (
'rtspsrc location={} latency={} !'
'rtph264depay ! h264parse ! omxh264dec enable-max-performance=1 enable-low-outbuffer={} !'
'nvvidconv ! '
'video/x-raw , format=(string)BGRx !'
'videoconvert ! '
'appsink').
format(self.url, self.latency, self.enable_low_outbuffer)
cv2.VideoCapture(gstream_elemets, cv2.CAP_GSTREAMER)
I use gstreamer elemets in opencv, but I want to use video/x-raw(memory:NVMM) instread of video/x-raw, In way is efficient way for decosing, But when I use this element, I get error, and opencv also only suported CPU buffer, whereas the (memory:NVMM) use GPU buffer.
gst-launch-1.0 rtspsrc location=rtsp://admin:[email protected]/cam/realmonitor?channel=1&subtype=0 caps = "application/x-rtp, media=(string)video, encoding-name=(string)H264, payload=(int)96" ! rtph265depay ! h265parse ! omxh265dec ! nvvidconv ! 'video/x-raw(memory:NVMM), format=(string)BGRx' ! videoconvert ! fakesink
also the above shell method is corectly work in terminal, I want to pip the above command in the pipe and then use in opencv as:
pipe = sp.Popen(gstreamer_elemets, stdout=sp.PIPE, )
cv2.VideoCapture( 'pipe:0'pipe.stdout.fileno() )
I also worked on FFMPEG pipe using opencv such this a,d work corectly:
ffmpeg_elemets = ["ffmpeg", # Input stream "-i", rtsp, # Keeps the video in raw video format "-c:v", "rawvideo", # Frame rate "-r", str(20), # Skip audio stream channel # Setting container, it is needed for opencv "-f", "avi", # Piping the output into a name pipe "pipe:"]
and I want to work gstreamer like ffmpeg.
i got green video also (rstp). how did you solve this problem
after using format=RGBA I've got the greenish image due to the second channel on the image is always 255
so when cvtColor into RGB the Green Channel is always at its maximum data :(
. . . After struggling for a while . . .
I use this gist plus some workaround and it is working now
my code (roughly)
RTSP_PATH = "rtsp://blabla"
GSTREAMER_COMMAND = [
"gst-launch-1.0",
"rtspsrc" ,
"location={}".format(RTSP_PATH), "latency=200", "!",
"queue", "!",
"rtph264depay", "!",
"h264parse", "!",
"nvv4l2decoder", "drop-frame-interval=2", "!",
"nvvideoconvert", "!",
"video/x-raw,format=(string)I420", "!",
"queue", "!",
"filesink", "location=/dev/stdout"
]
# Video Config
width = 1280
height = 720
# Receive in I420 Format
receive_height = (height * 3) //2
receive_width = width
gst_stream = subprocess.Popen(
GSTREAMER_COMMAND,
stdout=subprocess.PIPE,
bufsize=BUFFSIZE
)
# throw the init log
gst_stream.stdout.read(528+26)
# Received Thread
image = gst_stream.stdout.read(int(receive_height*receive_width))
image = np.fromstring(image, dtype=np.uint8).reshape((receive_height, receive_width))
image = cv2.cvtColor(image, cv2.COLOR_YUV2BGR_I420)
Full Implementation Here : GIST
use gst-launch-1.0 --quiet to ignore messge to stdout
My video are in green. How to resolve this ?