Difference between revisions of "Image Stitching for NVIDIA Jetson/Examples/Nano,TX1,TX2,Xavier Pipelines"

From RidgeRun Developer Connection
Jump to: navigation, search
(Running with bash GSTD client)
(Move contents to other pipelines)
(Tag: Replaced)
 
(10 intermediate revisions by 2 users not shown)
Line 1: Line 1:
<noinclude>
+
The Image Stitching examples have been reorganized, most of the contents of this page can now be found here https://developer.ridgerun.com/wiki/index.php?title=Image_Stitching_for_NVIDIA_Jetson/Examples/Other_pipelines
{{Image_Stitching_for_NVIDIA_Jetson/Head|previous=Getting_Started/Building_Image_Stitching_for_NVIDIA_Jetson|next=Performance|Image Stitching, CUDA, Stitcher, OpenCV, Panorama}}
 
</noinclude>
 
 
 
<br>
 
<br>
 
<table>
 
<tr>
 
<td><div class="clear; float:right">__TOC__</div></td>
 
<td valign=top>
 
{{GStreamer debug}}
 
</td>
 
</table>
 
 
 
This page gives a usage example of the '''cudastitcher''' element.
 
 
 
The perf element can be downloaded from [https://github.com/RidgeRun/gst-perf this repository], otherwise the element can be removed from the pipeline without any issues. In case of performance issues, consider executing the '''/usr/bin/jetson_clocks''' binary.
 
 
 
== Example pipelines stitching cameras ==
 
 
 
==Two Camera Stitching==
 
 
 
In the two image stitching the left image is always "static" and the transformation is applied on the right image.
 
 
 
=== Displaying a stitch ===
 
<syntaxhighlight lang=bash>
 
HOMOGRAPHY="{\"h00\":7.38511630e-01, \"h01\":1.04317351e-01, \"h02\":1.43471832e+03, \"h10\":-1.07952893e-01, \"h11\":9.89148056e-01, \"h12\":-9.39168804e+00, \"h20\":-2.34496984e-04, \"h21\":3.32061513e-05, \"h22\":1}"
 
 
 
#TODO
 
</syntaxhighlight>
 
 
 
=== Stitching from two cameras ===
 
 
 
<syntaxhighlight lang=bash>
 
HOMOGRAPHY="{\"h00\":7.38511630e-01, \"h01\":1.04317351e-01, \"h02\":1.43471832e+03, \"h10\":-1.07952893e-01, \"h11\":9.89148056e-01, \"h12\":-9.39168804e+00, \"h20\":-2.34496984e-04, \"h21\":3.32061513e-05, \"h22\":1}"
 
OUTVIDEO=/tmp/stitching_result.ts
 
 
 
gst-launch-1.0 -e cudastitcher name=cuda right-center-homography="$HOMOGRAPHY" \
 
nvarguscamerasrc maxperf=true sensor-id=0 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_0 \
 
nvarguscamerasrc maxperf=true sensor-id=1 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_1 \
 
cuda. ! perf print-arm-load=true ! queue ! nvvidconv ! nvv4l2h264enc bitrate=20000000 ! h264parse ! mpegtsmux ! filesink location=$OUTVIDEO
 
</syntaxhighlight>
 
 
 
=== Generating a MP4 stitch from 2 cameras ===
 
 
 
<syntaxhighlight lang=bash>
 
HOMOGRAPHY="{\"h00\":7.38511630e-01, \"h01\":1.04317351e-01, \"h02\":1.43471832e+03, \"h10\":-1.07952893e-01, \"h11\":9.89148056e-01, \"h12\":-9.39168804e+00, \"h20\":-2.34496984e-04, \"h21\":3.32061513e-05, \"h22\":1}"
 
OUTVIDEO=/tmp/stitching_result.mp4
 
 
 
 
 
gst-launch-1.0 -e cudastitcher name=cuda right-center-homography="$HOMOGRAPHY" \
 
nvarguscamerasrc maxperf=true sensor-id=0 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_0 \
 
nvarguscamerasrc maxperf=true sensor-id=1 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_1 \
 
cuda. ! perf print-arm-load=true ! queue ! nvvidconv ! nvv4l2h264enc bitrate=20000000 ! h264parse ! mp4mux ! filesink location=$OUTVIDEO
 
</syntaxhighlight>
 
 
 
=== Streaming a stitch from two cameras via UDP+RTP ===
 
 
 
<syntaxhighlight lang=bash>
 
HOMOGRAPHY="{\"h00\":7.38511630e-01, \"h01\":1.04317351e-01, \"h02\":1.43471832e+03, \"h10\":-1.07952893e-01, \"h11\":9.89148056e-01, \"h12\":-9.39168804e+00, \"h20\":-2.34496984e-04, \"h21\":3.32061513e-05, \"h22\":1}"
 
HOST=127.0.0.1
 
PORT=12345
 
 
 
# Sender
 
gst-launch-1.0 -e cudastitcher name=cuda right-center-homography="$HOMOGRAPHY" \
 
nvarguscamerasrc maxperf=true sensor-id=0 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_0 \
 
nvarguscamerasrc maxperf=true sensor-id=1 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_1 \
 
cuda. ! perf print-arm-load=true ! nvvidconv ! nvv4l2h264enc ! rtph264pay config-interval=10  ! queue ! udpsink host=$HOST port=$PORT
 
</syntaxhighlight>
 
 
 
<syntaxhighlight lang=bash>
 
# Receiver
 
gst-launch-1.0 udpsrc port=$PORT ! 'application/x-rtp, media=(string)video, encoding-name=(string)H264' !  queue ! rtph264depay ! avdec_h264 ! videoconvert ! xvimagesink
 
</syntaxhighlight>
 
 
 
== Stitching from three cameras ==
 
=== Displaying a stitch ===
 
<syntaxhighlight lang=bash>
 
RC_HOMOGRAPHY="{\"h00\":2.47795806e-01, \"h01\":1.83125651e-01, \"h02\":1511, \"h10\":-9.58951851e-02, \"h11\":1.00466096e+00, \"h12\":3.43046193e+01, \"h20\":-1.84997102e-04, \"h21\":4.48703017e-05, \"h22\":1}"
 
1962  LC_HOMOGRAPHY="{\"h00\":1.07033034e+00, \"h01\":-8.91500609e-02, \"h02\":-1504, \"h10\":2.06014232e-02, \"h11\":1.06485384e+00, \"h12\":-2.68451772e+01, \"h20\":3.98503995e-06, \"h21\":4.20860985e-05, \"h22\":1}"
 
 
 
#TODO
 
</syntaxhighlight>
 
 
 
=== Stitching from 3 cameras to a TS file ===
 
 
 
<syntaxhighlight lang=bash>
 
RC_HOMOGRAPHY="{\"h00\":2.47795806e-01, \"h01\":1.83125651e-01, \"h02\":1511, \"h10\":-9.58951851e-02, \"h11\":1.00466096e+00, \"h12\":3.43046193e+01, \"h20\":-1.84997102e-04, \"h21\":4.48703017e-05, \"h22\":1}"
 
LC_HOMOGRAPHY="{\"h00\":1.07033034e+00, \"h01\":-8.91500609e-02, \"h02\":-1504, \"h10\":2.06014232e-02, \"h11\":1.06485384e+00, \"h12\":-2.68451772e+01, \"h20\":3.98503995e-06, \"h21\":4.20860985e-05, \"h22\":1}"
 
OUTVIDEO=/tmp/stitching_result.ts
 
 
 
gst-launch-1.0 -e cudastitcher name=cuda left-center-homography="$LC_HOMOGRAPHY" right-center-homography="$RC_HOMOGRAPHY" \
 
nvarguscamerasrc maxperf=true sensor-id=0 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_0 \
 
nvarguscamerasrc maxperf=true sensor-id=1 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_1 \
 
nvarguscamerasrc maxperf=true sensor-id=2 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_2 \
 
cuda. ! perf print-arm-load=true ! queue ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=360" ! nvv4l2h264enc bitrate=20000000 ! h264parse ! mpegtsmux ! filesink location=$OUTVIDEO
 
</syntaxhighlight>
 
 
 
=== Generating a MP4 stitch from 3 cameras ===
 
 
 
<syntaxhighlight lang=bash>
 
RC_HOMOGRAPHY="{\"h00\":2.47795806e-01, \"h01\":1.83125651e-01, \"h02\":1511, \"h10\":-9.58951851e-02, \"h11\":1.00466096e+00, \"h12\":3.43046193e+01, \"h20\":-1.84997102e-04, \"h21\":4.48703017e-05, \"h22\":1}"
 
LC_HOMOGRAPHY="{\"h00\":1.07033034e+00, \"h01\":-8.91500609e-02, \"h02\":-1504, \"h10\":2.06014232e-02, \"h11\":1.06485384e+00, \"h12\":-2.68451772e+01, \"h20\":3.98503995e-06, \"h21\":4.20860985e-05, \"h22\":1}"
 
OUTVIDEO=/tmp/stitching_result.mp4
 
 
 
gst-launch-1.0 -e cudastitcher name=cuda left-center-homography="$LC_HOMOGRAPHY" right-center-homography="$RC_HOMOGRAPHY" \
 
nvarguscamerasrc maxperf=true sensor-id=0 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_0 \
 
nvarguscamerasrc maxperf=true sensor-id=1 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_1 \
 
nvarguscamerasrc maxperf=true sensor-id=2 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_2 \
 
cuda. ! perf print-arm-load=true ! queue ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=360" ! nvv4l2h264enc bitrate=20000000 ! h264parse ! mp4mux ! filesink location=$OUTVIDEO
 
</syntaxhighlight>
 
 
 
=== Generating a MP4 stitch from 3 GRAY8 cameras ===
 
 
 
<syntaxhighlight lang=bash>
 
RC_HOMOGRAPHY="{\"h00\":2.47795806e-01, \"h01\":1.83125651e-01, \"h02\":1511, \"h10\":-9.58951851e-02, \"h11\":1.00466096e+00, \"h12\":3.43046193e+01, \"h20\":-1.84997102e-04, \"h21\":4.48703017e-05, \"h22\":1}"
 
LC_HOMOGRAPHY="{\"h00\":1.07033034e+00, \"h01\":-8.91500609e-02, \"h02\":-1504, \"h10\":2.06014232e-02, \"h11\":1.06485384e+00, \"h12\":-2.68451772e+01, \"h20\":3.98503995e-06, \"h21\":4.20860985e-05, \"h22\":1}"
 
OUTVIDEO=/tmp/stitching_result.mp4
 
 
 
gst-launch-1.0 -e cudastitcher name=cuda left-center-homography="$LC_HOMOGRAPHY" right-center-homography="$RC_HOMOGRAPHY" \
 
nvarguscamerasrc maxperf=true sensor-id=0 ! nvvidconv ! "video/x-raw, width=1920, height=1080,format=GRAY8" ! queue ! cuda.sink_0 \
 
nvarguscamerasrc maxperf=true sensor-id=1 ! nvvidconv ! "video/x-raw, width=1920, height=1080,format=GRAY8" ! queue ! cuda.sink_1 \
 
nvarguscamerasrc maxperf=true sensor-id=2 ! nvvidconv ! "video/x-raw, width=1920, height=1080,format=GRAY8" ! queue ! cuda.sink_2 \
 
cuda. ! perf print-arm-load=true ! queue ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=360" ! nvv4l2h264enc bitrate=20000000 ! h264parse ! mp4mux ! filesink location=$OUTVIDEO
 
</syntaxhighlight>
 
 
 
=== Streaming a stitch from 3 cameras via UDP+RTP ===
 
In the three camera case the stitching should always be giving
 
 
 
<syntaxhighlight lang=bash>
 
RC_HOMOGRAPHY="{\"h00\":2.47795806e-01, \"h01\":1.83125651e-01, \"h02\":1511, \"h10\":-9.58951851e-02, \"h11\":1.00466096e+00, \"h12\":3.43046193e+01, \"h20\":-1.84997102e-04, \"h21\":4.48703017e-05, \"h22\":1}"
 
LC_HOMOGRAPHY="{\"h00\":1.07033034e+00, \"h01\":-8.91500609e-02, \"h02\":-1504, \"h10\":2.06014232e-02, \"h11\":1.06485384e+00, \"h12\":-2.68451772e+01, \"h20\":3.98503995e-06, \"h21\":4.20860985e-05, \"h22\":1}"
 
HOST=127.0.0.1
 
PORT=12345
 
 
 
# Sender
 
gst-launch-1.0 -e cudastitcher name=cuda left-center-homography="$LC_HOMOGRAPHY" right-center-homography="$RC_HOMOGRAPHY" \
 
nvarguscamerasrc maxperf=true sensor-id=0 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_0 \
 
nvarguscamerasrc maxperf=true sensor-id=1 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_1 \
 
nvarguscamerasrc maxperf=true sensor-id=2 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_2 \
 
cuda. ! perf print-arm-load=true ! nvvidconv ! nvv4l2h264enc ! rtph264pay config-interval=10  ! queue ! udpsink host=$HOST port=$PORT
 
</syntaxhighlight>
 
 
 
<syntaxhighlight lang=bash>
 
# Receiver
 
gst-launch-1.0 udpsrc port=$PORT ! 'application/x-rtp, media=(string)video, encoding-name=(string)H264' !  queue ! rtph264depay ! avdec_h264 ! videoconvert ! xvimagesink
 
</syntaxhighlight>
 
 
 
== Example pipeline stitching images ==
 
=== Displaying a stitch form two JPEG images ===
 
<syntaxhighlight lang=bash>
 
HOMOGRAPHY="{\"h00\":7.38511630e-01, \"h01\":1.04317351e-01, \"h02\":1.43471832e+03, \"h10\":-1.07952893e-01, \"h11\":9.89148056e-01, \"h12\":-9.39168804e+00, \"h20\":-2.34496984e-04, \"h21\":3.32061513e-05, \"h22\":1}"
 
LEFTIMG=left.jpg
 
RIGHTIMG=right.jpg
 
 
 
#TODO
 
</syntaxhighlight>
 
 
 
=== Saving a stitch from two JPEG images ===
 
<syntaxhighlight lang=bash>
 
HOMOGRAPHY="{\"h00\":7.38511630e-01, \"h01\":1.04317351e-01, \"h02\":1.43471832e+03, \"h10\":-1.07952893e-01, \"h11\":9.89148056e-01, \"h12\":-9.39168804e+00, \"h20\":-2.34496984e-04, \"h21\":3.32061513e-05, \"h22\":1}"
 
LEFTIMG=left.jpg
 
RIGHTIMG=right.jpg
 
OUTIMG=/tmp/stitching_result.jpg
 
 
 
#TODO
 
</syntaxhighlight>
 
 
 
== Example pipelines stitching videos ==
 
=== Displaying a stitch from two MP4 videos ===
 
<syntaxhighlight lang=bash>
 
HOMOGRAPHY="{\"h00\":7.38511630e-01, \"h01\":1.04317351e-01, \"h02\":1.43471832e+03, \"h10\":-1.07952893e-01, \"h11\":9.89148056e-01, \"h12\":-9.39168804e+00, \"h20\":-2.34496984e-04, \"h21\":3.32061513e-05, \"h22\":1}"
 
LEFTVID=left.mp4
 
RIGHTVID=right.mp4
 
 
 
#TODO
 
</syntaxhighlight>
 
 
 
=== Saving a stitch from two MP4 videos ===
 
<syntaxhighlight lang=bash>
 
HOMOGRAPHY="{\"h00\":7.38511630e-01, \"h01\":1.04317351e-01, \"h02\":1.43471832e+03, \"h10\":-1.07952893e-01, \"h11\":9.89148056e-01, \"h12\":-9.39168804e+00, \"h20\":-2.34496984e-04, \"h21\":3.32061513e-05, \"h22\":1}"
 
LEFTVID=left.mp4
 
RIGHTVID=right.mp4
 
OUTVID=/tmp/stitching_result.mp4
 
 
 
#TODO
 
</syntaxhighlight>
 
 
 
== Undistortion Pipelines ==
 
 
 
=== Capture to Display ===
 
 
 
<syntaxhighlight lang=bash>
 
CAMERA_MATRIX="{\"fx\":2.8472876737532920e+03, \"fy\":2.8608529052506838e+03, \"cx\":9.7983673800322515e+02, \"cy\":5.0423299551699932e+02}"
 
DISTORTION_PARAMETERS="{\"k1\":-6.7260720359999060e-01, \"k2\":2.5160831522455513e+00, \"p1\":5.4007310542765141e-02, \"p2\":-1.1365265232659062e-02, \"k3\":-1.2760075297700798e+01 }"
 
 
 
gst-launch-1.0 nvarguscamerasrc ! cudaundistort distortion-model=brown-conrady camera-matrix=$CAMERA_MATRIX distortion-parameters=$DISTORTION_PARAMETERS ! nvoverlaysink
 
</syntaxhighlight>
 
 
 
=== 3 cameras stitching with undistort element ===
 
 
 
This comprises of a system with both undistortion and stitching.
 
 
 
<syntaxhighlight lang=bash>
 
OUTVIDEO=/tmp/stitching_result.mp4
 
RC_HOMOGRAPHY="{\"h00\":0.18846164727524536,\"h01\":0.3094433487541917, \"h02\":471.7246223741918, \"h10\":-0.1970199049349592, \"h11\":0.9946393933468606, \"h12\":26.75293460418449, \"h20\":-0.000722663093587326, \"h21\":0.00021396368386953087, \"h22\":1}"
 
LC_HOMOGRAPHY="{\"h00\":1.1217838548058883,\"h01\":0.07335969882181002, \"h02\":-453.68800318355966, \"h10\":0.023170492179664638, \"h11\":1.2173787645317036, \"h12\":-9.14050414930091, \"h20\":0.0002687772494613081, \"h21\":0.00016446099177990192, \"h22\":1}"
 
 
 
CAMERA_MATRIX="{\"fx\":2.8472876737532920e+03, \"fy\":2.8608529052506838e+03, \"cx\":9.7983673800322515e+02, \"cy\":5.0423299551699932e+02}"
 
DISTORTION_PARAMETERS="{\"k1\":-6.7260720359999060e-01, \"k2\":2.5160831522455513e+00, \"p1\":5.4007310542765141e-02, \"p2\":-1.1365265232659062e-02, \"k3\":-1.2760075297700798e+01 }"
 
 
 
gst-launch-1.0 -e cudastitcher name=cuda left-center-homography="$LC_HOMOGRAPHY" right-center-homography="$RC_HOMOGRAPHY" \
 
nvarguscamerasrc maxperf=true sensor-id=0 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! cudaundistort distortion-model=brown-conrady camera-matrix="$CAMERA_MATRIX" distortion-parameters="$DISTORTION_PARAMETERS" ! nvvidconv ! "video/x-raw(memory:NVMM), width=640, height=360" ! queue ! cuda.sink_0 \
 
nvarguscamerasrc maxperf=true sensor-id=1 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! cudaundistort distortion-model=brown-conrady camera-matrix="$CAMERA_MATRIX" distortion-parameters="$DISTORTION_PARAMETERS" ! nvvidconv ! "video/x-raw(memory:NVMM), width=640, height=360" ! queue ! cuda.sink_1 \
 
nvarguscamerasrc maxperf=true sensor-id=2 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! cudaundistort distortion-model=brown-conrady camera-matrix="$CAMERA_MATRIX" distortion-parameters="$DISTORTION_PARAMETERS" ! nvvidconv ! "video/x-raw(memory:NVMM), width=640, height=360" ! queue ! cuda.sink_2 \
 
cuda. ! perf print-arm-load=true ! queue ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=360" ! nvv4l2h264enc bitrate=20000000 ! h264parse ! mpegtsmux ! filesink location=$OUTVIDEO
 
</syntaxhighlight>
 
 
 
 
 
==Running using GSTD==
 
 
 
[[GStreamer_Daemon]] is a gst-launch on steroids where you can create a GStreamer pipeline, play, pause, change speed, skip around, and even change element parameter settings all while the pipeline is active.
 
 
 
===Running with bash GSTD client===
 
 
 
The main difference in the pipeline syntax is that the the homographies now need to be surrounded with escaped single quotes:
 
<syntaxhighlight lang=bash>
 
gstd # Launch gstd daemon, this will separate itself into its own process
 
 
 
gstd-client pipeline_create stitcher_pipeline \
 
cudastitcher name=cuda \
 
left-center-homography=\'{\"h00\":2.47795806e-01, \"h01\":1.83125651e-01, \"h02\":1511, \"h10\":-9.58951851e-02, \"h11\":1.00466096e+00, \"h12\":3.43046193e+01, \"h20\":-1.84997102e-04, \"h21\":4.48703017e-05, \"h22\":1}\' \
 
right-center-homography=\'{\"h00\":1.07033034e+00, \"h01\":-8.91500609e-02, \"h02\":-1504, \"h10\":2.06014232e-02, \"h11\":1.06485384e+00, \"h12\":-2.68451772e+01, \"h20\":3.98503995e-06, \"h21\":4.20860985e-05, \"h22\":1}\' \
 
nvarguscamerasrc maxperf=true sensor-id=0 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_0 \
 
nvarguscamerasrc maxperf=true sensor-id=1 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_1 \
 
nvarguscamerasrc maxperf=true sensor-id=2 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_2 \
 
cuda. ! perf print-arm-load=true ! queue ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=360" ! nvoverlaysink
 
 
 
 
 
gstd-client pipeline_play stitcher_pipeline
 
</syntaxhighlight>
 
 
 
== Example pipeline for debugging ==
 
=== Dumping output to fakesink ===
 
<syntaxhighlight lang=bash>
 
HOMOGRAPHY="{\"h00\":7.38511630e-01, \"h01\":1.04317351e-01, \"h02\":1.43471832e+03, \"h10\":-1.07952893e-01, \"h11\":9.89148056e-01, \"h12\":-9.39168804e+00, \"h20\":-2.34496984e-04, \"h21\":3.32061513e-05, \"h22\":1}"
 
 
 
gst-launch-1.0 -e cudastitcher name=cuda right-center-homography="$HOMOGRAPHY" nvarguscamerasrc maxperf=true sensor-id=0 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_0 nvarguscamerasrc maxperf=true sensor-id=1 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_1 cuda. ! perf print-arm-load=true ! fakesink
 
</syntaxhighlight>
 
 
 
=== Generating a raw video frames ===
 
 
 
<syntaxhighlight lang=bash>
 
HOMOGRAPHY="{\"h00\":7.38511630e-01, \"h01\":1.04317351e-01, \"h02\":1.43471832e+03, \"h10\":-1.07952893e-01, \"h11\":9.89148056e-01, \"h12\":-9.39168804e+00, \"h20\":-2.34496984e-04, \"h21\":3.32061513e-05, \"h22\":1}"
 
OUTVIDEO=/tmp/stitching_result.raw
 
 
 
gst-launch-1.0 -e cudastitcher name=cuda right-center-homography="$HOMOGRAPHY" nvarguscamerasrc maxperf=true sensor-id=0 num-buffers=30 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_0 nvarguscamerasrc maxperf=true sensor-id=1 num-buffers=30 ! nvvidconv ! "video/x-raw(memory:NVMM), width=1920, height=1080" ! queue ! cuda.sink_1 cuda. ! perf print-arm-load=true ! filesink location=$OUTVIDEO
 
</syntaxhighlight>
 
 
 
<noinclude>
 
{{Image_Stitching_for_NVIDIA_Jetson/Foot|Getting_Started/Building_Image_Stitching_for_NVIDIA_Jetson|Contact_Us}}
 
</noinclude>
 

Latest revision as of 16:09, 10 March 2021

The Image Stitching examples have been reorganized, most of the contents of this page can now be found here https://developer.ridgerun.com/wiki/index.php?title=Image_Stitching_for_NVIDIA_Jetson/Examples/Other_pipelines