Difference between revisions of "GstInference/Example pipelines with hierarchical metadata/PC"
Line 241: | Line 241: | ||
</div> | </div> | ||
<div class="col-25"> | <div class="col-25"> | ||
− | <input type="text" id="thickness" placeholder=" | + | <input type="text" id="thickness" placeholder="thickness" disabled="true"> |
</div> | </div> | ||
<div class="col-25"> | <div class="col-25"> | ||
− | <input type="text" id="fontscale" placeholder=" | + | <input type="text" id="fontscale" placeholder="fontscale" disabled="true"> |
</div> | </div> | ||
<div class="col-25"> | <div class="col-25"> | ||
Line 266: | Line 266: | ||
<div class="row"> | <div class="row"> | ||
<button type="button" class="button" onclick="reset_all()">Reset</button> | <button type="button" class="button" onclick="reset_all()">Reset</button> | ||
− | <button type="button" class="button" onclick=" | + | <button type="button" class="button" onclick="print()">Generate!</button> |
</div> | </div> | ||
</form> | </form> | ||
</div> | </div> | ||
− | <!-- ********************** | + | <!-- ****************************************************** --> |
<script> | <script> | ||
Line 331: | Line 331: | ||
facenetv1: "imagenet_labels.txt" | facenetv1: "imagenet_labels.txt" | ||
}; | }; | ||
− | |||
− | |||
function reset_all() { | function reset_all() { | ||
document.getElementById("gen_form").reset(); | document.getElementById("gen_form").reset(); | ||
− | src = ""; | + | var src = ""; |
− | model = ""; | + | var model = ""; |
− | model_props = ""; | + | var model_props = ""; |
− | tee = ""; | + | var tee = ""; |
− | filter = ""; | + | var filter = ""; |
− | crop = ""; | + | var crop = ""; |
− | overlay = ""; | + | var overlay = ""; |
− | sink = "" | + | var sink = ""; |
− | |||
− | |||
− | |||
− | |||
− | |||
− | |||
− | |||
− | |||
− | |||
− | |||
} | } | ||
− | |||
function dynamic_backend_dropdown(platform) { | function dynamic_backend_dropdown(platform) { | ||
Line 377: | Line 364: | ||
option_ty3.value = "tinyyolov3"; | option_ty3.value = "tinyyolov3"; | ||
− | |||
− | |||
switch (backend) | switch (backend) | ||
{ | { | ||
Line 393: | Line 378: | ||
break; | break; | ||
case "tflite" : | case "tflite" : | ||
− | document.getElementById("model"). | + | document.getElementById("model").removeChild("tinyyolov3"); |
− | |||
document.getElementById("inputlayer").disabled=true; | document.getElementById("inputlayer").disabled=true; | ||
document.getElementById("inputlayer").value=null; | document.getElementById("inputlayer").value=null; | ||
Line 400: | Line 384: | ||
document.getElementById("outputlayer").value=null; | document.getElementById("outputlayer").value=null; | ||
break; | break; | ||
− | |||
− | |||
− | |||
− | |||
− | |||
− | |||
− | |||
− | |||
} | } | ||
+ | |||
+ | |||
} | } | ||
Line 432: | Line 410: | ||
overlay = " inferenceoverlay"; | overlay = " inferenceoverlay"; | ||
} else { | } else { | ||
− | + | document.getElementById("thickness").disabled=true; | |
− | + | document.getElementById("style").disabled=true; | |
− | + | document.getElementById("fontscale").disabled=true; | |
+ | document.getElementById("thickness").value = null; | ||
+ | document.getElementById("style").value = null; | ||
+ | document.getElementById("fontscale").value = null; | ||
overlay = ""; | overlay = ""; | ||
} | } | ||
Line 441: | Line 422: | ||
function enable_filter() { | function enable_filter() { | ||
var checkbox = document.getElementById('en_inferencefilter'); | var checkbox = document.getElementById('en_inferencefilter'); | ||
− | |||
if(checkbox.checked == true) { | if(checkbox.checked == true) { | ||
− | + | document.getElementById("filter_class_id").disabled=false; | |
filter = " inferencefilter"; | filter = " inferencefilter"; | ||
} else { | } else { | ||
− | + | document.getElementById("filter_class_id").disabled=true; | |
filter = ""; | filter = ""; | ||
+ | document.getElementById("filter_class_id").value = null; | ||
} | } | ||
} | } | ||
Line 463: | Line 444: | ||
} | } | ||
− | function | + | function print() { |
model = document.getElementById("model").value; | model = document.getElementById("model").value; | ||
src = document.getElementById("source").value; | src = document.getElementById("source").value; | ||
Line 492: | Line 473: | ||
if(overlay != "") { | if(overlay != "") { | ||
− | + | overlay = " inferenceoverlay"; | |
var thickness= document.getElementById("thickness").value; | var thickness= document.getElementById("thickness").value; | ||
var fontscale=document.getElementById("fontscale").value; | var fontscale=document.getElementById("fontscale").value; | ||
Line 507: | Line 488: | ||
} | } | ||
overlay = overlay + " !"; | overlay = overlay + " !"; | ||
− | } | + | } |
if(filter != "") { | if(filter != "") { | ||
Line 516: | Line 497: | ||
filter = filter + " !"; | filter = filter + " !"; | ||
} | } | ||
− | } | + | } |
if(crop != "") { | if(crop != "") { | ||
Line 525: | Line 506: | ||
crop = crop + " !"; | crop = crop + " !"; | ||
} | } | ||
− | } | + | } |
+ | |||
document.getElementById("new_pipeline").value = "gst-launch-1.0 " + model + model_props + src + tee + filter + crop + overlay + sink; | document.getElementById("new_pipeline").value = "gst-launch-1.0 " + model + model_props + src + tee + filter + crop + overlay + sink; | ||
} | } |
Revision as of 12:26, 11 March 2020
![]() | Make sure you also check GstInference's companion project: R2Inference |
Sample pipelines
The following section contains a tool for generating simple GStreamer pipelines with one model of a selected architecture using our hierarchical inference metadata. If you are using and older version, you chan check the legacy pipelines section. Please make sure to check the documentation to understand the property usage for each element.
The required elements are:
- Backend
- Model
- Model location
- Labels
- Source
- Sink
The optional elements include:
- inferencefilter
- inferencrop
- inferenceoverlay
Pipeline generator
The following tool will provide simple pipelines according to the selected elements.
Advanced pipelines