Skip to content
This repository has been archived by the owner on Jul 10, 2023. It is now read-only.

Commit

Permalink
Merge pull request from dlstreamer/v0.7.2
Browse files Browse the repository at this point in the history
v0.7.2 release
  • Loading branch information
tobiasmo1 authored Jun 10, 2022
2 parents 87de4c9 + 34afe4f commit c2ece52
Show file tree
Hide file tree
Showing 143 changed files with 5,484 additions and 1,424 deletions.
6 changes: 6 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# Avoid issues with container-specific assignment of user permissions
samples/webrtc/grafana/grafana-storage/**/*
samples/webrtc/grafana/grafana-storage
samples/webrtc/webserver/www/js-client/**/*
samples/webrtc/webserver/www/js-client

130 changes: 61 additions & 69 deletions README.md

Large diffs are not rendered by default.

112 changes: 57 additions & 55 deletions vaclient/README.md → client/README.md

Large diffs are not rendered by default.

File renamed without changes.
21 changes: 11 additions & 10 deletions vaclient/arguments.py → client/arguments.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
import sys
import json
import argparse
import vaclient
import pipeline_client


def get_typed_value(value):
Expand All @@ -53,6 +53,7 @@ def add_request_arguments(parser):
parser.add_argument('--destination', action='append', nargs=2, metavar=('key', 'value'), type=str, \
help='Update destination information with key and value pair')
parser.add_argument('--rtsp-path', type=str, help='RTSP endpoint path')
parser.add_argument('--webrtc-peer-id', type=str, help='WebRTC server side peer id')
parser.add_argument('--parameter', action='append', nargs=2, metavar=('key', 'value'), type=get_typed_value, \
dest='parameters', help='Update request parameter with key and value pair')
parser.add_argument('--parameter-file', type=str, dest='parameter_file', help='Update request parameter \
Expand All @@ -64,7 +65,7 @@ def add_request_arguments(parser):
parser.add_argument('--number-of-streams', type=int, default=1, dest="streams", help='Set number of streams')
parser.add_argument("--status-only", action='store_true', help='Only show status')

def parse_args(program_name="Intel(R) DL Streamer Pipeline Server Client"):
def parse_args(program_name="Pipeline Client"):
"""Process command line options"""
#pylint: disable=too-many-statements
parser = argparse.ArgumentParser(
Expand All @@ -75,40 +76,40 @@ def parse_args(program_name="Intel(R) DL Streamer Pipeline Server Client"):

parser_run = subparsers.add_parser('run', help='Start specified pipeline with specified source. \
Meta-data will be displayed as pipeline runs. Once pipeline ends the average fps is displayed')
parser_run.set_defaults(command=vaclient.run)
parser_run.set_defaults(command=pipeline_client.run)
add_request_arguments(parser_run)
add_common_arguments(parser_run)

parser_start = subparsers.add_parser('start', help='start specified pipeline')
parser_start.set_defaults(command=vaclient.start)
parser_start.set_defaults(command=pipeline_client.start)
add_request_arguments(parser_start)
add_common_arguments(parser_start)

parser_status = subparsers.add_parser('status', help='Print status of specified pipeline')
parser_status.set_defaults(command=vaclient.status)
parser_status.set_defaults(command=pipeline_client.status)
add_instance_arguments(parser_status)
add_common_arguments(parser_status)

parser_wait = subparsers.add_parser('wait', help='Connect to a running pipeline and wait until completion')
parser_wait.set_defaults(command=vaclient.wait)
parser_wait.set_defaults(command=pipeline_client.wait)
add_instance_arguments(parser_wait)
add_common_arguments(parser_wait)

parser_stop = subparsers.add_parser('stop', help='Stop a specified pipeline')
parser_stop.set_defaults(command=vaclient.stop)
parser_stop.set_defaults(command=pipeline_client.stop)
add_instance_arguments(parser_stop)
add_common_arguments(parser_stop)

parser_list_pipelines = subparsers.add_parser('list-pipelines', help='List loaded pipelines')
parser_list_pipelines.set_defaults(command=vaclient.list_pipelines)
parser_list_pipelines.set_defaults(command=pipeline_client.list_pipelines)
add_common_arguments(parser_list_pipelines)

parser_list_models = subparsers.add_parser('list-models', help='List loaded models')
parser_list_models.set_defaults(command=vaclient.list_models)
parser_list_models.set_defaults(command=pipeline_client.list_models)
add_common_arguments(parser_list_models)

parser_list_instances = subparsers.add_parser('list-instances', help='List active pipeline instances')
parser_list_instances.set_defaults(command=vaclient.list_instances)
parser_list_instances.set_defaults(command=pipeline_client.list_instances)
add_common_arguments(parser_list_instances)

parser.add_argument("--quiet", action="store_false",
Expand Down
42 changes: 32 additions & 10 deletions vaclient/vaclient.py → client/pipeline_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from html.parser import HTMLParser
import requests
import results_watcher
from vaserving.pipeline import Pipeline
from server.pipeline import Pipeline

RESPONSE_SUCCESS = 200
TIMEOUT = 30
Expand All @@ -40,6 +40,13 @@
"path": ""
}
}
WEBRTC_TEMPLATE = {
"frame": {
"type": "webrtc",
"peer-id": ""
}
}

SERVER_CONNECTION_FAILURE_MESSAGE = "Unable to connect to server, check if the pipeline-server microservice is running"

def html_handle_data(self, data):
Expand Down Expand Up @@ -131,7 +138,7 @@ def start(args):

def stop(args):
if stop_pipeline(args.server_address, args.instance, args.show_request):
print_fps(get_pipeline_status(args.server_address, args.instance))
print_fps([get_pipeline_status(args.server_address, args.instance)])

def wait(args):
try:
Expand All @@ -140,11 +147,11 @@ def wait(args):
print(pipeline_status["state"])
else:
print("Unable to fetch status")
print_fps(wait_for_pipeline_completion(args.server_address, args.instance))
print_fps([wait_for_pipeline_completion(args.server_address, args.instance)])
except KeyboardInterrupt:
print()
stop_pipeline(args.pipeline, args.instance)
print_fps(wait_for_pipeline_completion(args.server_address, args.instance))
print_fps([wait_for_pipeline_completion(args.server_address, args.instance)])

def status(args):
pipeline_status = get_pipeline_status(args.server_address, args.instance, args.show_request)
Expand Down Expand Up @@ -202,6 +209,10 @@ def update_request_options(request,
rtsp_template = RTSP_TEMPLATE
rtsp_template['frame']['path'] = args.rtsp_path
request['destination'].update(rtsp_template)
if hasattr(args, 'webrtc_peer_id') and args.webrtc_peer_id:
webrtc_template = WEBRTC_TEMPLATE
webrtc_template['frame']['peer-id'] = args.webrtc_peer_id
request['destination'].update(webrtc_template)
if hasattr(args, 'request_file') and args.request_file:
with open(args.request_file, 'r') as request_file:
request.update(json.load(request_file))
Expand Down Expand Up @@ -256,13 +267,15 @@ def wait_for_pipeline_running(server_address,
timeout_count = 0
while status and not Pipeline.State[status["state"]] == Pipeline.State.RUNNING:
status = get_pipeline_status(server_address, instance_id)
if not status or status["state"] == "ERROR":
raise ValueError("Error in pipeline, please check pipeline-server log messages")
if not status or Pipeline.State[status["state"]].stopped():
break
time.sleep(SLEEP_FOR_STATUS)
timeout_count += 1
if timeout_count * SLEEP_FOR_STATUS >= timeout_sec:
print("Timed out waiting for RUNNING status")
break
if not status or status["state"] == "ERROR":
raise ValueError("Error in pipeline, please check pipeline-server log messages")
return Pipeline.State[status["state"]] == Pipeline.State.RUNNING

def wait_for_pipeline_completion(server_address, instance_id):
Expand All @@ -277,6 +290,7 @@ def wait_for_pipeline_completion(server_address, instance_id):

def wait_for_all_pipeline_completions(server_address, instance_ids, status_only=False):
status = {"state" : "RUNNING"}
status_list = []
stopped = False
num_streams = len(instance_ids)
if num_streams == 0:
Expand All @@ -295,15 +309,17 @@ def wait_for_all_pipeline_completions(server_address, instance_ids, status_only=
instance_id, status["state"], round(status["avg_fps"])))
if not Pipeline.State[status["state"]].stopped():
all_streams_stopped = False
status_list.append(status)
first_pipeline = False
stopped = all_streams_stopped
else:
time.sleep(SLEEP_FOR_STATUS)
status = get_pipeline_status(server_address, instance_ids[0])
stopped = Pipeline.State[status["state"]].stopped()
status_list.append(status)
if status and status["state"] == "ERROR":
raise ValueError("Error in pipeline, please check pipeline-server log messages")
return status
return status_list

def get_pipeline_status(server_address, instance_id, show_request=False):
status_url = urljoin(server_address,
Expand Down Expand Up @@ -361,9 +377,15 @@ def delete(url, show_request=False):
raise ConnectionError(SERVER_CONNECTION_FAILURE_MESSAGE) from error
return None

def print_fps(status):
if status and 'avg_fps' in status:
print('avg_fps: {:.2f}'.format(status['avg_fps']))
def print_fps(status_list):
sum_of_all_fps = 0
num_of_pipelines = 0
for status in status_list:
if status and 'avg_fps' in status and status['avg_fps'] > 0:
sum_of_all_fps += status['avg_fps']
num_of_pipelines += 1
if num_of_pipelines > 0:
print('avg_fps: {:.2f}'.format(sum_of_all_fps/num_of_pipelines))

def print_list(item_list):
for item in item_list:
Expand Down
8 changes: 4 additions & 4 deletions vaclient/vaclient.sh → client/pipeline_client.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@

VOLUME_MOUNT="-v /tmp:/tmp "
IMAGE="dlstreamer-pipeline-server-gstreamer"
VASERVING_ROOT=/home/pipeline-server
PIPELINE_SERVER_ROOT=/home/pipeline-server
ENTRYPOINT="python3"
ENTRYPOINT_ARGS="$VASERVING_ROOT/vaclient $@"
LOCAL_VACLIENT_DIR=$(dirname $(readlink -f "$0"))
ROOT_DIR=$(dirname $LOCAL_VACLIENT_DIR)
ENTRYPOINT_ARGS="$PIPELINE_SERVER_ROOT/client $@"
LOCAL_CLIENT_DIR=$(dirname $(readlink -f "$0"))
ROOT_DIR=$(dirname $LOCAL_CLIENT_DIR)

"$ROOT_DIR/docker/run.sh" $INTERACTIVE --name \"\" --network host --image $IMAGE $VOLUME_MOUNT --entrypoint $ENTRYPOINT --entrypoint-args "$ENTRYPOINT_ARGS"
File renamed without changes.
88 changes: 53 additions & 35 deletions docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -24,29 +24,16 @@ USER root

# Dependencies for OpenVINO
ARG BASE=dlstreamer-pipeline-server-gstreamer-base
ENV VA_SERVING_BASE=${BASE}
ENV PIPELINE_SERVER_BASE=${BASE}
SHELL ["/bin/bash", "-c"]

# Creating user pipeline-server and adding it to groups "video" and "users" to use GPU and VPU
ARG USER=pipeline-server
RUN useradd -ms /bin/bash -G video,audio,users ${USER} -d /home/pipeline-server && \
chown ${USER} -R /home/pipeline-server /root

RUN if [ -f /opt/intel/openvino/install_dependencies/install_NEO_OCL_driver.sh ]; then \
/opt/intel/openvino/install_dependencies/install_NEO_OCL_driver.sh -y ; exit 0; \
fi

RUN if [[ ${VA_SERVING_BASE} == *"openvino/ubuntu20_data_runtime:2021.2" ]]; then \
DEBIAN_FRONTEND=noninteractive apt-get update && \
apt-get install -y -q --no-install-recommends \
intel-media-va-driver-non-free \
gstreamer1.0-tools && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* ;\
fi

# Install boost library required for HDDL plugin
RUN if [[ ${VA_SERVING_BASE} == *"openvino/ubuntu20_data_runtime"* ]]; then \
RUN if [[ ${PIPELINE_SERVER_BASE} == *"openvino/ubuntu20_data_runtime"* || ${PIPELINE_SERVER_BASE} == *"intel/dlstreamer"* ]]; then \
DEBIAN_FRONTEND=noninteractive apt-get update && \
apt-get install -y -q --no-install-recommends \
libboost-program-options1.71.0 && \
Expand All @@ -55,29 +42,46 @@ RUN if [[ ${VA_SERVING_BASE} == *"openvino/ubuntu20_data_runtime"* ]]; then \
fi

RUN DEBIAN_FRONTEND=noninteractive apt-get update && \
apt-get upgrade -y -q && \
apt-get dist-upgrade -y -q && \
apt-get install -y -q --no-install-recommends \
gstreamer1.0-nice \
python3-pip && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* ;

RUN if [[ ${VA_SERVING_BASE} == *"openvisualcloud/xeone3-ubuntu1804-analytics-ffmpeg"* ]]; then \
RUN DEBIAN_FRONTEND=noninteractive apt-get update && apt-get install -y libjemalloc-dev

# Install GStreamer packages not present dlstreamer base image
RUN if [[ ${PIPELINE_SERVER_BASE} == *"dlstreamer"* ]]; then \
DEBIAN_FRONTEND=noninteractive apt-get update && \
apt-get install -y -q --no-install-recommends \
python3 \
python3-setuptools \
python3-pip && \
gstreamer1.0-plugins-good \
gstreamer1.0-alsa \
gstreamer1.0-libav \
gstreamer1.0-plugins-bad \
gstreamer1.0-plugins-ugly \
gstreamer1.0-tools \
gstreamer1.0-vaapi \
gstreamer1.0-x \
libgstreamer-plugins-bad1.0-0 \
libgstreamer-plugins-base1.0-dev \
libgstreamer1.0-dev && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* ;\
fi

RUN DEBIAN_FRONTEND=noninteractive apt-get update && \
apt-get upgrade -y -q && \
apt-get dist-upgrade -y -q && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* ;

COPY ./requirements.txt /
RUN pip3 install --upgrade pip --no-cache-dir -r /requirements.txt
RUN rm -f /requirements.txt

# Intel(R) DL Streamer Pipeline Server Python Modules
COPY ./vaserving /home/pipeline-server/vaserving
COPY ./vaclient /home/pipeline-server/vaclient
COPY --chown=pipeline-server ./tools /home/pipeline-server/tools
# Pipeline Server Python Modules
COPY ./server /home/pipeline-server/server
COPY ./client /home/pipeline-server/client

# Copy GVA Python extensions
COPY ./extensions /home/pipeline-server/extensions
Expand All @@ -97,9 +101,9 @@ FROM dlstreamer-pipeline-server as do_not_copy_models

# Creates a stage that copies models from the build context
FROM dlstreamer-pipeline-server as copy_models
ONBUILD ARG MODELS_PATH
ONBUILD ENV MODELS_PATH=${MODELS_PATH}
ONBUILD COPY ${MODELS_PATH} /home/pipeline-server/models
ONBUILD ARG PS_MODELS_PATH
ONBUILD ENV PS_MODELS_PATH=${PS_MODELS_PATH}
ONBUILD COPY $PS_MODELS_PATH /home/pipeline-server/models

# Stage that is used is controlled via MODELS_COMMAND build argument
FROM ${MODELS_COMMAND} as dlstreamer-pipeline-server-with-models
Expand All @@ -115,9 +119,9 @@ FROM dlstreamer-pipeline-server-with-models as do_not_copy_pipelines

# Creates a stage that copies pipelines from the build context
FROM dlstreamer-pipeline-server-with-models as copy_pipelines
ONBUILD ARG PIPELINES_PATH
ONBUILD ENV PIPELINES_PATH=${PIPELINES_PATH}
ONBUILD COPY ${PIPELINES_PATH} /home/pipeline-server/pipelines
ONBUILD ARG PS_PIPELINES_PATH
ONBUILD ENV PS_PIPELINES_PATH=${PS_PIPELINES_PATH}
ONBUILD COPY ${PS_PIPELINES_PATH} /home/pipeline-server/pipelines

# Stage that is used is controlled via PIPELINES_COMMAND build argument
FROM ${PIPELINES_COMMAND} as dlstreamer-pipeline-server-with-models-and-pipelines
Expand All @@ -128,22 +132,36 @@ FROM ${PIPELINES_COMMAND} as dlstreamer-pipeline-server-with-models-and-pipeline
# Final stage is controlled by the FINAL_STAGE build argument.

FROM dlstreamer-pipeline-server-with-models-and-pipelines as dlstreamer-pipeline-server-library
ONBUILD RUN rm -rf /home/pipeline-server/vaserving/__main__.py
ONBUILD RUN rm -rf /home/pipeline-server/vaserving/rest_api
ONBUILD RUN rm -rf /home/pipeline-server/server/__main__.py
ONBUILD RUN rm -rf /home/pipeline-server/server/rest_api

FROM dlstreamer-pipeline-server-with-models-and-pipelines as dlstreamer-pipeline-server-service

# Dependencies installed via pip
ONBUILD COPY ./requirements.service.txt /
ONBUILD RUN pip3 install --no-cache-dir -r /requirements.service.txt
ONBUILD RUN rm -f /requirements.service.txt
ONBUILD ENTRYPOINT ["python3", "-m", "vaserving"]

# WebRTC specific dependencies installed via pip
ONBUILD COPY ./requirements.webrtc.txt /
ONBUILD RUN if [[ ${FRAMEWORK} == "gstreamer" ]]; then \
pip3 install --no-cache-dir -r /requirements.webrtc.txt; \
fi
ONBUILD RUN rm -f /requirements.webrtc.txt

ONBUILD ENTRYPOINT ["python3", "-m", "server"]

FROM ${FINAL_STAGE} as deploy

ARG USER=pipeline-server

ENV PYTHONPATH=$PYTHONPATH:/home/pipeline-server
ENV HOME=/home/pipeline-server
ENV PYTHONPATH=/home/pipeline-server:$PYTHONPATH
ENV GST_PLUGIN_PATH=$GST_PLUGIN_PATH:/usr/lib/x86_64-linux-gnu/gstreamer-1.0/
ENV LD_PRELOAD=libjemalloc.so

ENV cl_cache_dir=/home/.cl_cache
RUN mkdir -p -m g+s $cl_cache_dir && chown ${USER}:users $cl_cache_dir

# Prepare XDG_RUNTIME_DIR
ENV XDG_RUNTIME_DIR=/home/.xdg_runtime_dir
Expand Down
Loading

0 comments on commit c2ece52

Please sign in to comment.