diff --git a/analytics/common/runva.py b/analytics/common/runva.py index bbf867f3c..90512512f 100755 --- a/analytics/common/runva.py +++ b/analytics/common/runva.py @@ -31,76 +31,39 @@ def stop(self): GLib.timeout_add(10,self._noop) self._stop=True - def loop(self, sensor, location, uri, topic, algorithm, algorithmName, resolution={}, zonemap=[]): - if algorithmName=="crowd-counting": - pid,msg=PipelineManager.create_instance(self._pipeline,self._version,{ - "source": { - "uri": uri, - "type":"uri" - }, - "destination": { - "type": "mqtt", - "host": mqtthost, - "clientid": algorithm, - "topic": topic - }, - "tags": { - "sensor": sensor, - "location": location, - "algorithm": algorithm, - "office": { - "lat": office[0], - "lon": office[1], - }, - }, - "destination": { - "type": "mqtt", - "host": mqtthost, - "clientid": algorithm, - "topic": topic, - }, - "parameters": { - "crowd_count": { - "width": resolution["width"], - "height": resolution["height"], - "zonemap": zonemap - }, - "every-nth-frame": every_nth_frame, - "recording_prefix": "/tmp/" + sensor, - } - }) - else: - pid,msg=PipelineManager.create_instance(self._pipeline,self._version,{ - "source": { - "uri": uri, - "type":"uri" - }, - "destination": { - "type": "mqtt", - "host": mqtthost, - "clientid": algorithm, - "topic": topic - }, - "tags": { - "sensor": sensor, - "location": location, - "algorithm": algorithm, - "office": { - "lat": office[0], - "lon": office[1], - }, - }, - "destination": { - "type": "mqtt", - "host": mqtthost, - "clientid": algorithm, - "topic": topic, + def loop(self, sensor, location, uri, algorithm, algorithmName, resolution={"width":0,"height":0}, zonemap=[], topic="analytics"): + + pid,msg=PipelineManager.create_instance(self._pipeline,self._version,{ + "source": { + "uri": uri, + "type":"uri" + }, + "destination": { + "type": "mqtt", + "host": mqtthost, + "clientid": algorithm, + "topic": topic, + }, + "tags": { + "sensor": sensor, + "location": location, + "algorithm": algorithm, + "office": { + "lat": office[0], + "lon": office[1], }, - "parameters": { - "every-nth-frame": every_nth_frame, - "recording_prefix": "/tmp/" + sensor, + }, + "parameters": { + "crowd_count": { # crowd-counting only + "width": resolution["width"], + "height": resolution["height"], + "zonemap": zonemap }, - }) + "every-nth-frame": every_nth_frame, + "recording_prefix": "/tmp/" + sensor, + }, + }) + if pid is None: print("Exception: "+str(msg), flush=True) return diff --git a/analytics/crowd/build.sh b/analytics/crowd/build.sh index eb1a30c75..0018d93cf 100755 --- a/analytics/crowd/build.sh +++ b/analytics/crowd/build.sh @@ -2,7 +2,17 @@ DIR=$(dirname $(readlink -f "$0")) SCENARIO="${2:-stadium}" +FRAMEWORK="${6:-gst}" +case "$FRAMEWORK" in + gst) + ;; + *) + echo "Not Implemented" + exit -1 + ;; +esac + case "$SCENARIO" in *stadium*) . "$DIR/../../script/build.sh" diff --git a/analytics/crowd/count-crowd.py b/analytics/crowd/count-crowd.py index 5cb9bfbec..dd6cf3960 100755 --- a/analytics/crowd/count-crowd.py +++ b/analytics/crowd/count-crowd.py @@ -4,7 +4,6 @@ from db_query import DBQuery from signal import signal, SIGTERM from concurrent.futures import ThreadPoolExecutor -from mqtt2db import MQTT2DB from rec2db import Rec2DB from runva import RunVA import os @@ -19,30 +18,25 @@ dbhost = os.environ["DBHOST"] every_nth_frame = int(os.environ["EVERY_NTH_FRAME"]) -mqtt2db=None rec2db=None runva=None stop=False def connect(sensor, location, uri, algorithm, algorithmName, resolution, zonemap): - global mqtt2db, rec2db, runva + global rec2db, runva try: - mqtt2db=MQTT2DB(algorithm) # this waits for mqtt rec2db=Rec2DB(sensor) runva=RunVA("crowd_counting") - topic=str(uuid.uuid4()) # topic must be different as camera may reconnect with ThreadPoolExecutor(2) as e: - e.submit(mqtt2db.loop, topic) e.submit(rec2db.loop) # any VA exit indicates a camera disconnect with ThreadPoolExecutor(1) as e1: - e1.submit(runva.loop, sensor, location, uri, topic, algorithm, algorithmName, resolution, zonemap) + e1.submit(runva.loop, sensor, location, uri, algorithm, algorithmName, resolution, zonemap) if not stop: - mqtt2db.stop() rec2db.stop() raise Exception("VA exited. This should not happen.") diff --git a/analytics/crowd/mqtt2db.py b/analytics/crowd/mqtt2db.py deleted file mode 100644 index df75740bd..000000000 --- a/analytics/crowd/mqtt2db.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/python3 - -from db_ingest import DBIngest -import paho.mqtt.client as mqtt -from threading import Thread, Condition -import json -import time -import sys -import os - -mqtthost = os.environ["MQTTHOST"] -scenario = os.environ["SCENARIO"] -dbhost = os.environ["DBHOST"] -office = list(map(float, os.environ["OFFICE"].split(","))) - -class MQTT2DB(object): - def __init__(self, algorithm): - super(MQTT2DB,self).__init__() - self._mqtt=mqtt.Client("feeder_" + algorithm) - self._db=DBIngest(host=dbhost, index="analytics", office=office) - self._cache=[] - self._cond=Condition() - - def loop(self, topic): - self._stop=False - Thread(target=self.todb).start() - - while True: - try: - self._mqtt.connect(mqtthost) - break - except Exception as e: - print("Exception: "+str(e), flush=True) - time.sleep(10) - - self._mqtt.on_message = self.on_message - self._mqtt.subscribe(topic) - self._mqtt.loop_forever() - - def _add1(self, item=None): - self._cond.acquire() - if item: self._cache.append(item) - self._cond.notify() - self._cond.release() - - def stop(self): - self._mqtt.disconnect() - self._stop=True - self._add1() - - def on_message(self, client, userdata, message): - try: - r=json.loads(str(message.payload.decode("utf-8", "ignore"))) - r.update(r["tags"]) - del r["tags"] - if "real_base" not in r: r["real_base"]=0 - r["time"]=int((r["real_base"]+r["timestamp"])/1000000) - - if "objects" in r and scenario == "traffic": r["nobjects"]=int(len(r["objects"])) - if "objects" in r and scenario == "stadium": r["count"]={"people":len(r["objects"])} - except Exception as e: - print("Exception: "+str(e), flush=True) - - self._add1(r) - - def todb(self): - while not self._stop: - self._cond.acquire() - self._cond.wait() - bulk=self._cache - self._cache=[] - self._cond.release() - - try: - self._db.ingest_bulk(bulk) - except Exception as e: - print("Exception: "+str(e), flush=True) - diff --git a/analytics/entrance/VCAC-A/gst/Dockerfile b/analytics/entrance/VCAC-A/gst/Dockerfile index f08711df2..6a2129e42 100644 --- a/analytics/entrance/VCAC-A/gst/Dockerfile +++ b/analytics/entrance/VCAC-A/gst/Dockerfile @@ -7,9 +7,9 @@ RUN apt-get update -qq && apt-get install -qq python3-paho-mqtt python3-ply pyt COPY --from=smtc_common /home/*.py /home/ COPY *.py /home/ COPY models /home/models -COPY VCAC-A/gst/pipeline /home/pipelines/people_counting +COPY VCAC-A/gst/pipeline /home/pipelines/entrance_counting COPY custom_transforms /home/custom_transforms -CMD ["/home/count-people.py"] +CMD ["/home/count-entrance.py"] ENV PATH=${PATH}:/home/custom_transforms #### diff --git a/analytics/entrance/Xeon/gst/Dockerfile b/analytics/entrance/Xeon/gst/Dockerfile index 8ac231172..f1b5217d6 100644 --- a/analytics/entrance/Xeon/gst/Dockerfile +++ b/analytics/entrance/Xeon/gst/Dockerfile @@ -6,9 +6,9 @@ RUN apt-get update -qq && apt-get install -qq python3-paho-mqtt python3-ply pyt COPY --from=smtc_common /home/*.py /home/ COPY *.py /home/ COPY models /home/models -COPY Xeon/gst/pipeline /home/pipelines/people_counting +COPY Xeon/gst/pipeline /home/pipelines/entrance_counting COPY custom_transforms /home/custom_transforms -CMD ["/home/count-people.py"] +CMD ["/home/count-entrance.py"] ENV PATH=${PATH}:/home/custom_transforms #### diff --git a/analytics/entrance/build.sh b/analytics/entrance/build.sh index eb1a30c75..f188161fe 100755 --- a/analytics/entrance/build.sh +++ b/analytics/entrance/build.sh @@ -2,6 +2,16 @@ DIR=$(dirname $(readlink -f "$0")) SCENARIO="${2:-stadium}" +FRAMEWORK="${6:-gst}" + +case "$FRAMEWORK" in + gst) + ;; + *) + echo "Not Implemented" + exit -1 + ;; +esac case "$SCENARIO" in *stadium*) diff --git a/analytics/entrance/count-people.py b/analytics/entrance/count-entrance.py similarity index 75% rename from analytics/entrance/count-people.py rename to analytics/entrance/count-entrance.py index 5e7508779..82ce6e25a 100755 --- a/analytics/entrance/count-people.py +++ b/analytics/entrance/count-entrance.py @@ -4,7 +4,6 @@ from db_query import DBQuery from signal import signal, SIGTERM from concurrent.futures import ThreadPoolExecutor -from mqtt2db import MQTT2DB from rec2db import Rec2DB from runva import RunVA import os @@ -15,30 +14,25 @@ dbhost = os.environ["DBHOST"] every_nth_frame = int(os.environ["EVERY_NTH_FRAME"]) -mqtt2db=None rec2db=None runva=None stop=False def connect(sensor, location, uri, algorithm, algorithmName): - global mqtt2db, rec2db, runva + global rec2db, runva try: - mqtt2db=MQTT2DB(algorithm) # this waits for mqtt rec2db=Rec2DB(sensor) - runva=RunVA("people_counting") + runva=RunVA("entrance_counting") - topic=str(uuid.uuid4()) # topic must be different as camera may reconnect with ThreadPoolExecutor(2) as e: - e.submit(mqtt2db.loop, topic) e.submit(rec2db.loop) # any VA exit indicates a camera disconnect with ThreadPoolExecutor(1) as e1: - e1.submit(runva.loop, sensor, location, uri, topic, algorithm, algorithmName) + e1.submit(runva.loop, sensor, location, uri, algorithm, algorithmName) if not stop: - mqtt2db.stop() rec2db.stop() raise Exception("VA exited. This should not happen.") @@ -58,7 +52,7 @@ def quit_service(signum, sigframe): while not stop: try: algorithm=dba.ingest({ - "name": "people-counting", + "name": "entrance-counting", "office": { "lat": office[0], "lon": office[1], @@ -68,14 +62,14 @@ def quit_service(signum, sigframe): })["_id"] break except Exception as e: - print("Exception in count-people register algorithm: "+str(e), flush=True) + print("Exception in count-entrance register algorithm: "+str(e), flush=True) time.sleep(10) # compete for a sensor connection while not stop: try: print("Searching...", flush=True) - for sensor in dbs.search("sensor:'camera' and status:'idle' and algorithm='people-counting' and office:["+str(office[0])+","+str(office[1])+"]"): + for sensor in dbs.search("sensor:'camera' and status:'idle' and algorithm='entrance-counting' and office:["+str(office[0])+","+str(office[1])+"]"): try: # compete (with other va instances) for a sensor r=dbs.update(sensor["_id"],{"status":"streaming"},seq_no=sensor["_seq_no"],primary_term=sensor["_primary_term"]) @@ -89,10 +83,10 @@ def quit_service(signum, sigframe): if stop: break except Exception as e: - print("Exception in count-people search sensor: "+str(e), flush=True) + print("Exception in count-entrance search sensor: "+str(e), flush=True) except Exception as e: - print("Exception in count-people sensor connection: "+str(e), flush=True) + print("Exception in count-entrance sensor connection: "+str(e), flush=True) time.sleep(10) diff --git a/analytics/entrance/mqtt2db.py b/analytics/entrance/mqtt2db.py deleted file mode 100755 index df75740bd..000000000 --- a/analytics/entrance/mqtt2db.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/python3 - -from db_ingest import DBIngest -import paho.mqtt.client as mqtt -from threading import Thread, Condition -import json -import time -import sys -import os - -mqtthost = os.environ["MQTTHOST"] -scenario = os.environ["SCENARIO"] -dbhost = os.environ["DBHOST"] -office = list(map(float, os.environ["OFFICE"].split(","))) - -class MQTT2DB(object): - def __init__(self, algorithm): - super(MQTT2DB,self).__init__() - self._mqtt=mqtt.Client("feeder_" + algorithm) - self._db=DBIngest(host=dbhost, index="analytics", office=office) - self._cache=[] - self._cond=Condition() - - def loop(self, topic): - self._stop=False - Thread(target=self.todb).start() - - while True: - try: - self._mqtt.connect(mqtthost) - break - except Exception as e: - print("Exception: "+str(e), flush=True) - time.sleep(10) - - self._mqtt.on_message = self.on_message - self._mqtt.subscribe(topic) - self._mqtt.loop_forever() - - def _add1(self, item=None): - self._cond.acquire() - if item: self._cache.append(item) - self._cond.notify() - self._cond.release() - - def stop(self): - self._mqtt.disconnect() - self._stop=True - self._add1() - - def on_message(self, client, userdata, message): - try: - r=json.loads(str(message.payload.decode("utf-8", "ignore"))) - r.update(r["tags"]) - del r["tags"] - if "real_base" not in r: r["real_base"]=0 - r["time"]=int((r["real_base"]+r["timestamp"])/1000000) - - if "objects" in r and scenario == "traffic": r["nobjects"]=int(len(r["objects"])) - if "objects" in r and scenario == "stadium": r["count"]={"people":len(r["objects"])} - except Exception as e: - print("Exception: "+str(e), flush=True) - - self._add1(r) - - def todb(self): - while not self._stop: - self._cond.acquire() - self._cond.wait() - bulk=self._cache - self._cache=[] - self._cond.release() - - try: - self._db.ingest_bulk(bulk) - except Exception as e: - print("Exception: "+str(e), flush=True) - diff --git a/analytics/mqtt2db/CMakeLists.txt b/analytics/mqtt2db/CMakeLists.txt new file mode 100644 index 000000000..71c51a46a --- /dev/null +++ b/analytics/mqtt2db/CMakeLists.txt @@ -0,0 +1,3 @@ +set(service "smtc_mqtt2db") +include("${CMAKE_SOURCE_DIR}/script/service.cmake") +add_dependencies(build_${service} build_smtc_common) diff --git a/analytics/mqtt2db/Dockerfile b/analytics/mqtt2db/Dockerfile new file mode 100644 index 000000000..81e63b75c --- /dev/null +++ b/analytics/mqtt2db/Dockerfile @@ -0,0 +1,17 @@ + +FROM smtc_common +RUN pip3 install paho-mqtt==1.3.0 +COPY *.py /home/ +CMD ["/home/mqtt2db.py"] + +#### +ARG USER +ARG GROUP +ARG UID +ARG GID +## must use ; here to ignore user exist status code +RUN [ ${GID} -gt 0 ] && groupadd -f -g ${GID} ${GROUP}; \ + [ ${UID} -gt 0 ] && useradd -d /home/${USER} -g ${GID} -K UID_MAX=${UID} -K UID_MIN=${UID} ${USER}; \ + echo +USER ${UID} +#### diff --git a/analytics/mqtt2db/build.sh b/analytics/mqtt2db/build.sh new file mode 100755 index 000000000..13044725e --- /dev/null +++ b/analytics/mqtt2db/build.sh @@ -0,0 +1,6 @@ +#!/bin/bash -e + +IMAGE="smtc_mqtt2db" +DIR=$(dirname $(readlink -f "$0")) + +. "$DIR/../../script/build.sh" diff --git a/analytics/object/mqtt2db.py b/analytics/mqtt2db/mqtt2db.py similarity index 64% rename from analytics/object/mqtt2db.py rename to analytics/mqtt2db/mqtt2db.py index 09c47710e..3b7d91909 100755 --- a/analytics/object/mqtt2db.py +++ b/analytics/mqtt2db/mqtt2db.py @@ -2,7 +2,9 @@ from db_ingest import DBIngest import paho.mqtt.client as mqtt -from threading import Thread, Condition +from threading import Thread, Condition, Timer +from signal import signal, SIGTERM +import traceback import json import time import sys @@ -14,29 +16,40 @@ office = list(map(float, os.environ["OFFICE"].split(","))) class MQTT2DB(object): - def __init__(self, algorithm): + def __init__(self): super(MQTT2DB,self).__init__() - self._mqtt=mqtt.Client("feeder_" + algorithm) + self._db=DBIngest(host=dbhost, index="analytics", office=office) self._cache=[] self._cond=Condition() - def loop(self, topic): - self._stop=False - Thread(target=self.todb).start() + self._mqtt=mqtt.Client() + self._mqtt.on_message = self.on_message + self._mqtt.on_disconnect = self.on_disconnect + def loop(self, topic="analytics"): + print("connecting mqtt", flush=True) + timer=Timer(10,self._connect_watchdog) + timer.start() while True: try: self._mqtt.connect(mqtthost) break - except Exception as e: - print("Exception: "+str(e), flush=True) - time.sleep(10) + except: + print(trackback.format_exc(),flush=True) + timer.cancel() + print("mqtt connected", flush=True) + + self._stop=False + Thread(target=self.todb).start() - self._mqtt.on_message = self.on_message self._mqtt.subscribe(topic) self._mqtt.loop_forever() + def _connect_watchdog(self): + print("quit due to mqtt timeout", flush=True) + exit(-1) + def _add1(self, item=None): self._cond.acquire() if item: self._cache.append(item) @@ -45,6 +58,8 @@ def _add1(self, item=None): def stop(self): self._mqtt.disconnect() + + def on_disconnect(self, client, userdata, rc): self._stop=True self._add1() @@ -57,9 +72,9 @@ def on_message(self, client, userdata, message): r["time"]=int((r["real_base"]+r["timestamp"])/1000000) if "objects" in r and scenario == "traffic": r["nobjects"]=int(len(r["objects"])) - if "objects" in r and scenario == "stadium": r["count"]={"queue":len(r["objects"])} - except Exception as e: - print("Exception: "+str(e), flush=True) + if "objects" in r and scenario == "stadium": r["count"]={"people":len(r["objects"])} + except: + print(trackback.format_exc(),flush=True) self._add1(r) @@ -73,5 +88,13 @@ def todb(self): try: self._db.ingest_bulk(bulk) - except Exception as e: - print("Exception: "+str(e), flush=True) + except: + print(trackback.format_exc(),flush=True) + +mqtt2db=MQTT2DB() + +def quit_service(signum, sigframe): + mqtt2db.stop() + +signal(SIGTERM, quit_service) +mqtt2db.loop() diff --git a/analytics/mqtt2db/shell.sh b/analytics/mqtt2db/shell.sh new file mode 100755 index 000000000..4d7a2897a --- /dev/null +++ b/analytics/mqtt2db/shell.sh @@ -0,0 +1,6 @@ +#!/bin/bash -e + +IMAGE="smtc_mqtt2db" +DIR=$(dirname $(readlink -f "$0")) + +. "$DIR/../../script/shell.sh" diff --git a/analytics/object/detect-object.py b/analytics/object/detect-object.py index f95c36a28..896714f0f 100755 --- a/analytics/object/detect-object.py +++ b/analytics/object/detect-object.py @@ -4,7 +4,6 @@ from db_query import DBQuery from signal import signal, SIGTERM from concurrent.futures import ThreadPoolExecutor -from mqtt2db import MQTT2DB from rec2db import Rec2DB from runva import RunVA import os @@ -16,7 +15,6 @@ dbhost = os.environ["DBHOST"] every_nth_frame = int(os.environ["EVERY_NTH_FRAME"]) -mqtt2db=None rec2db=None runva=None stop=False @@ -24,24 +22,20 @@ version=0 def connect(sensor, location, uri, algorithm, algorithmName): - global mqtt2db, rec2db, runva + global rec2db, runva try: - mqtt2db=MQTT2DB(algorithm) # this waits for mqtt rec2db=Rec2DB(sensor) runva=RunVA("object_detection", version) - topic=str(uuid.uuid4()) # topic must be different as camera may reconnect with ThreadPoolExecutor(2) as e: - e.submit(mqtt2db.loop, topic) e.submit(rec2db.loop) # any VA exit indicates a camera disconnect with ThreadPoolExecutor(1) as e1: - e1.submit(runva.loop, sensor, location, uri, topic, algorithm, algorithmName) + e1.submit(runva.loop, sensor, location, uri, algorithm, algorithmName) if not stop: - mqtt2db.stop() rec2db.stop() raise Exception("VA exited. This should not happen.") @@ -62,7 +56,7 @@ def quit_service(signum, sigframe): myAlgorithm="object-detection" if scenario=="stadium": version = 2 - myAlgorithm="queue-counting" + myAlgorithm="svcq-counting" # register algorithm (while waiting for db to startup) while True: diff --git a/cloud/html/js/preview.js b/cloud/html/js/preview.js index 642b6050a..bb17e5fc9 100644 --- a/cloud/html/js/preview.js +++ b/cloud/html/js/preview.js @@ -16,7 +16,10 @@ var preview={ sensorctx.marker.closeTooltip(); preview.play(div,sensor); + var offset={x:0,y:0}; div.attr('draggable','true').bind('dragstart',function (e) { + var divoffset=$(this).offset(); + offset={x:e.pageX-divoffset.left,y:e.pageY-divoffset.top}; e.originalEvent.dataTransfer.setData("application/json",JSON.stringify(sensor)); page.find("#mapCanvas").unbind('dragover').on('dragover', function (e) { e.preventDefault(); @@ -24,7 +27,8 @@ var preview={ e.preventDefault(); var div=$("[template] [preview-template]").clone().addClass("max-size"); var icon=L.divIcon({html:div[0],iconSize:[300,200],iconAnchor:[0,0]}); - var marker=L.marker(map.mouseEventToLatLng(e),{icon:icon,draggable:true}).addTo(page.data('preview').layer); + var e1={clientX:e.clientX-offset.x,clientY:e.clientY-offset.y}; + var marker=L.marker(map.mouseEventToLatLng(e1),{icon:icon,draggable:true}).addTo(page.data('preview').layer); marker._zoomargs={zoom:map.getZoom(),width:300,height:200}; $(marker._icon).addClass("page-home-preview-screen"); var sensor1=JSON.parse(e.originalEvent.dataTransfer.getData("application/json")); diff --git a/cloud/html/js/scenario.js b/cloud/html/js/scenario.js index fc3f287e6..eb697d141 100644 --- a/cloud/html/js/scenario.js +++ b/cloud/html/js/scenario.js @@ -225,14 +225,9 @@ var scenarios={ var fields=[], iconloc=null; if (sensor._source.algorithm=="object-detection") { fields.push("nobjects"); - } - if (sensor._source.algorithm=="people-counting") { + } else if (sensor._source.algorithm=="entrance-counting" || sensor._source.algorithm=="svcq-counting") { fields.push("count.people"); - } - if (sensor._source.algorithm=="queue-counting") { - fields.push("count.queue"); - } - if (sensor._source.algorithm=="crowd-counting") { + } else if (sensor._source.algorithm=="crowd-counting") { iconloc=sensorctx.zonemap.getBounds().getCenter(); $.each(sensor._source.zones,function (x,v) { fields.push("count.zone"+v); diff --git a/cloud/html/js/stats.js b/cloud/html/js/stats.js index dbf222c96..d4c987999 100644 --- a/cloud/html/js/stats.js +++ b/cloud/html/js/stats.js @@ -70,7 +70,10 @@ var stats={ }, create: function (sensorctx, sensor, page, map, create_chart_icon) { sensorctx.text=L.tooltip({permanent:true,direction:'center',className:'tooltip_text'}); + var offset={x:0,y:0}; var div=$('
').on('dragstart', function (e) { + var divoffset=$(this).offset(); + offset={x:e.pageX-divoffset.left,y:e.pageY-divoffset.top}; e.originalEvent.dataTransfer.setData('application/json',JSON.stringify(sensor)); page.find("#mapCanvas").unbind('dragover').on('dragover', function (e) { e.preventDefault(); @@ -78,7 +81,8 @@ var stats={ e.preventDefault(); var div1=div.clone().removeAttr('draggable').css({width:'100%',height:'100%'}); var icon1=L.divIcon({html:div1[0],iconSize:[350,200],iconAnchor:[0,0]}); - var marker1=L.marker(map.mouseEventToLatLng(e),{icon:icon1,draggable:true}).addTo(page.data('stat').layer); + var e1={clientX:e.clientX-offset.x,clientY:e.clientY-offset.y}; + var marker1=L.marker(map.mouseEventToLatLng(e1),{icon:icon1,draggable:true}).addTo(page.data('stat').layer); marker1._sensor=JSON.parse(e.originalEvent.dataTransfer.getData('application/json')); marker1._chart=stats.create_chart(div1.find('canvas')); marker1._zoomargs={zoom:map.getZoom(),width:350,height:200}; diff --git a/common/shell.sh b/common/shell.sh new file mode 100755 index 000000000..b2e906694 --- /dev/null +++ b/common/shell.sh @@ -0,0 +1,6 @@ +#!/bin/bash -e + +IMAGE="smtc_common" +DIR=$(dirname $(readlink -f "$0")) + +. "$DIR/../script/shell.sh" diff --git a/deployment/docker-swarm/analytics.VCAC-A.m4 b/deployment/docker-swarm/analytics.VCAC-A.m4 index e60d0278f..297c53bb5 100644 --- a/deployment/docker-swarm/analytics.VCAC-A.m4 +++ b/deployment/docker-swarm/analytics.VCAC-A.m4 @@ -59,7 +59,7 @@ ifelse(defn(`SCENARIO_NAME'),`stadium',` VCAC_OFFICE: "defn(`OFFICE_LOCATION')" VCAC_DBHOST: "http://ifelse(eval(defn(`NOFFICES')>1),1,defn(`OFFICE_NAME')_db,db):9200" VCAC_MQTTHOST: "defn(`OFFICE_NAME')_mqtt" - VCAC_EVERY_NTH_FRAME: 6 + VCAC_EVERY_NTH_FRAME: 30 VCAC_SCENARIO: "defn(`SCENARIO')" VCAC_STHOST: "http://defn(`OFFICE_NAME')_storage:8080/api/upload" VCAC_NO_PROXY: "*" @@ -72,7 +72,7 @@ ifelse(defn(`SCENARIO_NAME'),`stadium',` constraints: - node.labels.vcac_zone==yes - defn(`OFFICE_NAME')_analytics_queue: + defn(`OFFICE_NAME')_analytics_svcq: image: vcac-container-launcher:latest volumes: - /var/run/docker.sock:/var/run/docker.sock diff --git a/deployment/docker-swarm/analytics.Xeon.m4 b/deployment/docker-swarm/analytics.Xeon.m4 index 9f0bac1fe..7b4efccfd 100644 --- a/deployment/docker-swarm/analytics.Xeon.m4 +++ b/deployment/docker-swarm/analytics.Xeon.m4 @@ -51,7 +51,7 @@ ifelse(defn(`SCENARIO_NAME'),`stadium',` DBHOST: "http://ifelse(eval(defn(`NOFFICES')>1),1,defn(`OFFICE_NAME')_db,db):9200" MQTTHOST: "defn(`OFFICE_NAME')_mqtt" STHOST: "http://defn(`OFFICE_NAME')_storage:8080/api/upload" - EVERY_NTH_FRAME: 6 + EVERY_NTH_FRAME: 30 `SCENARIO': "defn(`SCENARIO')" NO_PROXY: "*" no_proxy: "*" @@ -65,7 +65,7 @@ ifelse(defn(`SCENARIO_NAME'),`stadium',` constraints: - node.labels.vcac_zone!=yes - defn(`OFFICE_NAME')_analytics_queue: + defn(`OFFICE_NAME')_analytics_svcq: `image: smtc_analytics_object_xeon_'defn(`FRAMEWORK'):latest environment: OFFICE: "defn(`OFFICE_LOCATION')" diff --git a/deployment/docker-swarm/camera.m4 b/deployment/docker-swarm/camera.m4 index 7bccdbc5c..9a8ee5856 100644 --- a/deployment/docker-swarm/camera.m4 +++ b/deployment/docker-swarm/camera.m4 @@ -3,10 +3,10 @@ image: smtc_sensor_simulation:latest environment: ifelse(defn(`SCENARIO_NAME'),`traffic',`dnl - FILES: "traffic.mp4$$" + FILES: "_traffic.mp4$$" ')dnl ifelse(defn(`SCENARIO_NAME'),`stadium',`dnl - FILES: "people.mp4$$" + FILES: "_svcq.mp4$$" ')dnl `NCAMERAS': "defn(`NCAMERAS')" RTSP_PORT: "defn(`CAMERA_RTSP_PORT')" @@ -39,10 +39,10 @@ ifelse(defn(`SCENARIO_NAME'),`stadium',` constraints: - node.labels.vcac_zone!=yes - defn(`OFFICE_NAME')_simulated_cameras_queue: + defn(`OFFICE_NAME')_simulated_cameras_entrance: image: smtc_sensor_simulation:latest environment: - FILES: "queue.mp4$$" + FILES: "_entrance.mp4$$" `NCAMERAS': "defn(`NCAMERAS3')" RTSP_PORT: "defn(`CAMERA_RTSP_PORT')" RTP_PORT: "defn(`CAMERA_RTP_PORT')" diff --git a/deployment/docker-swarm/discovery.m4 b/deployment/docker-swarm/discovery.m4 index 6c5796f8b..dfc0a1981 100644 --- a/deployment/docker-swarm/discovery.m4 +++ b/deployment/docker-swarm/discovery.m4 @@ -42,10 +42,10 @@ ifelse(defn(`SCENARIO_NAME'),`stadium',` constraints: - node.labels.vcac_zone!=yes - defn(`OFFICE_NAME')_camera_discovery_queue: + defn(`OFFICE_NAME')_camera_discovery_entrance: image: smtc_onvif_discovery:latest environment: - PORT_SCAN: "-p T:defn(`CAMERA_RTSP_PORT')-eval(defn(`CAMERA_RTSP_PORT')+defn(`NCAMERAS3')*defn(`CAMERA_PORT_STEP')) defn(`OFFICE_NAME')_simulated_cameras_queue" + PORT_SCAN: "-p T:defn(`CAMERA_RTSP_PORT')-eval(defn(`CAMERA_RTSP_PORT')+defn(`NCAMERAS3')*defn(`CAMERA_PORT_STEP')) defn(`OFFICE_NAME')_simulated_cameras_entrance" SIM_PORT: ifelse(eval(defn(`NCAMERAS3')>0),1,"loop(`CAMERAIDX',1,defn(`NCAMERAS3'),`eval(defn(`CAMERA_RTSP_PORT')+defn(`CAMERAIDX')*defn(`CAMERA_PORT_STEP')-defn(`CAMERA_PORT_STEP'))/')","0") SIM_PREFIX: "`cams'defn(`SCENARIOIDX')`o'defn(`OFFICEIDX')q" OFFICE: "defn(`OFFICE_LOCATION')" diff --git a/deployment/docker-swarm/docker-compose.yml.m4 b/deployment/docker-swarm/docker-compose.yml.m4 index a5620539d..6f79292bd 100644 --- a/deployment/docker-swarm/docker-compose.yml.m4 +++ b/deployment/docker-swarm/docker-compose.yml.m4 @@ -21,6 +21,7 @@ loop(`OFFICEIDX',1,defn(`NOFFICES'),` include(smart-upload.m4) include(analytics.defn(`PLATFORM').m4) include(mqtt.m4) + include(mqtt2db.m4) ') ')') include(secret.m4) diff --git a/deployment/docker-swarm/mqtt2db.m4 b/deployment/docker-swarm/mqtt2db.m4 new file mode 100644 index 000000000..76e4bb19e --- /dev/null +++ b/deployment/docker-swarm/mqtt2db.m4 @@ -0,0 +1,18 @@ + + defn(`OFFICE_NAME')_mqtt2db: + image: smtc_mqtt2db:latest + volumes: + - /etc/localtime:/etc/localtime:ro + environment: + OFFICE: "defn(`OFFICE_LOCATION')" + DBHOST: "http://ifelse(eval(defn(`NOFFICES')>1),1,defn(`OFFICE_NAME')_db,db):9200" + MQTTHOST: "defn(`OFFICE_NAME')_mqtt" + `SCENARIO': "defn(`SCENARIO')" + NO_PROXY: "*" + no_proxy: "*" + networks: + - appnet + deploy: + placement: + constraints: + - node.labels.vcac_zone!=yes diff --git a/deployment/kubernetes/analytics.yaml.m4 b/deployment/kubernetes/analytics.yaml.m4 index 934a6467e..2df05fb0c 100644 --- a/deployment/kubernetes/analytics.yaml.m4 +++ b/deployment/kubernetes/analytics.yaml.m4 @@ -151,7 +151,7 @@ ifelse(defn(`DISCOVER_IP_CAMERA'),`true',`dnl - name: STHOST value: "http://defn(`OFFICE_NAME')-storage-service:8080/api/upload" - name: EVERY_NTH_FRAME - value: "6" + value: "30" - name: `SCENARIO' value: "defn(`SCENARIO')" - name: NO_PROXY @@ -176,18 +176,18 @@ PLATFORM_NODE_SELECTOR(`VCAC-A')dnl apiVersion: apps/v1 kind: Deployment metadata: - name: defn(`OFFICE_NAME')-analytics-queue + name: defn(`OFFICE_NAME')-analytics-svcq labels: - app: defn(`OFFICE_NAME')-analytics-queue + app: defn(`OFFICE_NAME')-analytics-svcq spec: replicas: defn(`NANALYTICS3') selector: matchLabels: - app: defn(`OFFICE_NAME')-analytics-queue + app: defn(`OFFICE_NAME')-analytics-svcq template: metadata: labels: - app: defn(`OFFICE_NAME')-analytics-queue + app: defn(`OFFICE_NAME')-analytics-svcq spec: enableServiceLinks: false ifelse(defn(`DISCOVER_IP_CAMERA'),`true',`dnl @@ -195,7 +195,7 @@ ifelse(defn(`DISCOVER_IP_CAMERA'),`true',`dnl dnsPolicy: ClusterFirstWithHostNet ')dnl containers: - - name: defn(`OFFICE_NAME')-analytics-queue + - name: defn(`OFFICE_NAME')-analytics-svcq image: `smtc_analytics_object_'defn(`PLATFORM_SUFFIX')`_'defn(`FRAMEWORK'):latest imagePullPolicy: IfNotPresent env: diff --git a/deployment/kubernetes/camera.yaml.m4 b/deployment/kubernetes/camera.yaml.m4 index b3dfce31d..7d26cd974 100644 --- a/deployment/kubernetes/camera.yaml.m4 +++ b/deployment/kubernetes/camera.yaml.m4 @@ -50,11 +50,11 @@ loop(`CAMERAIDX',1,defn(`NCAMERAS'),`dnl env: ifelse(defn(`SCENARIO_NAME'),`traffic',`dnl - name: FILES - value: "traffic.mp4$$" + value: "_traffic.mp4$$" ')dnl ifelse(defn(`SCENARIO_NAME'),`stadium',`dnl - name: FILES - value: "people.mp4$$" + value: "_svcq.mp4$$" ')dnl - name: `NCAMERAS' value: "defn(`NCAMERAS')" @@ -152,9 +152,9 @@ ifelse(eval(defn(`NCAMERAS3')>0),1,`dnl apiVersion: v1 kind: Service metadata: - name: defn(`OFFICE_NAME')-cameras-queue-service + name: defn(`OFFICE_NAME')-cameras-entrance-service labels: - app: defn(`OFFICE_NAME')-cameras-queue + app: defn(`OFFICE_NAME')-cameras-entrance spec: ports: loop(`CAMERAIDX',1,defn(`NCAMERAS3'),`dnl @@ -163,28 +163,28 @@ loop(`CAMERAIDX',1,defn(`NCAMERAS3'),`dnl name: `rtsp'defn(`CAMERAIDX') ')dnl selector: - app: defn(`OFFICE_NAME')-cameras-queue + app: defn(`OFFICE_NAME')-cameras-entrance --- apiVersion: apps/v1 kind: Deployment metadata: - name: defn(`OFFICE_NAME')-cameras-queue + name: defn(`OFFICE_NAME')-cameras-entrance labels: - app: defn(`OFFICE_NAME')-cameras-queue + app: defn(`OFFICE_NAME')-cameras-entrance spec: replicas: 1 selector: matchLabels: - app: defn(`OFFICE_NAME')-cameras-queue + app: defn(`OFFICE_NAME')-cameras-entrance template: metadata: labels: - app: defn(`OFFICE_NAME')-cameras-queue + app: defn(`OFFICE_NAME')-cameras-entrance spec: containers: - - name: defn(`OFFICE_NAME')-cameras-queue + - name: defn(`OFFICE_NAME')-cameras-entrance image: smtc_sensor_simulation:latest imagePullPolicy: IfNotPresent ports: @@ -194,7 +194,7 @@ loop(`CAMERAIDX',1,defn(`NCAMERAS3'),`dnl ')dnl env: - name: FILES - value: "queue.mp4$$" + value: "_entrance.mp4$$" - name: `NCAMERAS' value: "defn(`NCAMERAS3')" - name: RTSP_PORT diff --git a/deployment/kubernetes/discovery.yaml.m4 b/deployment/kubernetes/discovery.yaml.m4 index 7dfb86f8c..33d42aa8a 100644 --- a/deployment/kubernetes/discovery.yaml.m4 +++ b/deployment/kubernetes/discovery.yaml.m4 @@ -108,26 +108,26 @@ PLATFORM_NODE_SELECTOR(`Xeon')dnl apiVersion: apps/v1 kind: Deployment metadata: - name: defn(`OFFICE_NAME')-camera-discovery-queue + name: defn(`OFFICE_NAME')-camera-discovery-entrance labels: - app: defn(`OFFICE_NAME')-camera-discovery-queue + app: defn(`OFFICE_NAME')-camera-discovery-entrance spec: replicas: 1 selector: matchLabels: - app: defn(`OFFICE_NAME')-camera-discovery-queue + app: defn(`OFFICE_NAME')-camera-discovery-entrance template: metadata: labels: - app: defn(`OFFICE_NAME')-camera-discovery-queue + app: defn(`OFFICE_NAME')-camera-discovery-entrance spec: containers: - - name: defn(`OFFICE_NAME')-camera-discovery-queue + - name: defn(`OFFICE_NAME')-camera-discovery-entrance image: smtc_onvif_discovery:latest imagePullPolicy: IfNotPresent env: - name: PORT_SCAN - value: "-p T:defn(`CAMERA_RTSP_PORT')-eval(defn(`CAMERA_RTSP_PORT')+defn(`NCAMERAS3')*defn(`CAMERA_PORT_STEP')) defn(`OFFICE_NAME')-cameras-queue-service -Pn" + value: "-p T:defn(`CAMERA_RTSP_PORT')-eval(defn(`CAMERA_RTSP_PORT')+defn(`NCAMERAS3')*defn(`CAMERA_PORT_STEP')) defn(`OFFICE_NAME')-cameras-entrance-service -Pn" - name: SIM_PORT value: ifelse(eval(defn(`NCAMERAS3')>0),1,"loop(`CAMERAIDX',1,defn(`NCAMERAS3'),`eval(defn(`CAMERA_RTSP_PORT')+defn(`CAMERAIDX')*defn(`CAMERA_PORT_STEP')-defn(`CAMERA_PORT_STEP'))/')","0") - name: SIM_PREFIX diff --git a/deployment/kubernetes/mqtt2db.yaml.m4 b/deployment/kubernetes/mqtt2db.yaml.m4 new file mode 100644 index 000000000..c6c127cfd --- /dev/null +++ b/deployment/kubernetes/mqtt2db.yaml.m4 @@ -0,0 +1,47 @@ +include(office.m4) +include(platform.m4) + +apiVersion: apps/v1 +kind: Deployment +metadata: + name: defn(`OFFICE_NAME')-mqtt2db + labels: + app: defn(`OFFICE_NAME')-mqtt2db +spec: + replicas: 1 + selector: + matchLabels: + app: defn(`OFFICE_NAME')-mqtt2db + template: + metadata: + labels: + app: defn(`OFFICE_NAME')-mqtt2db + spec: + enableServiceLinks: false + containers: + - name: defn(`OFFICE_NAME')-mqtt2db + image: smtc_mqtt2db:latest + imagePullPolicy: IfNotPresent + env: + - name: OFFICE + value: "defn(`OFFICE_LOCATION')" + - name: DBHOST + value: "http://ifelse(eval(defn(`NOFFICES')>1),1,defn(`OFFICE_NAME')-db,db)-service:9200" + - name: MQTTHOST + value: "defn(`OFFICE_NAME')-mqtt-service" + - name: `SCENARIO' + value: "defn(`SCENARIO')" + - name: NO_PROXY + value: "*" + - name: no_proxy + value: "*" + volumeMounts: + - mountPath: /etc/localtime + name: timezone + readOnly: true + volumes: + - name: timezone + hostPath: + path: /etc/localtime + type: File +PLATFORM_NODE_SELECTOR(`Xeon')dnl diff --git a/doc/extend.md b/doc/extend.md index 7b4cf9e95..a2450e41b 100644 --- a/doc/extend.md +++ b/doc/extend.md @@ -74,7 +74,7 @@ Rebuild the sample. You are good to go. ### Stadium Scenario -The stadium scenario includes the following modes: entrance people counting and seating-area crowd counting. The following sub-sections describe extension possibilities. +The stadium scenario includes the following modes: entrance and service-point people counting and seating-area crowd counting. The following sub-sections describe extension possibilities. #### Extending Offices @@ -102,7 +102,7 @@ The sensor provisioning information is described in [sensor-info.json](../mainte "lat": 37.38813, # Sensor location "lon": -121.94370 # Sensor location }, - "algorithm": "people-counting", # Analytic algorithm to associate with + "algorithm": "entrance-counting", # Analytic algorithm to associate with "theta": 90.0, # The rotation angle with 0 degree facing North "simsn": "cams2o1c0" # Camera identifier },{ diff --git a/maintenance/db-init/sensor-info.json b/maintenance/db-init/sensor-info.json index 44b87bd36..23093e143 100644 --- a/maintenance/db-init/sensor-info.json +++ b/maintenance/db-init/sensor-info.json @@ -380,7 +380,7 @@ "lat": 37.38813, "lon": -121.94370 }, - "algorithm": "people-counting", + "algorithm": "entrance-counting", "theta": 90.0, "simsn": "cams2o1c0" },{ @@ -389,7 +389,7 @@ "lat": 37.40008, "lon": -121.95925 }, - "algorithm": "people-counting", + "algorithm": "entrance-counting", "theta": 180, "simsn": "cams2o1c1" },{ @@ -398,7 +398,7 @@ "lat": 37.38679, "lon": -121.98048 }, - "algorithm": "people-counting", + "algorithm": "entrance-counting", "theta": 270.0, "simsn": "cams2o1c2" },{ @@ -407,7 +407,7 @@ "lat": 37.37610, "lon": -121.96242 }, - "algorithm": "people-counting", + "algorithm": "entrance-counting", "theta": 0.0, "simsn": "cams2o1c3" },{ @@ -416,7 +416,7 @@ "lat": 37.38062, "lon": -121.95048 }, - "algorithm": "people-counting", + "algorithm": "entrance-counting", "theta": 45.0, "simsn": "cams2o1c4" },{ @@ -425,7 +425,7 @@ "lat": 37.39628, "lon": -121.95067 }, - "algorithm": "people-counting", + "algorithm": "entrance-counting", "theta": 135.0, "simsn": "cams2o1c5" },{ @@ -434,7 +434,7 @@ "lat": 37.39628, "lon": -121.97461 }, - "algorithm": "people-counting", + "algorithm": "entrance-counting", "theta": 225.0, "simsn": "cams2o1c6" },{ @@ -443,7 +443,7 @@ "lat": 37.38077, "lon": -121.97388 }, - "algorithm": "people-counting", + "algorithm": "entrance-counting", "theta": 315.0, "simsn": "cams2o1c7" },{ @@ -564,7 +564,7 @@ "lat": 37.39821, "lon": -121.96969 }, - "algorithm": "queue-counting", + "algorithm": "svcq-counting", "theta": 0.0, "simsn": "cams2o1q0" },{ @@ -573,7 +573,7 @@ "lat": 37.39804, "lon": -121.95195 }, - "algorithm": "queue-counting", + "algorithm": "svcq-counting", "theta": 0.0, "simsn": "cams2o1q1" },{ @@ -582,7 +582,7 @@ "lat": 37.39245, "lon": -121.94811 }, - "algorithm": "queue-counting", + "algorithm": "svcq-counting", "theta": 270.0, "simsn": "cams2o1q2" },{ @@ -591,7 +591,7 @@ "lat": 37.37813, "lon": -121.96965 }, - "algorithm": "queue-counting", + "algorithm": "svcq-counting", "theta": 180.0, "simsn": "cams2o1q3" },{ @@ -600,7 +600,7 @@ "lat": 37.38134, "lon": -121.97670 }, - "algorithm": "queue-counting", + "algorithm": "svcq-counting", "theta": 90.0, "simsn": "cams2o1q4" },{ @@ -609,7 +609,7 @@ "lat": 37.39528, "lon": -121.94820 }, - "algorithm": "queue-counting", + "algorithm": "svcq-counting", "theta": 270.0, "simsn": "cams2o1q5" },{ @@ -618,7 +618,7 @@ "lat": 37.37800, "lon": -121.95116 }, - "algorithm": "queue-counting", + "algorithm": "svcq-counting", "theta": 180.0, "simsn": "cams2o1q6" },{ @@ -627,7 +627,7 @@ "lat": 37.39124, "lon": -121.97741 }, - "algorithm": "queue-counting", + "algorithm": "svcq-counting", "theta": 90.0, "simsn": "cams2o1q7" }] diff --git a/sensor/simulation/download.sh b/sensor/simulation/download.sh index 620ec0286..216107a03 100755 --- a/sensor/simulation/download.sh +++ b/sensor/simulation/download.sh @@ -25,7 +25,7 @@ for scenario in ${SCENARIOS[@]}; do echo "Downloading..." tmp="tmp_$clip_name" wget -q -U "XXX YYY" -O "$DIR/archive/$tmp" "$url" - docker run --rm -u $(id -u):$(id -g) -v "$DIR/archive:/mnt:rw" -it ${FFMPEG_IMAGE} ffmpeg -i /mnt/$tmp -c:v libx264 -profile:v baseline -x264-params keyint=30:bframes=0 -c:a aac -ss 0 -t 00:00:10.0 /mnt/$clip_mp4 + docker run --rm -u $(id -u):$(id -g) -v "$DIR/archive:/mnt:rw" -it ${FFMPEG_IMAGE} ffmpeg -i /mnt/$tmp -vf scale=1280:720 -pix_fmt yuv420p -c:v libx264 -profile:v baseline -x264-params keyint=30:bframes=0 -c:a aac -f mp4 /mnt/$clip_mp4 rm -f "$DIR/archive/$tmp" else echo "Skipping..." diff --git a/sensor/simulation/streamlist.txt b/sensor/simulation/streamlist.txt index d3bb1934a..c65d725cf 100755 --- a/sensor/simulation/streamlist.txt +++ b/sensor/simulation/streamlist.txt @@ -2,8 +2,8 @@ https://www.pexels.com/video/1388383/download/?w=1280&h=720,1388383_traffic.mp4, https://www.pexels.com/video/854614/download/?w=1280&h=720,854614_traffic.mp4,https://www.pexels.com/photo-license https://www.pexels.com/video/1721294/download/?w=1280&h=720,1721294_traffic.mp4,https://www.pexels.com/photo-license https://www.pexels.com/video/2252223/download/?w=1280&h=720,2252223_traffic.mp4,https://www.pexels.com/photo-license -https://www.pexels.com/video/853889/download/?w=1280&h=720,853889_people.mp4,https://www.pexels.com/photo-license -https://www.pexels.com/video/992599/download/?w=1280&h=720,992599_people.mp4,https://www.pexels.com/photo-license +https://www.pexels.com/video/853889/download/?w=1280&h=720,853889_svcq.mp4,https://www.pexels.com/photo-license +https://www.pexels.com/video/992599/download/?w=1280&h=720,992599_svcq.mp4,https://www.pexels.com/photo-license https://www.pexels.com/video/1677252/download/?w=1280&h=720,1677252_crowd.mp4,https://www.pexels.com/photo-license -https://www.pexels.com/video/4122971/download/?w=1280&h=720,4122971_queue.mp4,https://www.pexels.com/photo-license -https://www.pexels.com/video/4000563/download/?w=1280&h=720,4000563_queue.mp4,https://www.pexels.com/photo-license +https://www.pexels.com/video/4122971/download/?w=1280&h=720,4122971_entrance.mp4,https://www.pexels.com/photo-license +https://www.pexels.com/video/4000563/download/?w=1280&h=720,4000563_entrance.mp4,https://www.pexels.com/photo-license