1
0
Fork 0
mirror of https://gitlab.com/Shinobi-Systems/ShinobiCE.git synced 2025-03-09 15:40:15 +00:00

Coy Cobra

This commit is contained in:
Moe 2018-09-01 13:16:57 -07:00
parent 157bf6feb0
commit c8b67c57b4
45 changed files with 3076 additions and 144 deletions

View file

@ -27,6 +27,7 @@ if [ ! -e "./super.json" ]; then
fi
echo "Shinobi - Run yum update"
sudo yum update -y
sudo yum install make -y
echo "============="
echo "Shinobi - Do you want to Install FFMPEG?"
echo "(y)es or (N)o"
@ -44,7 +45,7 @@ if [ "$ffmpeginstall" = "y" ] || [ "$ffmpeginstall" = "Y" ]; then
sudo rpm -Uvh http://li.nux.ro/download/nux/dextop/el7/x86_64/nux-dextop-release-0-1.el7.nux.noarch.rpm
sudo yum install ffmpeg ffmpeg-devel -y
else
sudo npm install ffmpeg-static
sudo npm install ffmpeg-static@2.2.1
fi
fi
echo "Shinobi - Do you want to Install Node.js?"
@ -132,7 +133,9 @@ else
fi
echo "============="
echo "Shinobi - Install NPM Libraries"
sudo npm install
sudo npm i npm -g
sudo npm install --unsafe-perm
sudo npm audit fix --unsafe-perm
echo "============="
echo "Shinobi - Install PM2"
sudo npm install pm2 -g

29
INSTALL/cuda.sh Normal file
View file

@ -0,0 +1,29 @@
#!/bin/sh
echo "------------------------------------------"
echo "-- Installing CUDA Toolkit and CUDA DNN --"
echo "------------------------------------------"
# Install CUDA Drivers and Toolkit
wget https://cdn.shinobi.video/installers/cuda-repo-ubuntu1710_9.2.148-1_amd64.deb -O cuda.deb
sudo dpkg -i cuda.deb
sudo apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1710/x86_64/7fa2af80.pub
sudo apt-get update -y
sudo apt-get -o Dpkg::Options::="--force-overwrite" install cuda -y
sudo apt-get -o Dpkg::Options::="--force-overwrite" install --fix-broken -y
# Install CUDA DNN
wget https://cdn.shinobi.video/installers/libcudnn7_7.2.1.38-1+cuda9.2_amd64.deb -O cuda-dnn.deb
sudo dpkg -i cuda-dnn.deb
wget https://cdn.shinobi.video/installers/libcudnn7-dev_7.2.1.38-1+cuda9.2_amd64.deb -O cuda-dnn-dev.deb
sudo dpkg -i cuda-dnn-dev.deb
echo "-- Cleaning Up --"
# Cleanup
sudo rm cuda.deb
sudo rm cuda-dnn.deb
sudo rm cuda-dnn-dev.deb
echo "------------------------------"
echo "Reboot is required. Do it now?"
echo "------------------------------"
echo "(y)es or (N)o. Default is No."
read rebootTheMachineHomie
if [ "$rebootTheMachineHomie" = "y" ] || [ "$rebootTheMachineHomie" = "Y" ]; then
sudo reboot
fi

View file

@ -41,7 +41,9 @@ if ( $mysqlagreeData == "y" ) then
endif
echo "============="
echo "Shinobi - Install NPM Libraries"
npm install
npm i npm -g
npm install --unsafe-perm
npm audit fix --unsafe-perm
echo "============="
echo "Shinobi - Install PM2"
npm install pm2 -g

View file

@ -49,7 +49,9 @@ if [ "$mysqlagreeData" = "y" ]; then
fi
echo "============="
echo "Shinobi - Install NPM Libraries"
sudo npm install
sudo npm i npm -g
sudo npm install --unsafe-perm
sudo npm audit fix --unsafe-perm
echo "============="
echo "Shinobi - Install PM2"
sudo npm install pm2 -g

View file

@ -1,4 +1,4 @@
#!/bin/bash
#!/bin/sh
# OpenCV CUDA
if [ $(dpkg-query -W -f='${Status}' git 2>/dev/null | grep -c "ok installed") -eq 0 ]; then
echo "Installing Git..."

View file

@ -3,7 +3,6 @@ if [ -e "INSTALL/installed.txt" ]; then
echo "Starting Shinobi"
pm2 start camera.js
pm2 start cron.js
pm2 logs
fi
if [ ! -e "INSTALL/installed.txt" ]; then
chmod +x INSTALL/now.sh&&INSTALL/now.sh

View file

@ -147,7 +147,9 @@ fi
# Install NPM Libraries
echo "============="
echo "Shinobi - Install NPM Libraries"
npm install
sudo npm i npm -g
sudo npm install --unsafe-perm
sudo npm audit fix --unsafe-perm
echo "============="
#Install PM2

View file

@ -31,6 +31,7 @@ if [ "$nodejsinstall" = "y" ] || [ "$nodejsinstall" = "Y" ]; then
./setup_8.x
sudo apt install nodejs -y
fi
sudo apt install make -y
echo "============="
echo "Shinobi - Do you want to Install FFMPEG?"
echo "(y)es or (N)o"
@ -62,7 +63,7 @@ if [ "$ffmpeginstall" = "y" ] || [ "$ffmpeginstall" = "Y" ]; then
echo "============="
fi
else
sudo npm install ffmpeg-static
sudo npm install ffmpeg-static@2.2.1
fi
fi
echo "============="
@ -146,7 +147,9 @@ else
fi
echo "============="
echo "Shinobi - Install NPM Libraries"
sudo npm install
sudo npm i npm -g
sudo npm install --unsafe-perm
sudo npm audit fix --unsafe-perm
echo "============="
echo "Shinobi - Install PM2"
sudo npm install pm2 -g

211
camera.js
View file

@ -816,8 +816,6 @@ s.init=function(x,e,k,fn){
s.group[e.ke].init={}
}
if(!s.group[e.ke].fileBin){s.group[e.ke].fileBin={}};
if(!s.group[e.ke].sizeChangeQueue){s.group[e.ke].sizeChangeQueue=[]}
if(!s.group[e.ke].sizePurgeQueue){s.group[e.ke].sizePurgeQueue=[]}
if(!s.group[e.ke].users){s.group[e.ke].users={}}
if(!s.group[e.ke].dashcamUsers){s.group[e.ke].dashcamUsers={}}
if(!e.limit||e.limit===''){e.limit=10000}else{e.limit=parseFloat(e.limit)}
@ -892,6 +890,65 @@ s.init=function(x,e,k,fn){
})
s.group[e.ke].discordBot.login(ar.discordbot_token)
}
//disk Used Emitter
if(!s.group[e.ke].diskUsedEmitter){
s.group[e.ke].diskUsedEmitter = new events.EventEmitter()
s.group[e.ke].diskUsedEmitter.on('data',function(currentChange){
//validate current values
if(!s.group[e.ke].usedSpace){
s.group[e.ke].usedSpace=0
}else{
s.group[e.ke].usedSpace=parseFloat(s.group[e.ke].usedSpace)
}
if(s.group[e.ke].usedSpace<0||isNaN(s.group[e.ke].usedSpace)){
s.group[e.ke].usedSpace=0
}
//change global size value
s.group[e.ke].usedSpace=s.group[e.ke].usedSpace+currentChange
//remove value just used from queue
s.init('diskUsedEmit',e)
})
s.group[e.ke].diskPurgedEmitter = new events.EventEmitter()
s.group[e.ke].diskPurgedEmitter.on('data',function(currentPurge){
s.init('diskUsedSet',e,currentPurge.filesizeMB)
if(config.cron.deleteOverMax===true){
//set queue processor
var finish=function(){
s.init('diskUsedEmit',e)
}
var deleteVideos = function(){
//run purge command
if(s.group[e.ke].usedSpace>(s.group[e.ke].sizeLimit*config.cron.deleteOverMaxOffset)){
s.sqlQuery('SELECT * FROM Videos WHERE status != 0 AND details NOT LIKE \'%"archived":"1"%\' AND ke=? ORDER BY `time` ASC LIMIT 2',[e.ke],function(err,evs){
k.del=[];k.ar=[e.ke];
if(!evs)return console.log(err)
evs.forEach(function(ev){
ev.dir=s.video('getDir',ev)+s.formattedTime(ev.time)+'.'+ev.ext;
k.del.push('(mid=? AND `time`=?)');
k.ar.push(ev.mid),k.ar.push(ev.time);
s.file('delete',ev.dir);
s.init('diskUsedSet',e,-(ev.size/1000000))
s.tx({f:'video_delete',ff:'over_max',filename:s.formattedTime(ev.time)+'.'+ev.ext,mid:ev.mid,ke:ev.ke,time:ev.time,end:s.formattedTime(new Date,'YYYY-MM-DD HH:mm:ss')},'GRP_'+e.ke);
});
if(k.del.length>0){
k.qu=k.del.join(' OR ');
s.sqlQuery('DELETE FROM Videos WHERE ke =? AND ('+k.qu+')',k.ar,function(){
deleteVideos()
})
}else{
finish()
}
})
}else{
finish()
}
}
deleteVideos()
}else{
s.init('diskUsedEmit',e)
}
})
}
Object.keys(ar).forEach(function(v){
s.group[e.ke].init[v]=ar[v]
})
@ -943,37 +1000,7 @@ s.init=function(x,e,k,fn){
break;
case'diskUsedSet':
//`k` will be used as the value to add or substract
s.group[e.ke].sizeChangeQueue.push(k)
if(s.group[e.ke].sizeChanging!==true){
//lock this function
s.group[e.ke].sizeChanging=true
//validate current values
if(!s.group[e.ke].usedSpace){
s.group[e.ke].usedSpace=0
}else{
s.group[e.ke].usedSpace=parseFloat(s.group[e.ke].usedSpace)
}
if(s.group[e.ke].usedSpace<0||isNaN(s.group[e.ke].usedSpace)){
s.group[e.ke].usedSpace=0
}
//set queue processor
var checkQueue=function(){
//get first in queue
var currentChange = s.group[e.ke].sizeChangeQueue[0]
//change global size value
s.group[e.ke].usedSpace=s.group[e.ke].usedSpace+currentChange
//remove value just used from queue
s.group[e.ke].sizeChangeQueue = s.group[e.ke].sizeChangeQueue.splice(1,s.group[e.ke].sizeChangeQueue.length+10)
//do next one
if(s.group[e.ke].sizeChangeQueue.length>0){
checkQueue()
}else{
s.group[e.ke].sizeChanging=false
s.init('diskUsedEmit',e)
}
}
checkQueue()
}
s.group[e.ke].diskUsedEmitter.emit('data',k)
break;
case'monitorStatus':
// s.discordMsg({
@ -1243,64 +1270,9 @@ s.video=function(x,e,k){
v.details = details
})
break;
case'diskUseUpdate':
case'diskUseUpdate'://sizePurgeQueue
if(s.group[e.ke].init){
s.init('diskUsedSet',e,e.filesizeMB)
if(config.cron.deleteOverMax===true){
//check space
s.group[e.ke].sizePurgeQueue.push(1)
if(s.group[e.ke].sizePurging!==true){
//lock this function
s.group[e.ke].sizePurging=true
//set queue processor
var finish=function(){
//remove value just used from queue
s.group[e.ke].sizePurgeQueue = s.group[e.ke].sizePurgeQueue.splice(1,s.group[e.ke].sizePurgeQueue.length+10)
//do next one
if(s.group[e.ke].sizePurgeQueue.length>0){
checkQueue()
}else{
s.group[e.ke].sizePurging=false
s.init('diskUsedEmit',e)
}
}
var checkQueue=function(){
//get first in queue
var currentPurge = s.group[e.ke].sizePurgeQueue[0]
var deleteVideos = function(){
//run purge command
if(s.group[e.ke].usedSpace>(s.group[e.ke].sizeLimit*config.cron.deleteOverMaxOffset)){
s.sqlQuery('SELECT * FROM Videos WHERE status != 0 AND details NOT LIKE \'%"archived":"1"%\' AND ke=? ORDER BY `time` ASC LIMIT 2',[e.ke],function(err,evs){
k.del=[];k.ar=[e.ke];
if(!evs)return console.log(err)
evs.forEach(function(ev){
ev.dir=s.video('getDir',ev)+s.formattedTime(ev.time)+'.'+ev.ext;
k.del.push('(mid=? AND `time`=?)');
k.ar.push(ev.mid),k.ar.push(ev.time);
s.file('delete',ev.dir);
s.init('diskUsedSet',e,-(ev.size/1000000))
s.tx({f:'video_delete',ff:'over_max',filename:s.formattedTime(ev.time)+'.'+ev.ext,mid:ev.mid,ke:ev.ke,time:ev.time,end:s.formattedTime(new Date,'YYYY-MM-DD HH:mm:ss')},'GRP_'+e.ke);
});
if(k.del.length>0){
k.qu=k.del.join(' OR ');
s.sqlQuery('DELETE FROM Videos WHERE ke =? AND ('+k.qu+')',k.ar,function(){
deleteVideos()
})
}else{
finish()
}
})
}else{
finish()
}
}
deleteVideos()
}
checkQueue()
}
}else{
s.init('diskUsedEmit',e)
}
s.group[e.ke].diskPurgedEmitter.emit('data',k)
}
break;
case'insertCompleted':
@ -1325,16 +1297,16 @@ s.video=function(x,e,k){
if(k.fileExists===true){
//close video row
k.stat = fs.statSync(k.dir+k.file)
e.filesize = k.stat.size
e.filesizeMB = parseFloat((e.filesize/1000000).toFixed(2))
k.filesize = k.stat.size
k.filesizeMB = parseFloat((k.filesize/1000000).toFixed(2))
e.startTime = new Date(s.nameToTime(k.file))
e.endTime = new Date(k.stat.mtime)
k.startTime = new Date(s.nameToTime(k.file))
k.endTime = new Date(k.stat.mtime)
if(config.useUTC === true){
fs.rename(k.dir+k.file, k.dir+s.formattedTime(e.startTime)+'.'+e.ext, (err) => {
fs.rename(k.dir+k.file, k.dir+s.formattedTime(k.startTime)+'.'+e.ext, (err) => {
if (err) return console.error(err);
});
k.filename = s.formattedTime(e.startTime)+'.'+e.ext
k.filename = s.formattedTime(k.startTime)+'.'+e.ext
}else{
k.filename = k.file
}
@ -1351,8 +1323,8 @@ s.video=function(x,e,k){
filename:k.filename,
d:s.init('noReference',e),
filesize:e.filesize,
time:s.timeObject(e.startTime).format(),
end:s.timeObject(e.endTime).format()
time:s.timeObject(k.startTime).format(),
end:s.timeObject(k.endTime).format()
})
})
.on('close',function(){
@ -1364,9 +1336,9 @@ s.video=function(x,e,k){
ke:e.ke,
filename:k.filename,
d:s.init('noReference',e),
filesize:e.filesize,
time:s.timeObject(e.startTime).format(),
end:s.timeObject(e.endTime).format()
filesize:k.filesize,
time:s.timeObject(k.startTime).format(),
end:s.timeObject(k.endTime).format()
})
});
}else{
@ -1378,9 +1350,9 @@ s.video=function(x,e,k){
filename:k.filename,
mid:e.mid,
ke:e.ke,
time:e.startTime,
size:e.filesize,
end:e.endTime
time:k.startTime,
size:k.filesize,
end:k.endTime
},'GRP_'+e.ke,'video_view');
}
//cloud auto savers
@ -1431,11 +1403,11 @@ s.video=function(x,e,k){
var save = [
e.mid,
e.ke,
e.startTime,
k.startTime,
0,
'{}',
e.filesize,
e.endTime,
k.filesize,
k.endTime,
data.Location
]
s.sqlQuery('INSERT INTO `Cloud Videos` (mid,ke,time,status,details,size,end,href) VALUES (?,?,?,?,?,?,?,?)',save)
@ -1450,12 +1422,12 @@ s.video=function(x,e,k){
var save = [
e.mid,
e.ke,
e.startTime,
k.startTime,
e.ext,
1,
s.s(k.details),
e.filesize,
e.endTime,
k.filesize,
k.endTime,
]
s.sqlQuery('INSERT INTO Videos (mid,ke,time,ext,status,details,size,end) VALUES (?,?,?,?,?,?,?,?)',save)
//send new diskUsage values
@ -2113,7 +2085,6 @@ s.ffmpeg = function(e){
if(e.details.cust_stream&&e.details.cust_stream!==''){x.cust_stream=' '+e.details.cust_stream}else{x.cust_stream=''}
//stream - preset
if(e.details.stream_type !== 'h265' && e.details.preset_stream && e.details.preset_stream !== ''){x.preset_stream=' -preset '+e.details.preset_stream;}else{x.preset_stream=''}
//stream - quality
//hardware acceleration
if(e.details.accelerator && e.details.accelerator==='1' && e.isStreamer === false){
if(e.details.hwaccel&&e.details.hwaccel!==''){
@ -2283,6 +2254,9 @@ s.ffmpeg = function(e){
}
if(!e.details.detector_buffer_acodec||e.details.detector_buffer_acodec===''||e.details.detector_buffer_acodec==='auto'){
switch(e.type){
case'mjpeg':case'jpeg':case'socket':
e.details.detector_buffer_acodec = 'no'
break;
case'h264':case'hls':case'mp4':
e.details.detector_buffer_acodec = 'copy'
break;
@ -3693,18 +3667,6 @@ s.camera=function(x,e,cn,tx){
s.group[e.ke].mon[e.id].emitter.emit('data',d);
}
break;
// case'pam':
// s.group[e.ke].mon[e.id].p2pStream = new P2P();
// s.group[e.ke].mon[e.id].spawn.stdout.pipe(s.group[e.ke].mon[e.id].p2pStream)
// s.group[e.ke].mon[e.id].p2pStream.on('pam',function(d){
// resetStreamCheck()
// s.tx({f:'pam_frame',ke:e.ke,id:e.id,imageData:{
// data : d.pixels,
// height : d.height,
// width : d.width
// }},'MON_STREAM_'+e.id);
// })
// break;
case'b64':case undefined:case null:case'':
var buffer
e.frame_to_stream=function(d){
@ -3978,6 +3940,7 @@ s.pluginInitiatorSuccess=function(mode,d,cn){
s.api[d.plug]={pluginEngine:d.plug,permissions:{},details:{},ip:'0.0.0.0'};
}
s.pluginInitiatorFail=function(mode,d,cn){
if(s.connectedPlugins[d.plug])s.connectedPlugins[d.plug].plugged=false
if(mode==='client'){
//is in client mode (camera.js is client)
cn.disconnect()
@ -4050,7 +4013,7 @@ var tx;
}
}
})
//unique Base64 socket stream
//unique h265 socket stream
cn.on('h265',function(d){
if(!s.group[d.ke]||!s.group[d.ke].mon||!s.group[d.ke].mon[d.id]){
cn.disconnect();return;

View file

@ -20,10 +20,5 @@
"cron":{
"key":"change_this_to_something_very_random__just_anything_other_than_this"
},
"pluginKeys":{
"Motion":"change_this_to_something_very_random____make_sure_to_match__/plugins/motion/conf.json",
"OpenCV":"change_this_to_something_very_random____make_sure_to_match__/plugins/opencv/conf.json",
"OpenALPR":"SomeOpenALPRkeySoPeopleDontMessWithYourShinobi",
"PythonYolo":"SomeOpenALPRkeySoPeopleDontMessWithYourShinobi"
}
"pluginKeys":{}
}

2
plugins/dnnCoco/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
conf.json
data

View file

@ -0,0 +1,19 @@
#!/bin/bash
mkdir data
chmod -R 777 data
wget https://cdn.shinobi.video/weights/dnnCocoData.zip -O dnnCocoData.zip
unzip dnnCocoData.zip -d data
if [ $(dpkg-query -W -f='${Status}' opencv_version 2>/dev/null | grep -c "ok installed") -eq 0 ]; then
echo "Shinobi - Do ypu want to let the `opencv4nodejs` npm package install OpenCV? "
echo "Only do this if you do not have OpenCV already or will not use a GPU (Hardware Acceleration)."
echo "(y)es or (N)o"
read nodejsinstall
if [ "$nodejsinstall" = "y" ] || [ "$nodejsinstall" = "Y" ]; then
export OPENCV4NODEJS_DISABLE_AUTOBUILD=0
else
export OPENCV4NODEJS_DISABLE_AUTOBUILD=1
fi
else
export OPENCV4NODEJS_DISABLE_AUTOBUILD=1
fi
npm install opencv4nodejs moment express canvas@1.6 --unsafe-perm

View file

@ -0,0 +1,9 @@
{
"plug":"Coco",
"host":"localhost",
"port":8080,
"hostPort":8082,
"key":"change_this_to_something_very_random____make_sure_to_match__/plugins/opencv/conf.json",
"mode":"client",
"type":"detector"
}

View file

@ -0,0 +1,83 @@
module.exports = [
'background',
'person',
'bicycle',
'car',
'motorcycle',
'airplane',
'bus',
'train',
'truck',
'boat',
'traffic light',
'fire hydrant',
'stop sign',
'parking meter',
'bench',
'bird',
'cat',
'dog',
'horse',
'sheep',
'cow',
'elephant',
'bear',
'zebra',
'giraffe',
'backpack',
'umbrella',
'handbag',
'tie',
'suitcase',
'frisbee',
'skis',
'snowboard',
'sports ball',
'kite',
'baseball bat',
'baseball glove',
'skateboard',
'surfboard',
'tennis racket',
'bottle',
'wine glass',
'cup',
'fork',
'knife',
'spoon',
'bowl',
'banana',
'apple',
'sandwich',
'orange',
'broccoli',
'carrot',
'hot dog',
'pizza',
'donut',
'cake',
'chair',
'couch',
'potted plant',
'bed',
'dining table',
'toilet',
'tv',
'laptop',
'mouse',
'remote',
'keyboard',
'cell phone',
'microwave',
'oven',
'toaster',
'sink',
'refrigerator',
'book',
'clock',
'vase',
'scissors',
'teddy bear',
'hair drier',
'toothbrush'
];

View file

@ -0,0 +1,94 @@
; Specify the path to the runtime data directory
runtime_dir = ${CMAKE_INSTALL_PREFIX}/share/openalpr/runtime_data
ocr_img_size_percent = 1.33333333
state_id_img_size_percent = 2.0
; Calibrating your camera improves detection accuracy in cases where vehicle plates are captured at a steep angle
; Use the openalpr-utils-calibrate utility to calibrate your fixed camera to adjust for an angle
; Once done, update the prewarp config with the values obtained from the tool
prewarp =
; detection will ignore plates that are too large. This is a good efficiency technique to use if the
; plates are going to be a fixed distance away from the camera (e.g., you will never see plates that fill
; up the entire image
max_plate_width_percent = 100
max_plate_height_percent = 100
; detection_iteration_increase is the percentage that the LBP frame increases each iteration.
; It must be greater than 1.0. A value of 1.01 means increase by 1%, 1.10 increases it by 10% each time.
; So a 1% increase would be ~10x slower than 10% to process, but it has a higher chance of landing
; directly on the plate and getting a strong detection
detection_iteration_increase = 1.1
; The minimum detection strength determines how sure the detection algorithm must be before signaling that
; a plate region exists. Technically this corresponds to LBP nearest neighbors (e.g., how many detections
; are clustered around the same area). For example, 2 = very lenient, 9 = very strict.
detection_strictness = 3
; The detection doesn't necessarily need an extremely high resolution image in order to detect plates
; Using a smaller input image should still find the plates and will do it faster
; Tweaking the max_detection_input values will resize the input image if it is larger than these sizes
; max_detection_input_width/height are specified in pixels
max_detection_input_width = 1280
max_detection_input_height = 720
; detector is the technique used to find license plate regions in an image. Value can be set to
; lbpcpu - default LBP-based detector uses the system CPU
; lbpgpu - LBP-based detector that uses Nvidia GPU to increase recognition speed.
; lbpopencl - LBP-based detector that uses OpenCL GPU to increase recognition speed. Requires OpenCV 3.0
; morphcpu - Experimental detector that detects white rectangles in an image. Does not require training.
detector = lbpgpu
; If set to true, all results must match a postprocess text pattern if a pattern is available.
; If not, the result is disqualified.
must_match_pattern = 0
; Bypasses plate detection. If this is set to 1, the library assumes that each region provided is a likely plate area.
skip_detection = 0
; Specifies the full path to an image file that constrains the detection area. Only the plate regions allowed through the mask
; will be analyzed. The mask image must match the resolution of your image to be analyzed. The mask is black and white.
; Black areas will be ignored, white areas will be searched. An empty value means no mask (scan the entire image)
detection_mask_image =
; OpenALPR can scan the same image multiple times with different randomization. Setting this to a value larger than
; 1 may increase accuracy, but will increase processing time linearly (e.g., analysis_count = 3 is 3x slower)
analysis_count = 1
; OpenALPR detects high-contrast plate crops and uses an alternative edge detection technique. Setting this to 0.0
; would classify ALL images as high-contrast, setting it to 1.0 would classify no images as high-contrast.
contrast_detection_threshold = 0.3
max_plate_angle_degrees = 15
ocr_min_font_point = 6
; Minimum OCR confidence percent to consider.
postprocess_min_confidence = 65
; Any OCR character lower than this will also add an equally likely
; chance that the character is incorrect and will be skipped. Value is a confidence percent
postprocess_confidence_skip_level = 80
debug_general = 0
debug_timing = 0
debug_detector = 0
debug_prewarp = 0
debug_state_id = 0
debug_plate_lines = 0
debug_plate_corners = 0
debug_char_segment = 0
debug_char_analysis = 0
debug_color_filter = 0
debug_ocr = 0
debug_postprocess = 0
debug_show_images = 0
debug_pause_on_frame = 0

View file

@ -0,0 +1,525 @@
//
// Shinobi - OpenCV Plugin
// Copyright (C) 2016-2025 Moe Alam, moeiscool
//
// # Donate
//
// If you like what I am doing here and want me to continue please consider donating :)
// PayPal : paypal@m03.ca
//
process.on('uncaughtException', function (err) {
console.error('uncaughtException',err);
});
var fs=require('fs');
var cv=require('opencv4nodejs');
var exec = require('child_process').exec;
var moment = require('moment');
var Canvas = require('canvas');
var express = require('express');
const path = require('path');
var http = require('http'),
app = express(),
server = http.createServer(app);
var config=require('./conf.json');
if(!config.port){config.port=8080}
if(!config.hostPort){config.hostPort=8082}
if(config.systemLog===undefined){config.systemLog=true}
if(config.cascadesDir===undefined){config.cascadesDir=__dirname+'/cascades/'}
if(config.alprConfig===undefined){config.alprConfig=__dirname+'/openalpr.conf'}
const classNames = require(__dirname+'/dnnCocoClassNames.js');
const extractResults = function (outputBlob, imgDimensions) {
return Array(outputBlob.rows).fill(0)
.map((res, i) => {
const classLabel = outputBlob.at(i, 1);
const confidence = outputBlob.at(i, 2);
const bottomLeft = new cv.Point(
outputBlob.at(i, 3) * imgDimensions.cols,
outputBlob.at(i, 6) * imgDimensions.rows
);
const topRight = new cv.Point(
outputBlob.at(i, 5) * imgDimensions.cols,
outputBlob.at(i, 4) * imgDimensions.rows
);
const rect = new cv.Rect(
bottomLeft.x,
topRight.y,
topRight.x - bottomLeft.x,
bottomLeft.y - topRight.y
);
return ({
classLabel,
confidence,
rect
});
});
};
// replace with path where you unzipped inception model
const ssdcocoModelPath = __dirname+'/data';
const prototxt = path.resolve(ssdcocoModelPath, 'deploy.prototxt');
const modelFile = path.resolve(ssdcocoModelPath, 'VGG_coco_SSD_300x300_iter_400000.caffemodel');
if (!fs.existsSync(prototxt) || !fs.existsSync(modelFile)) {
console.log('could not find ssdcoco model');
console.log('download the model from: https://cdn.shinobi.video/weights/dnnCocoData.zip');
throw new Error('exiting: could not find ssdcoco model');
}
// initialize ssdcoco model from prototxt and modelFile
const net = cv.readNetFromCaffe(prototxt, modelFile);
function classifyImg(img) {
// ssdcoco model works with 300 x 300 images
const imgResized = img.resize(300, 300);
// network accepts blobs as input
const inputBlob = cv.blobFromImage(imgResized);
net.setInput(inputBlob);
// forward pass input through entire network, will return
// classification result as 1x1xNxM Mat
let outputBlob = net.forward();
// extract NxM Mat
outputBlob = outputBlob.flattenFloat(outputBlob.sizes[2], outputBlob.sizes[3]);
return extractResults(outputBlob, img)
.map(r => Object.assign({}, r.rect, { confidence : r.confidence, tag: classNames[r.classLabel] }));
}
const makeDrawClassDetections = predictions => (drawImg, className, getColor, thickness = 2) => {
predictions
.filter(p => classNames[p.classLabel] === className)
.forEach(p => console.log(p));
};
s={
group:{},
dir:{
cascades : config.cascadesDir
},
isWin:(process.platform==='win32'),
foundCascades : {
}
}
//default stream folder check
if(!config.streamDir){
if(s.isWin===false){
config.streamDir='/dev/shm'
}else{
config.streamDir=config.windowsTempDir
}
if(!fs.existsSync(config.streamDir)){
config.streamDir=__dirname+'/streams/'
}else{
config.streamDir+='/streams/'
}
}
s.dir.streams=config.streamDir;
//streams dir
if(!fs.existsSync(s.dir.streams)){
fs.mkdirSync(s.dir.streams);
}
//streams dir
if(!fs.existsSync(s.dir.cascades)){
fs.mkdirSync(s.dir.cascades);
}
s.gid=function(x){
if(!x){x=10};var t = "";var p = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
for( var i=0; i < x; i++ )
t += p.charAt(Math.floor(Math.random() * p.length));
return t;
};
s.findCascades=function(callback){
var tmp={};
tmp.foundCascades=[];
fs.readdir(s.dir.cascades,function(err,files){
files.forEach(function(cascade,n){
if(cascade.indexOf('.xml')>-1){
tmp.foundCascades.push(cascade.replace('.xml',''))
}
})
s.cascadesInDir=tmp.foundCascades;
callback(tmp.foundCascades)
})
}
s.findCascades(function(){
//get cascades
})
s.detectLicensePlate=function(buffer,d,tx){
if(!d.mon.detector_lisence_plate_country||d.mon.detector_lisence_plate_country===''){
d.mon.detector_lisence_plate_country='us'
}
d.tmpFile=s.gid(5)+'.jpg'
if(!fs.existsSync(s.dir.streams)){
fs.mkdirSync(s.dir.streams);
}
d.dir=s.dir.streams+d.ke+'/'
if(!fs.existsSync(d.dir)){
fs.mkdirSync(d.dir);
}
d.dir=s.dir.streams+d.ke+'/'+d.id+'/'
if(!fs.existsSync(d.dir)){
fs.mkdirSync(d.dir);
}
fs.writeFile(d.dir+d.tmpFile,buffer,function(err){
if(err) return s.systemLog(err);
exec('alpr -j --config '+config.alprConfig+' -c '+d.mon.detector_lisence_plate_country+' '+d.dir+d.tmpFile,{encoding:'utf8'},(err, scan, stderr) => {
if(err){
s.systemLog(err);
}else{
try{
scan=JSON.parse(scan.replace('--(!)Loaded CUDA classifier','').trim())
}catch(err){
if(!scan||!scan.results){
return s.systemLog(scan,err);
}
}
if(scan.results.length>0){
scan.plates=[]
scan.mats=[]
scan.results.forEach(function(v){
v.candidates.forEach(function(g,n){
if(v.candidates[n].matches_template)
delete(v.candidates[n].matches_template)
})
scan.plates.push({coordinates:v.coordinates,candidates:v.candidates,confidence:v.confidence,plate:v.plate})
var width = Math.sqrt( Math.pow(v.coordinates[1].x - v.coordinates[0].x, 2) + Math.pow(v.coordinates[1].y - v.coordinates[0].y, 2));
var height = Math.sqrt( Math.pow(v.coordinates[2].x - v.coordinates[1].x, 2) + Math.pow(v.coordinates[2].y - v.coordinates[1].y, 2))
scan.mats.push({
x:v.coordinates[0].x,
y:v.coordinates[0].y,
width:width,
height:height,
tag:v.plate
})
})
tx({f:'trigger',id:d.id,ke:d.ke,details:{split:true,plug:config.plug,name:'licensePlate',reason:'object',matrices:scan.mats,imgHeight:d.mon.detector_scale_y,imgWidth:d.mon.detector_scale_x,frame:d.base64}})
}
}
exec('rm -rf '+d.dir+d.tmpFile,{encoding:'utf8'})
})
})
}
s.detectObject=function(buffer,d,tx){
//detect license plate?
if(d.mon.detector_lisence_plate==="1"){
s.detectLicensePlate(buffer,d,tx)
}
cv.imdecodeAsync(buffer,(err,im) => {
if(err){
console.log(err)
return
}
if (!cv.xmodules.dnn) {
throw new Error('exiting: opencv4nodejs compiled without dnn module');
}
const minConfidence = 0.5;
const predictions = classifyImg(im).filter(res => res.confidence > minConfidence);
// console.log(predictions)
if(predictions.length > 0) {
s.cx({
f:'trigger',
id:d.id,
ke:d.ke,
name:'coco',
details:{
plug:'coco',
name:'coco',
reason:'object',
matrices : predictions
// confidence:d.average
},
imgHeight:d.mon.detector_scale_y,
imgWidth:d.mon.detector_scale_x
})
}
})
}
s.systemLog=function(q,w,e){
if(!w){w=''}
if(!e){e=''}
if(config.systemLog===true){
return console.log(moment().format(),q,w,e)
}
}
s.blenderRegion=function(d,cord,tx){
d.width = d.image.width;
d.height = d.image.height;
if(!s.group[d.ke][d.id].canvas[cord.name]){
if(!cord.sensitivity||isNaN(cord.sensitivity)){
cord.sensitivity=d.mon.detector_sensitivity;
}
s.group[d.ke][d.id].canvas[cord.name] = new Canvas(d.width,d.height);
s.group[d.ke][d.id].canvasContext[cord.name] = s.group[d.ke][d.id].canvas[cord.name].getContext('2d');
s.group[d.ke][d.id].canvasContext[cord.name].fillStyle = '#000';
s.group[d.ke][d.id].canvasContext[cord.name].fillRect( 0, 0,d.width,d.height);
if(cord.points&&cord.points.length>0){
s.group[d.ke][d.id].canvasContext[cord.name].beginPath();
for (var b = 0; b < cord.points.length; b++){
cord.points[b][0]=parseFloat(cord.points[b][0]);
cord.points[b][1]=parseFloat(cord.points[b][1]);
if(b===0){
s.group[d.ke][d.id].canvasContext[cord.name].moveTo(cord.points[b][0],cord.points[b][1]);
}else{
s.group[d.ke][d.id].canvasContext[cord.name].lineTo(cord.points[b][0],cord.points[b][1]);
}
}
s.group[d.ke][d.id].canvasContext[cord.name].clip();
}
}
if(!s.group[d.ke][d.id].canvasContext[cord.name]){
return
}
s.group[d.ke][d.id].canvasContext[cord.name].drawImage(d.image, 0, 0, d.width, d.height);
if(!s.group[d.ke][d.id].blendRegion[cord.name]){
s.group[d.ke][d.id].blendRegion[cord.name] = new Canvas(d.width, d.height);
s.group[d.ke][d.id].blendRegionContext[cord.name] = s.group[d.ke][d.id].blendRegion[cord.name].getContext('2d');
}
var sourceData = s.group[d.ke][d.id].canvasContext[cord.name].getImageData(0, 0, d.width, d.height);
// create an image if the previous image doesn<73>t exist
if (!s.group[d.ke][d.id].lastRegionImageData[cord.name]) s.group[d.ke][d.id].lastRegionImageData[cord.name] = s.group[d.ke][d.id].canvasContext[cord.name].getImageData(0, 0, d.width, d.height);
// create a ImageData instance to receive the blended result
var blendedData = s.group[d.ke][d.id].canvasContext[cord.name].createImageData(d.width, d.height);
// blend the 2 images
s.differenceAccuracy(blendedData.data,sourceData.data,s.group[d.ke][d.id].lastRegionImageData[cord.name].data);
// draw the result in a canvas
s.group[d.ke][d.id].blendRegionContext[cord.name].putImageData(blendedData, 0, 0);
// store the current webcam image
s.group[d.ke][d.id].lastRegionImageData[cord.name] = sourceData;
blendedData = s.group[d.ke][d.id].blendRegionContext[cord.name].getImageData(0, 0, d.width, d.height);
var i = 0;
d.average = 0;
while (i < (blendedData.data.length * 0.25)) {
d.average += (blendedData.data[i * 4] + blendedData.data[i * 4 + 1] + blendedData.data[i * 4 + 2]);
++i;
}
d.average = (d.average / (blendedData.data.length * 0.25))*10;
if (d.average > parseFloat(cord.sensitivity)){
if(d.mon.detector_use_detect_object==="1"&&d.mon.detector_second!=='1'){
var buffer=s.group[d.ke][d.id].canvas[cord.name].toBuffer();
s.detectObject(buffer,d,tx)
}else{
tx({f:'trigger',id:d.id,ke:d.ke,details:{split:true,plug:config.plug,name:cord.name,reason:'motion',confidence:d.average,frame:d.base64}})
}
}
s.group[d.ke][d.id].canvasContext[cord.name].clearRect(0, 0, d.width, d.height);
s.group[d.ke][d.id].blendRegionContext[cord.name].clearRect(0, 0, d.width, d.height);
}
function blobToBuffer (blob, cb) {
if (typeof Blob === 'undefined' || !(blob instanceof Blob)) {
throw new Error('first argument must be a Blob')
}
if (typeof cb !== 'function') {
throw new Error('second argument must be a function')
}
var reader = new FileReader()
function onLoadEnd (e) {
reader.removeEventListener('loadend', onLoadEnd, false)
if (e.error) cb(e.error)
else cb(null, Buffer.from(reader.result))
}
reader.addEventListener('loadend', onLoadEnd, false)
reader.readAsArrayBuffer(blob)
}
function fastAbs(value) {
return (value ^ (value >> 31)) - (value >> 31);
}
function threshold(value) {
return (value > 0x15) ? 0xFF : 0;
}
s.differenceAccuracy=function(target, data1, data2) {
if (data1.length != data2.length) return null;
var i = 0;
while (i < (data1.length * 0.25)) {
var average1 = (data1[4 * i] + data1[4 * i + 1] + data1[4 * i + 2]) / 3;
var average2 = (data2[4 * i] + data2[4 * i + 1] + data2[4 * i + 2]) / 3;
var diff = threshold(fastAbs(average1 - average2));
target[4 * i] = diff;
target[4 * i + 1] = diff;
target[4 * i + 2] = diff;
target[4 * i + 3] = 0xFF;
++i;
}
}
s.checkAreas=function(d,tx){
if(!s.group[d.ke][d.id].cords){
if(!d.mon.cords){d.mon.cords={}}
s.group[d.ke][d.id].cords=Object.values(d.mon.cords);
}
if(d.mon.detector_frame==='1'){
d.mon.cords.frame={name:'FULL_FRAME',s:d.mon.detector_sensitivity,points:[[0,0],[0,d.image.height],[d.image.width,d.image.height],[d.image.width,0]]};
s.group[d.ke][d.id].cords.push(d.mon.cords.frame);
}
for (var b = 0; b < s.group[d.ke][d.id].cords.length; b++){
if(!s.group[d.ke][d.id].cords[b]){return}
s.blenderRegion(d,s.group[d.ke][d.id].cords[b],tx)
}
delete(d.image)
}
s.MainEventController=function(d,cn,tx){
switch(d.f){
case'refreshPlugins':
s.findCascades(function(cascades){
s.cx({f:'s.tx',data:{f:'detector_cascade_list',cascades:cascades},to:'GRP_'+d.ke})
})
break;
case'readPlugins':
s.cx({f:'s.tx',data:{f:'detector_cascade_list',cascades:s.cascadesInDir},to:'GRP_'+d.ke})
break;
case'init_plugin_as_host':
if(!cn){
console.log('No CN',d)
return
}
if(d.key!==config.key){
console.log(new Date(),'Plugin Key Mismatch',cn.request.connection.remoteAddress,d)
cn.emit('init',{ok:false})
cn.disconnect()
}else{
console.log(new Date(),'Plugin Connected to Client',cn.request.connection.remoteAddress)
cn.emit('init',{ok:true,plug:config.plug,notice:config.notice,type:config.type})
}
break;
case'init_monitor':
if(s.group[d.ke]&&s.group[d.ke][d.id]){
s.group[d.ke][d.id].canvas={}
s.group[d.ke][d.id].canvasContext={}
s.group[d.ke][d.id].blendRegion={}
s.group[d.ke][d.id].blendRegionContext={}
s.group[d.ke][d.id].lastRegionImageData={}
s.group[d.ke][d.id].numberOfTriggers=0
delete(s.group[d.ke][d.id].cords)
delete(s.group[d.ke][d.id].buffer)
}
break;
case'init_aws_push':
// console.log('init_aws')
s.group[d.ke][d.id].aws={links:[],complete:0,total:d.total,videos:[],tx:tx}
break;
case'frame':
try{
if(!s.group[d.ke]){
s.group[d.ke]={}
}
if(!s.group[d.ke][d.id]){
s.group[d.ke][d.id]={
canvas:{},
canvasContext:{},
lastRegionImageData:{},
blendRegion:{},
blendRegionContext:{},
}
}
if(!s.group[d.ke][d.id].buffer){
s.group[d.ke][d.id].buffer=[d.frame];
}else{
s.group[d.ke][d.id].buffer.push(d.frame)
}
if(d.frame[d.frame.length-2] === 0xFF && d.frame[d.frame.length-1] === 0xD9){
s.group[d.ke][d.id].buffer=Buffer.concat(s.group[d.ke][d.id].buffer);
try{
d.mon.detector_cascades=JSON.parse(d.mon.detector_cascades)
}catch(err){
}
if(d.mon.detector_frame_save==="1"){
d.base64=s.group[d.ke][d.id].buffer.toString('base64')
}
if(d.mon.detector_second==='1'&&d.objectOnly===true){
s.detectObject(s.group[d.ke][d.id].buffer,d,tx)
}else{
if((d.mon.detector_pam !== '1' && d.mon.detector_use_motion === "1") || d.mon.detector_use_detect_object !== "1"){
if((typeof d.mon.cords ==='string')&&d.mon.cords.trim()===''){
d.mon.cords=[]
}else{
try{
d.mon.cords=JSON.parse(d.mon.cords)
}catch(err){
// console.log('d.mon.cords',err,d)
}
}
s.group[d.ke][d.id].cords=Object.values(d.mon.cords);
d.mon.cords=d.mon.cords;
d.image = new Canvas.Image;
if(d.mon.detector_scale_x===''||d.mon.detector_scale_y===''){
s.systemLog('Must set detector image size')
return
}else{
d.image.width=d.mon.detector_scale_x;
d.image.height=d.mon.detector_scale_y;
}
d.width=d.image.width;
d.height=d.image.height;
d.image.onload = function() {
s.checkAreas(d,tx);
}
d.image.src = s.group[d.ke][d.id].buffer;
}else{
s.detectObject(s.group[d.ke][d.id].buffer,d,tx)
}
}
s.group[d.ke][d.id].buffer=null;
}
}catch(err){
if(err){
s.systemLog(err)
delete(s.group[d.ke][d.id].buffer)
}
}
break;
}
}
server.listen(config.hostPort);
//web pages and plugin api
app.get('/', function (req, res) {
res.end('<b>'+config.plug+'</b> for Shinobi is running')
});
//Conector to Shinobi
if(config.mode==='host'){
//start plugin as host
var io = require('socket.io')(server);
io.attach(server);
s.connectedClients={};
io.on('connection', function (cn) {
s.connectedClients[cn.id]={id:cn.id}
s.connectedClients[cn.id].tx = function(data){
data.pluginKey=config.key;data.plug=config.plug;
return io.to(cn.id).emit('ocv',data);
}
cn.on('f',function(d){
s.MainEventController(d,cn,s.connectedClients[cn.id].tx)
});
cn.on('disconnect',function(d){
delete(s.connectedClients[cn.id])
})
});
}else{
//start plugin as client
if(!config.host){config.host='localhost'}
var io = require('socket.io-client')('ws://'+config.host+':'+config.port);//connect to master
s.cx=function(x){x.pluginKey=config.key;x.plug=config.plug;return io.emit('ocv',x)}
io.on('connect',function(d){
s.cx({f:'init',plug:config.plug,notice:config.notice,type:config.type});
})
io.on('disconnect',function(d){
io.connect();
})
io.on('f',function(d){
s.MainEventController(d,null,s.cx)
})
}

View file

@ -34,13 +34,13 @@ nano plugins/openalpr/conf.json
Start the plugin.
```
node plugins/openalpr/shinobi-motion.js
node plugins/openalpr/shinobi-openalpr.js
```
Or to daemonize with PM2.
```
pm2 start plugins/openalpr/shinobi-motion.js
pm2 start plugins/openalpr/shinobi-openalpr.js
```
Doing this will reveal options in the monitor configuration. Shinobi does not need to be restarted when a plugin is initiated or stopped.

3
plugins/python-dlib/.gitignore vendored Normal file
View file

@ -0,0 +1,3 @@
conf.json
faces
data

View file

@ -0,0 +1,66 @@
#!/bin/bash
echo "-----------------------------------------------"
echo "-- Installing Python Dlib Plugin for Shinobi --"
echo "-----------------------------------------------"
echo "-----------------------------------"
if [ ! -e "./conf.json" ]; then
echo "Creating conf.json"
sudo cp conf.sample.json conf.json
else
echo "conf.json already exists..."
fi
echo "-----------------------------------"
sudo apt update -y
echo "Installing python3"
sudo apt install python3 python3-dev python3-pip -y
echo "-----------------------------------"
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt-get install gcc-6 g++-6 -y && sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-6 60 --slave /usr/bin/g++ g++ /usr/bin/g++-6
echo "-----------------------------------"
if ! [ -x "$(command -v nvidia-smi)" ]; then
echo "You need to install NVIDIA Drivers to use this."
echo "inside the Shinobi directory run the following :"
echo "sh INSTALL/cuda9-part1.sh"
exit 1
else
echo "NVIDIA Drivers found..."
echo "$(nvidia-smi |grep 'Driver Version')"
fi
echo "-----------------------------------"
if [ ! -d "/usr/local/cuda" ]; then
echo "You need to install CUDA Toolkit to use this."
echo "inside the Shinobi directory run the following :"
echo "sh INSTALL/cuda9-part2-after-reboot.sh"
exit 1
else
echo "CUDA Toolkit found..."
fi
echo "-----------------------------------"
if ! [ -x "$(command -v opencv_version)" ]; then
echo "You need to install OpenCV with CUDA first."
echo "inside the Shinobi directory run the following :"
echo "sh INSTALL/opencv-cuda.sh"
exit 1
else
echo "OpenCV found... : $(opencv_version)"
fi
echo "-----------------------------------"
echo "Getting new pip..."
pip3 install --upgrade pip
pip install --user --upgrade pip
echo "Smoking pips..."
pip3 install flask_socketio
pip3 install flask
pip3 install numpy
pip3 install gevent gevent-websocket
export PATH=/usr/local/cuda/bin:$PATH
echo "Installing Dlib..."
cd /opt
git clone https://github.com/davisking/dlib.git dlib
cd dlib
cmake -DCUDA_CUDART_LIBRARY=/usr/local/cuda/lib64/libcudart.so
make install
pip3 install dlib
echo "Start the plugin with pm2 like so :"
echo "pm2 start shinobi-python-dlib.js"

View file

@ -0,0 +1,72 @@
# Python DLIB
> This plugin requires the use of port `7990` by default. You can specify a different port by adding `pythonPort` to your plugin's conf.json.
**Ubuntu and Debian only**
Go to the Shinobi directory. **/home/Shinobi** is the default directory.
```
cd /home/Shinobi/plugins/python-dlib
```
Copy the config file.
```
sh INSTALL.sh
```
Start the plugin.
```
pm2 start shinobi-python-dlib.js
```
Doing this will reveal options in the monitor configuration. Shinobi does not need to be restarted when a plugin is initiated or stopped.
## Run the plugin as a Host
> The main app (Shinobi) will be the client and the plugin will be the host. The purpose of allowing this method is so that you can use one plugin for multiple Shinobi instances. Allowing you to easily manage connections without starting multiple processes.
Edit your plugins configuration file. Set the `hostPort` **to be different** than the `listening port for camera.js`.
```
nano conf.json
```
Here is a sample of a Host configuration for the plugin.
- `plug` is the name of the plugin corresponding in the main configuration file.
- `https` choose if you want to use SSL or not. Default is `false`.
- `hostPort` can be any available port number. **Don't make this the same port number as Shinobi.** Default is `8082`.
- `type` tells the main application (Shinobi) what kind of plugin it is. In this case it is a detector.
```
{
"plug":"PythonDlib",
"hostPort":8082,
"key":"SomeOpenALPRkeySoPeopleDontMessWithYourShinobi",
"mode":"host",
"type":"detector"
}
```
Now modify the **main configuration file** located in the main directory of Shinobi. *Where you currently should be.*
```
nano conf.json
```
Add the `plugins` array if you don't already have it. Add the following *object inside the array*.
```
"plugins":[
{
"id" : "PythonDlib",
"https" : false,
"host" : "localhost",
"port" : 8082,
"key" : "SomeOpenALPRkeySoPeopleDontMessWithYourShinobi",
"mode" : "host",
"type" : "detector"
}
],
```

View file

@ -0,0 +1 @@
python3 -u $@

View file

@ -0,0 +1,10 @@
{
"plug":"PythonDlib",
"host":"localhost",
"port":8080,
"pythonPort":7990,
"hostPort":8082,
"key":"SomeOpenALPRkeySoPeopleDontMessWithYourShinobi",
"mode":"client",
"type":"detector"
}

View file

@ -0,0 +1,18 @@
{
"name": "shinobi-python-dlib",
"version": "1.0.0",
"description": "DLIB plugin for Shinobi that uses Python functions for detection.",
"main": "shinobi-python-dlib.js",
"dependencies": {
"socket.io-client": "^1.7.4",
"express": "^4.16.2",
"moment": "^2.19.2",
"socket.io": "^2.0.4"
},
"devDependencies": {},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "Moe Alam",
"license": "ISC"
}

View file

@ -0,0 +1,113 @@
from flask import Flask, request, jsonify, render_template
from flask_socketio import SocketIO, emit
from PIL import Image
import face_recognition
import cv2
import os
import json
import numpy as np
import sys
dirname = sys.argv[1]
try:
with open("{}/conf.json".format(dirname)) as json_file:
config = json.load(json_file)
httpPort = config['pythonPort']
try:
httpPort
except NameError:
httpPort = 7990
except Exception as e:
print("conf.json not found.")
httpPort = 7990
# Load Flask
app = Flask("DLIB for Shinobi (Pumpkin Pie)")
socketio = SocketIO(app)
# Silence Flask
# import logging
# log = logging.getLogger('werkzeug')
# log.setLevel(logging.ERROR)
#check for faces dir
facesDir = "{}/faces/".format(dirname)
if not os.path.exists(facesDir):
os.makedirs(facesDir)
# load faces
included_extensions = ['jpg','jpeg', 'bmp', 'png', 'gif']
file_names = [fn for fn in os.listdir(facesDir)
if any(fn.endswith(ext) for ext in included_extensions)]
known_faces = []
face_locations = []
face_encodings = []
face_names = []
for faceFile in file_names:
face = face_recognition.load_image_file(facesDir+faceFile)
face_encoding = face_recognition.face_encodings(face)[0]
known_faces.append(face_encoding)
# detection function
def spark(filepath):
try:
filepath
except NameError:
return "File path not found."
img = cv2.imread(filepath)
returnData = []
frame = img[:, :, ::-1]
# Find all the faces and face encodings in the current frame of video
face_locations = face_recognition.face_locations(frame)
face_encodings = face_recognition.face_encodings(frame, face_locations)
face_names = []
for (top, right, bottom, left), face_encoding in zip(face_locations, face_encodings):
# See if the face is a match for the known face(s)
matrix = {}
matrix["coordinates"] = [
{"x" : left, "y" : top},
{"x" : right, "y" : top},
{"x" : right, "y" : bottom},
{"x" : left, "y" : bottom}
]
(left, top), (right, bottom)
match = face_recognition.compare_faces(known_faces, face_encoding, tolerance=0.50)
if True in match:
first_match_index = match.index(True)
name = file_names[first_match_index]
matrix["tag"] = name
returnData.append(matrix)
return returnData
# bake the image data by a file path
# POST body contains the "img" variable. The value should be to a local image path.
# Example : /dev/shm/streams/[GROUP_KEY]/[MONITOR_ID]/s.jpg
@app.route('/', methods=['GET'])
def index():
return "Pumpkin.py is running. This web interface should NEVER be accessible remotely. The Node.js plugin that runs this script should only be allowed accessible remotely."
# bake the image data by a file path
# POST body contains the "img" variable. The value should be to a local image path.
# Example : /dev/shm/streams/[GROUP_KEY]/[MONITOR_ID]/s.jpg
@app.route('/post', methods=['POST'])
def post():
filepath = request.form['img']
return jsonify(spark(filepath))
# bake the image data by a file path
# GET string contains the "img" variable. The value should be to a local image path.
# Example : /dev/shm/streams/[GROUP_KEY]/[MONITOR_ID]/s.jpg
@app.route('/get', methods=['GET'])
def get():
filepath = request.args.get('img')
return jsonify(spark(filepath))
@socketio.on('f')
def receiveMessage(message):
emit('f',{'id':message.get("id"),'data':spark(message.get("path"))})
# quick-and-dirty start
if __name__ == '__main__':
socketio.run(app, port=httpPort)

View file

@ -0,0 +1,292 @@
//
// Shinobi - Python DLIB Plugin
// Copyright (C) 2016-2025 Moe Alam, moeiscool
//
// # Donate
//
// If you like what I am doing here and want me to continue please consider donating :)
// PayPal : paypal@m03.ca
//
process.on('uncaughtException', function (err) {
console.error('uncaughtException',err);
});
//main vars
var fs=require('fs');
var exec = require('child_process').exec;
var spawn = require('child_process').spawn;
var moment = require('moment');
var http = require('http');
var express = require('express');
var socketIoClient = require('socket.io-client');
var config = require('./conf.json');
var http = require('http'),
app = express(),
server = http.createServer(app);
s={
group:{},
dir:{},
isWin:(process.platform==='win32'),
s:function(json){return JSON.stringify(json,null,3)}
}
s.checkCorrectPathEnding=function(x){
var length=x.length
if(x.charAt(length-1)!=='/'){
x=x+'/'
}
return x.replace('__DIR__',__dirname)
}
s.debugLog = function(){
if(config.debugLog === true){
console.log(new Date(),arguments)
if(config.debugLogVerbose === true){
console.log(new Error())
}
}
}
if(!config.port){config.port=8080}
if(!config.pythonScript){config.pythonScript=__dirname+'/pumpkin.py'}
if(!config.pythonPort){config.pythonPort=7990}
if(!config.hostPort){config.hostPort=8082}
if(config.systemLog===undefined){config.systemLog=true}
//default stream folder check
if(!config.streamDir){
if(s.isWin===false){
config.streamDir='/dev/shm'
}else{
config.streamDir=config.windowsTempDir
}
if(!fs.existsSync(config.streamDir)){
config.streamDir=__dirname+'/streams/'
}else{
config.streamDir+='/streams/'
}
}
s.dir.streams=config.streamDir;
//streams dir
if(!fs.existsSync(s.dir.streams)){
fs.mkdirSync(s.dir.streams);
}
s.gid=function(x){
if(!x){x=10};var t = "";var p = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
for( var i=0; i < x; i++ )
t += p.charAt(Math.floor(Math.random() * p.length));
return t;
};
s.getRequest = function(url,callback){
return http.get(url, function(res){
var body = '';
res.on('data', function(chunk){
body += chunk;
});
res.on('end',function(){
try{body = JSON.parse(body)}catch(err){}
callback(body)
});
}).on('error', function(e){
// s.systemLog("Get Snapshot Error", e);
});
}
s.multiplerHeight = 1
s.multiplerWidth = 1
s.detectObject=function(buffer,d,tx){
d.tmpFile=s.gid(5)+'.jpg'
if(!fs.existsSync(s.dir.streams)){
fs.mkdirSync(s.dir.streams);
}
d.dir=s.dir.streams+d.ke+'/'
if(!fs.existsSync(d.dir)){
fs.mkdirSync(d.dir);
}
d.dir=s.dir.streams+d.ke+'/'+d.id+'/'
if(!fs.existsSync(d.dir)){
fs.mkdirSync(d.dir);
}
fs.writeFile(d.dir+d.tmpFile,buffer,function(err){
if(err) return s.systemLog(err);
if(s.isPythonRunning === false){
return console.log('Python Script is not Running.')
}
var callbackId = s.gid(10)
s.group[d.ke][d.id].sendToPython({path:d.dir+d.tmpFile,id:callbackId},function(data){
console.log('returned',data)
if(data.length > 0){
var mats=[]
data.forEach(function(v){
var width = Math.sqrt( Math.pow(v.coordinates[1].x - v.coordinates[0].x, 2) + Math.pow(v.coordinates[1].y - v.coordinates[0].y, 2));
var height = Math.sqrt( Math.pow(v.coordinates[2].x - v.coordinates[1].x, 2) + Math.pow(v.coordinates[2].y - v.coordinates[1].y, 2))
mats.push({
x:v.coordinates[0].x,
y:v.coordinates[0].y,
width: width,
height: height,
confidence:v.confidence,
tag:v.tag
})
})
tx({
f:'trigger',
id:d.id,
ke:d.ke,
details:{
plug:config.plug,
name:'dlib',
reason:'object',
matrices:mats,
imgHeight:d.mon.detector_scale_y,
imgWidth:d.mon.detector_scale_x
}
})
}
delete(s.callbacks[callbackId])
exec('rm -rf '+d.dir+d.tmpFile,{encoding:'utf8'})
})
})
}
s.systemLog=function(q,w,e){
if(w===undefined){return}
if(!w){w=''}
if(!e){e=''}
if(config.systemLog===true){
return console.log(moment().format(),q,w,e)
}
}
s.MainEventController=function(d,cn,tx){
switch(d.f){
case'init_plugin_as_host':
if(!cn){
console.log('No CN',d)
return
}
if(d.key!==config.key){
console.log(new Date(),'Plugin Key Mismatch',cn.request.connection.remoteAddress,d)
cn.emit('init',{ok:false})
cn.disconnect()
}else{
console.log(new Date(),'Plugin Connected to Client',cn.request.connection.remoteAddress)
cn.emit('init',{ok:true,plug:config.plug,notice:config.notice,type:config.type})
}
break;
case'init_monitor':
if(s.group[d.ke]&&s.group[d.ke][d.id]){
delete(s.group[d.ke][d.id].buffer)
}
break;
case'frame':
try{
if(!s.group[d.ke]){
s.group[d.ke]={}
}
if(!s.group[d.ke][d.id]){
s.group[d.ke][d.id]={
sendToPython : s.createCameraBridgeToPython(d.ke+d.id)
}
}
if(!s.group[d.ke][d.id].buffer){
s.group[d.ke][d.id].buffer=[d.frame];
}else{
s.group[d.ke][d.id].buffer.push(d.frame)
}
if(d.frame[d.frame.length-2] === 0xFF && d.frame[d.frame.length-1] === 0xD9){
s.detectObject(Buffer.concat(s.group[d.ke][d.id].buffer),d,tx)
s.group[d.ke][d.id].buffer=null;
}
}catch(err){
if(err){
s.systemLog(err)
delete(s.group[d.ke][d.id].buffer)
}
}
break;
}
}
server.listen(config.hostPort);
//web pages and plugin api
app.get('/', function (req, res) {
res.end('<b>'+config.plug+'</b> for Shinobi is running')
});
//Conector to Shinobi
if(config.mode==='host'){
//start plugin as host
var io = require('socket.io')(server);
io.attach(server);
s.connectedClients={};
io.on('connection', function (cn) {
s.connectedClients[cn.id]={id:cn.id}
s.connectedClients[cn.id].tx = function(data){
data.pluginKey=config.key;data.plug=config.plug;
return io.to(cn.id).emit('ocv',data);
}
cn.on('f',function(d){
s.MainEventController(d,cn,s.connectedClients[cn.id].tx)
});
cn.on('disconnect',function(d){
delete(s.connectedClients[cn.id])
})
});
}else{
//start plugin as client
if(!config.host){config.host='localhost'}
var io = socketIoClient('ws://'+config.host+':'+config.port);//connect to master
s.cx=function(x){x.pluginKey=config.key;x.plug=config.plug;return io.emit('ocv',x)}
io.on('connect',function(d){
s.cx({f:'init',plug:config.plug,notice:config.notice,type:config.type});
})
io.on('disconnect',function(d){
io.connect();
})
io.on('f',function(d){
s.MainEventController(d,null,s.cx)
})
}
//Start Python Controller
s.callbacks = {}
s.createCameraBridgeToPython = function(uniqueId){
var pythonIo = socketIoClient('ws://localhost:'+config.pythonPort,{transports : ['websocket']});
var sendToPython = function(data,callback){
s.callbacks[data.id] = callback
pythonIo.emit('f',data)
}
pythonIo.on('connect',function(d){
s.debugLog(uniqueId+' is Connected from Python')
})
pythonIo.on('disconnect',function(d){
s.debugLog(uniqueId+' is Disconnected from Python')
setTimeout(function(){
pythonIo.connect();
s.debugLog(uniqueId+' is Attempting to Reconect to Python')
},3000)
})
pythonIo.on('f',function(d){
if(s.callbacks[d.id]){
s.callbacks[d.id](d.data)
delete(s.callbacks[d.id])
}
})
return sendToPython
}
//Start Python Daemon
process.env.PYTHONUNBUFFERED = 1;
s.createPythonProcess = function(){
s.isPythonRunning = false
s.pythonScript = spawn('sh',[__dirname+'/bootPy.sh',config.pythonScript,__dirname]);
var onStdErr = function(data){
s.debugLog(data.toString())
}
var onStdOut = function(data){
s.debugLog(data.toString())
}
setTimeout(function(){
s.isPythonRunning = true
},5000)
s.pythonScript.stderr.on('data',onStdErr);
s.pythonScript.stdout.on('data',onStdOut);
s.pythonScript.on('close', function () {
s.debugLog('Python CLOSED')
});
}
s.createPythonProcess()

5
plugins/python-yolo/.gitignore vendored Normal file
View file

@ -0,0 +1,5 @@
conf.json
cascades
cfg
weights
data

View file

@ -0,0 +1,109 @@
#!/bin/bash
echo "-----------------------------------------------"
echo "-- Installing Python Yolo Plugin for Shinobi --"
echo "-----------------------------------------------"
echo "-----------------------------------"
if [ ! -d "weights" ]; then
echo "Downloading yolov3 weights..."
mkdir weights
wget -O weights/yolov3.weights https://pjreddie.com/media/files/yolov3.weights
else
echo "yolov3 weights found..."
fi
echo "-----------------------------------"
if [ ! -d "cfg" ]; then
echo "Downloading yolov3 cfg"
mkdir cfg
wget -O cfg/coco.data https://raw.githubusercontent.com/pjreddie/darknet/master/cfg/coco.data
wget -O cfg/yolov3.cfg https://raw.githubusercontent.com/pjreddie/darknet/master/cfg/yolov3.cfg
else
echo "yolov3 cfg found..."
fi
echo "-----------------------------------"
if [ ! -d "data" ]; then
echo "Downloading yolov3 data"
mkdir data
wget -O data/coco.names https://raw.githubusercontent.com/pjreddie/darknet/master/data/coco.names
else
echo "yolov3 data found..."
fi
echo "-----------------------------------"
if [ ! -e "./conf.json" ]; then
echo "Creating conf.json"
sudo cp conf.sample.json conf.json
else
echo "conf.json already exists..."
fi
echo "-----------------------------------"
sudo apt update -y
sudo apt-get install libxml2-dev libxslt-dev libxslt1-dev zlib1g-dev -y
echo "Installing python3"
sudo apt install python3 python3-dev python3-pip -y
sudo apt install python3-lxml libxml2-dev -y
echo "-----------------------------------"
if ! [ -x "$(command -v gcc-6)" ]; then
echo "Installing gcc-6 and g++-6"
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt-get install gcc-6 g++-6 -y && sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-6 60 --slave /usr/bin/g++ g++ /usr/bin/g++-6
else
echo "gcc-6 and g++-6 found..."
fi
echo "-----------------------------------"
if ! [ -x "$(command -v nvidia-smi)" ]; then
echo "You need to install NVIDIA Drivers to use this."
echo "inside the Shinobi directory run the following :"
echo "sh INSTALL/cuda9-part1.sh"
exit 1
else
echo "NVIDIA Drivers found..."
echo "$(nvidia-smi |grep 'Driver Version')"
fi
echo "-----------------------------------"
if [ ! -d "/usr/local/cuda" ]; then
echo "You need to install CUDA Toolkit to use this."
echo "inside the Shinobi directory run the following :"
echo "sh INSTALL/cuda9-part2-after-reboot.sh"
exit 1
else
echo "CUDA Toolkit found..."
fi
echo "-----------------------------------"
if ! [ -x "$(command -v opencv_version)" ]; then
echo "You need to install OpenCV with CUDA first."
echo "inside the Shinobi directory run the following :"
echo "sh INSTALL/opencv-cuda.sh"
exit 1
else
echo "OpenCV found... : $(opencv_version)"
fi
echo "-----------------------------------"
echo "Getting new pip..."
pip3 install --upgrade pip
pip install --user --upgrade pip
echo "Smoking pips..."
pip3 install flask_socketio
pip3 install flask
pip3 install numpy
pip3 install gevent gevent-websocket
export PATH=/usr/local/cuda/bin:$PATH
pip3 install lxml
pip3 install numpy
pip3 install cython
echo "Installing Darknet..."
cd /opt
git clone https://github.com/pjreddie/darknet.git darknet
cd darknet
make
cd ..
echo "Installing YOLO3-4-Py"
echo "Learn more about this wrapper here : https://github.com/madhawav/YOLO3-4-Py"
git clone https://github.com/madhawav/YOLO3-4-Py.git YOLO3-4-Py
cd YOLO3-4-Py
export GPU=1
export OPENCV=1
pip3 install .
apt remove libpython-all-dev python-all python-all-dev python-asn1crypto python-cffi-backend python-crypto python-cryptography python-dbus python-enum34 python-gi python-idna python-ipaddress python-keyring python-keyrings.alt python-pkg-resources python-secretstorage python-setuptools python-six python-wheel python-xdg -y
echo "Done Installing Darknet..."
export PATH=/opt/darknet:$PATH
echo "Start the plugin with pm2 like so :"
echo "pm2 start shinobi-python-yolo.js"

View file

@ -0,0 +1,72 @@
# Python Yolo
> This plugin requires the use of port `7990` by default. You can specify a different port by adding `pythonPort` to your plugin's conf.json.
**Ubuntu and Debian only**
Go to the Shinobi directory. **/home/Shinobi** is the default directory.
```
cd /home/Shinobi/plugins/python-yolo
```
Copy the config file.
```
sh INSTALL.sh
```
Start the plugin.
```
pm2 start shinobi-python-yolo.js
```
Doing this will reveal options in the monitor configuration. Shinobi does not need to be restarted when a plugin is initiated or stopped.
## Run the plugin as a Host
> The main app (Shinobi) will be the client and the plugin will be the host. The purpose of allowing this method is so that you can use one plugin for multiple Shinobi instances. Allowing you to easily manage connections without starting multiple processes.
Edit your plugins configuration file. Set the `hostPort` **to be different** than the `listening port for camera.js`.
```
nano conf.json
```
Here is a sample of a Host configuration for the plugin.
- `plug` is the name of the plugin corresponding in the main configuration file.
- `https` choose if you want to use SSL or not. Default is `false`.
- `hostPort` can be any available port number. **Don't make this the same port number as Shinobi.** Default is `8082`.
- `type` tells the main application (Shinobi) what kind of plugin it is. In this case it is a detector.
```
{
"plug":"PythonYolo",
"hostPort":8082,
"key":"SomeOpenALPRkeySoPeopleDontMessWithYourShinobi",
"mode":"host",
"type":"detector"
}
```
Now modify the **main configuration file** located in the main directory of Shinobi. *Where you currently should be.*
```
nano conf.json
```
Add the `plugins` array if you don't already have it. Add the following *object inside the array*.
```
"plugins":[
{
"id" : "PythonYolo",
"https" : false,
"host" : "localhost",
"port" : 8082,
"key" : "SomeOpenALPRkeySoPeopleDontMessWithYourShinobi",
"mode" : "host",
"type" : "detector"
}
],
```

View file

@ -0,0 +1 @@
python3 -u $@

View file

@ -0,0 +1,10 @@
{
"plug":"PythonYolo",
"host":"localhost",
"port":8080,
"pythonPort":7990,
"hostPort":8082,
"key":"SomeOpenALPRkeySoPeopleDontMessWithYourShinobi",
"mode":"client",
"type":"detector"
}

View file

@ -0,0 +1,14 @@
#!/usr/bin/env bash
echo "Downloading config files..."
mkdir cfg
wget -O cfg/coco.data https://raw.githubusercontent.com/pjreddie/darknet/master/cfg/coco.data
wget -O cfg/yolov3.cfg https://raw.githubusercontent.com/pjreddie/darknet/master/cfg/yolov3.cfg
mkdir data
wget -O data/coco.names https://raw.githubusercontent.com/pjreddie/darknet/master/data/coco.names
echo "Downloading yolov3 weights"
mkdir weights
wget -O weights/yolov3.weights https://pjreddie.com/media/files/yolov3.weights

View file

@ -0,0 +1,18 @@
{
"name": "shinobi-python-yolo",
"version": "1.0.0",
"description": "YOLOv3 plugin for Shinobi that uses Python functions for detection.",
"main": "shinobi-python-yolo.js",
"dependencies": {
"socket.io-client": "^1.7.4",
"express": "^4.16.2",
"moment": "^2.19.2",
"socket.io": "^2.0.4"
},
"devDependencies": {},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "Moe Alam",
"license": "ISC"
}

View file

@ -0,0 +1,103 @@
from flask import Flask, request, jsonify, render_template
from flask_socketio import SocketIO, emit
from pydarknet import Detector, Image
import cv2
import os
import json
import numpy as np
import sys
dirname = sys.argv[1]
try:
with open("{}/conf.json".format(dirname)) as json_file:
config = json.load(json_file)
httpPort = config['pythonPort']
try:
httpPort
except NameError:
httpPort = 7990
except Exception as e:
print("conf.json not found.")
httpPort = 7990
# Load Flask
app = Flask("YOLOv3 for Shinobi (Pumpkin Pie)")
socketio = SocketIO(app)
# Silence Flask
import logging
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
# Load Darknet
net = Detector(bytes("{}/cfg/yolov3.cfg".format(dirname), encoding="utf-8"), bytes("{}/weights/yolov3.weights".format(dirname), encoding="utf-8"), 0, bytes("{}/cfg/coco.data".format(dirname),encoding="utf-8"))
def spark(filepath):
try:
filepath
except NameError:
return "File path not found."
img = cv2.imread(filepath)
img2 = Image(img)
# r = net.classify(img2)
results = net.detect(img2)
returnData = '[]'
try:
new_list = []
for item in results:
sub_list = {}
i = 0
for sub_item in item:
if i == 0:
key = 'tag'
sub_list[key] = sub_item.decode('utf-8')
if i == 1:
key = 'confidence'
sub_list[key] = sub_item
if i == 2:
key = 'points'
points_list = []
for points_item in sub_item:
points_list.append(points_item)
sub_list[key] = points_list
i += 1
new_list.append(sub_list)
returnData = new_list
# returnData = json.dumps(results)
except Exception as e:
returnData = ',\n'.join(map(str, results))
return returnData
# bake the image data by a file path
# POST body contains the "img" variable. The value should be to a local image path.
# Example : /dev/shm/streams/[GROUP_KEY]/[MONITOR_ID]/s.jpg
@app.route('/', methods=['GET'])
def index():
return "Pumpkin.py is running. This web interface should NEVER be accessible remotely. The Node.js plugin that runs this script should only be allowed accessible remotely."
# bake the image data by a file path
# POST body contains the "img" variable. The value should be to a local image path.
# Example : /dev/shm/streams/[GROUP_KEY]/[MONITOR_ID]/s.jpg
@app.route('/post', methods=['POST'])
def post():
filepath = request.form['img']
return jsonify(spark(filepath))
# bake the image data by a file path
# GET string contains the "img" variable. The value should be to a local image path.
# Example : /dev/shm/streams/[GROUP_KEY]/[MONITOR_ID]/s.jpg
@app.route('/get', methods=['GET'])
def get():
filepath = request.args.get('img')
return jsonify(spark(filepath))
@socketio.on('f')
def receiveMessage(message):
emit('f',{'id':message.get("id"),'data':spark(message.get("path"))})
# quick-and-dirty start
if __name__ == '__main__':
socketio.run(app, port=httpPort)

View file

@ -0,0 +1,296 @@
//
// Shinobi - Python YOLOv3 Plugin
// Copyright (C) 2016-2025 Moe Alam, moeiscool
//
// # Donate
//
// If you like what I am doing here and want me to continue please consider donating :)
// PayPal : paypal@m03.ca
//
process.on('uncaughtException', function (err) {
console.error('uncaughtException',err);
});
//main vars
var fs=require('fs');
var exec = require('child_process').exec;
var spawn = require('child_process').spawn;
var moment = require('moment');
var http = require('http');
var express = require('express');
var socketIoClient = require('socket.io-client');
var config = require('./conf.json');
var http = require('http'),
app = express(),
server = http.createServer(app);
s={
group:{},
dir:{},
isWin:(process.platform==='win32'),
s:function(json){return JSON.stringify(json,null,3)}
}
s.checkCorrectPathEnding=function(x){
var length=x.length
if(x.charAt(length-1)!=='/'){
x=x+'/'
}
return x.replace('__DIR__',__dirname)
}
s.debugLog = function(){
if(config.debugLog === true){
console.log(new Date(),arguments)
if(config.debugLogVerbose === true){
console.log(new Error())
}
}
}
if(!config.port){config.port=8080}
if(!config.pythonScript){config.pythonScript=__dirname+'/pumpkin.py'}
if(!config.pythonPort){config.pythonPort=7990}
if(!config.hostPort){config.hostPort=8082}
if(config.systemLog===undefined){config.systemLog=true}
if(config.alprConfig===undefined){config.alprConfig=__dirname+'/openalpr.conf'}
//default stream folder check
if(!config.streamDir){
if(s.isWin===false){
config.streamDir='/dev/shm'
}else{
config.streamDir=config.windowsTempDir
}
if(!fs.existsSync(config.streamDir)){
config.streamDir=__dirname+'/streams/'
}else{
config.streamDir+='/streams/'
}
}
s.dir.streams=config.streamDir;
//streams dir
if(!fs.existsSync(s.dir.streams)){
fs.mkdirSync(s.dir.streams);
}
s.gid=function(x){
if(!x){x=10};var t = "";var p = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
for( var i=0; i < x; i++ )
t += p.charAt(Math.floor(Math.random() * p.length));
return t;
};
s.getRequest = function(url,callback){
return http.get(url, function(res){
var body = '';
res.on('data', function(chunk){
body += chunk;
});
res.on('end',function(){
try{body = JSON.parse(body)}catch(err){}
callback(body)
});
}).on('error', function(e){
// s.systemLog("Get Snapshot Error", e);
});
}
s.multiplerHeight = 0.75
s.multiplerWidth = 0.96
s.detectObject=function(buffer,d,tx){
d.tmpFile=s.gid(5)+'.jpg'
if(!fs.existsSync(s.dir.streams)){
fs.mkdirSync(s.dir.streams);
}
d.dir=s.dir.streams+d.ke+'/'
if(!fs.existsSync(d.dir)){
fs.mkdirSync(d.dir);
}
d.dir=s.dir.streams+d.ke+'/'+d.id+'/'
if(!fs.existsSync(d.dir)){
fs.mkdirSync(d.dir);
}
fs.writeFile(d.dir+d.tmpFile,buffer,function(err){
if(err) return s.systemLog(err);
if(s.isPythonRunning === false){
return console.log('Python Script is not Running.')
}
var callbackId = s.gid(10)
s.group[d.ke][d.id].sendToPython({path:d.dir+d.tmpFile,id:callbackId},function(data){
if(data.length > 0){
var mats=[]
data.forEach(function(v){
mats.push({
x:v.points[0] * s.multiplerWidth,
y:v.points[1] * s.multiplerHeight,
width:v.points[2],
height:v.points[3],
confidence:v.confidence,
tag:v.tag
})
})
tx({
f:'trigger',
id:d.id,
ke:d.ke,
details:{
plug:config.plug,
name:'yolo',
reason:'object',
matrices:mats,
imgHeight:d.mon.detector_scale_y,
imgWidth:d.mon.detector_scale_x
}
})
}
delete(s.callbacks[callbackId])
exec('rm -rf '+d.dir+d.tmpFile,{encoding:'utf8'})
})
})
}
s.systemLog=function(q,w,e){
if(w===undefined){return}
if(!w){w=''}
if(!e){e=''}
if(config.systemLog===true){
return console.log(moment().format(),q,w,e)
}
}
s.MainEventController=function(d,cn,tx){
switch(d.f){
case'init_plugin_as_host':
if(!cn){
console.log('No CN',d)
return
}
if(d.key!==config.key){
console.log(new Date(),'Plugin Key Mismatch',cn.request.connection.remoteAddress,d)
cn.emit('init',{ok:false})
cn.disconnect()
}else{
console.log(new Date(),'Plugin Connected to Client',cn.request.connection.remoteAddress)
cn.emit('init',{ok:true,plug:config.plug,notice:config.notice,type:config.type})
}
break;
case'init_monitor':
if(s.group[d.ke]&&s.group[d.ke][d.id]){
delete(s.group[d.ke][d.id].buffer)
}
break;
case'frame':
try{
if(!s.group[d.ke]){
s.group[d.ke]={}
}
if(!s.group[d.ke][d.id]){
s.group[d.ke][d.id]={
sendToPython : s.createCameraBridgeToPython(d.ke+d.id)
}
}
if(!s.group[d.ke][d.id].buffer){
s.group[d.ke][d.id].buffer=[d.frame];
}else{
s.group[d.ke][d.id].buffer.push(d.frame)
}
if(d.frame[d.frame.length-2] === 0xFF && d.frame[d.frame.length-1] === 0xD9){
s.detectObject(Buffer.concat(s.group[d.ke][d.id].buffer),d,tx)
s.group[d.ke][d.id].buffer=null;
}
}catch(err){
if(err){
s.systemLog(err)
delete(s.group[d.ke][d.id].buffer)
}
}
break;
}
}
server.listen(config.hostPort);
//web pages and plugin api
app.get('/', function (req, res) {
res.end('<b>'+config.plug+'</b> for Shinobi is running')
});
//Conector to Shinobi
if(config.mode==='host'){
//start plugin as host
var io = require('socket.io')(server);
io.attach(server);
s.connectedClients={};
io.on('connection', function (cn) {
s.connectedClients[cn.id]={id:cn.id}
s.connectedClients[cn.id].tx = function(data){
data.pluginKey=config.key;data.plug=config.plug;
return io.to(cn.id).emit('ocv',data);
}
cn.on('f',function(d){
s.MainEventController(d,cn,s.connectedClients[cn.id].tx)
});
cn.on('disconnect',function(d){
delete(s.connectedClients[cn.id])
})
});
}else{
//start plugin as client
if(!config.host){config.host='localhost'}
var io = socketIoClient('ws://'+config.host+':'+config.port);//connect to master
s.cx=function(x){x.pluginKey=config.key;x.plug=config.plug;return io.emit('ocv',x)}
io.on('connect',function(d){
s.cx({f:'init',plug:config.plug,notice:config.notice,type:config.type});
})
io.on('disconnect',function(d){
io.connect();
})
io.on('f',function(d){
s.MainEventController(d,null,s.cx)
})
}
//Start Python Controller
s.callbacks = {}
s.createCameraBridgeToPython = function(uniqueId){
var pythonIo = socketIoClient('ws://localhost:'+config.pythonPort,{transports : ['websocket']});
var sendToPython = function(data,callback){
s.callbacks[data.id] = callback
pythonIo.emit('f',data)
}
pythonIo.on('connect',function(d){
s.debugLog(uniqueId+' is Connected from Python')
})
pythonIo.on('disconnect',function(d){
s.debugLog(uniqueId+' is Disconnected from Python')
setTimeout(function(){
pythonIo.connect();
s.debugLog(uniqueId+' is Attempting to Reconect to Python')
},3000)
})
pythonIo.on('f',function(d){
if(s.callbacks[d.id]){
s.callbacks[d.id](d.data)
delete(s.callbacks[d.id])
}
})
return sendToPython
}
//Start Python Daemon
process.env.PYTHONUNBUFFERED = 1;
s.createPythonProcess = function(){
s.isPythonRunning = false
s.pythonScript = spawn('sh',[__dirname+'/bootPy.sh',config.pythonScript,__dirname]);
var onStdErr = function (data) {
s.debugLog('Python ERR')
data = data.toString()
s.debugLog(data)
if(data.indexOf('Done!') > -1){
console.log('PYTHON READY')
s.isPythonRunning = true
onStdErr = function(data){
s.debugLog(data.toString())
}
}
}
s.pythonScript.stderr.on('data',onStdErr);
s.pythonScript.stdout.on('data', function (data) {
s.debugLog('Python OUT')
s.debugLog(data.toString())
});
s.pythonScript.on('close', function () {
s.debugLog('Python CLOSED')
});
}
s.createPythonProcess()

2
plugins/tensorflow/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
conf.json
cascades

View file

@ -0,0 +1,20 @@
#!/bin/bash
mkdir data
mkdir data/inception
chmod -R 777 data
wget https://cdn.shinobi.video/weights/inception5h.zip -O inception5h.zip
unzip inception5h.zip -d data/inception
if [ $(dpkg-query -W -f='${Status}' opencv_version 2>/dev/null | grep -c "ok installed") -eq 0 ]; then
echo "Shinobi - Do ypu want to let the `opencv4nodejs` npm package install OpenCV? "
echo "Only do this if you do not have OpenCV already or will not use a GPU (Hardware Acceleration)."
echo "(y)es or (N)o"
read nodejsinstall
if [ "$nodejsinstall" = "y" ] || [ "$nodejsinstall" = "Y" ]; then
export OPENCV4NODEJS_DISABLE_AUTOBUILD=0
else
export OPENCV4NODEJS_DISABLE_AUTOBUILD=1
fi
else
export OPENCV4NODEJS_DISABLE_AUTOBUILD=1
fi
npm install opencv4nodejs moment express canvas@1.6 --unsafe-perm

View file

@ -0,0 +1,9 @@
{
"plug":"Tensorflow",
"host":"localhost",
"port":8080,
"hostPort":8082,
"key":"change_this_to_something_very_random____make_sure_to_match__/plugins/opencv/conf.json",
"mode":"client",
"type":"detector"
}

View file

@ -0,0 +1,94 @@
; Specify the path to the runtime data directory
runtime_dir = ${CMAKE_INSTALL_PREFIX}/share/openalpr/runtime_data
ocr_img_size_percent = 1.33333333
state_id_img_size_percent = 2.0
; Calibrating your camera improves detection accuracy in cases where vehicle plates are captured at a steep angle
; Use the openalpr-utils-calibrate utility to calibrate your fixed camera to adjust for an angle
; Once done, update the prewarp config with the values obtained from the tool
prewarp =
; detection will ignore plates that are too large. This is a good efficiency technique to use if the
; plates are going to be a fixed distance away from the camera (e.g., you will never see plates that fill
; up the entire image
max_plate_width_percent = 100
max_plate_height_percent = 100
; detection_iteration_increase is the percentage that the LBP frame increases each iteration.
; It must be greater than 1.0. A value of 1.01 means increase by 1%, 1.10 increases it by 10% each time.
; So a 1% increase would be ~10x slower than 10% to process, but it has a higher chance of landing
; directly on the plate and getting a strong detection
detection_iteration_increase = 1.1
; The minimum detection strength determines how sure the detection algorithm must be before signaling that
; a plate region exists. Technically this corresponds to LBP nearest neighbors (e.g., how many detections
; are clustered around the same area). For example, 2 = very lenient, 9 = very strict.
detection_strictness = 3
; The detection doesn't necessarily need an extremely high resolution image in order to detect plates
; Using a smaller input image should still find the plates and will do it faster
; Tweaking the max_detection_input values will resize the input image if it is larger than these sizes
; max_detection_input_width/height are specified in pixels
max_detection_input_width = 1280
max_detection_input_height = 720
; detector is the technique used to find license plate regions in an image. Value can be set to
; lbpcpu - default LBP-based detector uses the system CPU
; lbpgpu - LBP-based detector that uses Nvidia GPU to increase recognition speed.
; lbpopencl - LBP-based detector that uses OpenCL GPU to increase recognition speed. Requires OpenCV 3.0
; morphcpu - Experimental detector that detects white rectangles in an image. Does not require training.
detector = lbpgpu
; If set to true, all results must match a postprocess text pattern if a pattern is available.
; If not, the result is disqualified.
must_match_pattern = 0
; Bypasses plate detection. If this is set to 1, the library assumes that each region provided is a likely plate area.
skip_detection = 0
; Specifies the full path to an image file that constrains the detection area. Only the plate regions allowed through the mask
; will be analyzed. The mask image must match the resolution of your image to be analyzed. The mask is black and white.
; Black areas will be ignored, white areas will be searched. An empty value means no mask (scan the entire image)
detection_mask_image =
; OpenALPR can scan the same image multiple times with different randomization. Setting this to a value larger than
; 1 may increase accuracy, but will increase processing time linearly (e.g., analysis_count = 3 is 3x slower)
analysis_count = 1
; OpenALPR detects high-contrast plate crops and uses an alternative edge detection technique. Setting this to 0.0
; would classify ALL images as high-contrast, setting it to 1.0 would classify no images as high-contrast.
contrast_detection_threshold = 0.3
max_plate_angle_degrees = 15
ocr_min_font_point = 6
; Minimum OCR confidence percent to consider.
postprocess_min_confidence = 65
; Any OCR character lower than this will also add an equally likely
; chance that the character is incorrect and will be skipped. Value is a confidence percent
postprocess_confidence_skip_level = 80
debug_general = 0
debug_timing = 0
debug_detector = 0
debug_prewarp = 0
debug_state_id = 0
debug_plate_lines = 0
debug_plate_corners = 0
debug_char_segment = 0
debug_char_analysis = 0
debug_color_filter = 0
debug_ocr = 0
debug_postprocess = 0
debug_show_images = 0
debug_pause_on_frame = 0

View file

@ -0,0 +1,502 @@
//
// Shinobi - OpenCV Plugin
// Copyright (C) 2016-2025 Moe Alam, moeiscool
//
// # Donate
//
// If you like what I am doing here and want me to continue please consider donating :)
// PayPal : paypal@m03.ca
//
process.on('uncaughtException', function (err) {
console.error('uncaughtException',err);
});
var fs=require('fs');
var cv=require('opencv4nodejs');
var exec = require('child_process').exec;
var moment = require('moment');
var Canvas = require('canvas');
var express = require('express');
const path = require('path');
var http = require('http'),
app = express(),
server = http.createServer(app);
var config=require('./conf.json');
if(!config.port){config.port=8080}
if(!config.hostPort){config.hostPort=8082}
if(config.systemLog===undefined){config.systemLog=true}
if(config.cascadesDir===undefined){config.cascadesDir=__dirname+'/cascades/'}
if(config.alprConfig===undefined){config.alprConfig=__dirname+'/openalpr.conf'}
s={
group:{},
dir:{
cascades : config.cascadesDir
},
isWin:(process.platform==='win32'),
foundCascades : {
}
}
//default stream folder check
if(!config.streamDir){
if(s.isWin===false){
config.streamDir='/dev/shm'
}else{
config.streamDir=config.windowsTempDir
}
if(!fs.existsSync(config.streamDir)){
config.streamDir=__dirname+'/streams/'
}else{
config.streamDir+='/streams/'
}
}
s.dir.streams=config.streamDir;
//streams dir
if(!fs.existsSync(s.dir.streams)){
fs.mkdirSync(s.dir.streams);
}
//streams dir
if(!fs.existsSync(s.dir.cascades)){
fs.mkdirSync(s.dir.cascades);
}
s.gid=function(x){
if(!x){x=10};var t = "";var p = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
for( var i=0; i < x; i++ )
t += p.charAt(Math.floor(Math.random() * p.length));
return t;
};
s.findCascades=function(callback){
var tmp={};
tmp.foundCascades=[];
fs.readdir(s.dir.cascades,function(err,files){
files.forEach(function(cascade,n){
if(cascade.indexOf('.xml')>-1){
tmp.foundCascades.push(cascade.replace('.xml',''))
}
})
s.cascadesInDir=tmp.foundCascades;
callback(tmp.foundCascades)
})
}
s.findCascades(function(){
//get cascades
})
s.detectLicensePlate=function(buffer,d,tx){
if(!d.mon.detector_lisence_plate_country||d.mon.detector_lisence_plate_country===''){
d.mon.detector_lisence_plate_country='us'
}
d.tmpFile=s.gid(5)+'.jpg'
if(!fs.existsSync(s.dir.streams)){
fs.mkdirSync(s.dir.streams);
}
d.dir=s.dir.streams+d.ke+'/'
if(!fs.existsSync(d.dir)){
fs.mkdirSync(d.dir);
}
d.dir=s.dir.streams+d.ke+'/'+d.id+'/'
if(!fs.existsSync(d.dir)){
fs.mkdirSync(d.dir);
}
fs.writeFile(d.dir+d.tmpFile,buffer,function(err){
if(err) return s.systemLog(err);
exec('alpr -j --config '+config.alprConfig+' -c '+d.mon.detector_lisence_plate_country+' '+d.dir+d.tmpFile,{encoding:'utf8'},(err, scan, stderr) => {
if(err){
s.systemLog(err);
}else{
try{
scan=JSON.parse(scan.replace('--(!)Loaded CUDA classifier','').trim())
}catch(err){
if(!scan||!scan.results){
return s.systemLog(scan,err);
}
}
if(scan.results.length>0){
scan.plates=[]
scan.mats=[]
scan.results.forEach(function(v){
v.candidates.forEach(function(g,n){
if(v.candidates[n].matches_template)
delete(v.candidates[n].matches_template)
})
scan.plates.push({coordinates:v.coordinates,candidates:v.candidates,confidence:v.confidence,plate:v.plate})
var width = Math.sqrt( Math.pow(v.coordinates[1].x - v.coordinates[0].x, 2) + Math.pow(v.coordinates[1].y - v.coordinates[0].y, 2));
var height = Math.sqrt( Math.pow(v.coordinates[2].x - v.coordinates[1].x, 2) + Math.pow(v.coordinates[2].y - v.coordinates[1].y, 2))
scan.mats.push({
x:v.coordinates[0].x,
y:v.coordinates[0].y,
width:width,
height:height,
tag:v.plate
})
})
tx({f:'trigger',id:d.id,ke:d.ke,details:{split:true,plug:config.plug,name:'licensePlate',reason:'object',matrices:scan.mats,imgHeight:d.mon.detector_scale_y,imgWidth:d.mon.detector_scale_x,frame:d.base64}})
}
}
exec('rm -rf '+d.dir+d.tmpFile,{encoding:'utf8'})
})
})
}
s.detectObject=function(buffer,d,tx){
//detect license plate?
if(d.mon.detector_lisence_plate==="1"){
s.detectLicensePlate(buffer,d,tx)
}
cv.imdecodeAsync(buffer,(err,im) => {
if(err){
console.log(err)
return
}
if (!cv.xmodules.dnn) {
throw new Error('exiting: opencv4nodejs compiled without dnn module');
}
// replace with path where you unzipped inception model
const inceptionModelPath = __dirname+'/data/inception';
const modelFile = path.resolve(inceptionModelPath, 'tensorflow_inception_graph.pb');
const classNamesFile = path.resolve(inceptionModelPath, 'imagenet_comp_graph_label_strings.txt');
if (!fs.existsSync(modelFile) || !fs.existsSync(classNamesFile)) {
console.log('could not find inception model');
console.log('download the model from: https://cdn.shinobi.video/weights/inception5h.zip');
throw new Error('exiting');
}
// read classNames and store them in an array
const classNames = fs.readFileSync(classNamesFile).toString().split('\n');
// initialize tensorflow inception model from modelFile
const net = cv.readNetFromTensorflow(modelFile);
// inception model works with 224 x 224 images, so we resize
// our input images and pad the image with white pixels to
// make the images have the same width and height
const maxImgDim = 224;
const white = new cv.Vec(255, 255, 255);
const imgResized = im.resizeToMax(maxImgDim).padToSquare(white);
// network accepts blobs as input
const inputBlob = cv.blobFromImage(imgResized);
net.setInput(inputBlob);
// forward pass input through entire network, will return
// classification result as 1xN Mat with confidences of each class
const outputBlob = net.forward();
// find all labels with a minimum confidence
const minConfidence = 0.05;
const locations =
outputBlob
.threshold(minConfidence, 1, cv.THRESH_BINARY)
.convertTo(cv.CV_8U)
.findNonZero();
// locations.forEach(function(v){
// console.log(v)
// })
const result =
locations.map(pt => ({
confidence: parseInt(outputBlob.at(0, pt.x) * 100) / 100,
className: classNames[pt.x]
}))
// sort result by confidence
.sort((r0, r1) => r1.confidence - r0.confidence)
.map(res => `${res.className} (${res.confidence})`);
console.log(result)
if(result.length > 0) {
s.cx({
f:'trigger',
id:d.id,
ke:d.ke,
name:'tensorflow',
details:{
plug:'tensorflow',
name:'tensorflow',
reason:'object',
matrices : result
// confidence:d.average
},
imgHeight:d.mon.detector_scale_y,
imgWidth:d.mon.detector_scale_x
})
}
})
}
s.systemLog=function(q,w,e){
if(!w){w=''}
if(!e){e=''}
if(config.systemLog===true){
return console.log(moment().format(),q,w,e)
}
}
s.blenderRegion=function(d,cord,tx){
d.width = d.image.width;
d.height = d.image.height;
if(!s.group[d.ke][d.id].canvas[cord.name]){
if(!cord.sensitivity||isNaN(cord.sensitivity)){
cord.sensitivity=d.mon.detector_sensitivity;
}
s.group[d.ke][d.id].canvas[cord.name] = new Canvas(d.width,d.height);
s.group[d.ke][d.id].canvasContext[cord.name] = s.group[d.ke][d.id].canvas[cord.name].getContext('2d');
s.group[d.ke][d.id].canvasContext[cord.name].fillStyle = '#000';
s.group[d.ke][d.id].canvasContext[cord.name].fillRect( 0, 0,d.width,d.height);
if(cord.points&&cord.points.length>0){
s.group[d.ke][d.id].canvasContext[cord.name].beginPath();
for (var b = 0; b < cord.points.length; b++){
cord.points[b][0]=parseFloat(cord.points[b][0]);
cord.points[b][1]=parseFloat(cord.points[b][1]);
if(b===0){
s.group[d.ke][d.id].canvasContext[cord.name].moveTo(cord.points[b][0],cord.points[b][1]);
}else{
s.group[d.ke][d.id].canvasContext[cord.name].lineTo(cord.points[b][0],cord.points[b][1]);
}
}
s.group[d.ke][d.id].canvasContext[cord.name].clip();
}
}
if(!s.group[d.ke][d.id].canvasContext[cord.name]){
return
}
s.group[d.ke][d.id].canvasContext[cord.name].drawImage(d.image, 0, 0, d.width, d.height);
if(!s.group[d.ke][d.id].blendRegion[cord.name]){
s.group[d.ke][d.id].blendRegion[cord.name] = new Canvas(d.width, d.height);
s.group[d.ke][d.id].blendRegionContext[cord.name] = s.group[d.ke][d.id].blendRegion[cord.name].getContext('2d');
}
var sourceData = s.group[d.ke][d.id].canvasContext[cord.name].getImageData(0, 0, d.width, d.height);
// create an image if the previous image doesn<73>t exist
if (!s.group[d.ke][d.id].lastRegionImageData[cord.name]) s.group[d.ke][d.id].lastRegionImageData[cord.name] = s.group[d.ke][d.id].canvasContext[cord.name].getImageData(0, 0, d.width, d.height);
// create a ImageData instance to receive the blended result
var blendedData = s.group[d.ke][d.id].canvasContext[cord.name].createImageData(d.width, d.height);
// blend the 2 images
s.differenceAccuracy(blendedData.data,sourceData.data,s.group[d.ke][d.id].lastRegionImageData[cord.name].data);
// draw the result in a canvas
s.group[d.ke][d.id].blendRegionContext[cord.name].putImageData(blendedData, 0, 0);
// store the current webcam image
s.group[d.ke][d.id].lastRegionImageData[cord.name] = sourceData;
blendedData = s.group[d.ke][d.id].blendRegionContext[cord.name].getImageData(0, 0, d.width, d.height);
var i = 0;
d.average = 0;
while (i < (blendedData.data.length * 0.25)) {
d.average += (blendedData.data[i * 4] + blendedData.data[i * 4 + 1] + blendedData.data[i * 4 + 2]);
++i;
}
d.average = (d.average / (blendedData.data.length * 0.25))*10;
if (d.average > parseFloat(cord.sensitivity)){
if(d.mon.detector_use_detect_object==="1"&&d.mon.detector_second!=='1'){
var buffer=s.group[d.ke][d.id].canvas[cord.name].toBuffer();
s.detectObject(buffer,d,tx)
}else{
tx({f:'trigger',id:d.id,ke:d.ke,details:{split:true,plug:config.plug,name:cord.name,reason:'motion',confidence:d.average,frame:d.base64}})
}
}
s.group[d.ke][d.id].canvasContext[cord.name].clearRect(0, 0, d.width, d.height);
s.group[d.ke][d.id].blendRegionContext[cord.name].clearRect(0, 0, d.width, d.height);
}
function blobToBuffer (blob, cb) {
if (typeof Blob === 'undefined' || !(blob instanceof Blob)) {
throw new Error('first argument must be a Blob')
}
if (typeof cb !== 'function') {
throw new Error('second argument must be a function')
}
var reader = new FileReader()
function onLoadEnd (e) {
reader.removeEventListener('loadend', onLoadEnd, false)
if (e.error) cb(e.error)
else cb(null, Buffer.from(reader.result))
}
reader.addEventListener('loadend', onLoadEnd, false)
reader.readAsArrayBuffer(blob)
}
function fastAbs(value) {
return (value ^ (value >> 31)) - (value >> 31);
}
function threshold(value) {
return (value > 0x15) ? 0xFF : 0;
}
s.differenceAccuracy=function(target, data1, data2) {
if (data1.length != data2.length) return null;
var i = 0;
while (i < (data1.length * 0.25)) {
var average1 = (data1[4 * i] + data1[4 * i + 1] + data1[4 * i + 2]) / 3;
var average2 = (data2[4 * i] + data2[4 * i + 1] + data2[4 * i + 2]) / 3;
var diff = threshold(fastAbs(average1 - average2));
target[4 * i] = diff;
target[4 * i + 1] = diff;
target[4 * i + 2] = diff;
target[4 * i + 3] = 0xFF;
++i;
}
}
s.checkAreas=function(d,tx){
if(!s.group[d.ke][d.id].cords){
if(!d.mon.cords){d.mon.cords={}}
s.group[d.ke][d.id].cords=Object.values(d.mon.cords);
}
if(d.mon.detector_frame==='1'){
d.mon.cords.frame={name:'FULL_FRAME',s:d.mon.detector_sensitivity,points:[[0,0],[0,d.image.height],[d.image.width,d.image.height],[d.image.width,0]]};
s.group[d.ke][d.id].cords.push(d.mon.cords.frame);
}
for (var b = 0; b < s.group[d.ke][d.id].cords.length; b++){
if(!s.group[d.ke][d.id].cords[b]){return}
s.blenderRegion(d,s.group[d.ke][d.id].cords[b],tx)
}
delete(d.image)
}
s.MainEventController=function(d,cn,tx){
switch(d.f){
case'refreshPlugins':
s.findCascades(function(cascades){
s.cx({f:'s.tx',data:{f:'detector_cascade_list',cascades:cascades},to:'GRP_'+d.ke})
})
break;
case'readPlugins':
s.cx({f:'s.tx',data:{f:'detector_cascade_list',cascades:s.cascadesInDir},to:'GRP_'+d.ke})
break;
case'init_plugin_as_host':
if(!cn){
console.log('No CN',d)
return
}
if(d.key!==config.key){
console.log(new Date(),'Plugin Key Mismatch',cn.request.connection.remoteAddress,d)
cn.emit('init',{ok:false})
cn.disconnect()
}else{
console.log(new Date(),'Plugin Connected to Client',cn.request.connection.remoteAddress)
cn.emit('init',{ok:true,plug:config.plug,notice:config.notice,type:config.type})
}
break;
case'init_monitor':
if(s.group[d.ke]&&s.group[d.ke][d.id]){
s.group[d.ke][d.id].canvas={}
s.group[d.ke][d.id].canvasContext={}
s.group[d.ke][d.id].blendRegion={}
s.group[d.ke][d.id].blendRegionContext={}
s.group[d.ke][d.id].lastRegionImageData={}
s.group[d.ke][d.id].numberOfTriggers=0
delete(s.group[d.ke][d.id].cords)
delete(s.group[d.ke][d.id].buffer)
}
break;
case'init_aws_push':
// console.log('init_aws')
s.group[d.ke][d.id].aws={links:[],complete:0,total:d.total,videos:[],tx:tx}
break;
case'frame':
try{
if(!s.group[d.ke]){
s.group[d.ke]={}
}
if(!s.group[d.ke][d.id]){
s.group[d.ke][d.id]={
canvas:{},
canvasContext:{},
lastRegionImageData:{},
blendRegion:{},
blendRegionContext:{},
}
}
if(!s.group[d.ke][d.id].buffer){
s.group[d.ke][d.id].buffer=[d.frame];
}else{
s.group[d.ke][d.id].buffer.push(d.frame)
}
if(d.frame[d.frame.length-2] === 0xFF && d.frame[d.frame.length-1] === 0xD9){
s.group[d.ke][d.id].buffer=Buffer.concat(s.group[d.ke][d.id].buffer);
try{
d.mon.detector_cascades=JSON.parse(d.mon.detector_cascades)
}catch(err){
}
if(d.mon.detector_frame_save==="1"){
d.base64=s.group[d.ke][d.id].buffer.toString('base64')
}
if(d.mon.detector_second==='1'&&d.objectOnly===true){
s.detectObject(s.group[d.ke][d.id].buffer,d,tx)
}else{
if((d.mon.detector_pam !== '1' && d.mon.detector_use_motion === "1") || d.mon.detector_use_detect_object !== "1"){
if((typeof d.mon.cords ==='string')&&d.mon.cords.trim()===''){
d.mon.cords=[]
}else{
try{
d.mon.cords=JSON.parse(d.mon.cords)
}catch(err){
// console.log('d.mon.cords',err,d)
}
}
s.group[d.ke][d.id].cords=Object.values(d.mon.cords);
d.mon.cords=d.mon.cords;
d.image = new Canvas.Image;
if(d.mon.detector_scale_x===''||d.mon.detector_scale_y===''){
s.systemLog('Must set detector image size')
return
}else{
d.image.width=d.mon.detector_scale_x;
d.image.height=d.mon.detector_scale_y;
}
d.width=d.image.width;
d.height=d.image.height;
d.image.onload = function() {
s.checkAreas(d,tx);
}
d.image.src = s.group[d.ke][d.id].buffer;
}else{
s.detectObject(s.group[d.ke][d.id].buffer,d,tx)
}
}
s.group[d.ke][d.id].buffer=null;
}
}catch(err){
if(err){
s.systemLog(err)
delete(s.group[d.ke][d.id].buffer)
}
}
break;
}
}
server.listen(config.hostPort);
//web pages and plugin api
app.get('/', function (req, res) {
res.end('<b>'+config.plug+'</b> for Shinobi is running')
});
//Conector to Shinobi
if(config.mode==='host'){
//start plugin as host
var io = require('socket.io')(server);
io.attach(server);
s.connectedClients={};
io.on('connection', function (cn) {
s.connectedClients[cn.id]={id:cn.id}
s.connectedClients[cn.id].tx = function(data){
data.pluginKey=config.key;data.plug=config.plug;
return io.to(cn.id).emit('ocv',data);
}
cn.on('f',function(d){
s.MainEventController(d,cn,s.connectedClients[cn.id].tx)
});
cn.on('disconnect',function(d){
delete(s.connectedClients[cn.id])
})
});
}else{
//start plugin as client
if(!config.host){config.host='localhost'}
var io = require('socket.io-client')('ws://'+config.host+':'+config.port);//connect to master
s.cx=function(x){x.pluginKey=config.key;x.plug=config.plug;return io.emit('ocv',x)}
io.on('connect',function(d){
s.cx({f:'init',plug:config.plug,notice:config.notice,type:config.type});
})
io.on('disconnect',function(d){
io.connect();
})
io.on('f',function(d){
s.MainEventController(d,null,s.cx)
})
}

119
web/libs/css/slick.css Normal file
View file

@ -0,0 +1,119 @@
/* Slider */
.slick-slider
{
position: relative;
display: block;
box-sizing: border-box;
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
-webkit-touch-callout: none;
-khtml-user-select: none;
-ms-touch-action: pan-y;
touch-action: pan-y;
-webkit-tap-highlight-color: transparent;
}
.slick-list
{
position: relative;
display: block;
overflow: hidden;
margin: 0;
padding: 0;
}
.slick-list:focus
{
outline: none;
}
.slick-list.dragging
{
cursor: pointer;
cursor: hand;
}
.slick-slider .slick-track,
.slick-slider .slick-list
{
-webkit-transform: translate3d(0, 0, 0);
-moz-transform: translate3d(0, 0, 0);
-ms-transform: translate3d(0, 0, 0);
-o-transform: translate3d(0, 0, 0);
transform: translate3d(0, 0, 0);
}
.slick-track
{
position: relative;
top: 0;
left: 0;
display: block;
margin-left: auto;
margin-right: auto;
}
.slick-track:before,
.slick-track:after
{
display: table;
content: '';
}
.slick-track:after
{
clear: both;
}
.slick-loading .slick-track
{
visibility: hidden;
}
.slick-slide
{
display: none;
float: left;
height: 100%;
min-height: 1px;
}
[dir='rtl'] .slick-slide
{
float: right;
}
.slick-slide img
{
display: block;
}
.slick-slide.slick-loading img
{
display: none;
}
.slick-slide.dragging img
{
pointer-events: none;
}
.slick-initialized .slick-slide
{
display: block;
}
.slick-loading .slick-slide
{
visibility: hidden;
}
.slick-vertical .slick-slide
{
display: block;
height: auto;
border: 1px solid transparent;
}
.slick-arrow.slick-hidden {
display: none;
}

1
web/libs/js/libde265.min.js vendored Normal file

File diff suppressed because one or more lines are too long

1
web/libs/js/slick.min.js vendored Normal file

File diff suppressed because one or more lines are too long

100
web/libs/less/slick.less Normal file
View file

@ -0,0 +1,100 @@
/* Slider */
.slick-slider {
position: relative;
display: block;
box-sizing: border-box;
-webkit-touch-callout: none;
-webkit-user-select: none;
-khtml-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
-ms-touch-action: pan-y;
touch-action: pan-y;
-webkit-tap-highlight-color: transparent;
}
.slick-list {
position: relative;
overflow: hidden;
display: block;
margin: 0;
padding: 0;
&:focus {
outline: none;
}
&.dragging {
cursor: pointer;
cursor: hand;
}
}
.slick-slider .slick-track,
.slick-slider .slick-list {
-webkit-transform: translate3d(0, 0, 0);
-moz-transform: translate3d(0, 0, 0);
-ms-transform: translate3d(0, 0, 0);
-o-transform: translate3d(0, 0, 0);
transform: translate3d(0, 0, 0);
}
.slick-track {
position: relative;
left: 0;
top: 0;
display: block;
margin-left: auto;
margin-right: auto;
&:before,
&:after {
content: "";
display: table;
}
&:after {
clear: both;
}
.slick-loading & {
visibility: hidden;
}
}
.slick-slide {
float: left;
height: 100%;
min-height: 1px;
[dir="rtl"] & {
float: right;
}
img {
display: block;
}
&.slick-loading img {
display: none;
}
display: none;
&.dragging img {
pointer-events: none;
}
.slick-initialized & {
display: block;
}
.slick-loading & {
visibility: hidden;
}
.slick-vertical & {
display: block;
height: auto;
border: 1px solid transparent;
}
}
.slick-arrow.slick-hidden {
display: none;
}

100
web/libs/scss/slick.scss Normal file
View file

@ -0,0 +1,100 @@
/* Slider */
.slick-slider {
position: relative;
display: block;
box-sizing: border-box;
-webkit-touch-callout: none;
-webkit-user-select: none;
-khtml-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
-ms-touch-action: pan-y;
touch-action: pan-y;
-webkit-tap-highlight-color: transparent;
}
.slick-list {
position: relative;
overflow: hidden;
display: block;
margin: 0;
padding: 0;
&:focus {
outline: none;
}
&.dragging {
cursor: pointer;
cursor: hand;
}
}
.slick-slider .slick-track,
.slick-slider .slick-list {
-webkit-transform: translate3d(0, 0, 0);
-moz-transform: translate3d(0, 0, 0);
-ms-transform: translate3d(0, 0, 0);
-o-transform: translate3d(0, 0, 0);
transform: translate3d(0, 0, 0);
}
.slick-track {
position: relative;
left: 0;
top: 0;
display: block;
margin-left: auto;
margin-right: auto;
&:before,
&:after {
content: "";
display: table;
}
&:after {
clear: both;
}
.slick-loading & {
visibility: hidden;
}
}
.slick-slide {
float: left;
height: 100%;
min-height: 1px;
[dir="rtl"] & {
float: right;
}
img {
display: block;
}
&.slick-loading img {
display: none;
}
display: none;
&.dragging img {
pointer-events: none;
}
.slick-initialized & {
display: block;
}
.slick-loading & {
visibility: hidden;
}
.slick-vertical & {
display: block;
height: auto;
border: 1px solid transparent;
}
}
.slick-arrow.slick-hidden {
display: none;
}

51
web/pages/cycle.ejs Normal file
View file

@ -0,0 +1,51 @@
<%
if(config.ssl&&config.ssl.port&&data.protocol==='https'){
data.port=config.ssl.port
}else{
data.port=config.port
}
if(!data.port||data.port===''||data.port==80||data.port==443){data.url=baseUrl}else{data.url=baseUrl+':'+data.port}
if(data.addon && data.addon.indexOf('relative')>-1){
data.url=''
}
%>
<script>
var data = <%- JSON.stringify(data) %>
if(!data.group || data.group === ''){
data.group = '$'
}
</script>
<link rel="stylesheet" href="<%=data.url%>/libs/css/font-awesome.min.css">
<link rel="stylesheet" type="text/css" href="<%=data.url%>/libs/css/slick.css"/>
<style>
.cycle-item {
}
</style>
<script src="<%=data.url%>/libs/js/jquery.min.js"></script>
<script type="text/javascript" src="<%=data.url%>/libs/js/slick.min.js"></script>
<div id="slick"></div>
<script type="text/javascript">
$.cycle = {};
$(function () {
var show = <%-query.show || 1 %>;
var scroll = <%-query.scroll || 1 %>;
var playSpeed = <%-query.playSpeed || 3000 %>;
var cycleEl = $('#slick')
var tmp = ''
$.each(<%- JSON.stringify(monitors) %>,function(n,monitor){
tmp += '<iframe auth="'+data.auth+'" ke="'+data.ke+'" mid="'+monitor.mid+'" class="cycle-item" src="'+data.url+'/'+data.auth+'/embed/'+monitor.ke+'/'+monitor.mid+'/fullscreen|jquery|relative"></iframe>'
})
cycleEl.html(tmp)
cycleEl.slick({
slidesToShow: show,
slidesToScroll: scroll,
autoplay: true,
autoplaySpeed: playSpeed,
})
.on('beforeChange', function(event, slick, currentSlide, nextSlide){
console.log(nextSlide)
})
});
</script>