mirror of
https://gitlab.com/Shinobi-Systems/ShinobiCE.git
synced 2025-03-09 15:40:15 +00:00
Shinobi CE officially lands on Gitlab
This commit is contained in:
commit
f1406d4eec
431 changed files with 118157 additions and 0 deletions
4
plugins/.gitignore
vendored
Normal file
4
plugins/.gitignore
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
opencv-python
|
||||
ccv
|
||||
stemkoski
|
||||
variantai
|
3
plugins/child/.gitignore
vendored
Normal file
3
plugins/child/.gitignore
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
conf.json
|
||||
events
|
||||
frames
|
392
plugins/child/child.js
Normal file
392
plugins/child/child.js
Normal file
|
@ -0,0 +1,392 @@
|
|||
var os = require('os');
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var mysql = require('mysql');
|
||||
var moment = require('moment');
|
||||
var request = require("request");
|
||||
var spawn = require('child_process').spawn;
|
||||
var exec = require('child_process').exec;
|
||||
var execSync = require('child_process').execSync;
|
||||
var connectionTester = require('connection-tester');
|
||||
var config = require('./conf.json');
|
||||
|
||||
exec("ps aux | grep -ie ffmpeg | awk '{print $2}' | xargs kill -9");//kill any ffmpeg running
|
||||
process.on('uncaughtException', function (err) {
|
||||
console.error('uncaughtException',err);
|
||||
});
|
||||
s={connected:false,child_node:true,platform:os.platform(),group:{}};
|
||||
|
||||
//connect to master
|
||||
io = require('socket.io-client')('ws://'+config.ws);
|
||||
//spawn conatiner
|
||||
s.spawns={};
|
||||
//emulate master sql query
|
||||
sql={
|
||||
query:function(x,y,z){
|
||||
io.emit('c',{f:'sql',query:x,values:y});if(typeof z==='function'){z();}
|
||||
}
|
||||
}
|
||||
//get this nodes cpu usage
|
||||
s.cpuUsage=function(e){
|
||||
k={}
|
||||
switch(s.platform){
|
||||
case'win32':
|
||||
k.cmd="@for /f \"skip=1\" %p in ('wmic cpu get loadpercentage') do @echo %p%"
|
||||
break;
|
||||
case'darwin':
|
||||
k.cmd="ps -A -o %cpu | awk '{s+=$1} END {print s}'";
|
||||
break;
|
||||
case'linux':
|
||||
k.cmd='LANG=C top -b -n 2 | grep "^'+config.cpuUsageMarker+'" | awk \'{print $2}\' | tail -n1';
|
||||
break;
|
||||
}
|
||||
if(config.customCpuCommand){
|
||||
exec(config.customCpuCommand,{encoding:'utf8',detached: true},function(err,d){
|
||||
if(s.isWin===true) {
|
||||
d = d.replace(/(\r\n|\n|\r)/gm, "").replace(/%/g, "")
|
||||
}
|
||||
e(d)
|
||||
});
|
||||
} else if(k.cmd){
|
||||
exec(k.cmd,{encoding:'utf8',detached: true},function(err,d){
|
||||
if(s.isWin===true){
|
||||
d=d.replace(/(\r\n|\n|\r)/gm,"").replace(/%/g,"")
|
||||
}
|
||||
e(d)
|
||||
});
|
||||
} else{
|
||||
e(0)
|
||||
}
|
||||
}
|
||||
setInterval(function(){
|
||||
s.cpuUsage(function(cpu){
|
||||
io.emit('c',{f:'cpu',cpu:parseFloat(cpu)})
|
||||
})
|
||||
},2000);
|
||||
//interact with server functions
|
||||
s.cx=function(x){io.emit('c',x)}
|
||||
//emulate master socket emitter
|
||||
s.tx=function(x,y){s.cx({f:'s.tx',data:x,to:y})}
|
||||
//emulate master logger
|
||||
s.log=function(x,y){console.log(y);s.cx({f:'s.log',data:s.init('clean',x),to:y})}
|
||||
//emulate master camera function
|
||||
s.camera=function(x,y){s.cx({f:'camera',mode:x,data:y})}
|
||||
|
||||
//load camera controller vars
|
||||
s.nameToTime=function(x){x=x.split('.')[0].split('T'),x[1]=x[1].replace(/-/g,':');x=x.join(' ');return x;}
|
||||
s.ratio=function(width,height,ratio){ratio = width / height;return ( Math.abs( ratio - 4 / 3 ) < Math.abs( ratio - 16 / 9 ) ) ? '4:3' : '16:9';}
|
||||
s.gid=function(x){
|
||||
if(!x){x=10};var t = "";var p = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
for( var i=0; i < x; i++ )
|
||||
t += p.charAt(Math.floor(Math.random() * p.length));
|
||||
return t;
|
||||
};
|
||||
s.moment=function(e,x){if(!e){e=new Date};if(!x){x='YYYY-MM-DDTHH-mm-ss'};return moment(e).utcOffset('-0800').format(x)}
|
||||
s.kill=function(x,e,p){
|
||||
if(e&&s.group[e.ke].mon[e.id].record){
|
||||
clearTimeout(s.group[e.ke].mon[e.id].record.capturing);
|
||||
if(s.group[e.ke].mon[e.id].record.request&&s.group[e.ke].mon[e.id].record.request.abort){s.group[e.ke].mon[e.id].record.request.abort();delete(s.group[e.ke].mon[e.id].record.request);}
|
||||
};
|
||||
if(!x||x===1){return};if(!x.stdin){return};p=x.pid;x.stdin.pause();setTimeout(function(){x.kill('SIGTERM');delete(x);setTimeout(function(){exec('kill -9 '+p)},1000)},1000)
|
||||
}
|
||||
s.cameraVals=function(e){
|
||||
e.t=Object.keys(s.group[e.ke].mon[e.id]);e.a={};
|
||||
e.t.forEach(function(n){
|
||||
if(s.group[e.ke].mon[e.id][n] instanceof Object){e.a[n]=s.group[e.ke].mon[e.id][n]};
|
||||
});
|
||||
return e.a;
|
||||
}
|
||||
//directories
|
||||
s.group={};
|
||||
s.dir={videos:__dirname+'/videos/',frames:__dirname+'/frames/'};
|
||||
if (!fs.existsSync(s.dir.frames)){
|
||||
fs.mkdirSync(s.dir.frames);
|
||||
}
|
||||
if (!fs.existsSync(s.dir.videos)){
|
||||
fs.mkdirSync(s.dir.videos);
|
||||
}
|
||||
////Camera Controller
|
||||
s.init=function(x,e){
|
||||
switch(x){
|
||||
case 0://camera
|
||||
if(!s.group[e.ke]){s.group[e.ke]={}};
|
||||
if(!s.group[e.ke].mon){s.group[e.ke].mon={}}
|
||||
if(!s.group[e.ke].mon[e.mid]){s.group[e.ke].mon[e.mid]={}}
|
||||
if(!s.group[e.ke].mon[e.mid].watch){s.group[e.ke].mon[e.mid].watch={}};
|
||||
if(e.type==='record'){e.record=1}else{e.record=0}
|
||||
if(!s.group[e.ke].mon[e.mid].record){s.group[e.ke].mon[e.mid].record={yes:e.record}};
|
||||
if(!s.group[e.ke].mon[e.mid].started){s.group[e.ke].mon[e.mid].started={}};
|
||||
if(!s.group[e.ke].mon[e.mid].running){s.group[e.ke].mon[e.mid].running={}};
|
||||
break;
|
||||
case'clean':
|
||||
if(e instanceof Object){
|
||||
x={keys:Object.keys(e),ar:{}};
|
||||
x.keys.forEach(function(v){
|
||||
if(v!=='record'&&v!=='spawn'&&v!=='running'&&(typeof e[v]!=='function')){x.ar[v]=e[v];}
|
||||
});
|
||||
return x.ar;
|
||||
}
|
||||
break;
|
||||
case'url':
|
||||
//build a complete url from pieces
|
||||
e.authd='';
|
||||
if(e.details.muser&&e.details.muser!==''&&e.host.indexOf('@')===-1) {
|
||||
e.authd=e.details.muser+':'+e.details.mpass+'@';
|
||||
}
|
||||
if(e.port==80&&e.details.port_force!=='1'){e.porty=''}else{e.porty=':'+e.port}
|
||||
e.url=e.protocol+'://'+e.authd+e.host+e.porty+e.path;return e.url;
|
||||
break;
|
||||
case'url_no_path':
|
||||
e.authd='';
|
||||
if(!e.details.muser){e.details.muser=''}
|
||||
if(!e.details.mpass){e.details.mpass=''}
|
||||
if(e.details.muser!==''&&e.host.indexOf('@')===-1) {
|
||||
e.authd=e.details.muser+':'+e.details.mpass+'@';
|
||||
}
|
||||
if(e.port==80&&e.details.port_force!=='1'){e.porty=''}else{e.porty=':'+e.port}
|
||||
e.url=e.protocol+'://'+e.authd+e.host+e.porty;return e.url;
|
||||
break;
|
||||
}
|
||||
if(typeof e.callback==='function'){setTimeout(function(){e.callback();delete(e.callback);},2000);}
|
||||
}
|
||||
s.video=function(x,e){
|
||||
if(!e){e={}};
|
||||
if(e.mid){e.id=e.mid};
|
||||
switch(x){
|
||||
case'delete':
|
||||
e.dir=s.dir.videos+e.ke+'/'+e.id+'/';
|
||||
e.save=[e.id,e.ke,s.nameToTime(e.filename),0];
|
||||
sql.query('DELETE FROM Videos WHERE `mid`=? AND `ke`=? AND `time`=? AND `status`=?',e.save)
|
||||
s.tx({f:'video_delete',reason:'Camera Error',filename:e.filename+'.'+e.ext,mid:e.id,ke:e.ke,time:s.nameToTime(e.filename),end:moment().format('YYYY-MM-DD HH:mm:ss')},'GRP_'+e.ke);
|
||||
if(fs.existsSync(e.dir+e.filename+'.'+e.ext)){
|
||||
return fs.unlink(e.dir+e.filename+'.'+e.ext);
|
||||
}
|
||||
break;
|
||||
case'close':
|
||||
e.dir=s.dir.videos+e.ke+'/'+e.id+'/';
|
||||
console.log(e.dir+e.filename+'.'+e.ext)
|
||||
if(fs.existsSync(e.dir+e.filename+'.'+e.ext)){
|
||||
e.filesize=fs.statSync(e.dir+e.filename+'.'+e.ext)["size"];
|
||||
if((e.filesize/100000).toFixed(2)>0.25){
|
||||
e.save=[e.filesize,e.frames,1,e.id,e.ke,s.nameToTime(e.filename)];
|
||||
sql.query('UPDATE Videos SET `size`=?,`frames`=?,`status`=? WHERE `mid`=? AND `ke`=? AND `time`=?',e.save)
|
||||
fs.readFile(e.dir+e.filename+'.'+e.ext,function (err,data) {
|
||||
s.cx({f:'created_file',mid:e.id,ke:e.ke,created_file:data,filename:e.filename+'.'+e.ext,d:s.init('clean',e)});
|
||||
s.tx({f:'video_build_success',filename:e.filename+'.'+e.ext,mid:e.id,ke:e.ke,time:s.nameToTime(e.filename),size:e.filesize,end:s.moment(new Date,'YYYY-MM-DD HH:mm:ss')},'GRP_'+e.ke);
|
||||
});
|
||||
}else{
|
||||
s.video('delete',e);
|
||||
s.log(e,{type:'File Corrupt',msg:{ffmpeg:s.group[e.ke].mon[e.mid].ffmpeg,filesize:(e.filesize/100000).toFixed(2)}})
|
||||
}
|
||||
}else{
|
||||
s.video('delete',e);
|
||||
s.log(e,{type:'File Not Exist',msg:'Cannot save non existant file. Something went wrong.',ffmpeg:s.group[e.ke].mon[e.id].ffmpeg})
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
s.ffmpeg=function(e,x){
|
||||
if(!x){x={tmp:''}}
|
||||
// if(!e.details.cutoff||e.details.cutoff===''){x.cutoff=15}else{x.cutoff=parseFloat(e.details.cutoff)};if(isNaN(x.cutoff)===true){x.cutoff=15}
|
||||
// x.segment=' -f segment -strftime 1 -segment_time '+(60*x.cutoff)+' -segment_format '+e.ext
|
||||
if(!e.details.timestamp||e.details.timestamp==1){x.time=' -vf drawtext=fontfile=/usr/share/fonts/truetype/freefont/FreeSans.ttf:text=\'%{localtime}\':x=(w-tw)/2:y=0:fontcolor=white:box=1:boxcolor=0x00000000@1:fontsize=10';}else{x.time=''}
|
||||
switch(e.ext){
|
||||
case'mp4':
|
||||
x.vcodec='libx265';x.acodec='libfaac';
|
||||
if(e.details.vcodec&&e.details.vcodec!==''){x.vcodec=e.details.vcodec}
|
||||
break;
|
||||
case'webm':
|
||||
x.acodec='libvorbis',x.vcodec='libvpx';
|
||||
break;
|
||||
}
|
||||
if(e.details.acodec&&e.details.acodec!==''){x.acodec=e.details.acodec}
|
||||
if(x.acodec==='none'){x.acodec=''}else{x.acodec=' -acodec '+x.acodec}
|
||||
if(x.vcodec!=='none'){x.vcodec=' -vcodec '+x.vcodec}
|
||||
if(e.fps&&e.fps!==''){x.framerate=' -r '+e.fps}else{x.framerate=''}
|
||||
if(e.details.vf&&e.details.vf!==''){
|
||||
if(x.time===''){x.vf=' -vf '}else{x.vf=','}
|
||||
x.vf+=e.details.vf;
|
||||
x.time+=x.vf;
|
||||
}
|
||||
if(e.details.svf&&e.details.svf!==''){x.svf=' -vf '+e.details.svf;}else{x.svf='';}
|
||||
// if(e.details.svf){'-vf "rotate=45*(PI/180)'}
|
||||
switch(e.type){
|
||||
case'socket':case'jpeg':case'pipe':
|
||||
if(!x.vf||x.vf===','){x.vf=''}
|
||||
x.tmp='-loglevel warning -pattern_type glob -f image2pipe'+x.framerate+' -vcodec mjpeg -i -'+x.vcodec+x.time+x.framerate+' -use_wallclock_as_timestamps 1 -q:v 1'+x.vf+' '+e.dir+e.filename+'.'+e.ext;
|
||||
break;
|
||||
case'mjpeg':
|
||||
if(e.mode=='record'){
|
||||
x.watch=x.vcodec+x.time+' -r 10 -s '+e.width+'x'+e.height+' -use_wallclock_as_timestamps 1 -q:v 1 '+e.dir+e.filename+'.'+e.ext+''
|
||||
}else{
|
||||
x.watch='';
|
||||
};
|
||||
x.tmp='-loglevel warning -reconnect 1 -f mjpeg -i '+e.url+''+x.watch+' -f image2pipe'+x.svf+' -s '+e.ratio+' pipe:1';
|
||||
break;
|
||||
case'h264':
|
||||
if(!x.vf||x.vf===','){x.vf=''}
|
||||
if(e.mode=='record'){
|
||||
x.watch=x.vcodec+x.framerate+x.acodec+' -movflags frag_keyframe+empty_moov -s '+e.width+'x'+e.height+' -use_wallclock_as_timestamps 1 -q:v 1'+x.vf+' '+e.dir+e.filename+'.'+e.ext
|
||||
}else{
|
||||
x.watch='';
|
||||
};
|
||||
x.tmp='-loglevel warning -i '+e.url+' -stimeout 2000'+x.watch+' -f image2pipe'+x.svf+' -s '+e.ratio+' pipe:1';
|
||||
break;
|
||||
case'local':
|
||||
if(e.mode=='record'){
|
||||
x.watch=x.vcodec+x.time+x.framerate+x.acodec+' -movflags frag_keyframe+empty_moov -s '+e.width+'x'+e.height+' -use_wallclock_as_timestamps 1 '+e.dir+e.filename+'.'+e.ext
|
||||
}else{
|
||||
x.watch='';
|
||||
};
|
||||
x.tmp='-loglevel warning -i '+e.path+''+x.watch+' -f image2pipe'+x.svf+' -s '+e.ratio+' pipe:1';
|
||||
break;
|
||||
}
|
||||
s.group[e.ke].mon[e.mid].ffmpeg=x.tmp;
|
||||
return spawn('ffmpeg',x.tmp.split(' '));
|
||||
}
|
||||
|
||||
//child functions
|
||||
var cn={};
|
||||
io.on('connect', function(d){
|
||||
console.log('connected');
|
||||
io.emit('c',{f:'init',socketKey:config.key,u:{name:config.name}})
|
||||
});
|
||||
io.on('c',function(d){
|
||||
console.log(d.f);
|
||||
switch(d.f){
|
||||
case'init_success':
|
||||
s.connected=true;
|
||||
s.other_helpers=d.child_helpers;
|
||||
break;
|
||||
case'kill':
|
||||
s.init(0,d.d);
|
||||
s.kill(s.group[d.d.ke].mon[d.d.id].spawn,d.d)
|
||||
break;
|
||||
case'sync':
|
||||
s.init(0,d.sync);
|
||||
Object.keys(d.sync).forEach(function(v){
|
||||
s.group[d.sync.ke].mon[d.sync.mid][v]=d.sync[v];
|
||||
});
|
||||
break;
|
||||
case'delete_file'://delete video
|
||||
d.dir=s.dir.videos+d.ke+'/'+d.mid+'/'+d.file;
|
||||
if(fs.existsSync(d.dir)){
|
||||
fs.unlink(d.dir);
|
||||
}
|
||||
break;
|
||||
case'close'://close video
|
||||
s.video('close',d.d);
|
||||
break;
|
||||
case'spawn'://start video
|
||||
s.init(0,d.d);
|
||||
s.group[d.d.ke].mon[d.d.id]=d.mon;
|
||||
if(!s.group[d.d.ke].mon_conf){s.group[d.d.ke].mon_conf={}}
|
||||
if(!s.group[d.d.ke].mon_conf[d.d.id]){s.group[d.d.ke].mon_conf[d.d.id]=s.init('clean',d.d);}
|
||||
if(s.group[d.d.ke].mon[d.d.id].spawn&&s.group[d.d.ke].mon[d.d.id].spawn.stdin){return}
|
||||
if(d.d.mode==='record'){
|
||||
console.log(s.group[d.d.ke].mon[d.d.id])
|
||||
s.group[d.d.ke].mon[d.d.id].record.yes=1;
|
||||
d.d.dir=s.dir.videos+d.d.ke+'/';
|
||||
if (!fs.existsSync(d.d.dir)){
|
||||
fs.mkdirSync(d.d.dir);
|
||||
}
|
||||
d.d.dir=s.dir.videos+d.d.ke+'/'+d.d.id+'/';
|
||||
if (!fs.existsSync(d.d.dir)){
|
||||
fs.mkdirSync(d.d.dir);
|
||||
}
|
||||
}else{
|
||||
s.group[d.d.ke].mon[d.d.mid].record.yes=0;
|
||||
}
|
||||
if(d.d.mode==='record'||d.d.type==='mjpeg'||d.d.type==='h264'||d.d.type==='local'){
|
||||
s.group[d.d.ke].mon[d.d.id].spawn = s.ffmpeg(d.d);
|
||||
s.log(d.d,{type:'FFMPEG Process Starting',msg:{cmd:s.group[d.d.ke].mon[d.d.id].ffmpeg}});
|
||||
}
|
||||
d.d.frames=0;
|
||||
switch(d.d.type){
|
||||
case'jpeg':
|
||||
if(!d.d.details.sfps||d.d.details.sfps===''){
|
||||
d.d.details.sfps=parseFloat(d.d.details.sfps);
|
||||
if(isNaN(d.d.details.sfps)){d.d.details.sfps=1}
|
||||
}
|
||||
d.d.captureOne=function(f){
|
||||
s.group[d.d.ke].mon[d.d.id].record.request=request({url:d.d.url,method:'GET',encoding: null,timeout:3000},function(er,data){
|
||||
++d.d.frames; if(s.group[d.d.ke].mon[d.d.id].spawn&&s.group[d.d.ke].mon[d.d.id].spawn.stdin){
|
||||
if(er){
|
||||
++d.d.error_count;
|
||||
s.log(d.d,{type:'Snapshot Error',msg:er});
|
||||
return;
|
||||
}
|
||||
if(d.d.mode==='record'&&s.group[d.d.ke].mon[d.d.id].spawn&&s.group[d.d.ke].mon[d.d.id].spawn.stdin){
|
||||
s.group[d.d.ke].mon[d.d.id].spawn.stdin.write(data.body);
|
||||
}
|
||||
if(s.group[d.d.ke].mon[d.d.id].watch&&Object.keys(s.group[d.d.ke].mon[d.d.id].watch).length>0){
|
||||
s.tx({f:'monitor_frame',ke:d.d.ke,id:d.d.id,time:s.moment(),frame:data.body.toString('base64'),frame_format:'b64'},'MON_'+d.d.id);
|
||||
}
|
||||
s.group[d.d.ke].mon[d.d.id].record.capturing=setTimeout(function(){d.d.captureOne()},1000/d.d.details.sfps);
|
||||
clearTimeout(d.d.timeOut),d.d.timeOut=setTimeout(function(){d.d.error_count=0;},3000)
|
||||
}
|
||||
}).on('error', function(err){
|
||||
// if(s.group[d.d.ke]&&s.group[d.d.ke].mon[d.d.id]&&s.group[d.d.ke].mon[d.d.id].record&&s.group[d.d.ke].mon[d.d.id].record.request){s.group[d.d.ke].mon[d.d.id].record.request.abort();}
|
||||
clearTimeout(s.group[d.d.ke].mon[d.d.id].record.capturing);
|
||||
if(d.d.error_count>4){d.d.fn();return}
|
||||
d.d.captureOne();
|
||||
});
|
||||
}
|
||||
d.d.captureOne()
|
||||
break;
|
||||
case'mjpeg':case'h264'://case'socket':case'local':
|
||||
if(!s.group[d.d.ke]||!s.group[d.d.ke].mon[d.d.id]){s.init(0,d.d)}
|
||||
if(s.group[d.d.ke].mon[d.d.id].spawn){
|
||||
s.group[d.d.ke].mon[d.d.id].spawn.on('error',function(er){d.d.error({type:'Spawn Error',msg:er})})
|
||||
s.group[d.d.ke].mon[d.d.id].spawn.stdout.on('data',function(de){
|
||||
s.tx({f:'monitor_frame',ke:d.d.ke,id:d.d.id,time:s.moment(),frame:de.toString('base64'),frame_format:'b64'},'MON_'+d.d.id);
|
||||
});
|
||||
s.group[d.d.ke].mon[d.d.id].spawn.stderr.on('data',function(de){
|
||||
de=de.toString();
|
||||
d.d.chk=function(x){return de.indexOf(x)>-1;}
|
||||
switch(true){
|
||||
// case d.d.chk('av_interleaved_write_frame'):
|
||||
case d.d.chk('Connection timed out'):
|
||||
setTimeout(function(){s.log(d.d,{type:"Can't Connect",msg:'Retrying...'});d.d.error_fatal();},1000)//restart
|
||||
break;
|
||||
case d.d.chk('No pixel format specified'):
|
||||
s.log(d.d,{type:"FFMPEG STDERR",msg:{ffmpeg:s.group[d.d.ke].mon[d.d.id].ffmpeg,msg:de}})
|
||||
break;
|
||||
case d.d.chk('RTP: missed'):
|
||||
case d.d.chk('deprecated pixel format used, make sure you did set range correctly'):
|
||||
return
|
||||
break;
|
||||
case d.d.chk('No such file or directory'):
|
||||
case d.d.chk('Unable to open RTSP for listening'):
|
||||
case d.d.chk('timed out'):
|
||||
case d.d.chk('Invalid data found when processing input'):
|
||||
case d.d.chk('Immediate exit requested'):
|
||||
case d.d.chk('reset by peer'):
|
||||
if(d.d.frames===0&&x==='record'){s.video('delete',d.d)};
|
||||
break;
|
||||
}
|
||||
s.log(d.d,{type:"FFMPEG STDERR",msg:de})
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case'video':
|
||||
s.video(d.d[0],d.d[1]);
|
||||
break;
|
||||
}
|
||||
});
|
||||
io.on('disconnect',function(d){
|
||||
s.connected=false;
|
||||
});
|
||||
|
||||
//web server
|
||||
if(config.port===undefined)config.port = 8080;
|
||||
if(config.ip===undefined||config.ip===''||config.ip.indexOf('0.0.0.0')>-1){config.ip='localhost'}else{config.bindip=config.ip};
|
||||
var childNodeHTTP = express();
|
||||
var childNodeServer = http.createServer(app);
|
||||
var childNodeWebsocket = new (require('socket.io'))()
|
||||
childNodeServer.listen(config.port,config.bindip,function(){
|
||||
console.log('SHINOBI CHILD NODE RUNNING ON PORT : '+config.port);
|
||||
});
|
||||
childNodeWebsocket.attach(childNodeServer);
|
6
plugins/child/conf.sample.json
Normal file
6
plugins/child/conf.sample.json
Normal file
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"name":"childMachine1",
|
||||
"ws":"192.168.1.48",
|
||||
"port":8080,
|
||||
"key":"3123asdasdf1dtj1hjk23sdfaasd12asdasddfdbtnkkfgvesra3asdsd3123afdsfqw345",
|
||||
}
|
13
plugins/darknet/INSTALL.sh
Normal file
13
plugins/darknet/INSTALL.sh
Normal file
|
@ -0,0 +1,13 @@
|
|||
#!/bin/bash
|
||||
ln -s /usr/local/cuda/targets/x86_64-linux/lib/libcurand.so /usr/local/lib/libcurand.so
|
||||
ln -s /usr/local/cuda/targets/x86_64-linux/lib/libcublas.so /usr/local/lib/libcublas.so
|
||||
ln -s /usr/local/cuda/targets/x86_64-linux/lib/libcudart.so /usr/local/lib/libcudart.so
|
||||
|
||||
git clone https://github.com/OrKoN/darknet
|
||||
rm darknet/Makefile
|
||||
cp modifiedMakefile darknet/Makefile
|
||||
cd darknet
|
||||
make OPENCV=1 GPU=1
|
||||
make install
|
||||
|
||||
npm install @moovel/yolo --unsafe-perm
|
9
plugins/darknet/conf.sample.json
Normal file
9
plugins/darknet/conf.sample.json
Normal file
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"plug":"OpenCV",
|
||||
"host":"localhost",
|
||||
"port":8080,
|
||||
"hostPort":8082,
|
||||
"key":"change_this_to_something_very_random____make_sure_to_match__/plugins/opencv/conf.json",
|
||||
"mode":"client",
|
||||
"type":"detector"
|
||||
}
|
119
plugins/darknet/modifiedMakefile
Normal file
119
plugins/darknet/modifiedMakefile
Normal file
|
@ -0,0 +1,119 @@
|
|||
GPU=0
|
||||
CUDNN=0
|
||||
OPENCV=1
|
||||
OPENMP=0
|
||||
DEBUG=0
|
||||
|
||||
PREFIX = /usr/local
|
||||
|
||||
ARCH= #-gencode arch=compute_20,code=[sm_20,sm_21] \
|
||||
-gencode arch=compute_30,code=sm_30 \
|
||||
-gencode arch=compute_35,code=sm_35 \
|
||||
-gencode arch=compute_50,code=[sm_50,compute_50] \
|
||||
-gencode arch=compute_52,code=[sm_52,compute_52]
|
||||
|
||||
# This is what I use, uncomment if you know your arch and want to specify
|
||||
# ARCH= -gencode arch=compute_52,code=compute_52
|
||||
|
||||
VPATH=./src/:./examples
|
||||
SLIB=libdarknet.so
|
||||
ALIB=libdarknet.a
|
||||
EXEC=darknet
|
||||
OBJDIR=./obj/
|
||||
|
||||
CC=gcc
|
||||
NVCC=nvcc
|
||||
AR=ar
|
||||
ARFLAGS=rcs
|
||||
OPTS=-Ofast
|
||||
LDFLAGS= -lm -pthread
|
||||
COMMON= -Iinclude/ -Isrc/
|
||||
CFLAGS=-Wall -Wno-unknown-pragmas -Wfatal-errors -fPIC
|
||||
|
||||
ifeq ($(OPENMP), 1)
|
||||
CFLAGS+= -fopenmp
|
||||
endif
|
||||
|
||||
ifeq ($(DEBUG), 1)
|
||||
OPTS=-O0 -g
|
||||
endif
|
||||
|
||||
CFLAGS+=$(OPTS)
|
||||
|
||||
ifeq ($(OPENCV), 1)
|
||||
COMMON+= -DOPENCV
|
||||
CFLAGS+= -DOPENCV
|
||||
LDFLAGS+= `pkg-config --libs opencv`
|
||||
COMMON+= `pkg-config --cflags opencv`
|
||||
endif
|
||||
|
||||
ifeq ($(GPU), 1)
|
||||
COMMON+= -DGPU -I/usr/local/cuda/include/
|
||||
CFLAGS+= -DGPU
|
||||
LDFLAGS+= -L/usr/local/cuda/lib64 -lcuda -lcudart -lcublas -lcurand
|
||||
endif
|
||||
|
||||
ifeq ($(CUDNN), 1)
|
||||
COMMON+= -DCUDNN
|
||||
CFLAGS+= -DCUDNN
|
||||
LDFLAGS+= -lcudnn
|
||||
endif
|
||||
|
||||
OBJ=gemm.o utils.o cuda.o deconvolutional_layer.o convolutional_layer.o list.o image.o activations.o im2col.o col2im.o blas.o crop_layer.o dropout_layer.o maxpool_layer.o softmax_layer.o data.o matrix.o network.o connected_layer.o cost_layer.o parser.o option_list.o detection_layer.o route_layer.o box.o normalization_layer.o avgpool_layer.o layer.o local_layer.o shortcut_layer.o activation_layer.o rnn_layer.o gru_layer.o crnn_layer.o demo.o batchnorm_layer.o region_layer.o reorg_layer.o tree.o lstm_layer.o
|
||||
EXECOBJA=captcha.o lsd.o super.o voxel.o art.o tag.o cifar.o go.o rnn.o rnn_vid.o compare.o segmenter.o regressor.o classifier.o coco.o dice.o yolo.o detector.o writing.o nightmare.o swag.o darknet.o
|
||||
ifeq ($(GPU), 1)
|
||||
LDFLAGS+= -lstdc++
|
||||
OBJ+=convolutional_kernels.o deconvolutional_kernels.o activation_kernels.o im2col_kernels.o col2im_kernels.o blas_kernels.o crop_layer_kernels.o dropout_layer_kernels.o maxpool_layer_kernels.o network_kernels.o avgpool_layer_kernels.o
|
||||
endif
|
||||
|
||||
EXECOBJ = $(addprefix $(OBJDIR), $(EXECOBJA))
|
||||
OBJS = $(addprefix $(OBJDIR), $(OBJ))
|
||||
DEPS = $(wildcard src/*.h) Makefile include/darknet.h
|
||||
HEADERS = $(wildcard src/*.h)
|
||||
|
||||
#all: obj backup results $(SLIB) $(ALIB) $(EXEC)
|
||||
all: obj results $(SLIB) $(ALIB) $(EXEC)
|
||||
|
||||
|
||||
$(EXEC): $(EXECOBJ) $(ALIB)
|
||||
$(CC) $(COMMON) $(CFLAGS) $^ -o $@ $(LDFLAGS) $(ALIB)
|
||||
|
||||
$(ALIB): $(OBJS)
|
||||
$(AR) $(ARFLAGS) $@ $^
|
||||
|
||||
$(SLIB): $(OBJS)
|
||||
$(CC) $(CFLAGS) -shared $^ -o $@ $(LDFLAGS)
|
||||
|
||||
$(OBJDIR)%.o: %.c $(DEPS)
|
||||
$(CC) $(COMMON) $(CFLAGS) -c $< -o $@
|
||||
|
||||
$(OBJDIR)%.o: %.cu $(DEPS)
|
||||
$(NVCC) $(ARCH) $(COMMON) --compiler-options "$(CFLAGS)" -c $< -o $@
|
||||
|
||||
darknet.a: $(OBJS)
|
||||
ar rcs $@ $^
|
||||
|
||||
obj:
|
||||
mkdir -p obj
|
||||
backup:
|
||||
mkdir -p backup
|
||||
results:
|
||||
mkdir -p results
|
||||
|
||||
.PHONY: clean
|
||||
|
||||
clean:
|
||||
rm -rf $(OBJS) $(SLIB) $(ALIB) $(EXEC) $(EXECOBJ) darknet.a
|
||||
|
||||
.PHONY: install
|
||||
|
||||
install:
|
||||
mkdir -p $(PREFIX)/include/darknet
|
||||
cp libdarknet.a $(PREFIX)/lib/libdarknet.a
|
||||
cp ${HEADERS} include/darknet.h $(PREFIX)/include/darknet
|
||||
|
||||
.PHONY: uninstall
|
||||
|
||||
uninstall:
|
||||
rm -f $(PREFIX)/lib/libdarknet.a
|
||||
rm -rf $(PREFIX)/include/darknet
|
456
plugins/darknet/shinobi-darknet.js
Normal file
456
plugins/darknet/shinobi-darknet.js
Normal file
|
@ -0,0 +1,456 @@
|
|||
//
|
||||
// Shinobi - OpenCV Plugin
|
||||
// Copyright (C) 2016-2025 Moe Alam, moeiscool
|
||||
//
|
||||
// # Donate
|
||||
//
|
||||
// If you like what I am doing here and want me to continue please consider donating :)
|
||||
// PayPal : paypal@m03.ca
|
||||
//
|
||||
process.on('uncaughtException', function (err) {
|
||||
console.error('uncaughtException',err);
|
||||
});
|
||||
var fs=require('fs');
|
||||
var cv=require('opencv4nodejs');
|
||||
var exec = require('child_process').exec;
|
||||
var moment = require('moment');
|
||||
var Canvas = require('canvas');
|
||||
var express = require('express');
|
||||
var http = require('http'),
|
||||
app = express(),
|
||||
server = http.createServer(app);
|
||||
var config=require('./conf.json');
|
||||
if(!config.port){config.port=8080}
|
||||
if(!config.hostPort){config.hostPort=8082}
|
||||
if(config.systemLog===undefined){config.systemLog=true}
|
||||
if(config.cascadesDir===undefined){config.cascadesDir=__dirname+'/cascades/'}
|
||||
if(config.alprConfig===undefined){config.alprConfig=__dirname+'/openalpr.conf'}
|
||||
s={
|
||||
group:{},
|
||||
dir:{
|
||||
cascades : config.cascadesDir
|
||||
},
|
||||
isWin:(process.platform==='win32')
|
||||
}
|
||||
//default stream folder check
|
||||
if(!config.streamDir){
|
||||
if(s.isWin===false){
|
||||
config.streamDir='/dev/shm'
|
||||
}else{
|
||||
config.streamDir=config.windowsTempDir
|
||||
}
|
||||
if(!fs.existsSync(config.streamDir)){
|
||||
config.streamDir=__dirname+'/streams/'
|
||||
}else{
|
||||
config.streamDir+='/streams/'
|
||||
}
|
||||
}
|
||||
s.dir.streams=config.streamDir;
|
||||
//streams dir
|
||||
if(!fs.existsSync(s.dir.streams)){
|
||||
fs.mkdirSync(s.dir.streams);
|
||||
}
|
||||
s.gid=function(x){
|
||||
if(!x){x=10};var t = "";var p = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
for( var i=0; i < x; i++ )
|
||||
t += p.charAt(Math.floor(Math.random() * p.length));
|
||||
return t;
|
||||
};
|
||||
s.findCascades=function(callback){
|
||||
var tmp={};
|
||||
tmp.foundCascades=[];
|
||||
fs.readdir(s.dir.cascades,function(err,files){
|
||||
files.forEach(function(cascade,n){
|
||||
if(cascade.indexOf('.xml')>-1){
|
||||
tmp.foundCascades.push(cascade.replace('.xml',''))
|
||||
}
|
||||
})
|
||||
s.cascadesInDir=tmp.foundCascades;
|
||||
callback(tmp.foundCascades)
|
||||
})
|
||||
}
|
||||
s.findCascades(function(){
|
||||
//get cascades
|
||||
})
|
||||
s.detectLicensePlate=function(buffer,d,tx){
|
||||
if(!d.mon.detector_lisence_plate_country||d.mon.detector_lisence_plate_country===''){
|
||||
d.mon.detector_lisence_plate_country='us'
|
||||
}
|
||||
d.tmpFile=s.gid(5)+'.jpg'
|
||||
if(!fs.existsSync(s.dir.streams)){
|
||||
fs.mkdirSync(s.dir.streams);
|
||||
}
|
||||
d.dir=s.dir.streams+d.ke+'/'
|
||||
if(!fs.existsSync(d.dir)){
|
||||
fs.mkdirSync(d.dir);
|
||||
}
|
||||
d.dir=s.dir.streams+d.ke+'/'+d.id+'/'
|
||||
if(!fs.existsSync(d.dir)){
|
||||
fs.mkdirSync(d.dir);
|
||||
}
|
||||
fs.writeFile(d.dir+d.tmpFile,buffer,function(err){
|
||||
if(err) return s.systemLog(err);
|
||||
exec('alpr -j --config '+config.alprConfig+' -c '+d.mon.detector_lisence_plate_country+' '+d.dir+d.tmpFile,{encoding:'utf8'},(err, scan, stderr) => {
|
||||
if(err){
|
||||
s.systemLog(err);
|
||||
}else{
|
||||
try{
|
||||
try{
|
||||
scan=JSON.parse(scan.replace('--(!)Loaded CUDA classifier','').trim())
|
||||
}catch(err){
|
||||
if(!scan||!scan.results){
|
||||
return s.systemLog(scan,err);
|
||||
}
|
||||
}
|
||||
// console.log('scan',scan)
|
||||
if(scan.results.length>0){
|
||||
scan.plates=[]
|
||||
scan.mats=[]
|
||||
scan.results.forEach(function(v){
|
||||
v.candidates.forEach(function(g,n){
|
||||
if(v.candidates[n].matches_template)
|
||||
delete(v.candidates[n].matches_template)
|
||||
})
|
||||
scan.plates.push({coordinates:v.coordinates,candidates:v.candidates,confidence:v.confidence,plate:v.plate})
|
||||
var width = Math.sqrt( Math.pow(v.coordinates[1].x - v.coordinates[0].x, 2) + Math.pow(v.coordinates[1].y - v.coordinates[0].y, 2));
|
||||
var height = Math.sqrt( Math.pow(v.coordinates[2].x - v.coordinates[1].x, 2) + Math.pow(v.coordinates[2].y - v.coordinates[1].y, 2))
|
||||
scan.mats.push({
|
||||
x:v.coordinates[0].x,
|
||||
y:v.coordinates[0].y,
|
||||
width:width,
|
||||
height:height,
|
||||
tag:v.plate
|
||||
})
|
||||
})
|
||||
tx({f:'trigger',id:d.id,ke:d.ke,details:{split:true,plug:config.plug,name:'licensePlate',reason:'object',matrices:scan.mats,imgHeight:d.mon.detector_scale_y,imgWidth:d.mon.detector_scale_x,frame:d.base64}})
|
||||
}
|
||||
}catch(err){
|
||||
s.systemLog(scan,err);
|
||||
}
|
||||
}
|
||||
exec('rm -rf '+d.dir+d.tmpFile,{encoding:'utf8'})
|
||||
})
|
||||
})
|
||||
}
|
||||
s.detectObject=function(buffer,d,tx){
|
||||
//detect license plate?
|
||||
if(d.mon.detector_lisence_plate==="1"){
|
||||
s.detectLicensePlate(buffer,d,tx)
|
||||
}
|
||||
//check selected opencv cascades
|
||||
var selectedCascades = Object.keys(d.mon.detector_cascades);
|
||||
if(selectedCascades.length > 0){
|
||||
cv.imdecodeAsync(buffer,(err,im) => {
|
||||
if(err){
|
||||
console.log(err)
|
||||
return
|
||||
}
|
||||
selectedCascades.forEach(function(cascade){
|
||||
var classifier = new cv.CascadeClassifier(s.dir.cascades+cascade+'.xml')
|
||||
var matrices = classifier.detectMultiScaleGpu(im).objects
|
||||
if(matrices.length > 0){
|
||||
matrices.forEach(function(v,n){
|
||||
v.centerX=v.width/2
|
||||
v.centerY=v.height/2
|
||||
v.centerXnoParent=v.x+(v.width/2)
|
||||
v.centerYnoParent=v.y+(v.height/2)
|
||||
})
|
||||
s.cx({
|
||||
f:'trigger',
|
||||
id:d.id,
|
||||
ke:d.ke,
|
||||
name:cascade,
|
||||
details:{
|
||||
plug:'built-in-opencv',
|
||||
name:cascade,
|
||||
reason:'object',
|
||||
matrices : matrices,
|
||||
confidence:d.average
|
||||
},
|
||||
imgHeight:d.mon.detector_scale_y,
|
||||
imgWidth:d.mon.detector_scale_x
|
||||
})
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
s.systemLog=function(q,w,e){
|
||||
if(!w){w=''}
|
||||
if(!e){e=''}
|
||||
if(config.systemLog===true){
|
||||
return console.log(moment().format(),q,w,e)
|
||||
}
|
||||
}
|
||||
|
||||
s.blenderRegion=function(d,cord,tx){
|
||||
d.width = d.image.width;
|
||||
d.height = d.image.height;
|
||||
if(!s.group[d.ke][d.id].canvas[cord.name]){
|
||||
if(!cord.sensitivity||isNaN(cord.sensitivity)){
|
||||
cord.sensitivity=d.mon.detector_sensitivity;
|
||||
}
|
||||
s.group[d.ke][d.id].canvas[cord.name] = new Canvas(d.width,d.height);
|
||||
s.group[d.ke][d.id].canvasContext[cord.name] = s.group[d.ke][d.id].canvas[cord.name].getContext('2d');
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].fillStyle = '#000';
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].fillRect( 0, 0,d.width,d.height);
|
||||
if(cord.points&&cord.points.length>0){
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].beginPath();
|
||||
for (var b = 0; b < cord.points.length; b++){
|
||||
cord.points[b][0]=parseFloat(cord.points[b][0]);
|
||||
cord.points[b][1]=parseFloat(cord.points[b][1]);
|
||||
if(b===0){
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].moveTo(cord.points[b][0],cord.points[b][1]);
|
||||
}else{
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].lineTo(cord.points[b][0],cord.points[b][1]);
|
||||
}
|
||||
}
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].clip();
|
||||
}
|
||||
}
|
||||
if(!s.group[d.ke][d.id].canvasContext[cord.name]){
|
||||
return
|
||||
}
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].drawImage(d.image, 0, 0, d.width, d.height);
|
||||
if(!s.group[d.ke][d.id].blendRegion[cord.name]){
|
||||
s.group[d.ke][d.id].blendRegion[cord.name] = new Canvas(d.width, d.height);
|
||||
s.group[d.ke][d.id].blendRegionContext[cord.name] = s.group[d.ke][d.id].blendRegion[cord.name].getContext('2d');
|
||||
}
|
||||
var sourceData = s.group[d.ke][d.id].canvasContext[cord.name].getImageData(0, 0, d.width, d.height);
|
||||
// create an image if the previous image doesn<73>t exist
|
||||
if (!s.group[d.ke][d.id].lastRegionImageData[cord.name]) s.group[d.ke][d.id].lastRegionImageData[cord.name] = s.group[d.ke][d.id].canvasContext[cord.name].getImageData(0, 0, d.width, d.height);
|
||||
// create a ImageData instance to receive the blended result
|
||||
var blendedData = s.group[d.ke][d.id].canvasContext[cord.name].createImageData(d.width, d.height);
|
||||
// blend the 2 images
|
||||
s.differenceAccuracy(blendedData.data,sourceData.data,s.group[d.ke][d.id].lastRegionImageData[cord.name].data);
|
||||
// draw the result in a canvas
|
||||
s.group[d.ke][d.id].blendRegionContext[cord.name].putImageData(blendedData, 0, 0);
|
||||
// store the current webcam image
|
||||
s.group[d.ke][d.id].lastRegionImageData[cord.name] = sourceData;
|
||||
blendedData = s.group[d.ke][d.id].blendRegionContext[cord.name].getImageData(0, 0, d.width, d.height);
|
||||
var i = 0;
|
||||
d.average = 0;
|
||||
while (i < (blendedData.data.length * 0.25)) {
|
||||
d.average += (blendedData.data[i * 4] + blendedData.data[i * 4 + 1] + blendedData.data[i * 4 + 2]);
|
||||
++i;
|
||||
}
|
||||
d.average = (d.average / (blendedData.data.length * 0.25))*10;
|
||||
if (d.average > parseFloat(cord.sensitivity)){
|
||||
if(d.mon.detector_use_detect_object==="1"&&d.mon.detector_second!=='1'){
|
||||
var buffer=s.group[d.ke][d.id].canvas[cord.name].toBuffer();
|
||||
s.detectObject(buffer,d,tx)
|
||||
}else{
|
||||
tx({f:'trigger',id:d.id,ke:d.ke,details:{split:true,plug:config.plug,name:cord.name,reason:'motion',confidence:d.average,frame:d.base64}})
|
||||
}
|
||||
}
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].clearRect(0, 0, d.width, d.height);
|
||||
s.group[d.ke][d.id].blendRegionContext[cord.name].clearRect(0, 0, d.width, d.height);
|
||||
}
|
||||
function blobToBuffer (blob, cb) {
|
||||
if (typeof Blob === 'undefined' || !(blob instanceof Blob)) {
|
||||
throw new Error('first argument must be a Blob')
|
||||
}
|
||||
if (typeof cb !== 'function') {
|
||||
throw new Error('second argument must be a function')
|
||||
}
|
||||
|
||||
var reader = new FileReader()
|
||||
|
||||
function onLoadEnd (e) {
|
||||
reader.removeEventListener('loadend', onLoadEnd, false)
|
||||
if (e.error) cb(e.error)
|
||||
else cb(null, Buffer.from(reader.result))
|
||||
}
|
||||
|
||||
reader.addEventListener('loadend', onLoadEnd, false)
|
||||
reader.readAsArrayBuffer(blob)
|
||||
}
|
||||
function fastAbs(value) {
|
||||
return (value ^ (value >> 31)) - (value >> 31);
|
||||
}
|
||||
|
||||
function threshold(value) {
|
||||
return (value > 0x15) ? 0xFF : 0;
|
||||
}
|
||||
s.differenceAccuracy=function(target, data1, data2) {
|
||||
if (data1.length != data2.length) return null;
|
||||
var i = 0;
|
||||
while (i < (data1.length * 0.25)) {
|
||||
var average1 = (data1[4 * i] + data1[4 * i + 1] + data1[4 * i + 2]) / 3;
|
||||
var average2 = (data2[4 * i] + data2[4 * i + 1] + data2[4 * i + 2]) / 3;
|
||||
var diff = threshold(fastAbs(average1 - average2));
|
||||
target[4 * i] = diff;
|
||||
target[4 * i + 1] = diff;
|
||||
target[4 * i + 2] = diff;
|
||||
target[4 * i + 3] = 0xFF;
|
||||
++i;
|
||||
}
|
||||
}
|
||||
s.checkAreas=function(d,tx){
|
||||
if(!s.group[d.ke][d.id].cords){
|
||||
if(!d.mon.cords){d.mon.cords={}}
|
||||
s.group[d.ke][d.id].cords=Object.values(d.mon.cords);
|
||||
}
|
||||
if(d.mon.detector_frame==='1'){
|
||||
d.mon.cords.frame={name:'FULL_FRAME',s:d.mon.detector_sensitivity,points:[[0,0],[0,d.image.height],[d.image.width,d.image.height],[d.image.width,0]]};
|
||||
s.group[d.ke][d.id].cords.push(d.mon.cords.frame);
|
||||
}
|
||||
for (var b = 0; b < s.group[d.ke][d.id].cords.length; b++){
|
||||
if(!s.group[d.ke][d.id].cords[b]){return}
|
||||
s.blenderRegion(d,s.group[d.ke][d.id].cords[b],tx)
|
||||
}
|
||||
delete(d.image)
|
||||
}
|
||||
|
||||
s.MainEventController=function(d,cn,tx){
|
||||
switch(d.f){
|
||||
case'refreshPlugins':
|
||||
s.findCascades(function(cascades){
|
||||
s.cx({f:'s.tx',data:{f:'detector_cascade_list',cascades:cascades},to:'GRP_'+d.ke})
|
||||
})
|
||||
break;
|
||||
case'readPlugins':
|
||||
s.cx({f:'s.tx',data:{f:'detector_cascade_list',cascades:s.cascadesInDir},to:'GRP_'+d.ke})
|
||||
break;
|
||||
case'init_plugin_as_host':
|
||||
if(!cn){
|
||||
console.log('No CN',d)
|
||||
return
|
||||
}
|
||||
if(d.key!==config.key){
|
||||
console.log(new Date(),'Plugin Key Mismatch',cn.request.connection.remoteAddress,d)
|
||||
cn.emit('init',{ok:false})
|
||||
cn.disconnect()
|
||||
}else{
|
||||
console.log(new Date(),'Plugin Connected to Client',cn.request.connection.remoteAddress)
|
||||
cn.emit('init',{ok:true,plug:config.plug,notice:config.notice,type:config.type})
|
||||
}
|
||||
break;
|
||||
case'init_monitor':
|
||||
if(s.group[d.ke]&&s.group[d.ke][d.id]){
|
||||
s.group[d.ke][d.id].canvas={}
|
||||
s.group[d.ke][d.id].canvasContext={}
|
||||
s.group[d.ke][d.id].blendRegion={}
|
||||
s.group[d.ke][d.id].blendRegionContext={}
|
||||
s.group[d.ke][d.id].lastRegionImageData={}
|
||||
s.group[d.ke][d.id].numberOfTriggers=0
|
||||
delete(s.group[d.ke][d.id].cords)
|
||||
delete(s.group[d.ke][d.id].buffer)
|
||||
}
|
||||
break;
|
||||
case'init_aws_push':
|
||||
// console.log('init_aws')
|
||||
s.group[d.ke][d.id].aws={links:[],complete:0,total:d.total,videos:[],tx:tx}
|
||||
break;
|
||||
case'frame':
|
||||
try{
|
||||
if(!s.group[d.ke]){
|
||||
s.group[d.ke]={}
|
||||
}
|
||||
if(!s.group[d.ke][d.id]){
|
||||
s.group[d.ke][d.id]={
|
||||
canvas:{},
|
||||
canvasContext:{},
|
||||
lastRegionImageData:{},
|
||||
blendRegion:{},
|
||||
blendRegionContext:{},
|
||||
}
|
||||
}
|
||||
if(!s.group[d.ke][d.id].buffer){
|
||||
s.group[d.ke][d.id].buffer=[d.frame];
|
||||
}else{
|
||||
s.group[d.ke][d.id].buffer.push(d.frame)
|
||||
}
|
||||
if(d.frame[d.frame.length-2] === 0xFF && d.frame[d.frame.length-1] === 0xD9){
|
||||
s.group[d.ke][d.id].buffer=Buffer.concat(s.group[d.ke][d.id].buffer);
|
||||
try{
|
||||
d.mon.detector_cascades=JSON.parse(d.mon.detector_cascades)
|
||||
}catch(err){
|
||||
|
||||
}
|
||||
if(d.mon.detector_frame_save==="1"){
|
||||
d.base64=s.group[d.ke][d.id].buffer.toString('base64')
|
||||
}
|
||||
if(d.mon.detector_second==='1'&&d.objectOnly===true){
|
||||
s.detectObject(s.group[d.ke][d.id].buffer,d,tx)
|
||||
}else{
|
||||
if((d.mon.detector_pam !== '1' && d.mon.detector_use_motion === "1") || d.mon.detector_use_detect_object !== "1"){
|
||||
if((typeof d.mon.cords ==='string')&&d.mon.cords.trim()===''){
|
||||
d.mon.cords=[]
|
||||
}else{
|
||||
try{
|
||||
d.mon.cords=JSON.parse(d.mon.cords)
|
||||
}catch(err){
|
||||
// console.log('d.mon.cords',err,d)
|
||||
}
|
||||
}
|
||||
s.group[d.ke][d.id].cords=Object.values(d.mon.cords);
|
||||
d.mon.cords=d.mon.cords;
|
||||
d.image = new Canvas.Image;
|
||||
if(d.mon.detector_scale_x===''||d.mon.detector_scale_y===''){
|
||||
s.systemLog('Must set detector image size')
|
||||
return
|
||||
}else{
|
||||
d.image.width=d.mon.detector_scale_x;
|
||||
d.image.height=d.mon.detector_scale_y;
|
||||
}
|
||||
d.width=d.image.width;
|
||||
d.height=d.image.height;
|
||||
d.image.onload = function() {
|
||||
s.checkAreas(d,tx);
|
||||
}
|
||||
d.image.src = s.group[d.ke][d.id].buffer;
|
||||
}else{
|
||||
s.detectObject(s.group[d.ke][d.id].buffer,d,tx)
|
||||
}
|
||||
}
|
||||
s.group[d.ke][d.id].buffer=null;
|
||||
}
|
||||
}catch(err){
|
||||
if(err){
|
||||
s.systemLog(err)
|
||||
delete(s.group[d.ke][d.id].buffer)
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
server.listen(config.hostPort);
|
||||
//web pages and plugin api
|
||||
app.get('/', function (req, res) {
|
||||
res.end('<b>'+config.plug+'</b> for Shinobi is running')
|
||||
});
|
||||
//Conector to Shinobi
|
||||
if(config.mode==='host'){
|
||||
//start plugin as host
|
||||
var io = require('socket.io')(server);
|
||||
io.attach(server);
|
||||
s.connectedClients={};
|
||||
io.on('connection', function (cn) {
|
||||
s.connectedClients[cn.id]={id:cn.id}
|
||||
s.connectedClients[cn.id].tx = function(data){
|
||||
data.pluginKey=config.key;data.plug=config.plug;
|
||||
return io.to(cn.id).emit('ocv',data);
|
||||
}
|
||||
cn.on('f',function(d){
|
||||
s.MainEventController(d,cn,s.connectedClients[cn.id].tx)
|
||||
});
|
||||
cn.on('disconnect',function(d){
|
||||
delete(s.connectedClients[cn.id])
|
||||
})
|
||||
});
|
||||
}else{
|
||||
//start plugin as client
|
||||
if(!config.host){config.host='localhost'}
|
||||
var io = require('socket.io-client')('ws://'+config.host+':'+config.port);//connect to master
|
||||
s.cx=function(x){x.pluginKey=config.key;x.plug=config.plug;return io.emit('ocv',x)}
|
||||
io.on('connect',function(d){
|
||||
s.cx({f:'init',plug:config.plug,notice:config.notice,type:config.type});
|
||||
})
|
||||
io.on('disconnect',function(d){
|
||||
io.connect();
|
||||
})
|
||||
io.on('f',function(d){
|
||||
s.MainEventController(d,null,s.cx)
|
||||
})
|
||||
}
|
29
plugins/microsoft/conf.sample.json
Normal file
29
plugins/microsoft/conf.sample.json
Normal file
|
@ -0,0 +1,29 @@
|
|||
{
|
||||
"plug":"ComputerVision",
|
||||
"host":"localhost",
|
||||
"port":8080,
|
||||
"key":"UNIQUE KEY HERE",
|
||||
"computerVision":{
|
||||
"apiKey":"YOUR_KEY",
|
||||
"endpoint":"http://YOUR_ENDPOINT/analyze",
|
||||
"params":{
|
||||
"visualFeatures": "Categories,Description,Color",
|
||||
"details": "",
|
||||
"language": "en"
|
||||
}
|
||||
},
|
||||
"EmotionAPI":{
|
||||
"apiKey":"YOUR_KEY",
|
||||
"endpoint":"http://YOUR_ENDPOINT/recognize",
|
||||
"params":{}
|
||||
},
|
||||
"FaceAPI":{
|
||||
"apiKey":"YOUR_KEY",
|
||||
"endpoint":"http://YOUR_ENDPOINT/detect",
|
||||
"params":{
|
||||
"returnFaceId": "true",
|
||||
"returnFaceLandmarks": "false",
|
||||
"returnFaceAttributes": "age,gender,headPose,smile,facialHair,glasses,emotion,hair,makeup,occlusion,accessories,blur,exposure,noise"
|
||||
}
|
||||
}
|
||||
}
|
170
plugins/microsoft/shinobi-ms-vision.js
Normal file
170
plugins/microsoft/shinobi-ms-vision.js
Normal file
|
@ -0,0 +1,170 @@
|
|||
//
|
||||
// Shinobi - Microsoft Computer Vision Plugin
|
||||
// Copyright (C) 2016-2025 Moe Alam, moeiscool
|
||||
//
|
||||
process.on('uncaughtException', function (err) {
|
||||
console.error('uncaughtException',err);
|
||||
});
|
||||
var fs=require('fs');
|
||||
var exec = require('child_process').exec;
|
||||
//var http = require('http');
|
||||
var request = require('request');
|
||||
var moment = require('moment');
|
||||
var cognitive = require('cognitive-services');
|
||||
var config=require('./conf.json');
|
||||
if(config.systemLog===undefined){config.systemLog=true}
|
||||
s={
|
||||
group:{},
|
||||
dir:{
|
||||
cascades:__dirname+'/cascades/'
|
||||
},
|
||||
isWin:(process.platform==='win32')
|
||||
}
|
||||
//default stream folder check
|
||||
if(!config.streamDir){
|
||||
if(s.isWin===false){
|
||||
config.streamDir='/dev/shm'
|
||||
}else{
|
||||
config.streamDir=config.windowsTempDir
|
||||
}
|
||||
if(!fs.existsSync(config.streamDir)){
|
||||
config.streamDir=__dirname+'/streams/'
|
||||
}else{
|
||||
config.streamDir+='/streams/'
|
||||
}
|
||||
}
|
||||
s.dir.streams=config.streamDir;
|
||||
//streams dir
|
||||
if(!fs.existsSync(s.dir.streams)){
|
||||
fs.mkdirSync(s.dir.streams);
|
||||
}
|
||||
s.gid=function(x){
|
||||
if(!x){x=10};var t = "";var p = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
for( var i=0; i < x; i++ )
|
||||
t += p.charAt(Math.floor(Math.random() * p.length));
|
||||
return t;
|
||||
};
|
||||
s.systemLog=function(q,w,e){
|
||||
if(!w){w=''}
|
||||
if(!e){e=''}
|
||||
if(config.systemLog===true){
|
||||
return console.log(moment().format(),q,w,e)
|
||||
}
|
||||
}
|
||||
s.objectToParameter = function(obj){
|
||||
return Object.keys(obj).map(function(key) {
|
||||
return key + '=' + encodeURIComponent(obj[key]);
|
||||
}).join('&');
|
||||
}
|
||||
s.sendImageToMS=function(sourceImageUrl,API,callback){
|
||||
var URL = API.endpoint+'?'+s.objectToParameter(API.params)
|
||||
request(URL,{
|
||||
method: 'POST',
|
||||
headers:{
|
||||
"Ocp-Apim-Subscription-Key":API.apiKey
|
||||
},
|
||||
json: {
|
||||
url:sourceImageUrl
|
||||
}
|
||||
}, callback)
|
||||
}
|
||||
s.detectObject=function(buffer,d){
|
||||
var sourceImageUrl = 'http://184.105.6.43/'+s.api_key+'/jpeg/'+d.ke+'/'+d.id+'/s.jpg'
|
||||
// const client = new cognitive.computerVision({
|
||||
// apiKey: config.computerVision.apiKey,
|
||||
// endpoint: config.computerVision.endpoint
|
||||
// });
|
||||
// const parameters = {
|
||||
// "visualFeatures": "Categories,Tags,Description",
|
||||
// "details": "Celebrities,Landmarks"
|
||||
// };
|
||||
// const headers = {
|
||||
// 'Content-type': 'application/json'
|
||||
// };
|
||||
// const body = {
|
||||
// "url": sourceImageUrl
|
||||
// };
|
||||
//
|
||||
// client.analyzeImage({
|
||||
// parameters,
|
||||
// headers,
|
||||
// body
|
||||
// }).then((response) => {
|
||||
//// should(response).not.be.undefined();
|
||||
//// should(response).have.properties(["categories", "metadata", "requestId"]);
|
||||
// console.log(response)
|
||||
// }).catch((err) => {
|
||||
// console.log('Error',err)
|
||||
// });
|
||||
var responses = {}
|
||||
s.sendImageToMS(sourceImageUrl,config.computerVision,function(err,resp,body1){
|
||||
responses.computerVisionURL = body1
|
||||
s.sendImageToMS(sourceImageUrl,config.FaceAPI,function(err,resp,body2){
|
||||
responses.faceApiURL = body2
|
||||
s.sendImageToMS(sourceImageUrl,config.EmotionAPI,function(err,resp,body3){
|
||||
responses.EmotionAPI = body3
|
||||
console.log('responses',JSON.stringify(responses,null,3))
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
s.makeMonitorObject=function(d){
|
||||
if(!s.group[d.ke]){
|
||||
s.group[d.ke]={}
|
||||
}
|
||||
if(!s.group[d.ke][d.id]){
|
||||
s.group[d.ke][d.id]={
|
||||
port:null,
|
||||
countStarted:new Date()
|
||||
}
|
||||
}
|
||||
}
|
||||
io = require('socket.io-client')('ws://'+config.host+':'+config.port);//connect to master
|
||||
s.cx=function(x){x.pluginKey=config.key;x.plug=config.plug;return io.emit('ocv',x)}
|
||||
io.on('connect',function(d){
|
||||
s.cx({f:'init',plug:config.plug});
|
||||
})
|
||||
io.on('disconnect',function(d){
|
||||
io.connect()
|
||||
})
|
||||
io.on('f',function(d){
|
||||
switch(d.f){
|
||||
case'api_key':
|
||||
s.api_key=d.key
|
||||
break;
|
||||
case'init_monitor':
|
||||
if(s.group[d.ke]&&s.group[d.ke][d.id]){
|
||||
s.group[d.ke][d.id].buffer=null
|
||||
s.group[d.ke][d.id].countStarted=new Date()
|
||||
}
|
||||
s.makeMonitorObject(d)
|
||||
break;
|
||||
case'frame':
|
||||
d.details={}
|
||||
try{
|
||||
s.makeMonitorObject(d)
|
||||
if(!s.group[d.ke][d.id].buffer){
|
||||
s.group[d.ke][d.id].buffer=[d.frame];
|
||||
}else{
|
||||
s.group[d.ke][d.id].buffer.push(d.frame)
|
||||
}
|
||||
if(d.frame[d.frame.length-2] === 0xFF && d.frame[d.frame.length-1] === 0xD9){
|
||||
if(d.mon.detector_frame_save==="1"){
|
||||
d.base64=s.group[d.ke][d.id].buffer.toString('base64')
|
||||
}
|
||||
if(d.mon.detector_scale_x&&d.mon.detector_scale_x!==''&&d.mon.detector_scale_y&&d.mon.detector_scale_y!==''){
|
||||
d.width=d.mon.detector_scale_x;
|
||||
d.height=d.mon.detector_scale_y;
|
||||
}else{
|
||||
d.width=640
|
||||
d.height=480
|
||||
}
|
||||
s.detectObject(Buffer.concat(s.group[d.ke][d.id].buffer),d)
|
||||
s.group[d.ke][d.id].buffer=null;
|
||||
}
|
||||
} catch(err){
|
||||
console.error(err)
|
||||
}
|
||||
break;
|
||||
}
|
||||
})
|
1
plugins/motion/.gitignore
vendored
Normal file
1
plugins/motion/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
conf.json
|
5
plugins/motion/INSTALL.sh
Normal file
5
plugins/motion/INSTALL.sh
Normal file
|
@ -0,0 +1,5 @@
|
|||
apt-get install libcairo2-dev libjpeg-dev libpango1.0-dev libgif-dev build-essential g++
|
||||
npm install canvas
|
||||
cd plugins/motion
|
||||
cp conf.sample.json conf.json
|
||||
pm2 start shinobi-motion.js
|
56
plugins/motion/README.md
Normal file
56
plugins/motion/README.md
Normal file
|
@ -0,0 +1,56 @@
|
|||
# Shinobi Motion Detector
|
||||
|
||||
Install required libraries.
|
||||
|
||||
**Ubuntu and Debian only**
|
||||
|
||||
```
|
||||
sudo apt-get install libcairo2-dev libjpeg-dev libpango1.0-dev libgif-dev build-essential g++
|
||||
```
|
||||
|
||||
**CentOS only**
|
||||
|
||||
```
|
||||
su -c 'yum install cairo cairo-devel cairomm-devel libjpeg-turbo-devel pango pango-devel pangomm pangomm-devel giflib-devel'
|
||||
yum search arial
|
||||
yum install liberation-sans-fonts.noarch
|
||||
```
|
||||
|
||||
**Install the Node.js Canvas engine**
|
||||
|
||||
```
|
||||
sudo npm install canvas
|
||||
```
|
||||
|
||||
Go to the Shinobi directory. **Below is an example.**
|
||||
|
||||
```
|
||||
cd /home/Shinobi
|
||||
```
|
||||
|
||||
Copy the config file.
|
||||
|
||||
```
|
||||
cp plugins/motion/conf.sample.json plugins/motion/conf.json
|
||||
```
|
||||
|
||||
Edit it the new file. Host should be `localhost` and port should match the `listening port for camera.js`.
|
||||
|
||||
```
|
||||
nano plugins/motion/conf.json
|
||||
```
|
||||
|
||||
Start the plugin.
|
||||
|
||||
```
|
||||
node plugins/motion/shinobi-motion.js
|
||||
```
|
||||
|
||||
Or to daemonize with PM2.
|
||||
|
||||
```
|
||||
pm2 start plugins/motion/shinobi-motion.js
|
||||
```
|
||||
|
||||
Doing this will reveal options in the monitor configuration. Shinobi does not need to be restarted when a plugin is initiated or stopped.
|
||||
|
7
plugins/motion/conf.sample.json
Normal file
7
plugins/motion/conf.sample.json
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"plug":"Motion",
|
||||
"host":"localhost",
|
||||
"port":8080,
|
||||
"key":"change_this_to_something_very_random____make_sure_to_match__/plugins/motion/conf.json",
|
||||
"notice":"Looks like you have the Motion plugin running. Don't forget to enable <b>Send Frames</b> to start pushing frames to be read."
|
||||
}
|
115
plugins/motion/libs/clusterPoints.js
Normal file
115
plugins/motion/libs/clusterPoints.js
Normal file
|
@ -0,0 +1,115 @@
|
|||
'use strict'
|
||||
|
||||
module.exports = {
|
||||
|
||||
data: getterSetter([], function(arrayOfArrays) {
|
||||
var n = arrayOfArrays[0].length;
|
||||
return (arrayOfArrays.map(function(array) {
|
||||
return array.length == n;
|
||||
}).reduce(function(boolA, boolB) { return (boolA & boolB) }, true));
|
||||
}),
|
||||
|
||||
clusters: function() {
|
||||
var pointsAndCentroids = kmeans(this.data(), {k: this.k(), iterations: this.iterations() });
|
||||
var points = pointsAndCentroids.points;
|
||||
var centroids = pointsAndCentroids.centroids;
|
||||
|
||||
return centroids.map(function(centroid) {
|
||||
return {
|
||||
centroid: centroid.location(),
|
||||
points: points.filter(function(point) { return point.label() == centroid.label() }).map(function(point) { return point.location() }),
|
||||
};
|
||||
});
|
||||
},
|
||||
|
||||
k: getterSetter(undefined, function(value) { return ((value % 1 == 0) & (value > 0)) }),
|
||||
|
||||
iterations: getterSetter(Math.pow(10, 3), function(value) { return ((value % 1 == 0) & (value > 0)) }),
|
||||
|
||||
};
|
||||
|
||||
function kmeans(data, config) {
|
||||
// default k
|
||||
var k = config.k || Math.round(Math.sqrt(data.length / 2));
|
||||
var iterations = config.iterations;
|
||||
|
||||
// initialize point objects with data
|
||||
var points = data.map(function(vector) { return new Point(vector) });
|
||||
|
||||
// intialize centroids randomly
|
||||
var centroids = [];
|
||||
for (var i = 0; i < k; i++) {
|
||||
centroids.push(new Centroid(points[i % points.length].location(), i));
|
||||
};
|
||||
|
||||
// update labels and centroid locations until convergence
|
||||
for (var iter = 0; iter < iterations; iter++) {
|
||||
points.forEach(function(point) { point.updateLabel(centroids) });
|
||||
centroids.forEach(function(centroid) { centroid.updateLocation(points) });
|
||||
};
|
||||
|
||||
// return points and centroids
|
||||
return {
|
||||
points: points,
|
||||
centroids: centroids
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
// objects
|
||||
function Point(location) {
|
||||
var self = this;
|
||||
this.location = getterSetter(location);
|
||||
this.label = getterSetter();
|
||||
this.updateLabel = function(centroids) {
|
||||
var distancesSquared = centroids.map(function(centroid) {
|
||||
return sumOfSquareDiffs(self.location(), centroid.location());
|
||||
});
|
||||
self.label(mindex(distancesSquared));
|
||||
};
|
||||
};
|
||||
|
||||
function Centroid(initialLocation, label) {
|
||||
var self = this;
|
||||
this.location = getterSetter(initialLocation);
|
||||
this.label = getterSetter(label);
|
||||
this.updateLocation = function(points) {
|
||||
var pointsWithThisCentroid = points.filter(function(point) { return point.label() == self.label() });
|
||||
if (pointsWithThisCentroid.length > 0) self.location(averageLocation(pointsWithThisCentroid));
|
||||
};
|
||||
};
|
||||
|
||||
// convenience functions
|
||||
function getterSetter(initialValue, validator) {
|
||||
var thingToGetSet = initialValue;
|
||||
var isValid = validator || function(val) { return true };
|
||||
return function(newValue) {
|
||||
if (typeof newValue === 'undefined') return thingToGetSet;
|
||||
if (isValid(newValue)) thingToGetSet = newValue;
|
||||
};
|
||||
};
|
||||
|
||||
function sumOfSquareDiffs(oneVector, anotherVector) {
|
||||
var squareDiffs = oneVector.map(function(component, i) {
|
||||
return Math.pow(component - anotherVector[i], 2);
|
||||
});
|
||||
return squareDiffs.reduce(function(a, b) { return a + b }, 0);
|
||||
};
|
||||
|
||||
function mindex(array) {
|
||||
var min = array.reduce(function(a, b) {
|
||||
return Math.min(a, b);
|
||||
});
|
||||
return array.indexOf(min);
|
||||
};
|
||||
|
||||
function sumVectors(a, b) {
|
||||
return a.map(function(val, i) { return val + b[i] });
|
||||
};
|
||||
|
||||
function averageLocation(points) {
|
||||
var zeroVector = points[0].location().map(function() { return 0 });
|
||||
var locations = points.map(function(point) { return point.location() });
|
||||
var vectorSum = locations.reduce(function(a, b) { return sumVectors(a, b) }, zeroVector);
|
||||
return vectorSum.map(function(val) { return val / points.length });
|
||||
};
|
245
plugins/motion/shinobi-motion-pixel.js
Normal file
245
plugins/motion/shinobi-motion-pixel.js
Normal file
|
@ -0,0 +1,245 @@
|
|||
//
|
||||
// Shinobi - Motion Plugin
|
||||
// Copyright (C) 2016-2025 Moe Alam, moeiscool
|
||||
//
|
||||
// # Donate
|
||||
//
|
||||
// If you like what I am doing here and want me to continue please consider donating :)
|
||||
// PayPal : paypal@m03.ca
|
||||
//
|
||||
process.on('uncaughtException', function (err) {
|
||||
console.error('uncaughtException',err);
|
||||
});
|
||||
var fs = require('fs');
|
||||
var moment = require('moment');
|
||||
var Canvas = require('canvas');
|
||||
var Cluster = require('./libs/clusterPoints.js');
|
||||
var config=require('./conf.json');
|
||||
if(process.argv[2]&&process.argv[3]){
|
||||
config.host=process.argv[2]
|
||||
config.port=process.argv[3]
|
||||
config.key=process.argv[4]
|
||||
}
|
||||
if(config.systemLog===undefined){config.systemLog=true}
|
||||
s={
|
||||
group:{},
|
||||
}
|
||||
s.systemLog=function(q,w,e){
|
||||
if(!w){w=''}
|
||||
if(!e){e=''}
|
||||
if(config.systemLog===true){
|
||||
return console.log(moment().format(),q,w,e)
|
||||
}
|
||||
}
|
||||
s.checkRegion=function(d,cord){
|
||||
d.width = d.image.width;
|
||||
d.height = d.image.height;
|
||||
if(!s.group[d.ke][d.id].canvas[cord.name]){
|
||||
if(!cord.sensitivity||isNaN(cord.sensitivity)){
|
||||
cord.sensitivity=d.mon.detector_sensitivity;
|
||||
}
|
||||
s.group[d.ke][d.id].canvas[cord.name] = new Canvas(d.width,d.height);
|
||||
s.group[d.ke][d.id].canvasContext[cord.name] = s.group[d.ke][d.id].canvas[cord.name].getContext('2d');
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].fillStyle = '#005337';
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].fillRect( 0, 0,d.width,d.height);
|
||||
if(cord.points&&cord.points.length>0){
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].beginPath();
|
||||
for (var b = 0; b < cord.points.length; b++){
|
||||
cord.points[b][0]=parseFloat(cord.points[b][0]);
|
||||
cord.points[b][1]=parseFloat(cord.points[b][1]);
|
||||
if(b===0){
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].moveTo(cord.points[b][0],cord.points[b][1]);
|
||||
}else{
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].lineTo(cord.points[b][0],cord.points[b][1]);
|
||||
}
|
||||
}
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].clip();
|
||||
}
|
||||
}
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].drawImage(d.image, 0, 0, d.width, d.height);
|
||||
var blenderCanvas = s.group[d.ke][d.id].canvas[cord.name];
|
||||
var blenderCanvasContext = s.group[d.ke][d.id].canvasContext[cord.name];
|
||||
s.group[d.ke][d.id].frameSelected[s.group[d.ke][d.id].frameNumber] = blenderCanvasContext.getImageData(0, 0, blenderCanvas.width, blenderCanvas.height);
|
||||
s.group[d.ke][d.id].frameNumber = 0 == s.group[d.ke][d.id].frameNumber ? 1 : 0;
|
||||
s.group[d.ke][d.id].lastRegionImageData = blenderCanvasContext.getImageData(0, 0, blenderCanvas.width, blenderCanvas.height);
|
||||
if(!s.group[d.ke][d.id].lastRegionImageData){return}
|
||||
var foundPixels = [];
|
||||
var average = 0;
|
||||
var currentImageLength = s.group[d.ke][d.id].lastRegionImageData.data.length * 0.25;
|
||||
for (b = 0; b < currentImageLength;){
|
||||
var pos = b * 4
|
||||
s.group[d.ke][d.id].lastRegionImageData.data[pos] = .5 * (255 - s.group[d.ke][d.id].lastRegionImageData.data[pos]) + .5 * s.group[d.ke][d.id].frameSelected[s.group[d.ke][d.id].frameNumber].data[pos];
|
||||
s.group[d.ke][d.id].lastRegionImageData.data[pos + 1] = .5 * (255 - s.group[d.ke][d.id].lastRegionImageData.data[pos + 1]) + .5 * s.group[d.ke][d.id].frameSelected[s.group[d.ke][d.id].frameNumber].data[pos + 1];
|
||||
s.group[d.ke][d.id].lastRegionImageData.data[pos + 2] = .5 * (255 - s.group[d.ke][d.id].lastRegionImageData.data[pos + 2]) + .5 * s.group[d.ke][d.id].frameSelected[s.group[d.ke][d.id].frameNumber].data[pos + 2];
|
||||
s.group[d.ke][d.id].lastRegionImageData.data[pos + 3] = 255;
|
||||
var score = (s.group[d.ke][d.id].lastRegionImageData.data[pos] + s.group[d.ke][d.id].lastRegionImageData.data[pos + 1] + s.group[d.ke][d.id].lastRegionImageData.data[pos + 2]) / 3;
|
||||
if(score>170){
|
||||
var x = (pos / 4) % d.width;
|
||||
var y = Math.floor((pos / 4) / d.width);
|
||||
foundPixels.push([x,y])
|
||||
}
|
||||
|
||||
average += (s.group[d.ke][d.id].lastRegionImageData.data[b * 4] + s.group[d.ke][d.id].lastRegionImageData.data[b * 4 + 1] + s.group[d.ke][d.id].lastRegionImageData.data[b * 4 + 2]);
|
||||
|
||||
b += 4;
|
||||
}
|
||||
// console.log(foundPixels)
|
||||
var matrices
|
||||
if(d.mon.detector_region_of_interest==='1'&&foundPixels.length>0){
|
||||
var groupedPoints = Object.assign({},Cluster);
|
||||
groupedPoints.iterations(25);
|
||||
groupedPoints.data(foundPixels);
|
||||
var groupedPoints = groupedPoints.clusters()
|
||||
var matrices=[]
|
||||
var mostHeight = 0;
|
||||
var mostWidth = 0;
|
||||
var mostWithMotion = null;
|
||||
groupedPoints.forEach(function(v,n){
|
||||
var matrix = {
|
||||
topLeft:[d.width,d.height],
|
||||
topRight:[0,d.height],
|
||||
bottomRight:[0,0],
|
||||
bottomLeft:[d.width,0],
|
||||
}
|
||||
v.points.forEach(function(b){
|
||||
var x = b[0]
|
||||
var y = b[1]
|
||||
if(x<matrix.topLeft[0])matrix.topLeft[0]=x;
|
||||
if(y<matrix.topLeft[1])matrix.topLeft[1]=y;
|
||||
//Top Right point
|
||||
if(x>matrix.topRight[0])matrix.topRight[0]=x;
|
||||
if(y<matrix.topRight[1])matrix.topRight[1]=y;
|
||||
//Bottom Right point
|
||||
if(x>matrix.bottomRight[0])matrix.bottomRight[0]=x;
|
||||
if(y>matrix.bottomRight[1])matrix.bottomRight[1]=y;
|
||||
//Bottom Left point
|
||||
if(x<matrix.bottomLeft[0])matrix.bottomLeft[0]=x;
|
||||
if(y>matrix.bottomLeft[1])matrix.bottomLeft[1]=y;
|
||||
})
|
||||
matrix.x = matrix.topLeft[0];
|
||||
matrix.y = matrix.topLeft[1];
|
||||
matrix.width = matrix.topRight[0] - matrix.topLeft[0]
|
||||
matrix.height = matrix.bottomLeft[1] - matrix.topLeft[1]
|
||||
|
||||
if(matrix.width>mostWidth&&matrix.height>mostHeight){
|
||||
mostWidth = matrix.width;
|
||||
mostHeight = matrix.height;
|
||||
mostWithMotion = matrix;
|
||||
}
|
||||
|
||||
matrices.push(matrix)
|
||||
})
|
||||
}
|
||||
average = (average / (currentImageLength));
|
||||
if (average > parseFloat(cord.sensitivity)){
|
||||
s.cx({f:'trigger',id:d.id,ke:d.ke,details:{plug:config.plug,name:cord.name,reason:'motion',confidence:average,matrices:matrices}})
|
||||
}
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].clearRect(0, 0, d.width, d.height);
|
||||
}
|
||||
s.checkAreas=function(d){
|
||||
if(!s.group[d.ke][d.id].cords){
|
||||
if(!d.mon.cords){d.mon.cords={}}
|
||||
s.group[d.ke][d.id].cords=Object.values(d.mon.cords);
|
||||
}
|
||||
if(d.mon.detector_frame==='1'){
|
||||
d.mon.cords.frame={name:'frame',s:d.mon.detector_sensitivity,points:[[0,0],[0,d.image.height],[d.image.width,d.image.height],[d.image.width,0]]};
|
||||
s.group[d.ke][d.id].cords.push(d.mon.cords.frame);
|
||||
}
|
||||
for (var b = 0; b < s.group[d.ke][d.id].cords.length; b++){
|
||||
if(!s.group[d.ke][d.id].cords[b]){return}
|
||||
s.checkRegion(d,s.group[d.ke][d.id].cords[b])
|
||||
}
|
||||
delete(d.image)
|
||||
}
|
||||
|
||||
io = require('socket.io-client')('ws://'+config.host+':'+config.port);//connect to master
|
||||
s.cx=function(x){x.pluginKey=config.key;x.plug=config.plug;return io.emit('ocv',x)}
|
||||
io.on('connect',function(d){
|
||||
s.cx({f:'init',plug:config.plug,notice:config.notice});
|
||||
})
|
||||
io.on('disconnect',function(d){
|
||||
io.connect();
|
||||
})
|
||||
io.on('f',function(d){
|
||||
switch(d.f){
|
||||
case'init_monitor':
|
||||
if(s.group[d.ke]&&s.group[d.ke][d.id]){
|
||||
s.group[d.ke][d.id].canvas={}
|
||||
s.group[d.ke][d.id].canvasContext={}
|
||||
s.group[d.ke][d.id].lastRegionImageData=undefined
|
||||
s.group[d.ke][d.id].frameNumber=0
|
||||
s.group[d.ke][d.id].frameSelected=[]
|
||||
delete(s.group[d.ke][d.id].cords)
|
||||
delete(s.group[d.ke][d.id].buffer)
|
||||
}
|
||||
break;
|
||||
case'frame':
|
||||
try{
|
||||
if(!s.group[d.ke]){
|
||||
s.group[d.ke]={}
|
||||
}
|
||||
if(!s.group[d.ke][d.id]){
|
||||
s.group[d.ke][d.id]={
|
||||
canvas:{},
|
||||
canvasContext:{},
|
||||
lastRegionImageData:undefined,
|
||||
frameNumber:0,
|
||||
frameSelected:[],
|
||||
}
|
||||
}
|
||||
if(!s.group[d.ke][d.id].buffer){
|
||||
s.group[d.ke][d.id].buffer=[d.frame];
|
||||
}else{
|
||||
s.group[d.ke][d.id].buffer.push(d.frame)
|
||||
}
|
||||
if(d.frame[d.frame.length-2] === 0xFF && d.frame[d.frame.length-1] === 0xD9){
|
||||
if(s.group[d.ke][d.id].motion_lock){
|
||||
return
|
||||
}else{
|
||||
if(!d.mon.detector_lock_timeout||d.mon.detector_lock_timeout===''||d.mon.detector_lock_timeout==0){
|
||||
d.mon.detector_lock_timeout=2000
|
||||
}else{
|
||||
d.mon.detector_lock_timeout=parseFloat(d.mon.detector_lock_timeout)
|
||||
}
|
||||
s.group[d.ke][d.id].motion_lock=setTimeout(function(){
|
||||
clearTimeout(s.group[d.ke][d.id].motion_lock);
|
||||
delete(s.group[d.ke][d.id].motion_lock);
|
||||
},d.mon.detector_lock_timeout)
|
||||
}
|
||||
s.group[d.ke][d.id].buffer=Buffer.concat(s.group[d.ke][d.id].buffer);
|
||||
if((typeof d.mon.cords ==='string')&&d.mon.cords.trim()===''){
|
||||
d.mon.cords=[]
|
||||
}else{
|
||||
try{
|
||||
d.mon.cords=JSON.parse(d.mon.cords)
|
||||
}catch(err){
|
||||
}
|
||||
}
|
||||
if(d.mon.detector_frame_save==="1"){
|
||||
d.base64=s.group[d.ke][d.id].buffer.toString('base64')
|
||||
}
|
||||
s.group[d.ke][d.id].cords=Object.values(d.mon.cords);
|
||||
d.mon.cords=d.mon.cords;
|
||||
d.image = new Canvas.Image;
|
||||
if(d.mon.detector_scale_x===''||d.mon.detector_scale_y===''){
|
||||
s.systemLog('Must set detector image size')
|
||||
return
|
||||
}else{
|
||||
d.image.width=d.mon.detector_scale_x;
|
||||
d.image.height=d.mon.detector_scale_y;
|
||||
}
|
||||
d.image.onload = function() {
|
||||
s.checkAreas(d);
|
||||
}
|
||||
d.image.src = s.group[d.ke][d.id].buffer;
|
||||
s.group[d.ke][d.id].buffer=null;
|
||||
}
|
||||
}catch(err){
|
||||
if(err){
|
||||
s.systemLog(err)
|
||||
delete(s.group[d.ke][d.id].buffer)
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
})
|
233
plugins/motion/shinobi-motion.js
Normal file
233
plugins/motion/shinobi-motion.js
Normal file
|
@ -0,0 +1,233 @@
|
|||
//
|
||||
// Shinobi - Motion Plugin
|
||||
// Copyright (C) 2016-2025 Moe Alam, moeiscool
|
||||
//
|
||||
// # Donate
|
||||
//
|
||||
// If you like what I am doing here and want me to continue please consider donating :)
|
||||
// PayPal : paypal@m03.ca
|
||||
//
|
||||
process.on('uncaughtException', function (err) {
|
||||
console.error('uncaughtException',err);
|
||||
});
|
||||
var fs = require('fs');
|
||||
var moment = require('moment');
|
||||
var Canvas = require('canvas');
|
||||
var config=require('./conf.json');
|
||||
if(process.argv[2]&&process.argv[3]){
|
||||
config.host=process.argv[2]
|
||||
config.port=process.argv[3]
|
||||
config.key=process.argv[4]
|
||||
}
|
||||
if(config.systemLog===undefined){config.systemLog=true}
|
||||
s={
|
||||
group:{},
|
||||
}
|
||||
s.systemLog=function(q,w,e){
|
||||
if(!w){w=''}
|
||||
if(!e){e=''}
|
||||
if(config.systemLog===true){
|
||||
return console.log(moment().format(),q,w,e)
|
||||
}
|
||||
}
|
||||
s.blenderRegion=function(d,cord){
|
||||
d.width = d.image.width;
|
||||
d.height = d.image.height;
|
||||
if(!s.group[d.ke][d.id].canvas[cord.name]){
|
||||
if(!cord.sensitivity||isNaN(cord.sensitivity)){
|
||||
cord.sensitivity=d.mon.detector_sensitivity;
|
||||
}
|
||||
s.group[d.ke][d.id].canvas[cord.name] = new Canvas(d.width,d.height);
|
||||
s.group[d.ke][d.id].canvasContext[cord.name] = s.group[d.ke][d.id].canvas[cord.name].getContext('2d');
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].fillStyle = '#005337';
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].fillRect( 0, 0,d.width,d.height);
|
||||
if(cord.points&&cord.points.length>0){
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].beginPath();
|
||||
for (var b = 0; b < cord.points.length; b++){
|
||||
cord.points[b][0]=parseFloat(cord.points[b][0]);
|
||||
cord.points[b][1]=parseFloat(cord.points[b][1]);
|
||||
if(b===0){
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].moveTo(cord.points[b][0],cord.points[b][1]);
|
||||
}else{
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].lineTo(cord.points[b][0],cord.points[b][1]);
|
||||
}
|
||||
}
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].clip();
|
||||
}
|
||||
}
|
||||
if(!s.group[d.ke][d.id].canvasContext[cord.name]){
|
||||
return
|
||||
}
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].drawImage(d.image, 0, 0, d.width, d.height);
|
||||
if(!s.group[d.ke][d.id].blendRegion[cord.name]){
|
||||
s.group[d.ke][d.id].blendRegion[cord.name] = new Canvas(d.width, d.height);
|
||||
s.group[d.ke][d.id].blendRegionContext[cord.name] = s.group[d.ke][d.id].blendRegion[cord.name].getContext('2d');
|
||||
}
|
||||
var sourceData = s.group[d.ke][d.id].canvasContext[cord.name].getImageData(0, 0, d.width, d.height);
|
||||
// create an image if the previous image doesn<73>t exist
|
||||
if (!s.group[d.ke][d.id].lastRegionImageData[cord.name]) s.group[d.ke][d.id].lastRegionImageData[cord.name] = s.group[d.ke][d.id].canvasContext[cord.name].getImageData(0, 0, d.width, d.height);
|
||||
// create a ImageData instance to receive the blended result
|
||||
var blendedData = s.group[d.ke][d.id].canvasContext[cord.name].createImageData(d.width, d.height);
|
||||
// blend the 2 images
|
||||
s.differenceAccuracy(blendedData.data,sourceData.data,s.group[d.ke][d.id].lastRegionImageData[cord.name].data);
|
||||
// draw the result in a canvas
|
||||
s.group[d.ke][d.id].blendRegionContext[cord.name].putImageData(blendedData, 0, 0);
|
||||
// store the current webcam image
|
||||
s.group[d.ke][d.id].lastRegionImageData[cord.name] = sourceData;
|
||||
blendedData = s.group[d.ke][d.id].blendRegionContext[cord.name].getImageData(0, 0, d.width, d.height);
|
||||
var i = 0;
|
||||
var average = 0;
|
||||
while (i < (blendedData.data.length * 0.25)) {
|
||||
average += (blendedData.data[i * 4] + blendedData.data[i * 4 + 1] + blendedData.data[i * 4 + 2]);
|
||||
++i;
|
||||
}
|
||||
average = (average / (blendedData.data.length * 0.25))*10;
|
||||
if (average > parseFloat(cord.sensitivity)){
|
||||
s.cx({f:'trigger',id:d.id,ke:d.ke,details:{plug:config.plug,name:cord.name,reason:'motion',confidence:average}})
|
||||
|
||||
}
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].clearRect(0, 0, d.width, d.height);
|
||||
s.group[d.ke][d.id].blendRegionContext[cord.name].clearRect(0, 0, d.width, d.height);
|
||||
}
|
||||
function fastAbs(value) {
|
||||
return (value ^ (value >> 31)) - (value >> 31);
|
||||
}
|
||||
|
||||
function threshold(value) {
|
||||
return (value > 0x15) ? 0xFF : 0;
|
||||
}
|
||||
|
||||
function difference(target, data1, data2) {
|
||||
// blend mode difference
|
||||
if (data1.length != data2.length) return null;
|
||||
var i = 0;
|
||||
while (i < (data1.length * 0.25)) {
|
||||
target[4 * i] = data1[4 * i] == 0 ? 0 : fastAbs(data1[4 * i] - data2[4 * i]);
|
||||
target[4 * i + 1] = data1[4 * i + 1] == 0 ? 0 : fastAbs(data1[4 * i + 1] - data2[4 * i + 1]);
|
||||
target[4 * i + 2] = data1[4 * i + 2] == 0 ? 0 : fastAbs(data1[4 * i + 2] - data2[4 * i + 2]);
|
||||
target[4 * i + 3] = 0xFF;
|
||||
++i;
|
||||
}
|
||||
}
|
||||
s.differenceAccuracy=function(target, data1, data2) {
|
||||
if (data1.length != data2.length) return null;
|
||||
var i = 0;
|
||||
while (i < (data1.length * 0.25)) {
|
||||
var average1 = (data1[4 * i] + data1[4 * i + 1] + data1[4 * i + 2]) / 3;
|
||||
var average2 = (data2[4 * i] + data2[4 * i + 1] + data2[4 * i + 2]) / 3;
|
||||
var diff = threshold(fastAbs(average1 - average2));
|
||||
target[4 * i] = diff;
|
||||
target[4 * i + 1] = diff;
|
||||
target[4 * i + 2] = diff;
|
||||
target[4 * i + 3] = 0xFF;
|
||||
++i;
|
||||
}
|
||||
}
|
||||
|
||||
s.checkAreas=function(d){
|
||||
if(!s.group[d.ke][d.id].cords){
|
||||
if(!d.mon.cords){d.mon.cords={}}
|
||||
s.group[d.ke][d.id].cords=Object.values(d.mon.cords);
|
||||
}
|
||||
if(d.mon.detector_frame==='1'){
|
||||
d.mon.cords.frame={name:'frame',s:d.mon.detector_sensitivity,points:[[0,0],[0,d.image.height],[d.image.width,d.image.height],[d.image.width,0]]};
|
||||
s.group[d.ke][d.id].cords.push(d.mon.cords.frame);
|
||||
}
|
||||
for (var b = 0; b < s.group[d.ke][d.id].cords.length; b++){
|
||||
if(!s.group[d.ke][d.id].cords[b]){return}
|
||||
s.blenderRegion(d,s.group[d.ke][d.id].cords[b])
|
||||
}
|
||||
delete(d.image)
|
||||
}
|
||||
|
||||
io = require('socket.io-client')('ws://'+config.host+':'+config.port);//connect to master
|
||||
s.cx=function(x){x.pluginKey=config.key;x.plug=config.plug;return io.emit('ocv',x)}
|
||||
io.on('connect',function(d){
|
||||
s.cx({f:'init',plug:config.plug,notice:config.notice});
|
||||
})
|
||||
io.on('disconnect',function(d){
|
||||
io.connect();
|
||||
})
|
||||
io.on('f',function(d){
|
||||
switch(d.f){
|
||||
case'init_monitor':
|
||||
if(s.group[d.ke]&&s.group[d.ke][d.id]){
|
||||
s.group[d.ke][d.id].canvas={}
|
||||
s.group[d.ke][d.id].canvasContext={}
|
||||
s.group[d.ke][d.id].blendRegion={}
|
||||
s.group[d.ke][d.id].blendRegionContext={}
|
||||
s.group[d.ke][d.id].lastRegionImageData={}
|
||||
delete(s.group[d.ke][d.id].cords)
|
||||
delete(s.group[d.ke][d.id].buffer)
|
||||
}
|
||||
break;
|
||||
case'frame':
|
||||
try{
|
||||
if(!s.group[d.ke]){
|
||||
s.group[d.ke]={}
|
||||
}
|
||||
if(!s.group[d.ke][d.id]){
|
||||
s.group[d.ke][d.id]={
|
||||
canvas:{},
|
||||
canvasContext:{},
|
||||
lastRegionImageData:{},
|
||||
blendRegion:{},
|
||||
blendRegionContext:{},
|
||||
}
|
||||
}
|
||||
if(!s.group[d.ke][d.id].buffer){
|
||||
s.group[d.ke][d.id].buffer=[d.frame];
|
||||
}else{
|
||||
s.group[d.ke][d.id].buffer.push(d.frame)
|
||||
}
|
||||
if(d.frame[d.frame.length-2] === 0xFF && d.frame[d.frame.length-1] === 0xD9){
|
||||
if(s.group[d.ke][d.id].motion_lock){
|
||||
return
|
||||
}else{
|
||||
if(!d.mon.detector_lock_timeout||d.mon.detector_lock_timeout===''||d.mon.detector_lock_timeout==0){
|
||||
d.mon.detector_lock_timeout=2000
|
||||
}else{
|
||||
d.mon.detector_lock_timeout=parseFloat(d.mon.detector_lock_timeout)
|
||||
}
|
||||
s.group[d.ke][d.id].motion_lock=setTimeout(function(){
|
||||
clearTimeout(s.group[d.ke][d.id].motion_lock);
|
||||
delete(s.group[d.ke][d.id].motion_lock);
|
||||
},d.mon.detector_lock_timeout)
|
||||
}
|
||||
s.group[d.ke][d.id].buffer=Buffer.concat(s.group[d.ke][d.id].buffer);
|
||||
if((typeof d.mon.cords ==='string')&&d.mon.cords.trim()===''){
|
||||
d.mon.cords=[]
|
||||
}else{
|
||||
try{
|
||||
d.mon.cords=JSON.parse(d.mon.cords)
|
||||
}catch(err){
|
||||
}
|
||||
}
|
||||
if(d.mon.detector_frame_save==="1"){
|
||||
d.base64=s.group[d.ke][d.id].buffer.toString('base64')
|
||||
}
|
||||
s.group[d.ke][d.id].cords=Object.values(d.mon.cords);
|
||||
d.mon.cords=d.mon.cords;
|
||||
d.image = new Canvas.Image;
|
||||
if(d.mon.detector_scale_x===''||d.mon.detector_scale_y===''){
|
||||
s.systemLog('Must set detector image size')
|
||||
return
|
||||
}else{
|
||||
d.image.width=d.mon.detector_scale_x;
|
||||
d.image.height=d.mon.detector_scale_y;
|
||||
}
|
||||
d.image.onload = function() {
|
||||
s.checkAreas(d);
|
||||
}
|
||||
d.image.src = s.group[d.ke][d.id].buffer;
|
||||
s.group[d.ke][d.id].buffer=null;
|
||||
}
|
||||
}catch(err){
|
||||
if(err){
|
||||
s.systemLog(err)
|
||||
delete(s.group[d.ke][d.id].buffer)
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
})
|
2
plugins/openalpr/.gitignore
vendored
Normal file
2
plugins/openalpr/.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
conf.json
|
||||
cascades
|
93
plugins/openalpr/README.md
Normal file
93
plugins/openalpr/README.md
Normal file
|
@ -0,0 +1,93 @@
|
|||
# OpenALPR and Motion Detector
|
||||
|
||||
Install required libraries.
|
||||
|
||||
**Ubuntu and Debian only**
|
||||
|
||||
```
|
||||
sudo apt update && sudo apt install libcairo2-dev libjpeg-dev libpango1.0-dev libgif-dev build-essential g++ openalpr openalpr-daemon openalpr-utils libopenalpr-dev -y
|
||||
```
|
||||
|
||||
**Install the Node.js Canvas engine**
|
||||
|
||||
```
|
||||
sudo npm install canvas@1.6
|
||||
```
|
||||
Go to the Shinobi directory. **Below is an example.**
|
||||
|
||||
```
|
||||
cd /home/Shinobi
|
||||
```
|
||||
|
||||
Copy the config file.
|
||||
|
||||
```
|
||||
cp plugins/openalpr/conf.sample.json plugins/openalpr/conf.json
|
||||
```
|
||||
|
||||
Edit it the new file. Host should be `localhost` and port should match the `listening port for camera.js`.
|
||||
|
||||
```
|
||||
nano plugins/openalpr/conf.json
|
||||
```
|
||||
|
||||
Start the plugin.
|
||||
|
||||
```
|
||||
node plugins/openalpr/shinobi-motion.js
|
||||
```
|
||||
|
||||
Or to daemonize with PM2.
|
||||
|
||||
```
|
||||
pm2 start plugins/openalpr/shinobi-motion.js
|
||||
```
|
||||
|
||||
Doing this will reveal options in the monitor configuration. Shinobi does not need to be restarted when a plugin is initiated or stopped.
|
||||
|
||||
## Run the plugin as a Host
|
||||
> The main app (Shinobi) will be the client and the plugin will be the host. The purpose of allowing this method is so that you can use one plugin for multiple Shinobi instances. Allowing you to easily manage connections without starting multiple processes.
|
||||
|
||||
Edit your plugins configuration file. Set the `hostPort` **to be different** than the `listening port for camera.js`.
|
||||
|
||||
```
|
||||
nano plugins/openalpr/conf.json
|
||||
```
|
||||
|
||||
Here is a sample of a Host configuration for the plugin.
|
||||
- `plug` is the name of the plugin corresponding in the main configuration file.
|
||||
- `https` choose if you want to use SSL or not. Default is `false`.
|
||||
- `hostPort` can be any available port number. **Don't make this the same port number as Shinobi.** Default is `8082`.
|
||||
- `type` tells the main application (Shinobi) what kind of plugin it is. In this case it is a detector.
|
||||
|
||||
```
|
||||
{
|
||||
"plug":"OpenALPR",
|
||||
"hostPort":8082,
|
||||
"key":"SomeOpenALPRkeySoPeopleDontMessWithYourShinobi",
|
||||
"mode":"host",
|
||||
"type":"detector"
|
||||
}
|
||||
```
|
||||
|
||||
Now modify the **main configuration file** located in the main directory of Shinobi. *Where you currently should be.*
|
||||
|
||||
```
|
||||
nano conf.json
|
||||
```
|
||||
|
||||
Add the `plugins` array if you don't already have it. Add the following *object inside the array*.
|
||||
|
||||
```
|
||||
"plugins":[
|
||||
{
|
||||
"id" : "OpenALPR",
|
||||
"https" : false,
|
||||
"host" : "localhost",
|
||||
"port" : 8082,
|
||||
"key" : "SomeOpenALPRkeySoPeopleDontMessWithYourShinobi",
|
||||
"mode" : "host",
|
||||
"type" : "detector"
|
||||
}
|
||||
],
|
||||
```
|
9
plugins/openalpr/conf.sample.json
Normal file
9
plugins/openalpr/conf.sample.json
Normal file
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"plug":"OpenALPR",
|
||||
"host":"localhost",
|
||||
"port":8080,
|
||||
"hostPort":8082,
|
||||
"key":"SomeOpenALPRkeySoPeopleDontMessWithYourShinobi",
|
||||
"mode":"client",
|
||||
"type":"detector"
|
||||
}
|
94
plugins/openalpr/openalpr.conf
Normal file
94
plugins/openalpr/openalpr.conf
Normal file
|
@ -0,0 +1,94 @@
|
|||
|
||||
; Specify the path to the runtime data directory
|
||||
runtime_dir = ${CMAKE_INSTALL_PREFIX}/share/openalpr/runtime_data
|
||||
|
||||
|
||||
ocr_img_size_percent = 1.33333333
|
||||
state_id_img_size_percent = 2.0
|
||||
|
||||
; Calibrating your camera improves detection accuracy in cases where vehicle plates are captured at a steep angle
|
||||
; Use the openalpr-utils-calibrate utility to calibrate your fixed camera to adjust for an angle
|
||||
; Once done, update the prewarp config with the values obtained from the tool
|
||||
prewarp =
|
||||
|
||||
; detection will ignore plates that are too large. This is a good efficiency technique to use if the
|
||||
; plates are going to be a fixed distance away from the camera (e.g., you will never see plates that fill
|
||||
; up the entire image
|
||||
max_plate_width_percent = 100
|
||||
max_plate_height_percent = 100
|
||||
|
||||
; detection_iteration_increase is the percentage that the LBP frame increases each iteration.
|
||||
; It must be greater than 1.0. A value of 1.01 means increase by 1%, 1.10 increases it by 10% each time.
|
||||
; So a 1% increase would be ~10x slower than 10% to process, but it has a higher chance of landing
|
||||
; directly on the plate and getting a strong detection
|
||||
detection_iteration_increase = 1.1
|
||||
|
||||
; The minimum detection strength determines how sure the detection algorithm must be before signaling that
|
||||
; a plate region exists. Technically this corresponds to LBP nearest neighbors (e.g., how many detections
|
||||
; are clustered around the same area). For example, 2 = very lenient, 9 = very strict.
|
||||
detection_strictness = 3
|
||||
|
||||
; The detection doesn't necessarily need an extremely high resolution image in order to detect plates
|
||||
; Using a smaller input image should still find the plates and will do it faster
|
||||
; Tweaking the max_detection_input values will resize the input image if it is larger than these sizes
|
||||
; max_detection_input_width/height are specified in pixels
|
||||
max_detection_input_width = 1280
|
||||
max_detection_input_height = 720
|
||||
|
||||
; detector is the technique used to find license plate regions in an image. Value can be set to
|
||||
; lbpcpu - default LBP-based detector uses the system CPU
|
||||
; lbpgpu - LBP-based detector that uses Nvidia GPU to increase recognition speed.
|
||||
; lbpopencl - LBP-based detector that uses OpenCL GPU to increase recognition speed. Requires OpenCV 3.0
|
||||
; morphcpu - Experimental detector that detects white rectangles in an image. Does not require training.
|
||||
detector = lbpgpu
|
||||
|
||||
; If set to true, all results must match a postprocess text pattern if a pattern is available.
|
||||
; If not, the result is disqualified.
|
||||
must_match_pattern = 0
|
||||
|
||||
; Bypasses plate detection. If this is set to 1, the library assumes that each region provided is a likely plate area.
|
||||
skip_detection = 0
|
||||
|
||||
; Specifies the full path to an image file that constrains the detection area. Only the plate regions allowed through the mask
|
||||
; will be analyzed. The mask image must match the resolution of your image to be analyzed. The mask is black and white.
|
||||
; Black areas will be ignored, white areas will be searched. An empty value means no mask (scan the entire image)
|
||||
detection_mask_image =
|
||||
|
||||
; OpenALPR can scan the same image multiple times with different randomization. Setting this to a value larger than
|
||||
; 1 may increase accuracy, but will increase processing time linearly (e.g., analysis_count = 3 is 3x slower)
|
||||
analysis_count = 1
|
||||
|
||||
; OpenALPR detects high-contrast plate crops and uses an alternative edge detection technique. Setting this to 0.0
|
||||
; would classify ALL images as high-contrast, setting it to 1.0 would classify no images as high-contrast.
|
||||
contrast_detection_threshold = 0.3
|
||||
|
||||
max_plate_angle_degrees = 15
|
||||
|
||||
ocr_min_font_point = 6
|
||||
|
||||
; Minimum OCR confidence percent to consider.
|
||||
postprocess_min_confidence = 65
|
||||
|
||||
; Any OCR character lower than this will also add an equally likely
|
||||
; chance that the character is incorrect and will be skipped. Value is a confidence percent
|
||||
postprocess_confidence_skip_level = 80
|
||||
|
||||
|
||||
debug_general = 0
|
||||
debug_timing = 0
|
||||
debug_detector = 0
|
||||
debug_prewarp = 0
|
||||
debug_state_id = 0
|
||||
debug_plate_lines = 0
|
||||
debug_plate_corners = 0
|
||||
debug_char_segment = 0
|
||||
debug_char_analysis = 0
|
||||
debug_color_filter = 0
|
||||
debug_ocr = 0
|
||||
debug_postprocess = 0
|
||||
debug_show_images = 0
|
||||
debug_pause_on_frame = 0
|
||||
|
||||
|
||||
|
||||
|
18
plugins/openalpr/package.json
Normal file
18
plugins/openalpr/package.json
Normal file
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"name": "shinobi-openalpr",
|
||||
"version": "1.0.0",
|
||||
"description": "OpenALPR plugin for Shinobi",
|
||||
"main": "shinobi-openalpr.js",
|
||||
"dependencies": {
|
||||
"canvas": "^1.6.7",
|
||||
"express": "^4.16.2",
|
||||
"moment": "^2.19.2",
|
||||
"socket.io": "^2.0.4"
|
||||
},
|
||||
"devDependencies": {},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"author": "Moe Alam",
|
||||
"license": "ISC"
|
||||
}
|
413
plugins/openalpr/shinobi-openalpr.js
Normal file
413
plugins/openalpr/shinobi-openalpr.js
Normal file
|
@ -0,0 +1,413 @@
|
|||
//
|
||||
// Shinobi - OpenALPR Plugin
|
||||
// Copyright (C) 2016-2025 Moe Alam, moeiscool
|
||||
//
|
||||
// # Donate
|
||||
//
|
||||
// If you like what I am doing here and want me to continue please consider donating :)
|
||||
// PayPal : paypal@m03.ca
|
||||
//
|
||||
process.on('uncaughtException', function (err) {
|
||||
console.error('uncaughtException',err);
|
||||
});
|
||||
//main vars
|
||||
var fs=require('fs');
|
||||
var exec = require('child_process').exec;
|
||||
var moment = require('moment');
|
||||
var Canvas = require('canvas');
|
||||
var express = require('express');
|
||||
var config=require('./conf.json');
|
||||
var http = require('http'),
|
||||
app = express(),
|
||||
server = http.createServer(app);
|
||||
s={
|
||||
group:{},
|
||||
dir:{
|
||||
cascades:__dirname+'/cascades/'
|
||||
},
|
||||
isWin:(process.platform==='win32'),
|
||||
s:function(json){return JSON.stringify(json,null,3)}
|
||||
}
|
||||
s.checkCorrectPathEnding=function(x){
|
||||
var length=x.length
|
||||
if(x.charAt(length-1)!=='/'){
|
||||
x=x+'/'
|
||||
}
|
||||
return x.replace('__DIR__',__dirname)
|
||||
}
|
||||
if(!config.port){config.port=8080}
|
||||
if(!config.hostPort){config.hostPort=8082}
|
||||
if(config.systemLog===undefined){config.systemLog=true}
|
||||
if(config.alprConfig===undefined){config.alprConfig=__dirname+'/openalpr.conf'}
|
||||
//default stream folder check
|
||||
if(!config.streamDir){
|
||||
if(s.isWin===false){
|
||||
config.streamDir='/dev/shm'
|
||||
}else{
|
||||
config.streamDir=config.windowsTempDir
|
||||
}
|
||||
if(!fs.existsSync(config.streamDir)){
|
||||
config.streamDir=__dirname+'/streams/'
|
||||
}else{
|
||||
config.streamDir+='/streams/'
|
||||
}
|
||||
}
|
||||
s.dir.streams=config.streamDir;
|
||||
//streams dir
|
||||
if(!fs.existsSync(s.dir.streams)){
|
||||
fs.mkdirSync(s.dir.streams);
|
||||
}
|
||||
s.gid=function(x){
|
||||
if(!x){x=10};var t = "";var p = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
for( var i=0; i < x; i++ )
|
||||
t += p.charAt(Math.floor(Math.random() * p.length));
|
||||
return t;
|
||||
};
|
||||
s.detectObject=function(buffer,d,tx){
|
||||
var keys = Object.keys(d.mon.detector_cascades);
|
||||
if(d.mon.detector_lisence_plate==="1"){
|
||||
if(!d.mon.detector_lisence_plate_country||d.mon.detector_lisence_plate_country===''){
|
||||
d.mon.detector_lisence_plate_country='us'
|
||||
}
|
||||
d.tmpFile=s.gid(5)+'.jpg'
|
||||
if(!fs.existsSync(s.dir.streams)){
|
||||
fs.mkdirSync(s.dir.streams);
|
||||
}
|
||||
d.dir=s.dir.streams+d.ke+'/'
|
||||
if(!fs.existsSync(d.dir)){
|
||||
fs.mkdirSync(d.dir);
|
||||
}
|
||||
d.dir=s.dir.streams+d.ke+'/'+d.id+'/'
|
||||
if(!fs.existsSync(d.dir)){
|
||||
fs.mkdirSync(d.dir);
|
||||
}
|
||||
fs.writeFile(d.dir+d.tmpFile,buffer,function(err){
|
||||
if(err) return s.systemLog(err);
|
||||
exec('alpr -j --config '+config.alprConfig+' -c '+d.mon.detector_lisence_plate_country+' '+d.dir+d.tmpFile,{encoding:'utf8'},(err, scan, stderr) => {
|
||||
if(err){
|
||||
s.systemLog(err);
|
||||
}else{
|
||||
try{
|
||||
try{
|
||||
scan=JSON.parse(scan.replace('--(!)Loaded CUDA classifier','').trim())
|
||||
}catch(err){
|
||||
if(!scan||!scan.results){
|
||||
return s.systemLog(scan,err);
|
||||
}
|
||||
}
|
||||
// console.log('scan',scan)
|
||||
if(scan.results.length>0){
|
||||
if(s.isNumberOfTriggersMet(d,2)){
|
||||
scan.plates=[]
|
||||
scan.mats=[]
|
||||
scan.results.forEach(function(v){
|
||||
v.candidates.forEach(function(g,n){
|
||||
if(v.candidates[n].matches_template)
|
||||
delete(v.candidates[n].matches_template)
|
||||
})
|
||||
scan.plates.push({coordinates:v.coordinates,candidates:v.candidates,confidence:v.confidence,plate:v.plate})
|
||||
var width = Math.sqrt( Math.pow(v.coordinates[1].x - v.coordinates[0].x, 2) + Math.pow(v.coordinates[1].y - v.coordinates[0].y, 2));
|
||||
var height = Math.sqrt( Math.pow(v.coordinates[2].x - v.coordinates[1].x, 2) + Math.pow(v.coordinates[2].y - v.coordinates[1].y, 2))
|
||||
scan.mats.push({
|
||||
x:v.coordinates[0].x,
|
||||
y:v.coordinates[0].y,
|
||||
width:width,
|
||||
height:height,
|
||||
tag:v.plate
|
||||
})
|
||||
})
|
||||
tx({f:'trigger',id:d.id,ke:d.ke,details:{split:true,plug:config.plug,name:'licensePlate',reason:'object',matrices:scan.mats,imgHeight:d.mon.detector_scale_y,imgWidth:d.mon.detector_scale_x,frame:d.base64}})
|
||||
}
|
||||
}
|
||||
}catch(err){
|
||||
s.systemLog(scan,err);
|
||||
}
|
||||
}
|
||||
exec('rm -rf '+d.dir+d.tmpFile,{encoding:'utf8'})
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
s.systemLog=function(q,w,e){
|
||||
if(w===undefined){return}
|
||||
if(!w){w=''}
|
||||
if(!e){e=''}
|
||||
if(config.systemLog===true){
|
||||
return console.log(moment().format(),q,w,e)
|
||||
}
|
||||
}
|
||||
s.blenderRegion=function(d,cord,tx){
|
||||
d.width = d.image.width;
|
||||
d.height = d.image.height;
|
||||
if(!s.group[d.ke][d.id].canvas[cord.name]){
|
||||
if(!cord.sensitivity||isNaN(cord.sensitivity)){
|
||||
cord.sensitivity=d.mon.detector_sensitivity;
|
||||
}
|
||||
s.group[d.ke][d.id].canvas[cord.name] = new Canvas(d.width,d.height);
|
||||
s.group[d.ke][d.id].canvasContext[cord.name] = s.group[d.ke][d.id].canvas[cord.name].getContext('2d');
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].fillStyle = '#000';
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].fillRect( 0, 0,d.width,d.height);
|
||||
if(cord.points&&cord.points.length>0){
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].beginPath();
|
||||
for (var b = 0; b < cord.points.length; b++){
|
||||
cord.points[b][0]=parseFloat(cord.points[b][0]);
|
||||
cord.points[b][1]=parseFloat(cord.points[b][1]);
|
||||
if(b===0){
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].moveTo(cord.points[b][0],cord.points[b][1]);
|
||||
}else{
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].lineTo(cord.points[b][0],cord.points[b][1]);
|
||||
}
|
||||
}
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].clip();
|
||||
}
|
||||
}
|
||||
if(!s.group[d.ke][d.id].canvasContext[cord.name]){
|
||||
return
|
||||
}
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].drawImage(d.image, 0, 0, d.width, d.height);
|
||||
if(!s.group[d.ke][d.id].blendRegion[cord.name]){
|
||||
s.group[d.ke][d.id].blendRegion[cord.name] = new Canvas(d.width, d.height);
|
||||
s.group[d.ke][d.id].blendRegionContext[cord.name] = s.group[d.ke][d.id].blendRegion[cord.name].getContext('2d');
|
||||
}
|
||||
var sourceData = s.group[d.ke][d.id].canvasContext[cord.name].getImageData(0, 0, d.width, d.height);
|
||||
// create an image if the previous image doesn<73>t exist
|
||||
if (!s.group[d.ke][d.id].lastRegionImageData[cord.name]) s.group[d.ke][d.id].lastRegionImageData[cord.name] = s.group[d.ke][d.id].canvasContext[cord.name].getImageData(0, 0, d.width, d.height);
|
||||
// create a ImageData instance to receive the blended result
|
||||
var blendedData = s.group[d.ke][d.id].canvasContext[cord.name].createImageData(d.width, d.height);
|
||||
// blend the 2 images
|
||||
s.differenceAccuracy(blendedData.data,sourceData.data,s.group[d.ke][d.id].lastRegionImageData[cord.name].data);
|
||||
// draw the result in a canvas
|
||||
s.group[d.ke][d.id].blendRegionContext[cord.name].putImageData(blendedData, 0, 0);
|
||||
// store the current webcam image
|
||||
s.group[d.ke][d.id].lastRegionImageData[cord.name] = sourceData;
|
||||
blendedData = s.group[d.ke][d.id].blendRegionContext[cord.name].getImageData(0, 0, d.width, d.height);
|
||||
var i = 0;
|
||||
d.average = 0;
|
||||
while (i < (blendedData.data.length * 0.25)) {
|
||||
d.average += (blendedData.data[i * 4] + blendedData.data[i * 4 + 1] + blendedData.data[i * 4 + 2]);
|
||||
++i;
|
||||
}
|
||||
d.average = (d.average / (blendedData.data.length * 0.25))*10;
|
||||
if (d.average > parseFloat(cord.sensitivity)){
|
||||
if(s.isNumberOfTriggersMet(d,2)){
|
||||
if(d.mon.detector_use_detect_object==="1"&&d.mon.detector_second!=='1'){
|
||||
var buffer=s.group[d.ke][d.id].canvas[cord.name].toBuffer();
|
||||
s.detectObject(buffer,d,tx)
|
||||
}else{
|
||||
tx({f:'trigger',id:d.id,ke:d.ke,details:{split:true,plug:config.plug,name:cord.name,reason:'motion',confidence:d.average,frame:d.base64}})
|
||||
}
|
||||
}
|
||||
}
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].clearRect(0, 0, d.width, d.height);
|
||||
s.group[d.ke][d.id].blendRegionContext[cord.name].clearRect(0, 0, d.width, d.height);
|
||||
}
|
||||
function blobToBuffer (blob, cb) {
|
||||
if (typeof Blob === 'undefined' || !(blob instanceof Blob)) {
|
||||
throw new Error('first argument must be a Blob')
|
||||
}
|
||||
if (typeof cb !== 'function') {
|
||||
throw new Error('second argument must be a function')
|
||||
}
|
||||
|
||||
var reader = new FileReader()
|
||||
|
||||
function onLoadEnd (e) {
|
||||
reader.removeEventListener('loadend', onLoadEnd, false)
|
||||
if (e.error) cb(e.error)
|
||||
else cb(null, Buffer.from(reader.result))
|
||||
}
|
||||
|
||||
reader.addEventListener('loadend', onLoadEnd, false)
|
||||
reader.readAsArrayBuffer(blob)
|
||||
}
|
||||
function fastAbs(value) {
|
||||
return (value ^ (value >> 31)) - (value >> 31);
|
||||
}
|
||||
|
||||
function threshold(value) {
|
||||
return (value > 0x15) ? 0xFF : 0;
|
||||
}
|
||||
s.differenceAccuracy=function(target, data1, data2) {
|
||||
if (data1.length != data2.length) return null;
|
||||
var i = 0;
|
||||
while (i < (data1.length * 0.25)) {
|
||||
var average1 = (data1[4 * i] + data1[4 * i + 1] + data1[4 * i + 2]) / 3;
|
||||
var average2 = (data2[4 * i] + data2[4 * i + 1] + data2[4 * i + 2]) / 3;
|
||||
var diff = threshold(fastAbs(average1 - average2));
|
||||
target[4 * i] = diff;
|
||||
target[4 * i + 1] = diff;
|
||||
target[4 * i + 2] = diff;
|
||||
target[4 * i + 3] = 0xFF;
|
||||
++i;
|
||||
}
|
||||
}
|
||||
s.checkAreas=function(d,tx){
|
||||
if(!s.group[d.ke][d.id].cords){
|
||||
if(!d.mon.cords){d.mon.cords={}}
|
||||
s.group[d.ke][d.id].cords=Object.values(d.mon.cords);
|
||||
}
|
||||
if(d.mon.detector_frame==='1'){
|
||||
d.mon.cords.frame={name:'FULL_FRAME',s:d.mon.detector_sensitivity,points:[[0,0],[0,d.image.height],[d.image.width,d.image.height],[d.image.width,0]]};
|
||||
s.group[d.ke][d.id].cords.push(d.mon.cords.frame);
|
||||
}
|
||||
for (var b = 0; b < s.group[d.ke][d.id].cords.length; b++){
|
||||
if(!s.group[d.ke][d.id].cords[b]){return}
|
||||
s.blenderRegion(d,s.group[d.ke][d.id].cords[b],tx)
|
||||
}
|
||||
delete(d.image)
|
||||
}
|
||||
s.isNumberOfTriggersMet = function(d,max){
|
||||
// ++s.group[d.ke][d.id].numberOfTriggers
|
||||
// clearTimeout(s.group[d.ke][d.id].numberOfTriggersTimeout)
|
||||
// s.group[d.ke][d.id].numberOfTriggersTimeout = setTimeout(function(){
|
||||
// s.group[d.ke][d.id].numberOfTriggers=0
|
||||
// },10000)
|
||||
// if(s.group[d.ke][d.id].numberOfTriggers>max){
|
||||
return true;
|
||||
// }
|
||||
// return false;
|
||||
}
|
||||
s.MainEventController=function(d,cn,tx){
|
||||
switch(d.f){
|
||||
case'init_plugin_as_host':
|
||||
if(!cn){
|
||||
console.log('No CN',d)
|
||||
return
|
||||
}
|
||||
if(d.key!==config.key){
|
||||
console.log(new Date(),'Plugin Key Mismatch',cn.request.connection.remoteAddress,d)
|
||||
cn.emit('init',{ok:false})
|
||||
cn.disconnect()
|
||||
}else{
|
||||
console.log(new Date(),'Plugin Connected to Client',cn.request.connection.remoteAddress)
|
||||
cn.emit('init',{ok:true,plug:config.plug,notice:config.notice,type:config.type})
|
||||
}
|
||||
break;
|
||||
case'init_monitor':
|
||||
if(s.group[d.ke]&&s.group[d.ke][d.id]){
|
||||
s.group[d.ke][d.id].canvas={}
|
||||
s.group[d.ke][d.id].canvasContext={}
|
||||
s.group[d.ke][d.id].blendRegion={}
|
||||
s.group[d.ke][d.id].blendRegionContext={}
|
||||
s.group[d.ke][d.id].lastRegionImageData={}
|
||||
s.group[d.ke][d.id].numberOfTriggers=0
|
||||
delete(s.group[d.ke][d.id].cords)
|
||||
delete(s.group[d.ke][d.id].buffer)
|
||||
}
|
||||
break;
|
||||
case'init_aws_push':
|
||||
// console.log('init_aws')
|
||||
s.group[d.ke][d.id].aws={links:[],complete:0,total:d.total,videos:[],tx:tx}
|
||||
break;
|
||||
case'frame':
|
||||
try{
|
||||
if(!s.group[d.ke]){
|
||||
s.group[d.ke]={}
|
||||
}
|
||||
if(!s.group[d.ke][d.id]){
|
||||
s.group[d.ke][d.id]={
|
||||
canvas:{},
|
||||
canvasContext:{},
|
||||
lastRegionImageData:{},
|
||||
blendRegion:{},
|
||||
blendRegionContext:{},
|
||||
}
|
||||
}
|
||||
if(!s.group[d.ke][d.id].buffer){
|
||||
s.group[d.ke][d.id].buffer=[d.frame];
|
||||
}else{
|
||||
s.group[d.ke][d.id].buffer.push(d.frame)
|
||||
}
|
||||
if(d.frame[d.frame.length-2] === 0xFF && d.frame[d.frame.length-1] === 0xD9){
|
||||
s.group[d.ke][d.id].buffer=Buffer.concat(s.group[d.ke][d.id].buffer);
|
||||
try{
|
||||
d.mon.detector_cascades=JSON.parse(d.mon.detector_cascades)
|
||||
}catch(err){
|
||||
|
||||
}
|
||||
if(d.mon.detector_frame_save==="1"){
|
||||
d.base64=s.group[d.ke][d.id].buffer.toString('base64')
|
||||
}
|
||||
if(d.mon.detector_second==='1'&&d.objectOnly===true){
|
||||
s.detectObject(s.group[d.ke][d.id].buffer,d,tx)
|
||||
}else{
|
||||
if(d.mon.detector_use_motion==="1"||d.mon.detector_use_detect_object!=="1"){
|
||||
if((typeof d.mon.cords ==='string')&&d.mon.cords.trim()===''){
|
||||
d.mon.cords=[]
|
||||
}else{
|
||||
try{
|
||||
d.mon.cords=JSON.parse(d.mon.cords)
|
||||
}catch(err){
|
||||
// console.log('d.mon.cords',err,d)
|
||||
}
|
||||
}
|
||||
s.group[d.ke][d.id].cords=Object.values(d.mon.cords);
|
||||
d.mon.cords=d.mon.cords;
|
||||
d.image = new Canvas.Image;
|
||||
if(d.mon.detector_scale_x===''||d.mon.detector_scale_y===''){
|
||||
s.systemLog('Must set detector image size')
|
||||
return
|
||||
}else{
|
||||
d.image.width=d.mon.detector_scale_x;
|
||||
d.image.height=d.mon.detector_scale_y;
|
||||
}
|
||||
d.width=d.image.width;
|
||||
d.height=d.image.height;
|
||||
d.image.onload = function() {
|
||||
s.checkAreas(d,tx);
|
||||
}
|
||||
d.image.src = s.group[d.ke][d.id].buffer;
|
||||
}else{
|
||||
s.detectObject(s.group[d.ke][d.id].buffer,d,tx)
|
||||
}
|
||||
}
|
||||
s.group[d.ke][d.id].buffer=null;
|
||||
}
|
||||
}catch(err){
|
||||
if(err){
|
||||
s.systemLog(err)
|
||||
delete(s.group[d.ke][d.id].buffer)
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
server.listen(config.hostPort);
|
||||
//web pages and plugin api
|
||||
app.get('/', function (req, res) {
|
||||
res.end('<b>'+config.plug+'</b> for Shinobi is running')
|
||||
});
|
||||
//Conector to Shinobi
|
||||
if(config.mode==='host'){
|
||||
//start plugin as host
|
||||
var io = require('socket.io')(server);
|
||||
io.attach(server);
|
||||
s.connectedClients={};
|
||||
io.on('connection', function (cn) {
|
||||
s.connectedClients[cn.id]={id:cn.id}
|
||||
s.connectedClients[cn.id].tx = function(data){
|
||||
data.pluginKey=config.key;data.plug=config.plug;
|
||||
return io.to(cn.id).emit('ocv',data);
|
||||
}
|
||||
cn.on('f',function(d){
|
||||
s.MainEventController(d,cn,s.connectedClients[cn.id].tx)
|
||||
});
|
||||
cn.on('disconnect',function(d){
|
||||
delete(s.connectedClients[cn.id])
|
||||
})
|
||||
});
|
||||
}else{
|
||||
//start plugin as client
|
||||
if(!config.host){config.host='localhost'}
|
||||
var io = require('socket.io-client')('ws://'+config.host+':'+config.port);//connect to master
|
||||
s.cx=function(x){x.pluginKey=config.key;x.plug=config.plug;return io.emit('ocv',x)}
|
||||
io.on('connect',function(d){
|
||||
s.cx({f:'init',plug:config.plug,notice:config.notice,type:config.type});
|
||||
})
|
||||
io.on('disconnect',function(d){
|
||||
io.connect();
|
||||
})
|
||||
io.on('f',function(d){
|
||||
s.MainEventController(d,null,s.cx)
|
||||
})
|
||||
}
|
15
plugins/opencv/INSTALL.sh
Normal file
15
plugins/opencv/INSTALL.sh
Normal file
|
@ -0,0 +1,15 @@
|
|||
#!/bin/bash
|
||||
if [ $(dpkg-query -W -f='${Status}' opencv_version 2>/dev/null | grep -c "ok installed") -eq 0 ]; then
|
||||
echo "Shinobi - Do ypu want to let the `opencv4nodejs` npm package install OpenCV? "
|
||||
echo "Only do this if you do not have OpenCV already or will not use a GPU (Hardware Acceleration)."
|
||||
echo "(y)es or (N)o"
|
||||
read nodejsinstall
|
||||
if [ "$nodejsinstall" = "y" ] || [ "$nodejsinstall" = "Y" ]; then
|
||||
export OPENCV4NODEJS_DISABLE_AUTOBUILD=0
|
||||
else
|
||||
export OPENCV4NODEJS_DISABLE_AUTOBUILD=1
|
||||
fi
|
||||
else
|
||||
export OPENCV4NODEJS_DISABLE_AUTOBUILD=1
|
||||
fi
|
||||
npm install opencv4nodejs moment express canvas@1.6 --unsafe-perm
|
9
plugins/opencv/conf.sample.json
Normal file
9
plugins/opencv/conf.sample.json
Normal file
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"plug":"OpenCV",
|
||||
"host":"localhost",
|
||||
"port":8080,
|
||||
"hostPort":8082,
|
||||
"key":"change_this_to_something_very_random____make_sure_to_match__/plugins/opencv/conf.json",
|
||||
"mode":"client",
|
||||
"type":"detector"
|
||||
}
|
94
plugins/opencv/openalpr.conf
Normal file
94
plugins/opencv/openalpr.conf
Normal file
|
@ -0,0 +1,94 @@
|
|||
|
||||
; Specify the path to the runtime data directory
|
||||
runtime_dir = ${CMAKE_INSTALL_PREFIX}/share/openalpr/runtime_data
|
||||
|
||||
|
||||
ocr_img_size_percent = 1.33333333
|
||||
state_id_img_size_percent = 2.0
|
||||
|
||||
; Calibrating your camera improves detection accuracy in cases where vehicle plates are captured at a steep angle
|
||||
; Use the openalpr-utils-calibrate utility to calibrate your fixed camera to adjust for an angle
|
||||
; Once done, update the prewarp config with the values obtained from the tool
|
||||
prewarp =
|
||||
|
||||
; detection will ignore plates that are too large. This is a good efficiency technique to use if the
|
||||
; plates are going to be a fixed distance away from the camera (e.g., you will never see plates that fill
|
||||
; up the entire image
|
||||
max_plate_width_percent = 100
|
||||
max_plate_height_percent = 100
|
||||
|
||||
; detection_iteration_increase is the percentage that the LBP frame increases each iteration.
|
||||
; It must be greater than 1.0. A value of 1.01 means increase by 1%, 1.10 increases it by 10% each time.
|
||||
; So a 1% increase would be ~10x slower than 10% to process, but it has a higher chance of landing
|
||||
; directly on the plate and getting a strong detection
|
||||
detection_iteration_increase = 1.1
|
||||
|
||||
; The minimum detection strength determines how sure the detection algorithm must be before signaling that
|
||||
; a plate region exists. Technically this corresponds to LBP nearest neighbors (e.g., how many detections
|
||||
; are clustered around the same area). For example, 2 = very lenient, 9 = very strict.
|
||||
detection_strictness = 3
|
||||
|
||||
; The detection doesn't necessarily need an extremely high resolution image in order to detect plates
|
||||
; Using a smaller input image should still find the plates and will do it faster
|
||||
; Tweaking the max_detection_input values will resize the input image if it is larger than these sizes
|
||||
; max_detection_input_width/height are specified in pixels
|
||||
max_detection_input_width = 1280
|
||||
max_detection_input_height = 720
|
||||
|
||||
; detector is the technique used to find license plate regions in an image. Value can be set to
|
||||
; lbpcpu - default LBP-based detector uses the system CPU
|
||||
; lbpgpu - LBP-based detector that uses Nvidia GPU to increase recognition speed.
|
||||
; lbpopencl - LBP-based detector that uses OpenCL GPU to increase recognition speed. Requires OpenCV 3.0
|
||||
; morphcpu - Experimental detector that detects white rectangles in an image. Does not require training.
|
||||
detector = lbpgpu
|
||||
|
||||
; If set to true, all results must match a postprocess text pattern if a pattern is available.
|
||||
; If not, the result is disqualified.
|
||||
must_match_pattern = 0
|
||||
|
||||
; Bypasses plate detection. If this is set to 1, the library assumes that each region provided is a likely plate area.
|
||||
skip_detection = 0
|
||||
|
||||
; Specifies the full path to an image file that constrains the detection area. Only the plate regions allowed through the mask
|
||||
; will be analyzed. The mask image must match the resolution of your image to be analyzed. The mask is black and white.
|
||||
; Black areas will be ignored, white areas will be searched. An empty value means no mask (scan the entire image)
|
||||
detection_mask_image =
|
||||
|
||||
; OpenALPR can scan the same image multiple times with different randomization. Setting this to a value larger than
|
||||
; 1 may increase accuracy, but will increase processing time linearly (e.g., analysis_count = 3 is 3x slower)
|
||||
analysis_count = 1
|
||||
|
||||
; OpenALPR detects high-contrast plate crops and uses an alternative edge detection technique. Setting this to 0.0
|
||||
; would classify ALL images as high-contrast, setting it to 1.0 would classify no images as high-contrast.
|
||||
contrast_detection_threshold = 0.3
|
||||
|
||||
max_plate_angle_degrees = 15
|
||||
|
||||
ocr_min_font_point = 6
|
||||
|
||||
; Minimum OCR confidence percent to consider.
|
||||
postprocess_min_confidence = 65
|
||||
|
||||
; Any OCR character lower than this will also add an equally likely
|
||||
; chance that the character is incorrect and will be skipped. Value is a confidence percent
|
||||
postprocess_confidence_skip_level = 80
|
||||
|
||||
|
||||
debug_general = 0
|
||||
debug_timing = 0
|
||||
debug_detector = 0
|
||||
debug_prewarp = 0
|
||||
debug_state_id = 0
|
||||
debug_plate_lines = 0
|
||||
debug_plate_corners = 0
|
||||
debug_char_segment = 0
|
||||
debug_char_analysis = 0
|
||||
debug_color_filter = 0
|
||||
debug_ocr = 0
|
||||
debug_postprocess = 0
|
||||
debug_show_images = 0
|
||||
debug_pause_on_frame = 0
|
||||
|
||||
|
||||
|
||||
|
465
plugins/opencv/shinobi-opencv.js
Normal file
465
plugins/opencv/shinobi-opencv.js
Normal file
|
@ -0,0 +1,465 @@
|
|||
//
|
||||
// Shinobi - OpenCV Plugin
|
||||
// Copyright (C) 2016-2025 Moe Alam, moeiscool
|
||||
//
|
||||
// # Donate
|
||||
//
|
||||
// If you like what I am doing here and want me to continue please consider donating :)
|
||||
// PayPal : paypal@m03.ca
|
||||
//
|
||||
process.on('uncaughtException', function (err) {
|
||||
console.error('uncaughtException',err);
|
||||
});
|
||||
var fs=require('fs');
|
||||
var cv=require('opencv4nodejs');
|
||||
var exec = require('child_process').exec;
|
||||
var moment = require('moment');
|
||||
var Canvas = require('canvas');
|
||||
var express = require('express');
|
||||
var http = require('http'),
|
||||
app = express(),
|
||||
server = http.createServer(app);
|
||||
var config=require('./conf.json');
|
||||
if(!config.port){config.port=8080}
|
||||
if(!config.hostPort){config.hostPort=8082}
|
||||
if(config.systemLog===undefined){config.systemLog=true}
|
||||
if(config.cascadesDir===undefined){config.cascadesDir=__dirname+'/cascades/'}
|
||||
if(config.alprConfig===undefined){config.alprConfig=__dirname+'/openalpr.conf'}
|
||||
s={
|
||||
group:{},
|
||||
dir:{
|
||||
cascades : config.cascadesDir
|
||||
},
|
||||
isWin:(process.platform==='win32'),
|
||||
foundCascades : {
|
||||
|
||||
}
|
||||
}
|
||||
//default stream folder check
|
||||
if(!config.streamDir){
|
||||
if(s.isWin===false){
|
||||
config.streamDir='/dev/shm'
|
||||
}else{
|
||||
config.streamDir=config.windowsTempDir
|
||||
}
|
||||
if(!fs.existsSync(config.streamDir)){
|
||||
config.streamDir=__dirname+'/streams/'
|
||||
}else{
|
||||
config.streamDir+='/streams/'
|
||||
}
|
||||
}
|
||||
s.dir.streams=config.streamDir;
|
||||
//streams dir
|
||||
if(!fs.existsSync(s.dir.streams)){
|
||||
fs.mkdirSync(s.dir.streams);
|
||||
}
|
||||
//streams dir
|
||||
if(!fs.existsSync(s.dir.cascades)){
|
||||
fs.mkdirSync(s.dir.cascades);
|
||||
}
|
||||
s.gid=function(x){
|
||||
if(!x){x=10};var t = "";var p = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
for( var i=0; i < x; i++ )
|
||||
t += p.charAt(Math.floor(Math.random() * p.length));
|
||||
return t;
|
||||
};
|
||||
s.findCascades=function(callback){
|
||||
var tmp={};
|
||||
tmp.foundCascades=[];
|
||||
fs.readdir(s.dir.cascades,function(err,files){
|
||||
files.forEach(function(cascade,n){
|
||||
if(cascade.indexOf('.xml')>-1){
|
||||
tmp.foundCascades.push(cascade.replace('.xml',''))
|
||||
}
|
||||
})
|
||||
s.cascadesInDir=tmp.foundCascades;
|
||||
callback(tmp.foundCascades)
|
||||
})
|
||||
}
|
||||
s.findCascades(function(){
|
||||
//get cascades
|
||||
})
|
||||
s.detectLicensePlate=function(buffer,d,tx){
|
||||
if(!d.mon.detector_lisence_plate_country||d.mon.detector_lisence_plate_country===''){
|
||||
d.mon.detector_lisence_plate_country='us'
|
||||
}
|
||||
d.tmpFile=s.gid(5)+'.jpg'
|
||||
if(!fs.existsSync(s.dir.streams)){
|
||||
fs.mkdirSync(s.dir.streams);
|
||||
}
|
||||
d.dir=s.dir.streams+d.ke+'/'
|
||||
if(!fs.existsSync(d.dir)){
|
||||
fs.mkdirSync(d.dir);
|
||||
}
|
||||
d.dir=s.dir.streams+d.ke+'/'+d.id+'/'
|
||||
if(!fs.existsSync(d.dir)){
|
||||
fs.mkdirSync(d.dir);
|
||||
}
|
||||
fs.writeFile(d.dir+d.tmpFile,buffer,function(err){
|
||||
if(err) return s.systemLog(err);
|
||||
exec('alpr -j --config '+config.alprConfig+' -c '+d.mon.detector_lisence_plate_country+' '+d.dir+d.tmpFile,{encoding:'utf8'},(err, scan, stderr) => {
|
||||
if(err){
|
||||
s.systemLog(err);
|
||||
}else{
|
||||
try{
|
||||
scan=JSON.parse(scan.replace('--(!)Loaded CUDA classifier','').trim())
|
||||
}catch(err){
|
||||
if(!scan||!scan.results){
|
||||
return s.systemLog(scan,err);
|
||||
}
|
||||
}
|
||||
if(scan.results.length>0){
|
||||
scan.plates=[]
|
||||
scan.mats=[]
|
||||
scan.results.forEach(function(v){
|
||||
v.candidates.forEach(function(g,n){
|
||||
if(v.candidates[n].matches_template)
|
||||
delete(v.candidates[n].matches_template)
|
||||
})
|
||||
scan.plates.push({coordinates:v.coordinates,candidates:v.candidates,confidence:v.confidence,plate:v.plate})
|
||||
var width = Math.sqrt( Math.pow(v.coordinates[1].x - v.coordinates[0].x, 2) + Math.pow(v.coordinates[1].y - v.coordinates[0].y, 2));
|
||||
var height = Math.sqrt( Math.pow(v.coordinates[2].x - v.coordinates[1].x, 2) + Math.pow(v.coordinates[2].y - v.coordinates[1].y, 2))
|
||||
scan.mats.push({
|
||||
x:v.coordinates[0].x,
|
||||
y:v.coordinates[0].y,
|
||||
width:width,
|
||||
height:height,
|
||||
tag:v.plate
|
||||
})
|
||||
})
|
||||
tx({f:'trigger',id:d.id,ke:d.ke,details:{split:true,plug:config.plug,name:'licensePlate',reason:'object',matrices:scan.mats,imgHeight:d.mon.detector_scale_y,imgWidth:d.mon.detector_scale_x,frame:d.base64}})
|
||||
}
|
||||
}
|
||||
exec('rm -rf '+d.dir+d.tmpFile,{encoding:'utf8'})
|
||||
})
|
||||
})
|
||||
}
|
||||
s.detectObject=function(buffer,d,tx){
|
||||
//detect license plate?
|
||||
if(d.mon.detector_lisence_plate==="1"){
|
||||
s.detectLicensePlate(buffer,d,tx)
|
||||
}
|
||||
//check selected opencv cascades
|
||||
if(!d.mon.detector_cascades || d.mon.detector_cascades === '')return;
|
||||
var selectedCascades = Object.keys(d.mon.detector_cascades);
|
||||
if(selectedCascades.length > 0){
|
||||
cv.imdecodeAsync(buffer,(err,im) => {
|
||||
if(err){
|
||||
console.log(err)
|
||||
return
|
||||
}
|
||||
selectedCascades.forEach(function(cascade){
|
||||
var cascadePath = s.dir.cascades+cascade+'.xml'
|
||||
if(s.foundCascades[cascadePath] === undefined){
|
||||
s.foundCascades[cascadePath] = fs.existsSync(cascadePath)
|
||||
}else if(s.foundCascades[cascadePath] === false){
|
||||
return s.systemLog('Attempted to use non existant cascade. : '+cascadePath)
|
||||
}
|
||||
var classifier = new cv.CascadeClassifier(cascadePath)
|
||||
var matrices = classifier.detectMultiScaleGpu(im).objects
|
||||
if(matrices.length > 0){
|
||||
matrices.forEach(function(v,n){
|
||||
v.centerX=v.width/2
|
||||
v.centerY=v.height/2
|
||||
v.centerXnoParent=v.x+(v.width/2)
|
||||
v.centerYnoParent=v.y+(v.height/2)
|
||||
})
|
||||
s.cx({
|
||||
f:'trigger',
|
||||
id:d.id,
|
||||
ke:d.ke,
|
||||
name:cascade,
|
||||
details:{
|
||||
plug:'built-in-opencv',
|
||||
name:cascade,
|
||||
reason:'object',
|
||||
matrices : matrices,
|
||||
confidence:d.average
|
||||
},
|
||||
imgHeight:d.mon.detector_scale_y,
|
||||
imgWidth:d.mon.detector_scale_x
|
||||
})
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
s.systemLog=function(q,w,e){
|
||||
if(!w){w=''}
|
||||
if(!e){e=''}
|
||||
if(config.systemLog===true){
|
||||
return console.log(moment().format(),q,w,e)
|
||||
}
|
||||
}
|
||||
|
||||
s.blenderRegion=function(d,cord,tx){
|
||||
d.width = d.image.width;
|
||||
d.height = d.image.height;
|
||||
if(!s.group[d.ke][d.id].canvas[cord.name]){
|
||||
if(!cord.sensitivity||isNaN(cord.sensitivity)){
|
||||
cord.sensitivity=d.mon.detector_sensitivity;
|
||||
}
|
||||
s.group[d.ke][d.id].canvas[cord.name] = new Canvas(d.width,d.height);
|
||||
s.group[d.ke][d.id].canvasContext[cord.name] = s.group[d.ke][d.id].canvas[cord.name].getContext('2d');
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].fillStyle = '#000';
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].fillRect( 0, 0,d.width,d.height);
|
||||
if(cord.points&&cord.points.length>0){
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].beginPath();
|
||||
for (var b = 0; b < cord.points.length; b++){
|
||||
cord.points[b][0]=parseFloat(cord.points[b][0]);
|
||||
cord.points[b][1]=parseFloat(cord.points[b][1]);
|
||||
if(b===0){
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].moveTo(cord.points[b][0],cord.points[b][1]);
|
||||
}else{
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].lineTo(cord.points[b][0],cord.points[b][1]);
|
||||
}
|
||||
}
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].clip();
|
||||
}
|
||||
}
|
||||
if(!s.group[d.ke][d.id].canvasContext[cord.name]){
|
||||
return
|
||||
}
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].drawImage(d.image, 0, 0, d.width, d.height);
|
||||
if(!s.group[d.ke][d.id].blendRegion[cord.name]){
|
||||
s.group[d.ke][d.id].blendRegion[cord.name] = new Canvas(d.width, d.height);
|
||||
s.group[d.ke][d.id].blendRegionContext[cord.name] = s.group[d.ke][d.id].blendRegion[cord.name].getContext('2d');
|
||||
}
|
||||
var sourceData = s.group[d.ke][d.id].canvasContext[cord.name].getImageData(0, 0, d.width, d.height);
|
||||
// create an image if the previous image doesn<73>t exist
|
||||
if (!s.group[d.ke][d.id].lastRegionImageData[cord.name]) s.group[d.ke][d.id].lastRegionImageData[cord.name] = s.group[d.ke][d.id].canvasContext[cord.name].getImageData(0, 0, d.width, d.height);
|
||||
// create a ImageData instance to receive the blended result
|
||||
var blendedData = s.group[d.ke][d.id].canvasContext[cord.name].createImageData(d.width, d.height);
|
||||
// blend the 2 images
|
||||
s.differenceAccuracy(blendedData.data,sourceData.data,s.group[d.ke][d.id].lastRegionImageData[cord.name].data);
|
||||
// draw the result in a canvas
|
||||
s.group[d.ke][d.id].blendRegionContext[cord.name].putImageData(blendedData, 0, 0);
|
||||
// store the current webcam image
|
||||
s.group[d.ke][d.id].lastRegionImageData[cord.name] = sourceData;
|
||||
blendedData = s.group[d.ke][d.id].blendRegionContext[cord.name].getImageData(0, 0, d.width, d.height);
|
||||
var i = 0;
|
||||
d.average = 0;
|
||||
while (i < (blendedData.data.length * 0.25)) {
|
||||
d.average += (blendedData.data[i * 4] + blendedData.data[i * 4 + 1] + blendedData.data[i * 4 + 2]);
|
||||
++i;
|
||||
}
|
||||
d.average = (d.average / (blendedData.data.length * 0.25))*10;
|
||||
if (d.average > parseFloat(cord.sensitivity)){
|
||||
if(d.mon.detector_use_detect_object==="1"&&d.mon.detector_second!=='1'){
|
||||
var buffer=s.group[d.ke][d.id].canvas[cord.name].toBuffer();
|
||||
s.detectObject(buffer,d,tx)
|
||||
}else{
|
||||
tx({f:'trigger',id:d.id,ke:d.ke,details:{split:true,plug:config.plug,name:cord.name,reason:'motion',confidence:d.average,frame:d.base64}})
|
||||
}
|
||||
}
|
||||
s.group[d.ke][d.id].canvasContext[cord.name].clearRect(0, 0, d.width, d.height);
|
||||
s.group[d.ke][d.id].blendRegionContext[cord.name].clearRect(0, 0, d.width, d.height);
|
||||
}
|
||||
function blobToBuffer (blob, cb) {
|
||||
if (typeof Blob === 'undefined' || !(blob instanceof Blob)) {
|
||||
throw new Error('first argument must be a Blob')
|
||||
}
|
||||
if (typeof cb !== 'function') {
|
||||
throw new Error('second argument must be a function')
|
||||
}
|
||||
|
||||
var reader = new FileReader()
|
||||
|
||||
function onLoadEnd (e) {
|
||||
reader.removeEventListener('loadend', onLoadEnd, false)
|
||||
if (e.error) cb(e.error)
|
||||
else cb(null, Buffer.from(reader.result))
|
||||
}
|
||||
|
||||
reader.addEventListener('loadend', onLoadEnd, false)
|
||||
reader.readAsArrayBuffer(blob)
|
||||
}
|
||||
function fastAbs(value) {
|
||||
return (value ^ (value >> 31)) - (value >> 31);
|
||||
}
|
||||
|
||||
function threshold(value) {
|
||||
return (value > 0x15) ? 0xFF : 0;
|
||||
}
|
||||
s.differenceAccuracy=function(target, data1, data2) {
|
||||
if (data1.length != data2.length) return null;
|
||||
var i = 0;
|
||||
while (i < (data1.length * 0.25)) {
|
||||
var average1 = (data1[4 * i] + data1[4 * i + 1] + data1[4 * i + 2]) / 3;
|
||||
var average2 = (data2[4 * i] + data2[4 * i + 1] + data2[4 * i + 2]) / 3;
|
||||
var diff = threshold(fastAbs(average1 - average2));
|
||||
target[4 * i] = diff;
|
||||
target[4 * i + 1] = diff;
|
||||
target[4 * i + 2] = diff;
|
||||
target[4 * i + 3] = 0xFF;
|
||||
++i;
|
||||
}
|
||||
}
|
||||
s.checkAreas=function(d,tx){
|
||||
if(!s.group[d.ke][d.id].cords){
|
||||
if(!d.mon.cords){d.mon.cords={}}
|
||||
s.group[d.ke][d.id].cords=Object.values(d.mon.cords);
|
||||
}
|
||||
if(d.mon.detector_frame==='1'){
|
||||
d.mon.cords.frame={name:'FULL_FRAME',s:d.mon.detector_sensitivity,points:[[0,0],[0,d.image.height],[d.image.width,d.image.height],[d.image.width,0]]};
|
||||
s.group[d.ke][d.id].cords.push(d.mon.cords.frame);
|
||||
}
|
||||
for (var b = 0; b < s.group[d.ke][d.id].cords.length; b++){
|
||||
if(!s.group[d.ke][d.id].cords[b]){return}
|
||||
s.blenderRegion(d,s.group[d.ke][d.id].cords[b],tx)
|
||||
}
|
||||
delete(d.image)
|
||||
}
|
||||
|
||||
s.MainEventController=function(d,cn,tx){
|
||||
switch(d.f){
|
||||
case'refreshPlugins':
|
||||
s.findCascades(function(cascades){
|
||||
s.cx({f:'s.tx',data:{f:'detector_cascade_list',cascades:cascades},to:'GRP_'+d.ke})
|
||||
})
|
||||
break;
|
||||
case'readPlugins':
|
||||
s.cx({f:'s.tx',data:{f:'detector_cascade_list',cascades:s.cascadesInDir},to:'GRP_'+d.ke})
|
||||
break;
|
||||
case'init_plugin_as_host':
|
||||
if(!cn){
|
||||
console.log('No CN',d)
|
||||
return
|
||||
}
|
||||
if(d.key!==config.key){
|
||||
console.log(new Date(),'Plugin Key Mismatch',cn.request.connection.remoteAddress,d)
|
||||
cn.emit('init',{ok:false})
|
||||
cn.disconnect()
|
||||
}else{
|
||||
console.log(new Date(),'Plugin Connected to Client',cn.request.connection.remoteAddress)
|
||||
cn.emit('init',{ok:true,plug:config.plug,notice:config.notice,type:config.type})
|
||||
}
|
||||
break;
|
||||
case'init_monitor':
|
||||
if(s.group[d.ke]&&s.group[d.ke][d.id]){
|
||||
s.group[d.ke][d.id].canvas={}
|
||||
s.group[d.ke][d.id].canvasContext={}
|
||||
s.group[d.ke][d.id].blendRegion={}
|
||||
s.group[d.ke][d.id].blendRegionContext={}
|
||||
s.group[d.ke][d.id].lastRegionImageData={}
|
||||
s.group[d.ke][d.id].numberOfTriggers=0
|
||||
delete(s.group[d.ke][d.id].cords)
|
||||
delete(s.group[d.ke][d.id].buffer)
|
||||
}
|
||||
break;
|
||||
case'init_aws_push':
|
||||
// console.log('init_aws')
|
||||
s.group[d.ke][d.id].aws={links:[],complete:0,total:d.total,videos:[],tx:tx}
|
||||
break;
|
||||
case'frame':
|
||||
try{
|
||||
if(!s.group[d.ke]){
|
||||
s.group[d.ke]={}
|
||||
}
|
||||
if(!s.group[d.ke][d.id]){
|
||||
s.group[d.ke][d.id]={
|
||||
canvas:{},
|
||||
canvasContext:{},
|
||||
lastRegionImageData:{},
|
||||
blendRegion:{},
|
||||
blendRegionContext:{},
|
||||
}
|
||||
}
|
||||
if(!s.group[d.ke][d.id].buffer){
|
||||
s.group[d.ke][d.id].buffer=[d.frame];
|
||||
}else{
|
||||
s.group[d.ke][d.id].buffer.push(d.frame)
|
||||
}
|
||||
if(d.frame[d.frame.length-2] === 0xFF && d.frame[d.frame.length-1] === 0xD9){
|
||||
s.group[d.ke][d.id].buffer=Buffer.concat(s.group[d.ke][d.id].buffer);
|
||||
try{
|
||||
d.mon.detector_cascades=JSON.parse(d.mon.detector_cascades)
|
||||
}catch(err){
|
||||
|
||||
}
|
||||
if(d.mon.detector_frame_save==="1"){
|
||||
d.base64=s.group[d.ke][d.id].buffer.toString('base64')
|
||||
}
|
||||
if(d.mon.detector_second==='1'&&d.objectOnly===true){
|
||||
s.detectObject(s.group[d.ke][d.id].buffer,d,tx)
|
||||
}else{
|
||||
if((d.mon.detector_pam !== '1' && d.mon.detector_use_motion === "1") || d.mon.detector_use_detect_object !== "1"){
|
||||
if((typeof d.mon.cords ==='string')&&d.mon.cords.trim()===''){
|
||||
d.mon.cords=[]
|
||||
}else{
|
||||
try{
|
||||
d.mon.cords=JSON.parse(d.mon.cords)
|
||||
}catch(err){
|
||||
// console.log('d.mon.cords',err,d)
|
||||
}
|
||||
}
|
||||
s.group[d.ke][d.id].cords=Object.values(d.mon.cords);
|
||||
d.mon.cords=d.mon.cords;
|
||||
d.image = new Canvas.Image;
|
||||
if(d.mon.detector_scale_x===''||d.mon.detector_scale_y===''){
|
||||
s.systemLog('Must set detector image size')
|
||||
return
|
||||
}else{
|
||||
d.image.width=d.mon.detector_scale_x;
|
||||
d.image.height=d.mon.detector_scale_y;
|
||||
}
|
||||
d.width=d.image.width;
|
||||
d.height=d.image.height;
|
||||
d.image.onload = function() {
|
||||
s.checkAreas(d,tx);
|
||||
}
|
||||
d.image.src = s.group[d.ke][d.id].buffer;
|
||||
}else{
|
||||
s.detectObject(s.group[d.ke][d.id].buffer,d,tx)
|
||||
}
|
||||
}
|
||||
s.group[d.ke][d.id].buffer=null;
|
||||
}
|
||||
}catch(err){
|
||||
if(err){
|
||||
s.systemLog(err)
|
||||
delete(s.group[d.ke][d.id].buffer)
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
server.listen(config.hostPort);
|
||||
//web pages and plugin api
|
||||
app.get('/', function (req, res) {
|
||||
res.end('<b>'+config.plug+'</b> for Shinobi is running')
|
||||
});
|
||||
//Conector to Shinobi
|
||||
if(config.mode==='host'){
|
||||
//start plugin as host
|
||||
var io = require('socket.io')(server);
|
||||
io.attach(server);
|
||||
s.connectedClients={};
|
||||
io.on('connection', function (cn) {
|
||||
s.connectedClients[cn.id]={id:cn.id}
|
||||
s.connectedClients[cn.id].tx = function(data){
|
||||
data.pluginKey=config.key;data.plug=config.plug;
|
||||
return io.to(cn.id).emit('ocv',data);
|
||||
}
|
||||
cn.on('f',function(d){
|
||||
s.MainEventController(d,cn,s.connectedClients[cn.id].tx)
|
||||
});
|
||||
cn.on('disconnect',function(d){
|
||||
delete(s.connectedClients[cn.id])
|
||||
})
|
||||
});
|
||||
}else{
|
||||
//start plugin as client
|
||||
if(!config.host){config.host='localhost'}
|
||||
var io = require('socket.io-client')('ws://'+config.host+':'+config.port);//connect to master
|
||||
s.cx=function(x){x.pluginKey=config.key;x.plug=config.plug;return io.emit('ocv',x)}
|
||||
io.on('connect',function(d){
|
||||
s.cx({f:'init',plug:config.plug,notice:config.notice,type:config.type});
|
||||
})
|
||||
io.on('disconnect',function(d){
|
||||
io.connect();
|
||||
})
|
||||
io.on('f',function(d){
|
||||
s.MainEventController(d,null,s.cx)
|
||||
})
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue