1
0
Fork 0
mirror of https://gitlab.com/Shinobi-Systems/ShinobiCE.git synced 2025-03-09 15:40:15 +00:00

Shinobi CE officially lands on Gitlab

This commit is contained in:
Moe 2018-06-07 23:21:38 -07:00
commit f1406d4eec
431 changed files with 118157 additions and 0 deletions

View file

@ -0,0 +1,102 @@
// Shinobi (http://shinobi.video) - FFMPEG FLV over HTTP Test
// How to Use
// 1. Navigate to directory where this file is.
// 2. Run `npm install express moment`
// 3. Start with `node ffmpegToWeb.js`
// 4. Get the IP address of the computer where you did step 1. Example : 127.0.0.1
// 5. Open `http://127.0.0.1:8001/` in your browser.
var child = require('child_process');
var events = require('events');
var moment = require('moment');
var express = require('express')
var app = express();
var server = require('http').Server(app);
var io = require('socket.io')(server);
var spawn = child.spawn;
var exec = child.exec;
var Emitters = {}
var firstChunks = {}
var config = {
port:8001,
//ffmpegDir:'ffmpeg',//mac, linux
// ffmpegInput:'-rtsp_transport tcp -i rtsp://112.162.205.151:554/axis-media/media.3gp',
ffmpegInput:'-hwaccel cuvid -f dshow -i video=screen-capture-recorder',//windows screen
ffmpegDir:'D:/Program Files/ffmpeg/ffmpeg.exe',//windows (shortcutted ffmpeg to the same directory as this file)
}
var initEmitter = function(feed){
if(!Emitters[feed]){
Emitters[feed] = new events.EventEmitter().setMaxListeners(0)
}
return Emitters[feed]
}
//hold first chunk of FLV video
var initFirstChunk = function(feed,firstBuffer){
if(!firstChunks[feed]){
firstChunks[feed] = firstBuffer
}
return firstChunks[feed]
}
console.log('Starting Express Web Server on Port '+config.port)
//start webserver
server.listen(config.port);
//make libraries static
app.use('/libs',express.static(__dirname + '/../../web/libs'));
app.use('/',express.static(__dirname + '/'));
//homepage with video element.
//static file send of index.html
app.get('/', function (req, res) {
res.sendFile(__dirname + '/index.html');
})
//// FLV over HTTP, this URL goes in the flv.js javascript player
// see ./index.html
io.on('connection', function (socket) {
var emitter = initEmitter(1)
var contentWriter;
socket.emit('data',initFirstChunk('1'))
emitter.on('data',contentWriter=function(buffer){
socket.emit('flvData',buffer)
})
socket.on('disconnect', function (socket) {
emitter.removeListener('data',contentWriter)
})
});
//ffmpeg
console.log('Starting FFMPEG')
//var ffmpegString = config.ffmpegInput+' -r 15 -tune zerolatency -c:v libx264 -b:v 200k -crf 1 -an -f mpegts pipe:1'
var ffmpegString = config.ffmpegInput+' -an -c:v h264_nvenc -r 1 -f hls -tune zerolatency -g 1 -hls_time 0.1 -hls_list_size 2 -start_number 0 -live_start_index -3 -hls_allow_cache 0 -hls_flags +delete_segments+omit_endlist '+__dirname+'s.m3u8'
//var ffmpegString = config.ffmpegInput+' -r 15 -tune zerolatency -c:v h264_nvenc -crf 1 -vprofile baseline -preset ultrafast -pix_fmt yuv420p -b:v 400k -r 30 -threads 4 -fflags nobuffer -an -f mpegts pipe:1'
//+'-c:v h264_nvenc -an '+moment(new Date()).format('YYYY-MM-DDTHH-mm-ss')+'.mp4'
//var ffmpegString = '-i '+config.url+' -c:v libx264 -preset superfast -tune zerolatency -c:a aac -ar 44100 -f flv pipe:4'
//ffmpegString += ' -f mpegts -c:v mpeg1video -an http://localhost:'+config.port+'/streamIn/2'
if(ffmpegString.indexOf('rtsp://')>-1){
ffmpegString='-rtsp_transport tcp '+ffmpegString
}
console.log('Executing : '+config.ffmpegDir+' '+ffmpegString)
var ffmpeg = spawn(config.ffmpegDir,ffmpegString.split(' '),{stdio:['pipe','pipe','pipe','pipe','pipe']});
ffmpeg.on('close', function (buffer) {
console.log('ffmpeg died')
})
//// FFMPEG Error Logs
ffmpeg.stderr.on('data', function (buffer) {
console.log(buffer.toString())
});
//data from pipe:1 output of ffmpeg
var onFFmpegData = function (buffer) {
initFirstChunk('1',buffer)
onFFmpegData = function (buffer) {
initEmitter('1').emit('data',buffer)
}
onFFmpegData(buffer)
}
ffmpeg.stdio[1].on('data', onFFmpegData);

7
tools/FLV Stream/flv.min.js vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,36 @@
<!doctype html>
<!--www.shinobi.video-->
<html lang="en-US">
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta charset="UTF-8">
<title>FLV to Web Test by Shinobi Systems</title>
<!--FLV Live Player-->
<script src="http://cdn.shinobi.video/js/socket.io.js"></script>
<script src="http://cdn.shinobi.video/js/flv.socket.io.js"></script>
</head>
<body>
<script>
// var socket = io.connect('/');
// socket.on('data', function (data) {
// });
</script>
<video controls id="videoElement"></video>
<script>
if (flvjs.isSupported()) {
var videoElement = document.getElementById('videoElement');
var flvPlayer = flvjs.createPlayer({
type: 'flv',
"isLive": true,
"handlerKey": 'flvData',
url: 'ws://'+location.host
});
flvPlayer.attachMediaElement(videoElement);
flvPlayer.on('error',function(err){
console.log(err)
});
flvPlayer.load();
flvPlayer.play();
}
</script>
</body>

View file

@ -0,0 +1,95 @@
// Shinobi (http://shinobi.video) - FFMPEG MP4 over HTTP Test
// How to Use
// 1. Navigate to directory where this file is.
// 2. Run `npm install express`
// 3. Start with `node ffmpegToWeb.js`
// 4. Get the IP address of the computer where you did step 1. Example : 127.0.0.1
// 5. Open `http://127.0.0.1:8001/` in your browser.
var child = require('child_process');
var events = require('events');
var express = require('express')
var app = express();
var server = require('http').Server(app);
var spawn = child.spawn;
var exec = child.exec;
var Emitters = {}
var firstChunks = {}
var config = {
port:8001,
url:'rtsp://131.95.3.162/axis-media/media.3gp'
}
var initEmitter = function(feed){
if(!Emitters[feed]){
Emitters[feed] = new events.EventEmitter().setMaxListeners(0)
}
return Emitters[feed]
}
//hold first chunk of FLV video
var initFirstChunk = function(feed,firstBuffer){
if(!firstChunks[feed]){
firstChunks[feed] = firstBuffer
}
return firstChunks[feed]
}
console.log('Starting Express Web Server on Port '+config.port)
//start webserver
server.listen(config.port);
//make libraries static
app.use('/libs',express.static(__dirname + '/../../web/libs'));
app.use('/',express.static(__dirname + '/'));
//homepage with video element.
//static file send of index.html
app.get('/', function (req, res) {
res.sendFile(__dirname + '/index.html');
})
//// MP4 Stream over HTTP, this URL goes in the flv.js javascript player
// see ./index.html
app.get('/s.mp4', function (req, res) {
//default to first feed
if(!req.params.feed){req.params.feed='1'}
//get emitter
req.Emitter = initEmitter(req.params.feed)
//variable name of contentWriter
var contentWriter
//set headers
res.setHeader('Content-Type', 'video/mp4');
res.setHeader('Access-Control-Allow-Origin','*');
//write first frame on stream
res.write(initFirstChunk(1))
//write new frames as they happen
req.Emitter.on('data',contentWriter=function(buffer){
console.log(buffer)
res.write(buffer)
})
//remove contentWriter when client leaves
res.on('close', function () {
req.Emitter.removeListener('data',contentWriter)
})
});
//ffmpeg
console.log('Starting FFMPEG')
var ffmpegString = '-reorder_queue_size 5 -i '+config.url+' -c:v copy -an -movflags +frag_keyframe+empty_moov+default_base_moof -f mp4 pipe:1'
//var ffmpegString = '-i '+config.url+' -c:v libx264 -preset superfast -tune zerolatency -c:a aac -ar 44100 -f flv pipe:4'
//ffmpegString += ' -f mpegts -c:v mpeg1video -an http://localhost:'+config.port+'/streamIn/2'
if(ffmpegString.indexOf('rtsp://')>-1){
ffmpegString='-rtsp_transport tcp '+ffmpegString
}
console.log('Executing : ffmpeg '+ffmpegString)
var ffmpeg = spawn('ffmpeg',ffmpegString.split(' '),{stdio:['pipe','pipe','pipe','pipe','pipe']});
ffmpeg.on('close', function (buffer) {
console.log('ffmpeg died')
})
//// FFMPEG Error Logs
//ffmpeg.stderr.on('data', function (buffer) {
// console.log(buffer.toString())
//});
//data from pipe:1 output of ffmpeg
ffmpeg.stdio[1].on('data', function (buffer) {
initFirstChunk(1,buffer)
initEmitter(1).emit('data',buffer)
});

7
tools/MP4 Stream/flv.min.js vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,29 @@
<!doctype html>
<!--www.shinobi.video-->
<html lang="en-US">
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta charset="UTF-8">
<title>FLV to Web Test by Shinobi Systems</title>
<!--FLV Live Player-->
<script src="http://cdn.shinobi.video/js/flv.min.js"></script>
</head>
<body>
<video controls id="videoElement"></video>
<script>
if (flvjs.isSupported()) {
var videoElement = document.getElementById('videoElement');
var flvPlayer = flvjs.createPlayer({
type: 'mp4',
isLive: true,
url: '/s.mp4'
});
flvPlayer.attachMediaElement(videoElement);
flvPlayer.on('error',function(err){
console.log(err)
});
flvPlayer.load();
flvPlayer.play();
}
</script>
</body>

View file

@ -0,0 +1,48 @@
//add videos to SQL tables from filesystem
var fs = require('fs');
var moment = require('moment');
var mysql = require('mysql');
var config = require('../conf.json');
s={}
s.disc=function(){
sql = mysql.createConnection(config.db);
sql.connect(function(err){if(err){console.log('Error Connecting : DB',err);setTimeout(s.disc, 2000);}});
sql.on('error',function(err) {console.log('DB Lost.. Retrying..');console.log(err);s.disc();return;});
}
s.disc();
if(!config.videosDir){config.videosDir=__dirname+'/../videos/'}
s.dir={videos:config.videosDir};
s.nameToTime=function(x){x=x.split('.')[0].split('T'),x[1]=x[1].replace(/-/g,':');x=x.join(' ');return x;}
s.moment=function(e,x){
if(!e){e=new Date};if(!x){x='YYYY-MM-DDTHH-mm-ss'};
return moment(e).format(x);
}
fs.readdir(s.dir.videos,function(err,groups){
groups.forEach(function(group){
fs.readdir(s.dir.videos+group,function(err,cameras){
cameras.forEach(function(camera){
fs.readdir(s.dir.videos+group+'/'+camera,function(err,videos){
sql.query('SELECT * FROM Videos WHERE ke=? AND mid=?',[group,camera],function(err,r){
videos.forEach(function(filename){
fs.stat(s.dir.videos+group+'/'+camera+'/'+filename,function(err,file){
file.startTime=s.nameToTime(filename)
file.endTime=s.moment(file.mtime,'YYYY-MM-DD HH:mm:ss')
var save=[camera,group,filename.split('.')[1],file.size,file.startTime,file.endTime,1]
var found=null
r.forEach(function(v){
if(s.moment(v.time,'YYYY-MM-DD HH:mm:ss')===file.startTime){
found=v
}
})
if(!found){
console.log('!found',save)
sql.query('INSERT INTO Videos (mid,ke,ext,size,time,end,status) VALUES (?,?,?,?,?,?,?)',save)
}
})
})
})
})
})
})
})
})

10
tools/checkNPM.js Normal file
View file

@ -0,0 +1,10 @@
var fs=require('fs');
function npmls(cb) {
require('child_process').exec('npm ls --json', function(err, stdout, stderr) {
if (err) return cb(err)
cb(null, JSON.stringify(JSON.parse(stdout),null,3));
});
}
npmls(function(yolo,stdout){
fs.writeFileSync(__dirname+'/npmls.json',stdout)
});

103
tools/coco/testCoco.js Normal file
View file

@ -0,0 +1,103 @@
var cv = require('opencv4nodejs')
const fs = require('fs');
const path = require('path');
const classNames = require('./dnnCocoClassNames');
const { extractResults } = require('./dnn/ssdUtils');
if (!cv.xmodules.dnn) {
throw new Error('exiting: opencv4nodejs compiled without dnn module');
}
// replace with path where you unzipped inception model
const ssdcocoModelPath = '../data/dnn/coco-SSD_300x300';
const prototxt = path.resolve(ssdcocoModelPath, 'deploy.prototxt');
const modelFile = path.resolve(ssdcocoModelPath, 'VGG_coco_SSD_300x300_iter_400000.caffemodel');
if (!fs.existsSync(prototxt) || !fs.existsSync(modelFile)) {
console.log('could not find ssdcoco model');
console.log('download the model from: https://drive.google.com/file/d/0BzKzrI_SkD1_dUY1Ml9GRTFpUWc/view');
throw new Error('exiting: could not find ssdcoco model');
}
// initialize ssdcoco model from prototxt and modelFile
const net = cv.readNetFromCaffe(prototxt, modelFile);
function classifyImg(img) {
// ssdcoco model works with 300 x 300 images
const imgResized = img.resize(300, 300);
// network accepts blobs as input
const inputBlob = cv.blobFromImage(imgResized);
net.setInput(inputBlob);
// forward pass input through entire network, will return
// classification result as 1x1xNxM Mat
let outputBlob = net.forward();
// extract NxM Mat
outputBlob = outputBlob.flattenFloat(outputBlob.sizes[2], outputBlob.sizes[3]);
return extractResults(outputBlob, img)
.map(r => Object.assign({}, r, { className: classNames[r.classLabel] }));
}
const makeDrawClassDetections = predictions => (drawImg, className, getColor, thickness = 2) => {
predictions
.filter(p => classNames[p.classLabel] === className)
.forEach(p => drawRect(drawImg, p.rect, getColor(), { thickness }));
return drawImg;
};
const runDetectDishesExample = () => {
const img = cv.imread('../data/dishes.jpg');
const minConfidence = 0.2;
const predictions = classifyImg(img).filter(res => res.confidence > minConfidence);
const drawClassDetections = makeDrawClassDetections(predictions);
const classColors = {
fork: new cv.Vec(0, 255, 0),
bowl: new cv.Vec(255, 0, 0),
'wine glass': new cv.Vec(0, 0, 255),
cup: new cv.Vec(0, 255, 255)
};
const legendLeftTop = new cv.Point(580, 20);
const alpha = 0.4;
cv.drawTextBox(
img,
legendLeftTop,
Object.keys(classColors).map(className => ({
text: className,
fontSize: 0.8,
color: classColors[className]
})),
alpha
);
Object.keys(classColors).forEach((className) => {
const color = classColors[className];
// draw detections
drawClassDetections(img, className, () => color);
});
cv.imshowWait('img', img);
};
const runDetectPeopleExample = () => {
const img = cv.imread('../data/cars.jpeg');
const minConfidence = 0.4;
const predictions = classifyImg(img).filter(res => res.confidence > minConfidence);
const drawClassDetections = makeDrawClassDetections(predictions);
const getRandomColor = () => new cv.Vec(Math.random() * 255, Math.random() * 255, 255);
drawClassDetections(img, 'car', getRandomColor);
cv.imshowWait('img', img);
};
runDetectDishesExample();
runDetectPeopleExample();

405
tools/cron.js Normal file
View file

@ -0,0 +1,405 @@
var fs = require('fs');
var path = require('path');
var mysql = require('mysql');
var moment = require('moment');
var exec = require('child_process').exec;
var spawn = require('child_process').spawn;
var config=require(__dirname+'/../conf.json');
var sql=mysql.createConnection(config.db);
//set option defaults
s={};
if(config.cron===undefined)config.cron={};
if(config.cron.enabled===undefined)config.cron.enabled=true;
if(config.cron.deleteOld===undefined)config.cron.deleteOld=true;
if(config.cron.deleteOrphans===undefined)config.cron.deleteOrphans=false;
if(config.cron.deleteNoVideo===undefined)config.cron.deleteNoVideo=true;
if(config.cron.deleteNoVideoRecursion===undefined)config.cron.deleteNoVideoRecursion=false;
if(config.cron.deleteOverMax===undefined)config.cron.deleteOverMax=true;
if(config.cron.deleteLogs===undefined)config.cron.deleteLogs=true;
if(config.cron.deleteEvents===undefined)config.cron.deleteEvents=true;
if(config.cron.deleteFileBins===undefined)config.cron.deleteFileBins=true;
if(config.cron.interval===undefined)config.cron.interval=1;
if(!config.ip||config.ip===''||config.ip.indexOf('0.0.0.0')>-1)config.ip='localhost';
if(!config.videosDir)config.videosDir=__dirname+'/videos/';
if(!config.binDir){config.binDir=__dirname+'/fileBin/'}
if(!config.addStorage){config.addStorage=[]}
//containers
cronOverlapLock={};
cronAlreadyDeletedRowsWithNoVideosOnStart={};
//functions
module.exports.checkCorrectPathEnding=function(x){
var length=x.length
if(x.charAt(length-1)!=='/'){
x=x+'/'
}
return x.replace('__DIR__',__dirname)
}
module.exports.dir={
videos:module.exports.checkCorrectPathEnding(config.videosDir),
fileBin:module.exports.checkCorrectPathEnding(config.binDir),
addStorage:config.addStorage,
};
module.exports.moment=function(e,x){
if(!e){e=new Date};if(!x){x='YYYY-MM-DDTHH-mm-ss'};
return moment(e).format(x);
}
module.exports.nameToTime=function(x){x=x.replace('.webm','').replace('.mp4','').split('T'),x[1]=x[1].replace(/-/g,':');x=x.join(' ');return x;}
io = require('socket.io-client')('ws://'+config.ip+':'+config.port);//connect to master
module.exports.cx=function(x){x.cronKey=config.cron.key;return io.emit('cron',x)}
//emulate master socket emitter
module.exports.tx=function(x,y){module.exports.cx({f:'module.exports.tx',data:x,to:y})}
module.exports.video=function(x,y){module.exports.cx({f:'module.exports.video',data:x,file:y})}
//Cron Job
module.exports.cx({f:'init',time:moment()})
module.exports.getVideoDirectory=function(e){
if(e.mid&&!e.id){e.id=e.mid};
if(e.details&&(e.details instanceof Object)===false){
try{e.details=JSON.parse(e.details)}catch(err){}
}
if(e.details.dir&&e.details.dir!==''){
return module.exports.checkCorrectPathEnding(e.details.dir)+e.ke+'/'+e.id+'/'
}else{
return module.exports.dir.videos+e.ke+'/'+e.id+'/';
}
}
module.exports.getFileBinDirectory=function(e){
if(e.mid&&!e.id){e.id=e.mid};
return module.exports.dir.fileBin+e.ke+'/'+e.id+'/';
}
//filters set by the user in their dashboard
//deleting old videos is part of the filter - config.cron.deleteOld
module.exports.cronCheckFilterRules=function(v,callback){
//filters
if(!v.d.filters||v.d.filters==''){
v.d.filters={};
}
//delete old videos with filter
if(config.cron.deleteOld===true){
v.d.filters.deleteOldByCron={
"id":"deleteOldByCron",
"name":"deleteOldByCron",
"sort_by":"time",
"sort_by_direction":"ASC",
"limit":"",
"enabled":"1",
"archive":"0",
"email":"0",
"delete":"1",
"execute":"",
"where":[{
"p1":"end",
"p2":"<",
"p3":"NOW() - INTERVAL "+(v.maxVideoDays[v.mid]*24)+" HOUR",
"p3_type":"function",
}]
};
}
var keys = Object.keys(v.d.filters)
if(keys.length>0){
keys.forEach(function(m,current){
var b=v.d.filters[m];
if(b.enabled==="1"){
b.ar=[v.ke];
b.sql=[];
b.where.forEach(function(j,k){
if(j.p1==='ke'){j.p3=v.ke}
switch(j.p3_type){
case'function':
b.sql.push(j.p1+' '+j.p2+' '+j.p3)
break;
default:
b.sql.push(j.p1+' '+j.p2+' ?')
b.ar.push(j.p3)
break;
}
})
b.sql='WHERE ke=? AND status != 0 AND details NOT LIKE \'%"archived":"1"%\' AND ('+b.sql.join(' AND ')+')';
if(b.sort_by&&b.sort_by!==''){
b.sql+=' ORDER BY `'+b.sort_by+'` '+b.sort_by_direction
}
if(b.limit&&b.limit!==''){
b.sql+=' LIMIT '+b.limit
}
sql.query('SELECT * FROM Videos '+b.sql,b.ar,function(err,r){
if(r&&r[0]){
b.cx={
f:'filters',
name:b.name,
videos:r,
time:moment(),
ke:v.ke,
id:b.id
};
if(b.archive==="1"){
module.exports.cx({f:'filters',ff:'archive',videos:r,time:moment(),ke:v.ke,id:b.id});
}else{
if(b.delete==="1"){
module.exports.cx({f:'filters',ff:'delete',videos:r,time:moment(),ke:v.ke,id:b.id});
}
}
if(b.email==="1"){
b.cx.ff='email';
b.cx.delete=b.delete;
b.cx.mail=v.mail;
b.cx.execute=b.execute;
b.cx.query=b.sql;
module.exports.cx(b.cx);
}
if(b.execute&&b.execute!==""){
module.exports.cx({f:'filters',ff:'execute',execute:b.execute,time:moment()});
}
}
})
}
if(current===keys.length-1){
//last filter
callback()
}
})
}else{
//no filters
callback()
}
}
//database rows with no videos in the filesystem
module.exports.cronDeleteRowsWithNoVideo=function(v,callback){
if(
config.cron.deleteNoVideo===true&&(
config.cron.deleteNoVideoRecursion===true||
(config.cron.deleteNoVideoRecursion===false&&!cronAlreadyDeletedRowsWithNoVideosOnStart[v.ke])
)
){
cronAlreadyDeletedRowsWithNoVideosOnStart[v.ke]=true;
es={};
sql.query('SELECT * FROM Videos WHERE ke = ? AND status != 0 AND details NOT LIKE \'%"archived":"1"%\' AND time < (NOW() - INTERVAL 10 MINUTE)',[v.ke],function(err,evs){
if(evs&&evs[0]){
es.del=[];es.ar=[v.ke];
evs.forEach(function(ev){
ev.dir=module.exports.getVideoDirectory(ev)+module.exports.moment(ev.time)+'.'+ev.ext;
if(fs.existsSync(ev.dir)!==true){
module.exports.video('delete',ev)
es.del.push('(mid=? AND time=?)');
es.ar.push(ev.mid),es.ar.push(ev.time);
module.exports.tx({f:'video_delete',filename:module.exports.moment(ev.time)+'.'+ev.ext,mid:ev.mid,ke:ev.ke,time:ev.time,end:module.exports.moment(new Date,'YYYY-MM-DD HH:mm:ss')},'GRP_'+ev.ke);
}
});
if(es.del.length>0){
module.exports.cx({f:'deleteNoVideo',msg:es.del.length+' SQL rows with no file deleted',ke:v.ke,time:moment()})
}
}
setTimeout(function(){
callback()
},3000)
})
}else{
callback()
}
}
//info about what the application is doing
module.exports.cronDeleteOldLogs=function(v,callback){
if(!v.d.log_days||v.d.log_days==''){v.d.log_days=10}else{v.d.log_days=parseFloat(v.d.log_days)};
if(config.cron.deleteLogs===true&&v.d.log_days!==0){
sql.query("DELETE FROM Logs WHERE ke=? AND `time` < DATE_SUB(NOW(), INTERVAL ? DAY)",[v.ke,v.d.log_days],function(err,rrr){
callback()
if(err)return console.error(err);
if(rrr.affectedRows.length>0){
module.exports.cx({f:'deleteLogs',msg:rrr.affectedRows+' SQL rows older than '+v.d.log_days+' days deleted',ke:v.ke,time:moment()})
}
})
}else{
callback()
}
}
//events - motion, object, etc. detections
module.exports.cronDeleteOldEvents=function(v,callback){
if(!v.d.event_days||v.d.event_days==''){v.d.event_days=10}else{v.d.event_days=parseFloat(v.d.event_days)};
if(config.cron.deleteEvents===true&&v.d.event_days!==0){
sql.query("DELETE FROM Events WHERE ke=? AND `time` < DATE_SUB(NOW(), INTERVAL ? DAY)",[v.ke,v.d.event_days],function(err,rrr){
callback()
if(err)return console.error(err);
if(rrr.affectedRows.length>0){
module.exports.cx({f:'deleteEvents',msg:rrr.affectedRows+' SQL rows older than '+v.d.event_days+' days deleted',ke:v.ke,time:moment()})
}
})
}else{
callback()
}
}
//check for temporary files (special archive)
cronDeleteOldFileBins=function(v,callback){
if(!v.d.fileBin_days||v.d.fileBin_days==''){v.d.fileBin_days=10}else{v.d.fileBin_days=parseFloat(v.d.fileBin_days)};
if(config.cron.deleteFileBins===true&&v.d.fileBin_days!==0){
var fileBinQuery = ' FROM Files WHERE ke=? AND `date` < DATE_SUB(NOW(), INTERVAL ? DAY)';
sql.query("SELECT *"+fileBinQuery,[v.ke,v.d.fileBin_days],function(err,files){
if(files&&files[0]){
//delete the files
files.forEach(function(file){
fs.unlink(module.exports.getFileBinDirectory(file)+file.name,function(err){
// if(err)console.error(err)
})
})
//delete the database rows
sql.query("DELETE"+fileBinQuery,[v.ke,v.d.fileBin_days],function(err,rrr){
callback()
if(err)return console.error(err);
if(rrr.affectedRows.length>0){
module.exports.cx({f:'deleteFileBins',msg:rrr.affectedRows+' files older than '+v.d.fileBin_days+' days deleted',ke:v.ke,time:moment()})
}
})
}else{
callback()
}
})
}else{
callback()
}
}
//check for files with no database row
cronCheckForOrphanedFiles=function(v,callback){
if(config.cron.deleteOrphans===true){
var finish=function(count){
if(count>0){
module.exports.cx({f:'deleteOrphanedFiles',msg:count+' SQL rows with no database row deleted',ke:v.ke,time:moment()})
}
callback()
}
e={};
var numberOfItems = 0;
sql.query('SELECT * FROM Monitors WHERE ke=?',[v.ke],function(arr,b) {
if(b&&b[0]){
b.forEach(function(mon,m){
fs.readdir(module.exports.getVideoDirectory(mon), function(err, items) {
e.query=[];
e.filesFound=[mon.ke,mon.mid];
numberOfItems+=items.length;
if(items&&items.length>0){
items.forEach(function(v,n){
e.query.push('time=?')
e.filesFound.push(module.exports.nameToTime(v))
})
sql.query('SELECT * FROM Videos WHERE ke=? AND mid=? AND ('+e.query.join(' OR ')+')',e.filesFound,function(arr,r) {
if(!r){r=[]};
e.foundSQLrows=[];
r.forEach(function(v,n){
v.index=e.filesFound.indexOf(module.exports.moment(v.time,'YYYY-MM-DD HH:mm:ss'));
if(v.index>-1){
delete(items[v.index-2]);
}
});
items.forEach(function(v,n){
if(v&&v!==null){
exec('rm '+module.exports.getVideoDirectory(mon)+v);
}
if(m===b.length-1&&n===items.length-1){
finish(numberOfItems)
}
})
})
}else{
if(m===b.length-1){
finish(numberOfItems)
}
}
})
});
}else{
finish(numberOfItems)
}
});
}else{
callback()
}
}
//user processing function
cronProcessUser = function(number,rows){
console.log('processUser')
var v = rows[number];
if(!v){
//no user object given
return
}
if(!cronAlreadyDeletedRowsWithNoVideosOnStart[v.ke]){
cronAlreadyDeletedRowsWithNoVideosOnStart[v.ke]=false;
}
if(!cronOverlapLock[v.ke]){
// set overlap lock
cronOverlapLock[v.ke]=true;
//set permissions
v.d=JSON.parse(v.details);
//size
if(!v.d.size||v.d.size==''){v.d.size=10000}else{v.d.size=parseFloat(v.d.size)};
//days to keep videos
v.maxVideoDays={}
if(!v.d.days||v.d.days==''){v.d.days=5}else{v.d.days=parseFloat(v.d.days)};
sql.query('SELECT * FROM Monitors WHERE ke=?', [v.ke], function(err,rr) {
rr.forEach(function(b,m){
b.details=JSON.parse(b.details);
if(b.details.max_keep_days&&b.details.max_keep_days!==''){
v.maxVideoDays[b.mid]=parseFloat(b.details.max_keep_days)
}else{
v.maxVideoDays[b.mid]=v.d.days
};
})
cronDeleteOldLogs(v,function(){
cronDeleteOldFileBins(v,function(){
cronDeleteOldEvents(v,function(){
cronCheckFilterRules(v,function(){
cronDeleteRowsWithNoVideo(v,function(){
cronCheckForOrphanedFiles(v,function(){
//done user, unlock current, and do next
cronOverlapLock[v.ke]=false;
cronProcessUser(number+1,rows)
})
})
})
})
})
})
})
}
}
//recursive function
var cronTimeout;
cronStart = function(){
clearTimeout(cronTimeout);
x={};
module.exports.cx({f:'start',time:moment()})
sql.query('SELECT ke,uid,details,mail FROM Users WHERE details NOT LIKE \'%"sub"%\'', function(err,rows) {
if(err){
console.error(err)
}
if(rows&&rows[0]){
cronProcessUser(0,rows)
}
})
cronTimeout=setTimeout(function(){
cronStart();
},parseFloat(config.cron.interval)*60000*60)
}
cronStop = function(){
clearTimeout(cronTimeout);
}
//socket commander
io.on('f',function(d){
switch(d.f){
case'start':case'restart':
cronStart();
break;
case'stop':
cronStop();
break;
}
})
console.log('Shinobi : cron.js loaded')
module.exports = {
begin:cronStart,
start:cronStart,
restart:cronStart,
stop:cronStop,
end:cronStop,
kill:cronStop
}

BIN
tools/ffmpegToWeb.rar Normal file

Binary file not shown.

9
tools/ffmpegToWeb/.gitignore vendored Normal file
View file

@ -0,0 +1,9 @@
node_modules
videos
events
frames
web.old
.DS_Store
.vagrant
conf.json
ffmpeg

View file

@ -0,0 +1,3 @@
/npm-debug.log
/node_modules
/disc

View file

@ -0,0 +1,4 @@
/npm-debug.log
/node_modules
/dist
/disc

View file

@ -0,0 +1,19 @@
The following authors have all licensed their contributions to the project
under the licensing terms detailed in LICENSE (MIT style)
# h264-live-player
* Francois Leurent @131 <131.js@cloudyks.org>
# Broadway emscriptend h264 (broadway/Decoder.js)
* Michael Bebenita <mbebenita@gmail.com>
* Alon Zakai <alonzakai@gmail.com>
* Andreas Gal <gal@mozilla.com>
* Mathieu 'p01' Henri <mathieu@p01.org>
* Matthias 'soliton4' Behrens <matthias.behrens@gmail.com>
# WebGL canvas helpers
* Sam Leitch @oneam
# AVC player inspiration
* Benjamin Xiao @urbenlegend

View file

@ -0,0 +1,10 @@
Copyright (c) 2016, Project Authors (see AUTHORS file)
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the names of the Project Authors nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View file

@ -0,0 +1,11 @@
# Motivation
This is a very efficient h264 video player (that can run on live stream) for your browser.
You might use this with raspicam raw h264 stream.
This is a player around [Broadway](https://github.com/mbebenita/Broadway) Decoder, with very simple API.
NAL unit (h264 frames) are split on the server side, so the client side is very simple (and allow frame skipping easily)
See [github sample project's page for more information](https://github.com/131/h264-live-player)

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,71 @@
"use strict";
var Class = require('uclass');
var vertexShaderScript = Script.createFromSource("x-shader/x-vertex", `
attribute vec3 aVertexPosition;
attribute vec2 aTextureCoord;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
varying highp vec2 vTextureCoord;
void main(void) {
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
vTextureCoord = aTextureCoord;
}
`);
var fragmentShaderScript = Script.createFromSource("x-shader/x-fragment", [
precision highp float;
varying highp vec2 vTextureCoord;
uniform sampler2D FTexture;
void main(void) {
gl_FragColor = texture2D(FTexture, vTextureCoord);
}
`);
var FilterWebGLCanvas = new Class({
Extends : WebGLCanvas,
initialize : function(canvas, size, useFrameBuffer) {
FilterWebGLCanvas.parent.initialize.call(this, canvas, size, useFrameBuffer);
},
onInitShaders: function() {
this.program = new Program(this.gl);
this.program.attach(new Shader(this.gl, vertexShaderScript));
this.program.attach(new Shader(this.gl, fragmentShaderScript));
this.program.link();
this.program.use();
this.vertexPositionAttribute = this.program.getAttributeLocation("aVertexPosition");
this.gl.enableVertexAttribArray(this.vertexPositionAttribute);
this.textureCoordAttribute = this.program.getAttributeLocation("aTextureCoord");
this.gl.enableVertexAttribArray(this.textureCoordAttribute);
},
onInitTextures: function () {
console.log("creatingTextures: size: " + this.size);
this.FTexture = new Texture(this.gl, this.size, this.gl.RGBA);
},
onInitSceneTextures: function () {
this.FTexture.bind(0, this.program, "FTexture");
},
process: function(buffer, output) {
this.FTexture.fill(buffer);
this.drawScene();
this.readPixels(output);
},
toString: function() {
return "FilterWebGLCanvas Size: " + this.size;
}
});
module.exports = FilterWebGLCanvas;

View file

@ -0,0 +1,32 @@
"use strict";
var assert = require('../utils/assert');
function Program(gl) {
this.gl = gl;
this.program = this.gl.createProgram();
}
Program.prototype = {
attach: function (shader) {
this.gl.attachShader(this.program, shader.shader);
},
link: function () {
this.gl.linkProgram(this.program);
// If creating the shader program failed, alert.
assert(this.gl.getProgramParameter(this.program, this.gl.LINK_STATUS),
"Unable to initialize the shader program.");
},
use: function () {
this.gl.useProgram(this.program);
},
getAttributeLocation: function(name) {
return this.gl.getAttribLocation(this.program, name);
},
setMatrixUniform: function(name, array) {
var uniform = this.gl.getUniformLocation(this.program, name);
this.gl.uniformMatrix4fv(uniform, false, array);
}
};
module.exports = Program;

View file

@ -0,0 +1,4 @@
/*
* Those files wraps several WebGL constructs and provides a simple, single texture based WebGLCanvas as well as a
* specialized YUVWebGLCanvas that can handle YUV->RGB conversion.
*/

View file

@ -0,0 +1,41 @@
"use strict";
var assert = require('../utils/assert');
/**
* Represents a WebGL shader script.
*/
function Script() {}
Script.createFromElementId = function(id) {
var script = document.getElementById(id);
// Didn't find an element with the specified ID, abort.
assert(script , "Could not find shader with ID: " + id);
// Walk through the source element's children, building the shader source string.
var source = "";
var currentChild = script .firstChild;
while(currentChild) {
if (currentChild.nodeType == 3) {
source += currentChild.textContent;
}
currentChild = currentChild.nextSibling;
}
var res = new Scriptor();
res.type = script.type;
res.source = source;
return res;
};
Script.createFromSource = function(type, source) {
var res = new Script();
res.type = type;
res.source = source;
return res;
}
module.exports = Script;

View file

@ -0,0 +1,38 @@
"use strict";
var error = require('../utils/error');
/**
* Represents a WebGL shader object and provides a mechanism to load shaders from HTML
* script tags.
*/
function Shader(gl, script) {
// Now figure out what type of shader script we have, based on its MIME type.
if (script.type == "x-shader/x-fragment") {
this.shader = gl.createShader(gl.FRAGMENT_SHADER);
} else if (script.type == "x-shader/x-vertex") {
this.shader = gl.createShader(gl.VERTEX_SHADER);
} else {
error("Unknown shader type: " + script.type);
return;
}
// Send the source to the shader object.
gl.shaderSource(this.shader, script.source);
// Compile the shader program.
gl.compileShader(this.shader);
// See if it compiled successfully.
if (!gl.getShaderParameter(this.shader, gl.COMPILE_STATUS)) {
error("An error occurred compiling the shaders: " + gl.getShaderInfoLog(this.shader));
return;
}
}
module.exports = Shader;

View file

@ -0,0 +1,47 @@
"use strict";
var assert = require('../utils/assert');
/**
* Represents a WebGL texture object.
*/
function Texture(gl, size, format) {
this.gl = gl;
this.size = size;
this.texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, this.texture);
this.format = format ? format : gl.LUMINANCE;
gl.texImage2D(gl.TEXTURE_2D, 0, this.format, size.w, size.h, 0, this.format, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
}
var textureIDs = null;
Texture.prototype = {
fill: function(textureData, useTexSubImage2D) {
var gl = this.gl;
assert(textureData.length >= this.size.w * this.size.h,
"Texture size mismatch, data:" + textureData.length + ", texture: " + this.size.w * this.size.h);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
if (useTexSubImage2D) {
gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, this.size.w , this.size.h, this.format, gl.UNSIGNED_BYTE, textureData);
} else {
// texImage2D seems to be faster, thus keeping it as the default
gl.texImage2D(gl.TEXTURE_2D, 0, this.format, this.size.w, this.size.h, 0, this.format, gl.UNSIGNED_BYTE, textureData);
}
},
bind: function(n, program, name) {
var gl = this.gl;
if (!textureIDs) {
textureIDs = [gl.TEXTURE0, gl.TEXTURE1, gl.TEXTURE2];
}
gl.activeTexture(textureIDs[n]);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
gl.uniform1i(gl.getUniformLocation(program.program, name), n);
}
};
module.exports = Texture;

View file

@ -0,0 +1,261 @@
"use strict";
/**
* Generic WebGL backed canvas that sets up: a quad to paint a texture on, appropriate vertex/fragment shaders,
* scene parameters and other things. Specialized versions of this class can be created by overriding several
* initialization methods.
*/
var Script = require('./Script');
var error = require('../utils/error');
var makePerspective = require('../utils/glUtils').makePerspective;
var Matrix = require('sylvester.js').Matrix;
var Class = require('uclass');
var vertexShaderScript = Script.createFromSource("x-shader/x-vertex", `
attribute vec3 aVertexPosition;
attribute vec2 aTextureCoord;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
varying highp vec2 vTextureCoord;
void main(void) {
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
vTextureCoord = aTextureCoord;
}
`);
var fragmentShaderScript = Script.createFromSource("x-shader/x-fragment", `
precision highp float;
varying highp vec2 vTextureCoord;
uniform sampler2D texture;
void main(void) {
gl_FragColor = texture2D(texture, vTextureCoord);
}
`);
var WebGLCanvas = new Class({
initialize : function(canvas, size, useFrameBuffer) {
this.canvas = canvas;
this.size = size;
this.canvas.width = size.w;
this.canvas.height = size.h;
this.onInitWebGL();
this.onInitShaders();
this.initBuffers();
if (useFrameBuffer)
this.initFramebuffer();
this.onInitTextures();
this.initScene();
},
/**
* Initialize a frame buffer so that we can render off-screen.
*/
initFramebuffer : function() {
var gl = this.gl;
// Create framebuffer object and texture.
this.framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer);
this.framebufferTexture = new Texture(this.gl, this.size, gl.RGBA);
// Create and allocate renderbuffer for depth data.
var renderbuffer = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, renderbuffer);
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, this.size.w, this.size.h);
// Attach texture and renderbuffer to the framebuffer.
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, this.framebufferTexture.texture, 0);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, renderbuffer);
},
/**
* Initialize vertex and texture coordinate buffers for a plane.
*/
initBuffers : function () {
var tmp;
var gl = this.gl;
// Create vertex position buffer.
this.quadVPBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.quadVPBuffer);
tmp = [
1.0, 1.0, 0.0,
-1.0, 1.0, 0.0,
1.0, -1.0, 0.0,
-1.0, -1.0, 0.0];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(tmp), gl.STATIC_DRAW);
this.quadVPBuffer.itemSize = 3;
this.quadVPBuffer.numItems = 4;
/*
+--------------------+
| -1,1 (1) | 1,1 (0)
| |
| |
| |
| |
| |
| -1,-1 (3) | 1,-1 (2)
+--------------------+
*/
var scaleX = 1.0;
var scaleY = 1.0;
// Create vertex texture coordinate buffer.
this.quadVTCBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.quadVTCBuffer);
tmp = [
scaleX, 0.0,
0.0, 0.0,
scaleX, scaleY,
0.0, scaleY,
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(tmp), gl.STATIC_DRAW);
},
mvIdentity : function () {
this.mvMatrix = Matrix.I(4);
},
mvMultiply : function(m) {
this.mvMatrix = this.mvMatrix.x(m);
},
mvTranslate : function (m) {
this.mvMultiply(Matrix.Translation($V([m[0], m[1], m[2]])).ensure4x4());
},
setMatrixUniforms : function () {
this.program.setMatrixUniform("uPMatrix", new Float32Array(this.perspectiveMatrix.flatten()));
this.program.setMatrixUniform("uMVMatrix", new Float32Array(this.mvMatrix.flatten()));
},
initScene : function() {
var gl = this.gl;
// Establish the perspective with which we want to view the
// scene. Our field of view is 45 degrees, with a width/height
// ratio of 640:480, and we only want to see objects between 0.1 units
// and 100 units away from the camera.
this.perspectiveMatrix = makePerspective(45, 1, 0.1, 100.0);
// Set the drawing position to the "identity" point, which is
// the center of the scene.
this.mvIdentity();
// Now move the drawing position a bit to where we want to start
// drawing the square.
this.mvTranslate([0.0, 0.0, -2.4]);
// Draw the cube by binding the array buffer to the cube's vertices
// array, setting attributes, and pushing it to GL.
gl.bindBuffer(gl.ARRAY_BUFFER, this.quadVPBuffer);
gl.vertexAttribPointer(this.vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0);
// Set the texture coordinates attribute for the vertices.
gl.bindBuffer(gl.ARRAY_BUFFER, this.quadVTCBuffer);
gl.vertexAttribPointer(this.textureCoordAttribute, 2, gl.FLOAT, false, 0, 0);
this.onInitSceneTextures();
this.setMatrixUniforms();
if (this.framebuffer) {
console.log("Bound Frame Buffer");
gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer);
}
},
toString: function() {
return "WebGLCanvas Size: " + this.size;
},
checkLastError: function (operation) {
var err = this.gl.getError();
if (err != this.gl.NO_ERROR) {
var name = this.glNames[err];
name = (name !== undefined) ? name + "(" + err + ")":
("Unknown WebGL ENUM (0x" + value.toString(16) + ")");
if (operation) {
console.log("WebGL Error: %s, %s", operation, name);
} else {
console.log("WebGL Error: %s", name);
}
console.trace();
}
},
onInitWebGL: function () {
try {
this.gl = this.canvas.getContext("experimental-webgl");
} catch(e) {}
if (!this.gl) {
error("Unable to initialize WebGL. Your browser may not support it.");
}
if (this.glNames) {
return;
}
this.glNames = {};
for (var propertyName in this.gl) {
if (typeof this.gl[propertyName] == 'number') {
this.glNames[this.gl[propertyName]] = propertyName;
}
}
},
onInitShaders: function() {
this.program = new Program(this.gl);
this.program.attach(new Shader(this.gl, vertexShaderScript));
this.program.attach(new Shader(this.gl, fragmentShaderScript));
this.program.link();
this.program.use();
this.vertexPositionAttribute = this.program.getAttributeLocation("aVertexPosition");
this.gl.enableVertexAttribArray(this.vertexPositionAttribute);
this.textureCoordAttribute = this.program.getAttributeLocation("aTextureCoord");;
this.gl.enableVertexAttribArray(this.textureCoordAttribute);
},
onInitTextures: function () {
var gl = this.gl;
this.texture = new Texture(gl, this.size, gl.RGBA);
},
onInitSceneTextures: function () {
this.texture.bind(0, this.program, "texture");
},
drawScene: function() {
this.gl.drawArrays(this.gl.TRIANGLE_STRIP, 0, 4);
},
readPixels: function(buffer) {
var gl = this.gl;
gl.readPixels(0, 0, this.size.w, this.size.h, gl.RGBA, gl.UNSIGNED_BYTE, buffer);
},
});
module.exports = WebGLCanvas;

View file

@ -0,0 +1,51 @@
"use strict";
var Class = require('uclass');
var YUVCanvas = new Class({
Binds : ['decode'],
initialize : function(canvas, size) {
this.canvas = canvas;
this.canvasCtx = this.canvas.getContext("2d");
this.canvasBuffer = this.canvasCtx.createImageData(size.w, size.h);
},
decode : function (buffer, width, height) {
if (!buffer)
return;
var lumaSize = width * height;
var chromaSize = lumaSize >> 2;
var ybuf = buffer.subarray(0, lumaSize);
var ubuf = buffer.subarray(lumaSize, lumaSize + chromaSize);
var vbuf = buffer.subarray(lumaSize + chromaSize, lumaSize + 2 * chromaSize);
for (var y = 0; y < height; y++) {
for (var x = 0; x < width; x++) {
var yIndex = x + y * width;
var uIndex = ~~(y / 2) * ~~(width / 2) + ~~(x / 2);
var vIndex = ~~(y / 2) * ~~(width / 2) + ~~(x / 2);
var R = 1.164 * (ybuf[yIndex] - 16) + 1.596 * (vbuf[vIndex] - 128);
var G = 1.164 * (ybuf[yIndex] - 16) - 0.813 * (vbuf[vIndex] - 128) - 0.391 * (ubuf[uIndex] - 128);
var B = 1.164 * (ybuf[yIndex] - 16) + 2.018 * (ubuf[uIndex] - 128);
var rgbIndex = yIndex * 4;
this.canvasBuffer.data[rgbIndex+0] = R;
this.canvasBuffer.data[rgbIndex+1] = G;
this.canvasBuffer.data[rgbIndex+2] = B;
this.canvasBuffer.data[rgbIndex+3] = 0xff;
}
}
this.canvasCtx.putImageData(this.canvasBuffer, 0, 0);
var date = new Date();
//console.log("WSAvcPlayer: Decode time: " + (date.getTime() - this.rcvtime) + " ms");
},
});
module.exports = YUVCanvas;

View file

@ -0,0 +1,108 @@
"use strict";
var Program = require('./Program');
var Shader = require('./Shader');
var Texture = require('./Texture');
var Script = require('./Script');
var WebGLCanvas = require('./WebGLCanvas');
var Class = require('uclass');
var vertexShaderScript = Script.createFromSource("x-shader/x-vertex", `
attribute vec3 aVertexPosition;
attribute vec2 aTextureCoord;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
varying highp vec2 vTextureCoord;
void main(void) {
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
vTextureCoord = aTextureCoord;
}
`);
var fragmentShaderScript = Script.createFromSource("x-shader/x-fragment", `
precision highp float;
varying highp vec2 vTextureCoord;
uniform sampler2D YTexture;
uniform sampler2D UTexture;
uniform sampler2D VTexture;
const mat4 YUV2RGB = mat4
(
1.1643828125, 0, 1.59602734375, -.87078515625,
1.1643828125, -.39176171875, -.81296875, .52959375,
1.1643828125, 2.017234375, 0, -1.081390625,
0, 0, 0, 1
);
void main(void) {
gl_FragColor = vec4( texture2D(YTexture, vTextureCoord).x, texture2D(UTexture, vTextureCoord).x, texture2D(VTexture, vTextureCoord).x, 1) * YUV2RGB;
}
`);
var YUVWebGLCanvas = new Class({
Extends : WebGLCanvas,
Binds : ['decode'],
initialize : function(canvas, size) {
YUVWebGLCanvas.parent.initialize.call(this, canvas, size);
},
onInitShaders: function() {
this.program = new Program(this.gl);
this.program.attach(new Shader(this.gl, vertexShaderScript));
this.program.attach(new Shader(this.gl, fragmentShaderScript));
this.program.link();
this.program.use();
this.vertexPositionAttribute = this.program.getAttributeLocation("aVertexPosition");
this.gl.enableVertexAttribArray(this.vertexPositionAttribute);
this.textureCoordAttribute = this.program.getAttributeLocation("aTextureCoord");;
this.gl.enableVertexAttribArray(this.textureCoordAttribute);
},
onInitTextures: function () {
console.log("creatingTextures: size: " + this.size);
this.YTexture = new Texture(this.gl, this.size);
this.UTexture = new Texture(this.gl, this.size.getHalfSize());
this.VTexture = new Texture(this.gl, this.size.getHalfSize());
},
onInitSceneTextures: function () {
this.YTexture.bind(0, this.program, "YTexture");
this.UTexture.bind(1, this.program, "UTexture");
this.VTexture.bind(2, this.program, "VTexture");
},
fillYUVTextures: function(y, u, v) {
this.YTexture.fill(y);
this.UTexture.fill(u);
this.VTexture.fill(v);
},
decode: function(buffer, width, height) {
if (!buffer)
return;
var lumaSize = width * height;
var chromaSize = lumaSize >> 2;
this.YTexture.fill(buffer.subarray(0, lumaSize));
this.UTexture.fill(buffer.subarray(lumaSize, lumaSize + chromaSize));
this.VTexture.fill(buffer.subarray(lumaSize + chromaSize, lumaSize + 2 * chromaSize));
this.drawScene();
},
toString: function() {
return "YUVCanvas Size: " + this.size;
}
});
module.exports = YUVWebGLCanvas;

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,35 @@
{
"name": "h264-live-player",
"version": "1.3.1",
"main": "wsavc/index.js",
"scripts": {
"dist": "browserify --bare --standalone WSAvcPlayer --plugin discify wsavc/ > dist/http-live-player.js",
"test": "echo \"Error: no test specified\" && exit 1"
},
"repository": {
"type": "git",
"url": "git@github.com:131/h264-live-player.git"
},
"keywords": [
"h264",
"nal",
"live",
"broadcast",
"streaming"
],
"author": "Francois Leurent <131.js@cloudyks.org>",
"license": "ISC",
"bugs": {
"url": "https://github.com/131/h264-live-player/issues"
},
"description": "This is a very simple h264 video player (that can run on live stream) for your browser.\r You might use this with raspicam raw h264 stream.\r This is a player around [Broadway](https://github.com/mbebenita/Broadway) Decoder, with very simple API.\r NAL unit (h264 frames) are split on the server side, so the client side is very simple (and allow frame skipping easily)",
"dependencies": {
"debug": "^2.3.2",
"sylvester.js": "^0.1.1",
"uclass": "^2.4.0"
},
"devDependencies": {
"browserify": "^13.0.0",
"discify": "^1.4.2"
}
}

View file

@ -0,0 +1,23 @@
"use strict";
/**
* Represents a 2-dimensional size value.
*/
function Size(w, h) {
this.w = w;
this.h = h;
}
Size.prototype = {
toString: function () {
return "(" + this.w + ", " + this.h + ")";
},
getHalfSize: function() {
return new Size(this.w >>> 1, this.h >>> 1);
},
length: function() {
return this.w * this.h;
}
}
module.exports = Size;

View file

@ -0,0 +1,12 @@
"use strict";
var error = require('./error');
function assert(condition, message) {
if (!condition) {
error(message);
}
}
module.exports = assert;

View file

@ -0,0 +1,8 @@
"use strict";
function error(message) {
console.error(message);
console.trace();
}
module.exports = error;

View file

@ -0,0 +1,117 @@
"use strict";
var Matrix = require('sylvester.js').Matrix;
var Vector = require('sylvester.js').Vector;
var $M = Matrix.create;
// augment Sylvester some
Matrix.Translation = function (v)
{
if (v.elements.length == 2) {
var r = Matrix.I(3);
r.elements[2][0] = v.elements[0];
r.elements[2][1] = v.elements[1];
return r;
}
if (v.elements.length == 3) {
var r = Matrix.I(4);
r.elements[0][3] = v.elements[0];
r.elements[1][3] = v.elements[1];
r.elements[2][3] = v.elements[2];
return r;
}
throw "Invalid length for Translation";
}
Matrix.prototype.flatten = function ()
{
var result = [];
if (this.elements.length == 0)
return [];
for (var j = 0; j < this.elements[0].length; j++)
for (var i = 0; i < this.elements.length; i++)
result.push(this.elements[i][j]);
return result;
}
Matrix.prototype.ensure4x4 = function()
{
if (this.elements.length == 4 &&
this.elements[0].length == 4)
return this;
if (this.elements.length > 4 ||
this.elements[0].length > 4)
return null;
for (var i = 0; i < this.elements.length; i++) {
for (var j = this.elements[i].length; j < 4; j++) {
if (i == j)
this.elements[i].push(1);
else
this.elements[i].push(0);
}
}
for (var i = this.elements.length; i < 4; i++) {
if (i == 0)
this.elements.push([1, 0, 0, 0]);
else if (i == 1)
this.elements.push([0, 1, 0, 0]);
else if (i == 2)
this.elements.push([0, 0, 1, 0]);
else if (i == 3)
this.elements.push([0, 0, 0, 1]);
}
return this;
};
Vector.prototype.flatten = function ()
{
return this.elements;
};
//
// gluPerspective
//
function makePerspective(fovy, aspect, znear, zfar)
{
var ymax = znear * Math.tan(fovy * Math.PI / 360.0);
var ymin = -ymax;
var xmin = ymin * aspect;
var xmax = ymax * aspect;
return makeFrustum(xmin, xmax, ymin, ymax, znear, zfar);
}
//
// glFrustum
//
function makeFrustum(left, right,
bottom, top,
znear, zfar)
{
var X = 2*znear/(right-left);
var Y = 2*znear/(top-bottom);
var A = (right+left)/(right-left);
var B = (top+bottom)/(top-bottom);
var C = -(zfar+znear)/(zfar-znear);
var D = -2*zfar*znear/(zfar-znear);
return $M([[X, 0, A, 0],
[0, Y, B, 0],
[0, 0, C, D],
[0, 0, -1, 0]]);
}
module.exports.makePerspective = makePerspective;

View file

@ -0,0 +1,17 @@
"use strict";
/**
* Creates a new prototype object derived from another objects prototype along with a list of additional properties.
*
* @param base object whose prototype to use as the created prototype object's prototype
* @param properties additional properties to add to the created prototype object
*/
function inherit(base, properties) {
var prot = Object.create(base.prototype);
for (var p in properties) {
prot[p] = properties[p];
}
return prot;
}
module.exports = inherit;

View file

@ -0,0 +1,159 @@
"use strict";
var Avc = require('../broadway/Decoder');
var YUVWebGLCanvas = require('../canvas/YUVWebGLCanvas');
var YUVCanvas = require('../canvas/YUVCanvas');
var Size = require('../utils/Size');
var Class = require('uclass');
var Events = require('uclass/events');
var debug = require('debug');
var log = debug("wsavc");
var WSAvcPlayer = new Class({
Implements : [Events],
initialize : function(canvas, canvastype) {
this.canvas = canvas;
this.canvastype = canvastype;
// AVC codec initialization
this.avc = new Avc();
if(false) this.avc.configure({
filter: "original",
filterHorLuma: "optimized",
filterVerLumaEdge: "optimized",
getBoundaryStrengthsA: "optimized"
});
//WebSocket variable
this.ws;
this.pktnum = 0;
},
decode : function(data) {
var naltype = "invalid frame";
if (data.length > 4) {
if (data[4] == 0x65) {
naltype = "I frame";
}
else if (data[4] == 0x41) {
naltype = "P frame";
}
else if (data[4] == 0x67) {
naltype = "SPS";
}
else if (data[4] == 0x68) {
naltype = "PPS";
}
}
//log("Passed " + naltype + " to decoder");
this.avc.decode(data);
},
connect : function(url) {
// Websocket initialization
if (this.ws != undefined) {
this.ws.close();
delete this.ws;
}
this.ws = new WebSocket(url);
this.ws.binaryType = "arraybuffer";
this.ws.onopen = () => {
log("Connected to " + url);
};
var framesList = [];
this.ws.onmessage = (evt) => {
if(typeof evt.data == "string")
return this.cmd(JSON.parse(evt.data));
this.pktnum++;
var frame = new Uint8Array(evt.data);
//log("[Pkt " + this.pktnum + " (" + evt.data.byteLength + " bytes)]");
//this.decode(frame);
framesList.push(frame);
};
var running = true;
var shiftFrame = function() {
if(!running)
return;
if(framesList.length > 10) {
log("Dropping frames", framesList.length);
framesList = [];
}
var frame = framesList.shift();
if(frame)
this.decode(frame);
requestAnimationFrame(shiftFrame);
}.bind(this);
shiftFrame();
this.ws.onclose = () => {
running = false;
log("WSAvcPlayer: Connection closed")
};
},
initCanvas : function(width, height) {
var canvasFactory = this.canvastype == "webgl" || this.canvastype == "YUVWebGLCanvas"
? YUVWebGLCanvas
: YUVCanvas;
var canvas = new canvasFactory(this.canvas, new Size(width, height));
this.avc.onPictureDecoded = canvas.decode;
this.emit("canvasReady", width, height);
},
cmd : function(cmd){
log("Incoming request", cmd);
if(cmd.action == "init") {
this.initCanvas(cmd.width, cmd.height);
this.canvas.width = cmd.width;
this.canvas.height = cmd.height;
}
},
disconnect : function() {
this.ws.close();
},
playStream : function() {
var message = "REQUESTSTREAM ";
this.ws.send(message);
log("Sent " + message);
},
stopStream : function() {
this.ws.send("STOPSTREAM");
log("Sent STOPSTREAM");
},
});
module.exports = WSAvcPlayer;
module.exports.debug = debug;

View file

@ -0,0 +1,69 @@
/* Polyfill indexOf. */
var indexOf;
if (typeof Array.prototype.indexOf === 'function') {
indexOf = function (haystack, needle) {
return haystack.indexOf(needle);
};
} else {
indexOf = function (haystack, needle) {
var i = 0, length = haystack.length, idx = -1, found = false;
while (i < length && !found) {
if (haystack[i] === needle) {
idx = i;
found = true;
}
i++;
}
return idx;
};
};
/* Polyfill EventEmitter. */
var EventEmitter = function () {
this.events = {};
};
EventEmitter.prototype.on = function (event, listener) {
if (typeof this.events[event] !== 'object') {
this.events[event] = [];
}
this.events[event].push(listener);
};
EventEmitter.prototype.removeListener = function (event, listener) {
var idx;
if (typeof this.events[event] === 'object') {
idx = indexOf(this.events[event], listener);
if (idx > -1) {
this.events[event].splice(idx, 1);
}
}
};
EventEmitter.prototype.emit = function (event) {
var i, listeners, length, args = [].slice.call(arguments, 1);
if (typeof this.events[event] === 'object') {
listeners = this.events[event].slice();
length = listeners.length;
for (i = 0; i < length; i++) {
listeners[i].apply(this, args);
}
}
};
EventEmitter.prototype.once = function (event, listener) {
this.on(event, function g () {
this.removeListener(event, g);
listener.apply(this, arguments);
});
};

View file

@ -0,0 +1,117 @@
// Shinobi (http://shinobi.video) - FFMPEG H.264 over HTTP Test
// How to Use raw H.264 (Simulated RTSP)
// 1. Start with `node ffmpegToWeb.js`
// 2. Get the IP address of the computer where you did step 1. Example : 127.0.0.1
// 3. Open VLC and "Open Network Stream".
// 4. Input the following without quotes : `http://127.0.0.1:8001/h264` and start.
var child = require('child_process');
var io = require('socket.io');
var Splitter = require('stream-split')
var events = require('events');
var express = require('express')
var app = express();
var server = require('http').Server(app);
var io = require('socket.io')(server);
var spawn = child.spawn;
var exec = child.exec;
var Emitters = {}
var config = {
port:8001,
url:'rtsp://131.95.3.162/axis-media/media.3gp'
}
var initEmitter = function(feed){
if(!Emitters[feed]){
Emitters[feed] = new events.EventEmitter().setMaxListeners(0)
}
return Emitters[feed]
}
var NALseparator = new Buffer([0,0,0,1]);
//web app
console.log('Starting Express Web Server on Port '+config.port)
server.listen(config.port);
app.use('/libs',express.static(__dirname + '/../../web/libs'));
app.use('/Player',express.static(__dirname + '/Player'));
app.get('/', function (req, res) {
res.sendFile(__dirname + '/index.html');
})
//ffmpeg pushed stream in here to make a pipe
app.all('/streamIn/:feed', function (req, res) {
req.Emitter = initEmitter(req.params.feed)
//req.params.feed = Feed Number (Pipe NWebGLumber)
res.connection.setTimeout(0);
var Split = new Splitter(NALseparator)
var cn = io.to('STREAM_'+req.params.feed)
req.on('data', function(buffer){
req.Emitter.emit('data',buffer)
});
req.pipe(Split).on('data',function(buffer){
cn.emit('h264_'+req.params.feed,Buffer.concat([NALseparator,buffer]))
})
req.on('end',function(){
delete(Split)
console.log('close');
});
})
//socket.io client commands
io.on('connection', function (cn) {
cn.on('f',function (data) {
switch(data.function){
case'getStream':
console.log(data)
cn.join('STREAM_'+data.feed)
break;
}
})
});
//simulate RTSP over HTTP
app.get(['/h264','/h264/:feed'], function (req, res) {
if(!req.params.feed){req.params.feed='1'}
req.Emitter = initEmitter(req.params.feed)
var contentWriter
var date = new Date();
res.writeHead(200, {
'Date': date.toUTCString(),
'Connection': 'keep-alive',
'Cache-Control': 'no-cache',
'Pragma': 'no-cache',
'Content-Type': 'video/mp4',
'Server': 'Shinobi H.264 Test Stream',
});
req.Emitter.on('data',contentWriter=function(buffer){
res.write(buffer)
})
res.on('close', function () {
req.Emitter.removeListener('data',contentWriter)
})
});
//ffmpeg
console.log('Starting FFMPEG')
var ffmpegString = '-i '+config.url+''
ffmpegString += ' -f mpegts -c:v mpeg1video -an http://localhost:'+config.port+'/streamIn/1'
ffmpegString += ' -pix_fmt yuv420p -b:v 600k -f rawvideo -c:v libx264 -vprofile baseline -tune zerolatency http://localhost:'+config.port+'/streamIn/2'
//ffmpegString += ' -f mpegts -c:v mpeg1video -an http://localhost:'+config.port+'/streamIn/2'
if(ffmpegString.indexOf('rtsp://')>-1){
ffmpegString='-rtsp_transport tcp '+ffmpegString
}
console.log('Executing : ffmpeg '+ffmpegString)
var ffmpeg = spawn('ffmpeg',ffmpegString.split(' '));
ffmpeg.on('close', function (buffer) {
console.log('ffmpeg died')
})
//ffmpeg.stderr.on('data', function (buffer) {
// console.log(buffer.toString())
//});
//ffmpeg.stdout.on('data', function (buffer) {
// Emitter.emit('data',buffer)
//});

View file

@ -0,0 +1,45 @@
<script src="/libs/js/socket.io.js"></script>
<script src="/libs/js/jquery.min.js"></script>
<!--Socket.IO Connection-->
<script>
var socket = null
socket = io();
socket.on('connect',function(){
console.log('socket connected')
//pretend this is the command you use to initiate getting H.264 (MPEG) data
socket.emit('f',{function:'getStream',feed:'1'})
socket.emit('f',{function:'getStream',feed:'2'})
})
</script>
<!--Special JSMPEG-->
<!--
<script src="/libs/js/jsmpeg.pipe.js"></script>
<canvas id="canvas_jsmpeg" height=500 width=500></canvas>
<script>
// initiate a player that can be piped to.
var player = new JSMpeg.Player('pipe',{
canvas:document.getElementById('canvas_jsmpeg')
});
//on data from "h264" handle
socket.on('h264_1', function (data) {
// `data.buffer` is the raw video data from FFMPEG
// pretend you are getting data as follows
// var data = {buffer:ArrayBuffer}
player.write(data.buffer)
});
</script>
-->
<!--H264 Live Player-->
<script type="text/javascript" src="/Player/broadway/h264liveplayer/http-live-player.js">;</script>
<canvas id="canvas_h264_live_player" height=500 width=500></canvas>
<script>
// initiate a player that can be piped to.
var wsavc = new WSAvcPlayer(document.getElementById('canvas_h264_live_player'), "webgl", 1, 35);
wsavc.initCanvas(500,500)
wsavc.connect();
socket.on('h264_2', function (data) {
// pretend you are getting data as follows
// data = {buffer:ArrayBuffer}
wsavc.write(data)
});
</script>

View file

@ -0,0 +1,15 @@
{
"name": "ffmpegtoweb",
"version": "1.0.0",
"description": "Shinobi Testing Tool for H.264 over HTTP and Socket.IO",
"main": "ffmpegToWeb.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"dependencies": {
"express": "^4.14.0",
"socket.io": "^1.7.1"
},
"author": "Moe Alam",
"license": "MIT"
}

2
tools/getGitLog.js Normal file
View file

@ -0,0 +1,2 @@
var fs = require('fs');
fs.readFileSync(__dirname+'/.git/logs/')

View file

@ -0,0 +1,23 @@
var fs = require('fs');
var branch = fs.readFileSync(__dirname+'/../.git/HEAD','utf8').replace('ref: refs/heads/','');
var version = fs.readFileSync(__dirname+'/../.git/FETCH_HEAD','utf8').split('\t')[0];
var rawLogRows = fs.readFileSync(__dirname+'/../.git/logs/HEAD','utf8').split('\t');
var prettyLog = [];
rawLogRows.forEach(function(logRow,n){
var log = logRow.split('\n')[1].replace('\n','')
if(log){
var log = log.split(' ')
prettyLog.push({
version:log[1],
lastVersion:log[0],
time:log[4],
timezone:log[5]
})
}
})
module.exports = {
version:version,
branch:branch,
log:prettyLog,
}

23
tools/httpHookTest.js Normal file
View file

@ -0,0 +1,23 @@
var express = require("express");
var app = express();
/* serves main page */
app.get("/", function(req, res) {
console.log('Home Test Success')
res.end("Home Test Success");
});
app.get("/test", function(req, res) {
console.log('Hook Test Success')
res.end("Hook Test Success");
});
app.post("/post", function(req, res) {
console.log('Post Test Success')
res.end("Post Test Success");
});
var port = process.env.PORT || 5000;
app.listen(port, function() {
console.log("Listening on " + port);
});

View file

@ -0,0 +1,109 @@
;
var argparse = require('argparse');
var datetime = require('datetime');
var imutils = require('imutils');
var math = require('math');
var cv2 = require('opencv4nodejs');
var np = require('numpy');
width = 800;
textIn = 0;
textOut = 0;
function testIntersectionIn(x, y) {
res = -450 * x + 400 * y + 157500;
if((res >= -550) && (res < 550))) {
console.log (str(res));
return true;
return false;
}
}
function testIntersectionOut(x, y) {
res = -450 * x + 400 * y + 180000;
if (res >= -550) && (res <= 550))) {
console.log (str(res));
return true;
}
return false;
}
camera = cv2.VideoCapture('test2.mp4');
firstFrame = null;
// loop over the frames of the video
while (true) {
// grab the current frame and initialize the occupied/unoccupied
// text
(grabbed, frame) = camera.read();
text = 'Unoccupied';
}
// if the frame could not be grabbed, then we have reached the end
// of the video
if (!grabbed) {
break;
}
// resize the frame, convert it to grayscale, and blur it
frame = imutils.resize(frame, width=width);
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY);
gray = cv2.GaussianBlur(gray, (21, 21), 0);
// if the first frame is None, initialize it
if (firstFrame === null) {
firstFrame = gray;
continue;
}
// compute the absolute difference between the current frame and
// first frame
frameDelta = cv2.absdiff(firstFrame, gray);
thresh = cv2.threshold(frameDelta, 25, 255, cv2.THRESH_BINARY)[1];
// dilate the thresholded image to fill in holes, then find contours
// on thresholded image
thresh = cv2.dilate(thresh, null, iterations=2);
_, cnts, _ = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE);
// loop over the contours
for (c in cnts) {
// if the contour is too small, ignore it
if (cv2.contourArea(c) < 12000) {
continue;
// compute the bounding box for the contour, draw it on the frame,
// and update the text
(x, y, w, h) = cv2.boundingRect(c);
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2);
}
cv2.line(frame, (width / 2, 0), (width, 450), (250, 0, 1), 2) #blue line;
cv2.line(frame, (width / 2 - 50, 0), (width - 50, 450), (0, 0, 255), 2)#red line;
}
rectagleCenterPont = ((x + x + w) /2, (y + y + h) /2);
cv2.circle(frame, rectagleCenterPont, 1, (0, 0, 255), 5);
if(testIntersectionIn((x + x + w) / 2, (y + y + h) / 2))) {
textIn += 1;
}
if(testIntersectionOut((x + x + w) / 2, (y + y + h) / 2))) {
textOut += 1;
}
// draw the text and timestamp on the frame
// show the frame and record if the user presses a key
// cv2.imshow("Thresh", thresh)
// cv2.imshow("Frame Delta", frameDelta)
if (cv2.waitKey(1) & 0xFF == ord('q')) {
break;
}
cv2.putText(frame, 'In: {}'.format(str(textIn)), (10, 50),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2);
cv2.putText(frame, 'Out: {}'.format(str(textOut)), (10, 70),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2);
cv2.putText(frame, datetime.datetime.now().strftime('%A %d %B %Y %I:%M:%S%p'),
(10, frame.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0, 0, 255), 1);
cv2.imshow('Security Feed', frame);
// cleanup the camera and close any open windows
camera.release();
cv2.destroyAllWindows();

Binary file not shown.

After

Width:  |  Height:  |  Size: 557 KiB

View file

@ -0,0 +1,98 @@
const cv = require('opencv4nodejs');
width = 800
textIn = 0
textOut = 0
testIntersectionIn = function(x, y):
res = -450 * x + 400 * y + 157500
if((res >= -550) and (res < 550)):
print (str(res))
return True
return False
testIntersectionOut = function(x, y):
res = -450 * x + 400 * y + 180000
if ((res >= -550) and (res <= 550)):
print (str(res))
return True
return False
camera = cv2.VideoCapture("test2.mp4")
firstFrame = None
# loop over the frames of the video
while True:
# grab the current frame and initialize the occupied/unoccupied
# text
(grabbed, frame) = camera.read()
text = "Unoccupied"
# if the frame could not be grabbed, then we have reached the end
# of the video
if not grabbed:
break
# resize the frame, convert it to grayscale, and blur it
frame = imutils.resize(frame, width=width)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (21, 21), 0)
# if the first frame is None, initialize it
if firstFrame is None:
firstFrame = gray
continue
# compute the absolute difference between the current frame and
# first frame
frameDelta = cv2.absdiff(firstFrame, gray)
thresh = cv2.threshold(frameDelta, 25, 255, cv2.THRESH_BINARY)[1]
# dilate the thresholded image to fill in holes, then find contours
# on thresholded image
thresh = cv2.dilate(thresh, None, iterations=2)
_, cnts, _ = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
# loop over the contours
for c in cnts:
# if the contour is too small, ignore it
if cv2.contourArea(c) < 12000:
continue
# compute the bounding box for the contour, draw it on the frame,
# and update the text
(x, y, w, h) = cv2.boundingRect(c)
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
cv2.line(frame, (width / 2, 0), (width, 450), (250, 0, 1), 2) #blue line
cv2.line(frame, (width / 2 - 50, 0), (width - 50, 450), (0, 0, 255), 2)#red line
rectagleCenterPont = ((x + x + w) /2, (y + y + h) /2)
cv2.circle(frame, rectagleCenterPont, 1, (0, 0, 255), 5)
if(testIntersectionIn((x + x + w) / 2, (y + y + h) / 2)):
textIn += 1
if(testIntersectionOut((x + x + w) / 2, (y + y + h) / 2)):
textOut += 1
# draw the text and timestamp on the frame
# show the frame and record if the user presses a key
# cv2.imshow("Thresh", thresh)
# cv2.imshow("Frame Delta", frameDelta)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cv2.putText(frame, "In: {}".format(str(textIn)), (10, 50),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)
cv2.putText(frame, "Out: {}".format(str(textOut)), (10, 70),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)
cv2.putText(frame, datetime.datetime.now().strftime("%A %d %B %Y %I:%M:%S%p"),
(10, frame.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0, 0, 255), 1)
cv2.imshow("Security Feed", frame)

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

View file

@ -0,0 +1,30 @@
process.on('uncaughtException', function (err) {
console.error('Uncaught Exception occured!');
console.error(err.stack);
});
var configLocation = __dirname+'/../conf.json';
var fs = require('fs');
var jsonfile = require("jsonfile");
var config = jsonfile.readFileSync(configLocation);
var processArgv = process.argv.splice(2,process.argv.length)
var arguments = {};
processArgv.forEach(function(val) {
var theSplit = val.split('=');
var index = theSplit[0];
var value = theSplit[1];
if(value==='DELETE'){
delete(config[index])
}else{
try{
config[index] = JSON.parse(value);
}catch(err){
config[index] = value;
}
}
console.log(index + ': ' + value);
});
jsonfile.writeFile(configLocation,config,{spaces: 2},function(){
console.log('Changes Complete. Here is what it is now.')
console.log(JSON.stringify(config,null,2))
})

2342
tools/sql2mongo/mesh.js Normal file

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,352 @@
var _ = require('underscore')
sqlKeywords = ['select', 'top', 'from', 'join', 'where', 'groupby', 'orderby', 'having']; // keep "top x" in mind
logicalOperators = ['!=', '<=', '>=', '<', '>', '=', '!in', 'in', 'like'];
var hasTop, hasWhere, hasOrderBy, processed = [], whereValsWithSpaces, hasOr, filterFields, operators, sqlishFilter, filter;
parseOperatorsInArray = function(equation){
var completeArr = [], tmpArr = [];
sqlKeywords.forEach(function (e, k) { // for each operator
if (completeArr.length === 0) { // if empty, split equation and do first load to completeArr.
tmpArr = equation.split(e);
spliceOperatorIntoTmpArr(tmpArr, e); // adds operator between every item in array
tmpArr = tmpArr.filter(function (item) { return item.length > 0; });
tmpArr.forEach(function (e, k) {
if (e.indexOf('where') > -1){
buildWhere(e);
}
completeArr.push(e.replace(/\s/g, ''));
});
} else {
for (var n = 0; n < completeArr.length; n++) {
if (completeArr[n].indexOf(e) > -1 && completeArr[n].length > 1) {
var idx = n;
tmpArr = completeArr[n].split(e);
spliceOperatorIntoTmpArr(tmpArr, e);
tmpArr = tmpArr.filter(function (item) { return item.length > 0; });
completeArr.splice(idx, 1); // remove old text element
for (var x = 0; x < tmpArr.length ; x++) {
var newIdx = (idx + x);
completeArr.splice(newIdx, 0, tmpArr[x]);
}
}
}
}
});
return completeArr;
};
spliceOperatorIntoTmpArr = function(tmpArr, e){
var tmpLen = tmpArr.length + (tmpArr.length - 1);
for (var i = 1; i < tmpLen; i++) {
tmpArr.splice(i, 0, e);
i++;
}
};
buildWhere = function(e){
var originalWhere = hasOrderBy ? e.substring(e.indexOf('where'), e.indexOf(' order by')) : e.substring(e.indexOf('where'), e.length);
var splitWhere = originalWhere.split(' '), splitLength = splitWhere.length;
var isRightSide = false, isOperator = false, isKeyword = false, filterValsToPush = [];
splitWhere.forEach(function(el, idx){
isKeyword = ((sqlKeywords.indexOf(el) > -1) || el === 'and' || el === 'or');
isOperator = (logicalOperators.indexOf(el) > -1); // true if operator
if (isKeyword){
isRightSide = false; // false if sql keyword
}
if (isOperator){
operators.push(el);
isRightSide = true;
}
if (!isOperator && !isKeyword){
if (isRightSide){
filterValsToPush.push(el);
} else {
filterFields.push(el);
}
}
if ((el === 'and' || el === 'or' || idx === (splitLength - 1)) && filterValsToPush.length > 0){
var preservedVal = filterValsToPush.join(' ');
whereValsWithSpaces.push(preservedVal);
filterValsToPush = [];
}
});
for (var i = 0; i < filterFields.length; i++){
sqlishFilter.push(({ field: filterFields[i], operator: operators[i], value: whereValsWithSpaces[i] }));
}
};
getNext = function(arr, howMany){
howMany = howMany ? howMany : 1;
if (arr.length > 0){
var lastIn = arr.splice(0, howMany);
processed.push(lastIn);
return lastIn;
}
};
getLimit = function(arr){
var topN = arr[0].replace(/[^0-9.]/g, '');
// remove top n from fields portion of arr
arr[0] = arr[0].replace(/\d+/g, '');
return parseInt(topN);
};
getProjection = function(arr){
var projection = {}, selectFields = _.first(getNext(arr));
if (selectFields !== '*'){
selectFields.replace(/\s/g, '').split(',').forEach(function(e, k){
var show = 1;
if (e.substr(0,1) === '!'){
show = 0;
e = e.substr(1,1000);
}
projection[e] = show;
});
}
return projection;
};
processFilter = function(filterObj, filter){
var field = filterObj['field'];
var operator = filterObj['operator'];
var val = !isNaN(filterObj['value']) ? parseFloat(filterObj['value']) : filterObj['value'];
switch (operator){
case '=':
filter[field] = val;
break;
case '!=':
filter[field] = { $ne: val };
break;
case '>':
filter[field] = { $gt: val };
break;
case '<':
filter[field] = { $lt: val };
break;
case '>=':
filter[field] = { $gte: val };
break;
case '<=':
filter[field] = { $lte: val };
break;
case 'in':
filter[field] = { $in: val.split(',') };
break;
case '!in':
filter[field] = { $nin: val.split(',') };
break;
case 'like':
filter[field] = { $regex: '^' + val + '.*' };
break;
case '!like':
filter[field] = { $not: (new RegExp('/' + val + '/')) };
break;
}
return filter;
};
getSort = function(arr){
var sort = {}, sortFields = getNext(arr)[0], field, order, val;
sortFields.split(',').forEach(function(e, k){
if (e.substring(e.length - 4, e.length) === 'desc'){
field = e.substring(0, e.length - 4);
val = -1;
} else if (e.substring(e.length - 3, e.length) === 'asc'){
field = e.substring(0, e.length - 3);
val = 1;
} else {
field = e;
val = 1;
}
sort[field] = val;
});
return sort;
};
validateCollection = function(collection){
return _.contains(collections,collection) ? collection : 'Invalid Collection.';
};
// ######## Start of custom auto-complete code ########
function interceptAutoComplete(prefix, global, parts){
if (prefix.length === 0){ // space only
return ["')"];
}
var first = parts[0].toLowerCase();
var expandToText = snippetMap[first];
var lastChar = first.substring(first.length - 1, first.length);
var lastTwoChars = first.substring(first.length - 2, first.length);
if (first === 'sel'){
sqlQuery = "db.sql('select * from ";
return [sqlQuery];
} else if (expandToText){
return [expandToText];
} else if (!queryHasCollection && isNaN(lastChar)) {
return printCollections(first);
} else if (!queryHasCollection) {
return selectCollection(lastTwoChars, lastChar);
} else if (queryHasCollection && isNaN(lastChar)) {
return printFields(first);
} else {
return selectField(lastTwoChars, lastChar)
}
}
function printMatches(isField){
if (matches.length > 0){
print('\n');
matches.forEach(function(m, i){
var str = i + ': ' + m
print(colorize(str, 'green', true, false));
});
} else {
resetGlobalVars();
return [''];
}
}
function printCollections(first){
// No collection has been selected yet, and user isn't passing number for selection...
if (_.contains(collections, first)){
selectedCollection = first;
sqlQuery += selectedCollection;
return [selectedCollection];
}
matches = _.filter(collections, function(c){
return c.toLowerCase().substring(0, (first.length)) === first;
});
printMatches();
}
function selectCollection(lastTwoChars, lastChar){
// no collection is selected yet, but user is passing number for selection...
var num = !isNaN(lastTwoChars) ? lastTwoChars : lastChar;
selectedCollection = matches[num];
queryHasCollection = true;
print('\n');
generateFieldTable(selectedCollection);
if (sqlQuery === ''){
sqlQuery = "db.sql('select * from " + selectedCollection;
return [sqlQuery];
}
sqlQuery += selectedCollection;
return [selectedCollection];
}
function printFields(first){
// collection has been selected and user is trying to select field based on initial string
var collection = collectionFields[selectedCollection];
var filteredFields = _.filter(collection, function(c){
return c.field.toLowerCase().substring(0, (first.length)) === first;
});
matches = _.map(_.sortBy(filteredFields, 'field'), function(d, i) {
return d.field;
});
printMatches();
}
function selectField(lastTwoChars, lastChar){
// collection has been selected, as well as field string, now user is passing number to select one...
var num = !isNaN(lastTwoChars) ? lastTwoChars : lastChar;
var field = matches[num];
return [field];
}
function showCollections(){
return db.getCollectionNames();
}
function resetGlobalVars (){
queryHasCollection = false;
sqlQuery = '';
matches = null;
selectedCollection = '';
}
function generateFieldTable(collection){
var table = new AsciiTable(collection);
table.setHeading('#', 'Field', 'Types');
var fields = collectionFields[collection];
_.map(_.sortBy(fields, 'field'), function(d, i) {
return table.addRow(i, d.field, d.types)
});
return print(colorize(table, 'cyan', true, false));
}
module.exports = {
parseSQL : function(sql){
sql = sql.replace(/NOT LIKE/g,'!like').toLowerCase()
whereValsWithSpaces = [], filterFields = [], operators = [], sqlishFilter = [], filter = {};
hasTop = (sql.indexOf(' top ') > -1), hasWhere = (sql.indexOf('where') > -1), hasOrderBy = (sql.indexOf('order by') > -1), hasOr = (sql.indexOf(' or ') > -1);
var limit, join, sort;
var arr = parseOperatorsInArray(sql);
// getNext(arr); // remove Select
if (hasTop){
getNext(arr); // remove top
limit = getLimit(arr);
}
console.log(sql)
console.log(arr)
var projection = getProjection(arr);
getNext(arr); // remove From
if (hasWhere){
var orObj = {}, orArr = [];
sqlishFilter.forEach(function(f, fk){
if (hasOr){
orArr.push(processFilter(f, {}));
} else {
processFilter(f, filter);
}
});
if (hasOr){
filter = { $or: orArr };
}
getNext(arr, 2); // remove where and clause, since its handled earlier
}
if (hasOrderBy){
getNext(arr); // remove order by
sort = getSort(arr);
}
var ret = {
projection: projection,
filter: filter,
sort: sort || {},
limit: limit || 20
};
console.log('Converted Command: ' + 'db.' + ret.collection + '.find(' + JSON.stringify(ret.filter) + ', ' + JSON.stringify(ret.projection) + ').sort(' + JSON.stringify(ret.sort) + ').limit(' + ret.limit + ')');
return ret;
}
}

View file

@ -0,0 +1,84 @@
console.log('This translation tool uses Yandex.')
if(!process.argv[2]||!process.argv[3]||!process.argv[4]){
console.log('You must input arguments.')
console.log('# node translateLanguageFile.js <SOURCE> <FROM_LANGUAGE> <TO_LANGUAGE>')
console.log('Example:')
console.log('# node translateLanguageFile.js en_US en ar')
return
}
var defDir='../definitions/'
var fs=require('fs');
var https = require('https');
var jsonfile=require('jsonfile');
var source=require(defDir+process.argv[2]+'.json')
var list = Object.keys(source)
console.log(list.length)
var extra = ''
var current = 1
var currentItem = list[0]
var chosenFile = defDir+process.argv[4]+'.json'
try{
newList=require(chosenFile)
}catch(err){
console.log(chosenFile)
var newList={}
}
var newListAlphabetical={}
var goNext=function(){
++current
currentItem = list[current]
if(list.length===current){
console.log('complete checking.. please wait')
Object.keys(newList).sort().forEach(function(y,t){
newListAlphabetical[y]=newList[y]
})
jsonfile.writeFile(chosenFile,newListAlphabetical,{spaces: 2},function(){
console.log('complete writing')
})
}else{
next(currentItem)
}
}
var next=function(v){
if(v===undefined){return false}
//trnsl.1.1.20170718T033617Z.a9bbd3b739ca59df.7f89b7474ec69812afd0014b5e338328ebf3fc39
if(newList[v]&&newList[v]!==source[v]){
goNext()
return
}
if(/<[a-z][\s\S]*>/i.test(source[v])===true){
extra+='&format=html'
}
var url = 'https://translate.yandex.net/api/v1.5/tr.json/translate?key=trnsl.1.1.20160311T042953Z.341f2f63f38bdac6.c7e5c01fff7f57160141021ca61b60e36ff4d379'+extra+'&lang='+process.argv[3]+'-'+process.argv[4]+'&text='+source[v]
https.request(url, function(data) {
data.setEncoding('utf8');
var chunks='';
data.on('data', (chunk) => {
chunks+=chunk;
});
data.on('end', () => {
try{
chunks=JSON.parse(chunks)
if(chunks.html){
if(chunks.html[0]){
var translation=chunks.html[0]
}else{
var translation=chunks.html
}
}else{
var translation=chunks.text[0]
}
}catch(err){
var translation=source[v]
}
newList[v]=translation;
console.log(current+'/'+list.length+','+v+' ---> '+translation)
goNext()
});
}).on('error', function(e) {
console.log('ERROR : 500 '+v)
res.sendStatus(500);
}).end();
}
next(currentItem)

View file

@ -0,0 +1,84 @@
console.log('This translation tool uses Yandex.')
if(!process.argv[2]||!process.argv[3]||!process.argv[4]){
console.log('You must input arguments.')
console.log('# node translateLanguageFile.js <SOURCE> <FROM_LANGUAGE> <TO_LANGUAGE>')
console.log('Example:')
console.log('# node translateLanguageFile.js en_US en ar')
return
}
var langDir='../languages/'
var fs=require('fs');
var https = require('https');
var jsonfile=require('jsonfile');
var source=require(langDir+process.argv[2]+'.json')
var list = Object.keys(source)
console.log(list.length)
var extra = ''
var current = 1
var currentItem = list[0]
var chosenFile = langDir+process.argv[4]+'.json'
try{
newList=require(chosenFile)
}catch(err){
console.log(chosenFile)
var newList={}
}
var newListAlphabetical={}
var goNext=function(){
++current
currentItem = list[current]
if(list.length===current){
console.log('complete checking.. please wait')
Object.keys(newList).sort().forEach(function(y,t){
newListAlphabetical[y]=newList[y]
})
jsonfile.writeFile(chosenFile,newListAlphabetical,{spaces: 2},function(){
console.log('complete writing')
})
}else{
next(currentItem)
}
}
var next=function(v){
if(v===undefined){return false}
//trnsl.1.1.20170718T033617Z.a9bbd3b739ca59df.7f89b7474ec69812afd0014b5e338328ebf3fc39
if(newList[v]&&newList[v]!==source[v]){
goNext()
return
}
if(/<[a-z][\s\S]*>/i.test(source[v])===true){
extra+='&format=html'
}
var url = 'https://translate.yandex.net/api/v1.5/tr.json/translate?key=trnsl.1.1.20160311T042953Z.341f2f63f38bdac6.c7e5c01fff7f57160141021ca61b60e36ff4d379'+extra+'&lang='+process.argv[3]+'-'+process.argv[4]+'&text='+source[v]
https.request(url, function(data) {
data.setEncoding('utf8');
var chunks='';
data.on('data', (chunk) => {
chunks+=chunk;
});
data.on('end', () => {
try{
chunks=JSON.parse(chunks)
if(chunks.html){
if(chunks.html[0]){
var translation=chunks.html[0]
}else{
var translation=chunks.html
}
}else{
var translation=chunks.text[0]
}
}catch(err){
var translation=source[v]
}
newList[v]=translation;
console.log(current+'/'+list.length+','+v+' ---> '+translation)
goNext()
});
}).on('error', function(e) {
console.log('ERROR : 500 '+v)
res.sendStatus(500);
}).end();
}
next(currentItem)