1
0
Fork 0
mirror of https://gitlab.com/Shinobi-Systems/ShinobiCE.git synced 2025-03-09 15:40:15 +00:00

Shinobi CE officially lands on Gitlab

This commit is contained in:
Moe 2018-06-07 23:21:38 -07:00
commit f1406d4eec
431 changed files with 118157 additions and 0 deletions

9
tools/ffmpegToWeb/.gitignore vendored Normal file
View file

@ -0,0 +1,9 @@
node_modules
videos
events
frames
web.old
.DS_Store
.vagrant
conf.json
ffmpeg

View file

@ -0,0 +1,3 @@
/npm-debug.log
/node_modules
/disc

View file

@ -0,0 +1,4 @@
/npm-debug.log
/node_modules
/dist
/disc

View file

@ -0,0 +1,19 @@
The following authors have all licensed their contributions to the project
under the licensing terms detailed in LICENSE (MIT style)
# h264-live-player
* Francois Leurent @131 <131.js@cloudyks.org>
# Broadway emscriptend h264 (broadway/Decoder.js)
* Michael Bebenita <mbebenita@gmail.com>
* Alon Zakai <alonzakai@gmail.com>
* Andreas Gal <gal@mozilla.com>
* Mathieu 'p01' Henri <mathieu@p01.org>
* Matthias 'soliton4' Behrens <matthias.behrens@gmail.com>
# WebGL canvas helpers
* Sam Leitch @oneam
# AVC player inspiration
* Benjamin Xiao @urbenlegend

View file

@ -0,0 +1,10 @@
Copyright (c) 2016, Project Authors (see AUTHORS file)
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the names of the Project Authors nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View file

@ -0,0 +1,11 @@
# Motivation
This is a very efficient h264 video player (that can run on live stream) for your browser.
You might use this with raspicam raw h264 stream.
This is a player around [Broadway](https://github.com/mbebenita/Broadway) Decoder, with very simple API.
NAL unit (h264 frames) are split on the server side, so the client side is very simple (and allow frame skipping easily)
See [github sample project's page for more information](https://github.com/131/h264-live-player)

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,71 @@
"use strict";
var Class = require('uclass');
var vertexShaderScript = Script.createFromSource("x-shader/x-vertex", `
attribute vec3 aVertexPosition;
attribute vec2 aTextureCoord;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
varying highp vec2 vTextureCoord;
void main(void) {
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
vTextureCoord = aTextureCoord;
}
`);
var fragmentShaderScript = Script.createFromSource("x-shader/x-fragment", [
precision highp float;
varying highp vec2 vTextureCoord;
uniform sampler2D FTexture;
void main(void) {
gl_FragColor = texture2D(FTexture, vTextureCoord);
}
`);
var FilterWebGLCanvas = new Class({
Extends : WebGLCanvas,
initialize : function(canvas, size, useFrameBuffer) {
FilterWebGLCanvas.parent.initialize.call(this, canvas, size, useFrameBuffer);
},
onInitShaders: function() {
this.program = new Program(this.gl);
this.program.attach(new Shader(this.gl, vertexShaderScript));
this.program.attach(new Shader(this.gl, fragmentShaderScript));
this.program.link();
this.program.use();
this.vertexPositionAttribute = this.program.getAttributeLocation("aVertexPosition");
this.gl.enableVertexAttribArray(this.vertexPositionAttribute);
this.textureCoordAttribute = this.program.getAttributeLocation("aTextureCoord");
this.gl.enableVertexAttribArray(this.textureCoordAttribute);
},
onInitTextures: function () {
console.log("creatingTextures: size: " + this.size);
this.FTexture = new Texture(this.gl, this.size, this.gl.RGBA);
},
onInitSceneTextures: function () {
this.FTexture.bind(0, this.program, "FTexture");
},
process: function(buffer, output) {
this.FTexture.fill(buffer);
this.drawScene();
this.readPixels(output);
},
toString: function() {
return "FilterWebGLCanvas Size: " + this.size;
}
});
module.exports = FilterWebGLCanvas;

View file

@ -0,0 +1,32 @@
"use strict";
var assert = require('../utils/assert');
function Program(gl) {
this.gl = gl;
this.program = this.gl.createProgram();
}
Program.prototype = {
attach: function (shader) {
this.gl.attachShader(this.program, shader.shader);
},
link: function () {
this.gl.linkProgram(this.program);
// If creating the shader program failed, alert.
assert(this.gl.getProgramParameter(this.program, this.gl.LINK_STATUS),
"Unable to initialize the shader program.");
},
use: function () {
this.gl.useProgram(this.program);
},
getAttributeLocation: function(name) {
return this.gl.getAttribLocation(this.program, name);
},
setMatrixUniform: function(name, array) {
var uniform = this.gl.getUniformLocation(this.program, name);
this.gl.uniformMatrix4fv(uniform, false, array);
}
};
module.exports = Program;

View file

@ -0,0 +1,4 @@
/*
* Those files wraps several WebGL constructs and provides a simple, single texture based WebGLCanvas as well as a
* specialized YUVWebGLCanvas that can handle YUV->RGB conversion.
*/

View file

@ -0,0 +1,41 @@
"use strict";
var assert = require('../utils/assert');
/**
* Represents a WebGL shader script.
*/
function Script() {}
Script.createFromElementId = function(id) {
var script = document.getElementById(id);
// Didn't find an element with the specified ID, abort.
assert(script , "Could not find shader with ID: " + id);
// Walk through the source element's children, building the shader source string.
var source = "";
var currentChild = script .firstChild;
while(currentChild) {
if (currentChild.nodeType == 3) {
source += currentChild.textContent;
}
currentChild = currentChild.nextSibling;
}
var res = new Scriptor();
res.type = script.type;
res.source = source;
return res;
};
Script.createFromSource = function(type, source) {
var res = new Script();
res.type = type;
res.source = source;
return res;
}
module.exports = Script;

View file

@ -0,0 +1,38 @@
"use strict";
var error = require('../utils/error');
/**
* Represents a WebGL shader object and provides a mechanism to load shaders from HTML
* script tags.
*/
function Shader(gl, script) {
// Now figure out what type of shader script we have, based on its MIME type.
if (script.type == "x-shader/x-fragment") {
this.shader = gl.createShader(gl.FRAGMENT_SHADER);
} else if (script.type == "x-shader/x-vertex") {
this.shader = gl.createShader(gl.VERTEX_SHADER);
} else {
error("Unknown shader type: " + script.type);
return;
}
// Send the source to the shader object.
gl.shaderSource(this.shader, script.source);
// Compile the shader program.
gl.compileShader(this.shader);
// See if it compiled successfully.
if (!gl.getShaderParameter(this.shader, gl.COMPILE_STATUS)) {
error("An error occurred compiling the shaders: " + gl.getShaderInfoLog(this.shader));
return;
}
}
module.exports = Shader;

View file

@ -0,0 +1,47 @@
"use strict";
var assert = require('../utils/assert');
/**
* Represents a WebGL texture object.
*/
function Texture(gl, size, format) {
this.gl = gl;
this.size = size;
this.texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, this.texture);
this.format = format ? format : gl.LUMINANCE;
gl.texImage2D(gl.TEXTURE_2D, 0, this.format, size.w, size.h, 0, this.format, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
}
var textureIDs = null;
Texture.prototype = {
fill: function(textureData, useTexSubImage2D) {
var gl = this.gl;
assert(textureData.length >= this.size.w * this.size.h,
"Texture size mismatch, data:" + textureData.length + ", texture: " + this.size.w * this.size.h);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
if (useTexSubImage2D) {
gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, this.size.w , this.size.h, this.format, gl.UNSIGNED_BYTE, textureData);
} else {
// texImage2D seems to be faster, thus keeping it as the default
gl.texImage2D(gl.TEXTURE_2D, 0, this.format, this.size.w, this.size.h, 0, this.format, gl.UNSIGNED_BYTE, textureData);
}
},
bind: function(n, program, name) {
var gl = this.gl;
if (!textureIDs) {
textureIDs = [gl.TEXTURE0, gl.TEXTURE1, gl.TEXTURE2];
}
gl.activeTexture(textureIDs[n]);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
gl.uniform1i(gl.getUniformLocation(program.program, name), n);
}
};
module.exports = Texture;

View file

@ -0,0 +1,261 @@
"use strict";
/**
* Generic WebGL backed canvas that sets up: a quad to paint a texture on, appropriate vertex/fragment shaders,
* scene parameters and other things. Specialized versions of this class can be created by overriding several
* initialization methods.
*/
var Script = require('./Script');
var error = require('../utils/error');
var makePerspective = require('../utils/glUtils').makePerspective;
var Matrix = require('sylvester.js').Matrix;
var Class = require('uclass');
var vertexShaderScript = Script.createFromSource("x-shader/x-vertex", `
attribute vec3 aVertexPosition;
attribute vec2 aTextureCoord;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
varying highp vec2 vTextureCoord;
void main(void) {
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
vTextureCoord = aTextureCoord;
}
`);
var fragmentShaderScript = Script.createFromSource("x-shader/x-fragment", `
precision highp float;
varying highp vec2 vTextureCoord;
uniform sampler2D texture;
void main(void) {
gl_FragColor = texture2D(texture, vTextureCoord);
}
`);
var WebGLCanvas = new Class({
initialize : function(canvas, size, useFrameBuffer) {
this.canvas = canvas;
this.size = size;
this.canvas.width = size.w;
this.canvas.height = size.h;
this.onInitWebGL();
this.onInitShaders();
this.initBuffers();
if (useFrameBuffer)
this.initFramebuffer();
this.onInitTextures();
this.initScene();
},
/**
* Initialize a frame buffer so that we can render off-screen.
*/
initFramebuffer : function() {
var gl = this.gl;
// Create framebuffer object and texture.
this.framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer);
this.framebufferTexture = new Texture(this.gl, this.size, gl.RGBA);
// Create and allocate renderbuffer for depth data.
var renderbuffer = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, renderbuffer);
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, this.size.w, this.size.h);
// Attach texture and renderbuffer to the framebuffer.
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, this.framebufferTexture.texture, 0);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, renderbuffer);
},
/**
* Initialize vertex and texture coordinate buffers for a plane.
*/
initBuffers : function () {
var tmp;
var gl = this.gl;
// Create vertex position buffer.
this.quadVPBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.quadVPBuffer);
tmp = [
1.0, 1.0, 0.0,
-1.0, 1.0, 0.0,
1.0, -1.0, 0.0,
-1.0, -1.0, 0.0];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(tmp), gl.STATIC_DRAW);
this.quadVPBuffer.itemSize = 3;
this.quadVPBuffer.numItems = 4;
/*
+--------------------+
| -1,1 (1) | 1,1 (0)
| |
| |
| |
| |
| |
| -1,-1 (3) | 1,-1 (2)
+--------------------+
*/
var scaleX = 1.0;
var scaleY = 1.0;
// Create vertex texture coordinate buffer.
this.quadVTCBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.quadVTCBuffer);
tmp = [
scaleX, 0.0,
0.0, 0.0,
scaleX, scaleY,
0.0, scaleY,
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(tmp), gl.STATIC_DRAW);
},
mvIdentity : function () {
this.mvMatrix = Matrix.I(4);
},
mvMultiply : function(m) {
this.mvMatrix = this.mvMatrix.x(m);
},
mvTranslate : function (m) {
this.mvMultiply(Matrix.Translation($V([m[0], m[1], m[2]])).ensure4x4());
},
setMatrixUniforms : function () {
this.program.setMatrixUniform("uPMatrix", new Float32Array(this.perspectiveMatrix.flatten()));
this.program.setMatrixUniform("uMVMatrix", new Float32Array(this.mvMatrix.flatten()));
},
initScene : function() {
var gl = this.gl;
// Establish the perspective with which we want to view the
// scene. Our field of view is 45 degrees, with a width/height
// ratio of 640:480, and we only want to see objects between 0.1 units
// and 100 units away from the camera.
this.perspectiveMatrix = makePerspective(45, 1, 0.1, 100.0);
// Set the drawing position to the "identity" point, which is
// the center of the scene.
this.mvIdentity();
// Now move the drawing position a bit to where we want to start
// drawing the square.
this.mvTranslate([0.0, 0.0, -2.4]);
// Draw the cube by binding the array buffer to the cube's vertices
// array, setting attributes, and pushing it to GL.
gl.bindBuffer(gl.ARRAY_BUFFER, this.quadVPBuffer);
gl.vertexAttribPointer(this.vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0);
// Set the texture coordinates attribute for the vertices.
gl.bindBuffer(gl.ARRAY_BUFFER, this.quadVTCBuffer);
gl.vertexAttribPointer(this.textureCoordAttribute, 2, gl.FLOAT, false, 0, 0);
this.onInitSceneTextures();
this.setMatrixUniforms();
if (this.framebuffer) {
console.log("Bound Frame Buffer");
gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer);
}
},
toString: function() {
return "WebGLCanvas Size: " + this.size;
},
checkLastError: function (operation) {
var err = this.gl.getError();
if (err != this.gl.NO_ERROR) {
var name = this.glNames[err];
name = (name !== undefined) ? name + "(" + err + ")":
("Unknown WebGL ENUM (0x" + value.toString(16) + ")");
if (operation) {
console.log("WebGL Error: %s, %s", operation, name);
} else {
console.log("WebGL Error: %s", name);
}
console.trace();
}
},
onInitWebGL: function () {
try {
this.gl = this.canvas.getContext("experimental-webgl");
} catch(e) {}
if (!this.gl) {
error("Unable to initialize WebGL. Your browser may not support it.");
}
if (this.glNames) {
return;
}
this.glNames = {};
for (var propertyName in this.gl) {
if (typeof this.gl[propertyName] == 'number') {
this.glNames[this.gl[propertyName]] = propertyName;
}
}
},
onInitShaders: function() {
this.program = new Program(this.gl);
this.program.attach(new Shader(this.gl, vertexShaderScript));
this.program.attach(new Shader(this.gl, fragmentShaderScript));
this.program.link();
this.program.use();
this.vertexPositionAttribute = this.program.getAttributeLocation("aVertexPosition");
this.gl.enableVertexAttribArray(this.vertexPositionAttribute);
this.textureCoordAttribute = this.program.getAttributeLocation("aTextureCoord");;
this.gl.enableVertexAttribArray(this.textureCoordAttribute);
},
onInitTextures: function () {
var gl = this.gl;
this.texture = new Texture(gl, this.size, gl.RGBA);
},
onInitSceneTextures: function () {
this.texture.bind(0, this.program, "texture");
},
drawScene: function() {
this.gl.drawArrays(this.gl.TRIANGLE_STRIP, 0, 4);
},
readPixels: function(buffer) {
var gl = this.gl;
gl.readPixels(0, 0, this.size.w, this.size.h, gl.RGBA, gl.UNSIGNED_BYTE, buffer);
},
});
module.exports = WebGLCanvas;

View file

@ -0,0 +1,51 @@
"use strict";
var Class = require('uclass');
var YUVCanvas = new Class({
Binds : ['decode'],
initialize : function(canvas, size) {
this.canvas = canvas;
this.canvasCtx = this.canvas.getContext("2d");
this.canvasBuffer = this.canvasCtx.createImageData(size.w, size.h);
},
decode : function (buffer, width, height) {
if (!buffer)
return;
var lumaSize = width * height;
var chromaSize = lumaSize >> 2;
var ybuf = buffer.subarray(0, lumaSize);
var ubuf = buffer.subarray(lumaSize, lumaSize + chromaSize);
var vbuf = buffer.subarray(lumaSize + chromaSize, lumaSize + 2 * chromaSize);
for (var y = 0; y < height; y++) {
for (var x = 0; x < width; x++) {
var yIndex = x + y * width;
var uIndex = ~~(y / 2) * ~~(width / 2) + ~~(x / 2);
var vIndex = ~~(y / 2) * ~~(width / 2) + ~~(x / 2);
var R = 1.164 * (ybuf[yIndex] - 16) + 1.596 * (vbuf[vIndex] - 128);
var G = 1.164 * (ybuf[yIndex] - 16) - 0.813 * (vbuf[vIndex] - 128) - 0.391 * (ubuf[uIndex] - 128);
var B = 1.164 * (ybuf[yIndex] - 16) + 2.018 * (ubuf[uIndex] - 128);
var rgbIndex = yIndex * 4;
this.canvasBuffer.data[rgbIndex+0] = R;
this.canvasBuffer.data[rgbIndex+1] = G;
this.canvasBuffer.data[rgbIndex+2] = B;
this.canvasBuffer.data[rgbIndex+3] = 0xff;
}
}
this.canvasCtx.putImageData(this.canvasBuffer, 0, 0);
var date = new Date();
//console.log("WSAvcPlayer: Decode time: " + (date.getTime() - this.rcvtime) + " ms");
},
});
module.exports = YUVCanvas;

View file

@ -0,0 +1,108 @@
"use strict";
var Program = require('./Program');
var Shader = require('./Shader');
var Texture = require('./Texture');
var Script = require('./Script');
var WebGLCanvas = require('./WebGLCanvas');
var Class = require('uclass');
var vertexShaderScript = Script.createFromSource("x-shader/x-vertex", `
attribute vec3 aVertexPosition;
attribute vec2 aTextureCoord;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
varying highp vec2 vTextureCoord;
void main(void) {
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
vTextureCoord = aTextureCoord;
}
`);
var fragmentShaderScript = Script.createFromSource("x-shader/x-fragment", `
precision highp float;
varying highp vec2 vTextureCoord;
uniform sampler2D YTexture;
uniform sampler2D UTexture;
uniform sampler2D VTexture;
const mat4 YUV2RGB = mat4
(
1.1643828125, 0, 1.59602734375, -.87078515625,
1.1643828125, -.39176171875, -.81296875, .52959375,
1.1643828125, 2.017234375, 0, -1.081390625,
0, 0, 0, 1
);
void main(void) {
gl_FragColor = vec4( texture2D(YTexture, vTextureCoord).x, texture2D(UTexture, vTextureCoord).x, texture2D(VTexture, vTextureCoord).x, 1) * YUV2RGB;
}
`);
var YUVWebGLCanvas = new Class({
Extends : WebGLCanvas,
Binds : ['decode'],
initialize : function(canvas, size) {
YUVWebGLCanvas.parent.initialize.call(this, canvas, size);
},
onInitShaders: function() {
this.program = new Program(this.gl);
this.program.attach(new Shader(this.gl, vertexShaderScript));
this.program.attach(new Shader(this.gl, fragmentShaderScript));
this.program.link();
this.program.use();
this.vertexPositionAttribute = this.program.getAttributeLocation("aVertexPosition");
this.gl.enableVertexAttribArray(this.vertexPositionAttribute);
this.textureCoordAttribute = this.program.getAttributeLocation("aTextureCoord");;
this.gl.enableVertexAttribArray(this.textureCoordAttribute);
},
onInitTextures: function () {
console.log("creatingTextures: size: " + this.size);
this.YTexture = new Texture(this.gl, this.size);
this.UTexture = new Texture(this.gl, this.size.getHalfSize());
this.VTexture = new Texture(this.gl, this.size.getHalfSize());
},
onInitSceneTextures: function () {
this.YTexture.bind(0, this.program, "YTexture");
this.UTexture.bind(1, this.program, "UTexture");
this.VTexture.bind(2, this.program, "VTexture");
},
fillYUVTextures: function(y, u, v) {
this.YTexture.fill(y);
this.UTexture.fill(u);
this.VTexture.fill(v);
},
decode: function(buffer, width, height) {
if (!buffer)
return;
var lumaSize = width * height;
var chromaSize = lumaSize >> 2;
this.YTexture.fill(buffer.subarray(0, lumaSize));
this.UTexture.fill(buffer.subarray(lumaSize, lumaSize + chromaSize));
this.VTexture.fill(buffer.subarray(lumaSize + chromaSize, lumaSize + 2 * chromaSize));
this.drawScene();
},
toString: function() {
return "YUVCanvas Size: " + this.size;
}
});
module.exports = YUVWebGLCanvas;

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,35 @@
{
"name": "h264-live-player",
"version": "1.3.1",
"main": "wsavc/index.js",
"scripts": {
"dist": "browserify --bare --standalone WSAvcPlayer --plugin discify wsavc/ > dist/http-live-player.js",
"test": "echo \"Error: no test specified\" && exit 1"
},
"repository": {
"type": "git",
"url": "git@github.com:131/h264-live-player.git"
},
"keywords": [
"h264",
"nal",
"live",
"broadcast",
"streaming"
],
"author": "Francois Leurent <131.js@cloudyks.org>",
"license": "ISC",
"bugs": {
"url": "https://github.com/131/h264-live-player/issues"
},
"description": "This is a very simple h264 video player (that can run on live stream) for your browser.\r You might use this with raspicam raw h264 stream.\r This is a player around [Broadway](https://github.com/mbebenita/Broadway) Decoder, with very simple API.\r NAL unit (h264 frames) are split on the server side, so the client side is very simple (and allow frame skipping easily)",
"dependencies": {
"debug": "^2.3.2",
"sylvester.js": "^0.1.1",
"uclass": "^2.4.0"
},
"devDependencies": {
"browserify": "^13.0.0",
"discify": "^1.4.2"
}
}

View file

@ -0,0 +1,23 @@
"use strict";
/**
* Represents a 2-dimensional size value.
*/
function Size(w, h) {
this.w = w;
this.h = h;
}
Size.prototype = {
toString: function () {
return "(" + this.w + ", " + this.h + ")";
},
getHalfSize: function() {
return new Size(this.w >>> 1, this.h >>> 1);
},
length: function() {
return this.w * this.h;
}
}
module.exports = Size;

View file

@ -0,0 +1,12 @@
"use strict";
var error = require('./error');
function assert(condition, message) {
if (!condition) {
error(message);
}
}
module.exports = assert;

View file

@ -0,0 +1,8 @@
"use strict";
function error(message) {
console.error(message);
console.trace();
}
module.exports = error;

View file

@ -0,0 +1,117 @@
"use strict";
var Matrix = require('sylvester.js').Matrix;
var Vector = require('sylvester.js').Vector;
var $M = Matrix.create;
// augment Sylvester some
Matrix.Translation = function (v)
{
if (v.elements.length == 2) {
var r = Matrix.I(3);
r.elements[2][0] = v.elements[0];
r.elements[2][1] = v.elements[1];
return r;
}
if (v.elements.length == 3) {
var r = Matrix.I(4);
r.elements[0][3] = v.elements[0];
r.elements[1][3] = v.elements[1];
r.elements[2][3] = v.elements[2];
return r;
}
throw "Invalid length for Translation";
}
Matrix.prototype.flatten = function ()
{
var result = [];
if (this.elements.length == 0)
return [];
for (var j = 0; j < this.elements[0].length; j++)
for (var i = 0; i < this.elements.length; i++)
result.push(this.elements[i][j]);
return result;
}
Matrix.prototype.ensure4x4 = function()
{
if (this.elements.length == 4 &&
this.elements[0].length == 4)
return this;
if (this.elements.length > 4 ||
this.elements[0].length > 4)
return null;
for (var i = 0; i < this.elements.length; i++) {
for (var j = this.elements[i].length; j < 4; j++) {
if (i == j)
this.elements[i].push(1);
else
this.elements[i].push(0);
}
}
for (var i = this.elements.length; i < 4; i++) {
if (i == 0)
this.elements.push([1, 0, 0, 0]);
else if (i == 1)
this.elements.push([0, 1, 0, 0]);
else if (i == 2)
this.elements.push([0, 0, 1, 0]);
else if (i == 3)
this.elements.push([0, 0, 0, 1]);
}
return this;
};
Vector.prototype.flatten = function ()
{
return this.elements;
};
//
// gluPerspective
//
function makePerspective(fovy, aspect, znear, zfar)
{
var ymax = znear * Math.tan(fovy * Math.PI / 360.0);
var ymin = -ymax;
var xmin = ymin * aspect;
var xmax = ymax * aspect;
return makeFrustum(xmin, xmax, ymin, ymax, znear, zfar);
}
//
// glFrustum
//
function makeFrustum(left, right,
bottom, top,
znear, zfar)
{
var X = 2*znear/(right-left);
var Y = 2*znear/(top-bottom);
var A = (right+left)/(right-left);
var B = (top+bottom)/(top-bottom);
var C = -(zfar+znear)/(zfar-znear);
var D = -2*zfar*znear/(zfar-znear);
return $M([[X, 0, A, 0],
[0, Y, B, 0],
[0, 0, C, D],
[0, 0, -1, 0]]);
}
module.exports.makePerspective = makePerspective;

View file

@ -0,0 +1,17 @@
"use strict";
/**
* Creates a new prototype object derived from another objects prototype along with a list of additional properties.
*
* @param base object whose prototype to use as the created prototype object's prototype
* @param properties additional properties to add to the created prototype object
*/
function inherit(base, properties) {
var prot = Object.create(base.prototype);
for (var p in properties) {
prot[p] = properties[p];
}
return prot;
}
module.exports = inherit;

View file

@ -0,0 +1,159 @@
"use strict";
var Avc = require('../broadway/Decoder');
var YUVWebGLCanvas = require('../canvas/YUVWebGLCanvas');
var YUVCanvas = require('../canvas/YUVCanvas');
var Size = require('../utils/Size');
var Class = require('uclass');
var Events = require('uclass/events');
var debug = require('debug');
var log = debug("wsavc");
var WSAvcPlayer = new Class({
Implements : [Events],
initialize : function(canvas, canvastype) {
this.canvas = canvas;
this.canvastype = canvastype;
// AVC codec initialization
this.avc = new Avc();
if(false) this.avc.configure({
filter: "original",
filterHorLuma: "optimized",
filterVerLumaEdge: "optimized",
getBoundaryStrengthsA: "optimized"
});
//WebSocket variable
this.ws;
this.pktnum = 0;
},
decode : function(data) {
var naltype = "invalid frame";
if (data.length > 4) {
if (data[4] == 0x65) {
naltype = "I frame";
}
else if (data[4] == 0x41) {
naltype = "P frame";
}
else if (data[4] == 0x67) {
naltype = "SPS";
}
else if (data[4] == 0x68) {
naltype = "PPS";
}
}
//log("Passed " + naltype + " to decoder");
this.avc.decode(data);
},
connect : function(url) {
// Websocket initialization
if (this.ws != undefined) {
this.ws.close();
delete this.ws;
}
this.ws = new WebSocket(url);
this.ws.binaryType = "arraybuffer";
this.ws.onopen = () => {
log("Connected to " + url);
};
var framesList = [];
this.ws.onmessage = (evt) => {
if(typeof evt.data == "string")
return this.cmd(JSON.parse(evt.data));
this.pktnum++;
var frame = new Uint8Array(evt.data);
//log("[Pkt " + this.pktnum + " (" + evt.data.byteLength + " bytes)]");
//this.decode(frame);
framesList.push(frame);
};
var running = true;
var shiftFrame = function() {
if(!running)
return;
if(framesList.length > 10) {
log("Dropping frames", framesList.length);
framesList = [];
}
var frame = framesList.shift();
if(frame)
this.decode(frame);
requestAnimationFrame(shiftFrame);
}.bind(this);
shiftFrame();
this.ws.onclose = () => {
running = false;
log("WSAvcPlayer: Connection closed")
};
},
initCanvas : function(width, height) {
var canvasFactory = this.canvastype == "webgl" || this.canvastype == "YUVWebGLCanvas"
? YUVWebGLCanvas
: YUVCanvas;
var canvas = new canvasFactory(this.canvas, new Size(width, height));
this.avc.onPictureDecoded = canvas.decode;
this.emit("canvasReady", width, height);
},
cmd : function(cmd){
log("Incoming request", cmd);
if(cmd.action == "init") {
this.initCanvas(cmd.width, cmd.height);
this.canvas.width = cmd.width;
this.canvas.height = cmd.height;
}
},
disconnect : function() {
this.ws.close();
},
playStream : function() {
var message = "REQUESTSTREAM ";
this.ws.send(message);
log("Sent " + message);
},
stopStream : function() {
this.ws.send("STOPSTREAM");
log("Sent STOPSTREAM");
},
});
module.exports = WSAvcPlayer;
module.exports.debug = debug;

View file

@ -0,0 +1,69 @@
/* Polyfill indexOf. */
var indexOf;
if (typeof Array.prototype.indexOf === 'function') {
indexOf = function (haystack, needle) {
return haystack.indexOf(needle);
};
} else {
indexOf = function (haystack, needle) {
var i = 0, length = haystack.length, idx = -1, found = false;
while (i < length && !found) {
if (haystack[i] === needle) {
idx = i;
found = true;
}
i++;
}
return idx;
};
};
/* Polyfill EventEmitter. */
var EventEmitter = function () {
this.events = {};
};
EventEmitter.prototype.on = function (event, listener) {
if (typeof this.events[event] !== 'object') {
this.events[event] = [];
}
this.events[event].push(listener);
};
EventEmitter.prototype.removeListener = function (event, listener) {
var idx;
if (typeof this.events[event] === 'object') {
idx = indexOf(this.events[event], listener);
if (idx > -1) {
this.events[event].splice(idx, 1);
}
}
};
EventEmitter.prototype.emit = function (event) {
var i, listeners, length, args = [].slice.call(arguments, 1);
if (typeof this.events[event] === 'object') {
listeners = this.events[event].slice();
length = listeners.length;
for (i = 0; i < length; i++) {
listeners[i].apply(this, args);
}
}
};
EventEmitter.prototype.once = function (event, listener) {
this.on(event, function g () {
this.removeListener(event, g);
listener.apply(this, arguments);
});
};

View file

@ -0,0 +1,117 @@
// Shinobi (http://shinobi.video) - FFMPEG H.264 over HTTP Test
// How to Use raw H.264 (Simulated RTSP)
// 1. Start with `node ffmpegToWeb.js`
// 2. Get the IP address of the computer where you did step 1. Example : 127.0.0.1
// 3. Open VLC and "Open Network Stream".
// 4. Input the following without quotes : `http://127.0.0.1:8001/h264` and start.
var child = require('child_process');
var io = require('socket.io');
var Splitter = require('stream-split')
var events = require('events');
var express = require('express')
var app = express();
var server = require('http').Server(app);
var io = require('socket.io')(server);
var spawn = child.spawn;
var exec = child.exec;
var Emitters = {}
var config = {
port:8001,
url:'rtsp://131.95.3.162/axis-media/media.3gp'
}
var initEmitter = function(feed){
if(!Emitters[feed]){
Emitters[feed] = new events.EventEmitter().setMaxListeners(0)
}
return Emitters[feed]
}
var NALseparator = new Buffer([0,0,0,1]);
//web app
console.log('Starting Express Web Server on Port '+config.port)
server.listen(config.port);
app.use('/libs',express.static(__dirname + '/../../web/libs'));
app.use('/Player',express.static(__dirname + '/Player'));
app.get('/', function (req, res) {
res.sendFile(__dirname + '/index.html');
})
//ffmpeg pushed stream in here to make a pipe
app.all('/streamIn/:feed', function (req, res) {
req.Emitter = initEmitter(req.params.feed)
//req.params.feed = Feed Number (Pipe NWebGLumber)
res.connection.setTimeout(0);
var Split = new Splitter(NALseparator)
var cn = io.to('STREAM_'+req.params.feed)
req.on('data', function(buffer){
req.Emitter.emit('data',buffer)
});
req.pipe(Split).on('data',function(buffer){
cn.emit('h264_'+req.params.feed,Buffer.concat([NALseparator,buffer]))
})
req.on('end',function(){
delete(Split)
console.log('close');
});
})
//socket.io client commands
io.on('connection', function (cn) {
cn.on('f',function (data) {
switch(data.function){
case'getStream':
console.log(data)
cn.join('STREAM_'+data.feed)
break;
}
})
});
//simulate RTSP over HTTP
app.get(['/h264','/h264/:feed'], function (req, res) {
if(!req.params.feed){req.params.feed='1'}
req.Emitter = initEmitter(req.params.feed)
var contentWriter
var date = new Date();
res.writeHead(200, {
'Date': date.toUTCString(),
'Connection': 'keep-alive',
'Cache-Control': 'no-cache',
'Pragma': 'no-cache',
'Content-Type': 'video/mp4',
'Server': 'Shinobi H.264 Test Stream',
});
req.Emitter.on('data',contentWriter=function(buffer){
res.write(buffer)
})
res.on('close', function () {
req.Emitter.removeListener('data',contentWriter)
})
});
//ffmpeg
console.log('Starting FFMPEG')
var ffmpegString = '-i '+config.url+''
ffmpegString += ' -f mpegts -c:v mpeg1video -an http://localhost:'+config.port+'/streamIn/1'
ffmpegString += ' -pix_fmt yuv420p -b:v 600k -f rawvideo -c:v libx264 -vprofile baseline -tune zerolatency http://localhost:'+config.port+'/streamIn/2'
//ffmpegString += ' -f mpegts -c:v mpeg1video -an http://localhost:'+config.port+'/streamIn/2'
if(ffmpegString.indexOf('rtsp://')>-1){
ffmpegString='-rtsp_transport tcp '+ffmpegString
}
console.log('Executing : ffmpeg '+ffmpegString)
var ffmpeg = spawn('ffmpeg',ffmpegString.split(' '));
ffmpeg.on('close', function (buffer) {
console.log('ffmpeg died')
})
//ffmpeg.stderr.on('data', function (buffer) {
// console.log(buffer.toString())
//});
//ffmpeg.stdout.on('data', function (buffer) {
// Emitter.emit('data',buffer)
//});

View file

@ -0,0 +1,45 @@
<script src="/libs/js/socket.io.js"></script>
<script src="/libs/js/jquery.min.js"></script>
<!--Socket.IO Connection-->
<script>
var socket = null
socket = io();
socket.on('connect',function(){
console.log('socket connected')
//pretend this is the command you use to initiate getting H.264 (MPEG) data
socket.emit('f',{function:'getStream',feed:'1'})
socket.emit('f',{function:'getStream',feed:'2'})
})
</script>
<!--Special JSMPEG-->
<!--
<script src="/libs/js/jsmpeg.pipe.js"></script>
<canvas id="canvas_jsmpeg" height=500 width=500></canvas>
<script>
// initiate a player that can be piped to.
var player = new JSMpeg.Player('pipe',{
canvas:document.getElementById('canvas_jsmpeg')
});
//on data from "h264" handle
socket.on('h264_1', function (data) {
// `data.buffer` is the raw video data from FFMPEG
// pretend you are getting data as follows
// var data = {buffer:ArrayBuffer}
player.write(data.buffer)
});
</script>
-->
<!--H264 Live Player-->
<script type="text/javascript" src="/Player/broadway/h264liveplayer/http-live-player.js">;</script>
<canvas id="canvas_h264_live_player" height=500 width=500></canvas>
<script>
// initiate a player that can be piped to.
var wsavc = new WSAvcPlayer(document.getElementById('canvas_h264_live_player'), "webgl", 1, 35);
wsavc.initCanvas(500,500)
wsavc.connect();
socket.on('h264_2', function (data) {
// pretend you are getting data as follows
// data = {buffer:ArrayBuffer}
wsavc.write(data)
});
</script>

View file

@ -0,0 +1,15 @@
{
"name": "ffmpegtoweb",
"version": "1.0.0",
"description": "Shinobi Testing Tool for H.264 over HTTP and Socket.IO",
"main": "ffmpegToWeb.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"dependencies": {
"express": "^4.14.0",
"socket.io": "^1.7.1"
},
"author": "Moe Alam",
"license": "MIT"
}