Browse Source

修改成jessibuca

worker
李宇翔 6 years ago
parent
commit
1e1990d9b1
  1. 2
      Jessibuca.cpp
  2. 3
      Jessibuca.js
  3. 5
      Jessibuca.vcxproj
  4. 5
      Jessibuca.vcxproj.filters
  5. 2
      base.h
  6. 6
      cocos.js
  7. 18
      public/ff_aac.js
  8. 26
      public/hevc_aac.js
  9. 10
      public/index.html
  10. 64
      public/renderer.js
  11. 12
      slice.h

2
Jessibuca.cpp

@ -297,7 +297,7 @@ struct Jessica
}
clock_t getTimespan(clock_t t)
{
return call<clock_t>("timespan", t) + videoBuffer * 1000;
return call<clock_t>("timespan", t) + videoBuffer;
}
void $close()
{

3
Jessibuca.js

@ -176,6 +176,9 @@ mergeInto(LibraryManager.library, {
decoder.buffers[1].push(msg.buffers[1])
decoder.buffers[2].push(msg.buffers[2])
break
case "setVideoBuffer":
decoder.videoBuffer = msg.cmd*1000
break
case "close":
decoder.close()
break

5
Jessibuca.vcxproj

@ -125,10 +125,13 @@
<ClInclude Include="VideoDecoder.h" />
</ItemGroup>
<ItemGroup>
<None Include="cocos.js" />
<None Include="Jessibuca.js" />
<None Include="js\WebGLCanvas.js" />
<None Include="make.py" />
<None Include="public\ff_aac.html" />
<None Include="public\hevc_aac.html" />
<None Include="public\index.html" />
<None Include="public\renderer.js" />
</ItemGroup>
<ItemGroup>
<ClCompile Include="Jessibuca.cpp" />

5
Jessibuca.vcxproj.filters

@ -47,10 +47,13 @@
</ClInclude>
</ItemGroup>
<ItemGroup>
<None Include="js\WebGLCanvas.js" />
<None Include="make.py" />
<None Include="public\ff_aac.html" />
<None Include="Jessibuca.js" />
<None Include="cocos.js" />
<None Include="public\hevc_aac.html" />
<None Include="public\index.html" />
<None Include="public\renderer.js" />
</ItemGroup>
<ItemGroup>
<ClCompile Include="Jessibuca.cpp">

2
base.h

@ -30,7 +30,7 @@
#include <time.h>
using namespace std;
using namespace emscripten;
#include "MemoryStream.h"
//#include "MemoryStream.h"
#include "slice.h"
//#define USEBUFFERARRAY
#include "AudioDecoder.h"

6
cocos.js

@ -90,7 +90,7 @@ mergeInto(LibraryManager.library, {
})
Module.print = text => cc.log(text);
Module.printErr = text => cc.error(text)
Module.H5LiveClient = Module.H5LCBase.extend("H5LC", {
Module.Jessibuca = Module.Jessica.extend("Jessibuca", {
__construct: function (component) {
cc.dynamicAtlasManager.enabled = false;
let sprite = component.node.getComponent(cc.Sprite);
@ -130,7 +130,7 @@ mergeInto(LibraryManager.library, {
var reconnectCount = 0;
var reconnectTime = 2000;
cc.log('H5LiverClient play', url)
cc.log('Jessibuca play', url)
function setWebsocket() {
this.$play(url);
@ -161,7 +161,7 @@ mergeInto(LibraryManager.library, {
close: function () {
clearTimeout(this.reconnectId)
if (!this.isPlaying) return;
cc.log('close H5LiverClient')
cc.log('close Jessibuca')
this.isPlaying = false;
this.ws.onmessage = null;
this.ws.onclose = null;

18
public/ff_aac.js
File diff suppressed because it is too large
View File

26
public/hevc_aac.js
File diff suppressed because it is too large
View File

10
public/index.html

@ -1,9 +1,9 @@
<!DOCTYPE html>
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>H5LiveClient 1.0</title>
<title>Jessibuca 1.0</title>
<meta charset="utf-8" />
<style>
.btn {
@ -104,7 +104,7 @@
disabledMouseWheel(canvas);
canvas.width = 900;
canvas.height = 500;
var h5lc = new H5LiveClient({ canvas, decoder: "H5LiveClient.js" });
var h5lc = new Jessibuca({ canvas, decoder: "ff_aac.js" });
$play.addEventListener('click', function () {
if (isPlaying) {
return;
@ -131,8 +131,8 @@
function play() {
let stream = queryParam('stream') || 'user1';
// h5lc.play("ws://3.1.39.135:8080/live/user1.flv", canvas)
h5lc.play("ws://localhost:8080/live/" + stream)
h5lc.play("ws://3.1.39.135:8080/live/"+stream)
//h5lc.play("ws://localhost:8080/live/" + stream)
// h5lc.play("ws://119.9.118.39:8080/live/user1", canvas)
// h5lc.play("ws://test.qihaipi.com/gnddragon/test.flv", canvas)
}

64
public/renderer.js

@ -1,9 +1,9 @@
function H5LiveClient(opt) {
function Jessibuca(opt) {
this.canvasElement = opt.canvas;
this.contextOptions = opt.contextOptions;
this.videoBuffer = opt.videoBuffer || 1
if (!opt.forceNoGL) this.initContextGL();
if (this.contextGL) {
@ -18,6 +18,7 @@ function H5LiveClient(opt) {
switch (msg.cmd) {
case "init":
console.log("decoder worker init")
postMessage({cmd:"setVideoBuffer",time:_this.videoBuffer})
break
case "initSize":
_this.width = msg.w
@ -49,21 +50,22 @@ function H5LiveClient(opt) {
}
}
};
H5LiveClient.prototype.initAudioPlay = function (frameCount, samplerate, channels) {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new window.AudioContext();
function _unlock() {
context.resume();
var source = context.createBufferSource();
source.buffer = context.createBuffer(1, 1, 22050);
source.connect(context.destination);
if (source.noteOn)
source.noteOn(0);
else
source.start(0);
}
document.addEventListener("mousedown", _unlock, true);
document.addEventListener("touchend", _unlock, true);
window.AudioContext = window.AudioContext || window.webkitAudioContext;
function _unlock() {
var context = Jessibuca.prototype.audioContext = Jessibuca.prototype.audioContext|| new window.AudioContext();
context.resume();
var source = context.createBufferSource();
source.buffer = context.createBuffer(1, 1, 22050);
source.connect(context.destination);
if (source.noteOn)
source.noteOn(0);
else
source.start(0);
}
document.addEventListener("mousedown", _unlock, true);
document.addEventListener("touchend", _unlock, true);
Jessibuca.prototype.initAudioPlay = function (frameCount, samplerate, channels) {
var context = this.audioContext;
var isPlaying = false;
var audioBuffers = [];
if (!context) return false;
@ -109,14 +111,14 @@ H5LiveClient.prototype.initAudioPlay = function (frameCount, samplerate, channel
/**
* Returns true if the canvas supports WebGL
*/
H5LiveClient.prototype.isWebGL = function () {
Jessibuca.prototype.isWebGL = function () {
return !!this.contextGL;
};
/**
* Create the GL context from the canvas element
*/
H5LiveClient.prototype.initContextGL = function () {
Jessibuca.prototype.initContextGL = function () {
var canvas = this.canvasElement;
var gl = null;
@ -149,7 +151,7 @@ H5LiveClient.prototype.initContextGL = function () {
/**
* Initialize GL shader program
*/
H5LiveClient.prototype.initProgram = function () {
Jessibuca.prototype.initProgram = function () {
var gl = this.contextGL;
var vertexShaderScript = [
@ -216,7 +218,7 @@ H5LiveClient.prototype.initProgram = function () {
/**
* Initialize vertex buffers and attach to shader program
*/
H5LiveClient.prototype.initBuffers = function () {
Jessibuca.prototype.initBuffers = function () {
var gl = this.contextGL;
var program = this.shaderProgram;
@ -242,7 +244,7 @@ H5LiveClient.prototype.initBuffers = function () {
/**
* Initialize GL textures and attach to shader program
*/
H5LiveClient.prototype.initTextures = function () {
Jessibuca.prototype.initTextures = function () {
var gl = this.contextGL;
var program = this.shaderProgram;
@ -265,7 +267,7 @@ H5LiveClient.prototype.initTextures = function () {
/**
* Create and configure a single texture
*/
H5LiveClient.prototype.initTexture = function () {
Jessibuca.prototype.initTexture = function () {
var gl = this.contextGL;
var textureRef = gl.createTexture();
@ -284,7 +286,7 @@ H5LiveClient.prototype.initTexture = function () {
* If this object is using WebGL, the data must be an I420 formatted ArrayBuffer,
* Otherwise, data must be an RGBA formatted ArrayBuffer.
*/
H5LiveClient.prototype.drawNextOutputPicture = function (width, height, croppingParams, data) {
Jessibuca.prototype.drawNextOutputPicture = function (width, height, croppingParams, data) {
var gl = this.contextGL;
if (gl) {
this.drawNextOuptutPictureGL(width, height, croppingParams, data);
@ -296,7 +298,7 @@ H5LiveClient.prototype.drawNextOutputPicture = function (width, height, cropping
/**
* Draw the next output picture using WebGL
*/
H5LiveClient.prototype.drawNextOuptutPictureGL = function (width, height, croppingParams, data) {
Jessibuca.prototype.drawNextOuptutPictureGL = function (width, height, croppingParams, data) {
var gl = this.contextGL;
var texturePosBuffer = this.texturePosBuffer;
var yTextureRef = this.yTextureRef;
@ -335,7 +337,7 @@ H5LiveClient.prototype.drawNextOuptutPictureGL = function (width, height, croppi
/**
* Draw next output picture using ARGB data on a 2d canvas.
*/
H5LiveClient.prototype.drawNextOuptutPictureRGBA = function (width, height, croppingParams, data) {
Jessibuca.prototype.drawNextOuptutPictureRGBA = function (width, height, croppingParams, data) {
// var canvas = this.canvasElement;
//var argbData = data;
//var ctx = canvas.getContext('2d');
@ -349,9 +351,9 @@ H5LiveClient.prototype.drawNextOuptutPictureRGBA = function (width, height, crop
this.ctx2d.putImageData(this.imageData, -croppingParams.left, -croppingParams.top, 0, 0, croppingParams.width, croppingParams.height);
}
};
H5LiveClient.prototype.ctx2d = null;
H5LiveClient.prototype.imageData = null;
H5LiveClient.prototype.initRGB = function (width, height) {
Jessibuca.prototype.ctx2d = null;
Jessibuca.prototype.imageData = null;
Jessibuca.prototype.initRGB = function (width, height) {
this.ctx2d = this.canvasElement.getContext('2d');
this.imageData = this.ctx2d.getImageData(0, 0, width, height);
this.clear = function () {
@ -359,10 +361,10 @@ H5LiveClient.prototype.initRGB = function (width, height) {
};
//Module.print(this.imageData);
};
H5LiveClient.prototype.close = function () {
Jessibuca.prototype.close = function () {
this.decoderWorker.postMessage({ cmd: "close" })
this.contextGL.clear(this.contextGL.COLOR_BUFFER_BIT);
}
H5LiveClient.prototype.play = function (url) {
Jessibuca.prototype.play = function (url) {
this.decoderWorker.postMessage({ cmd: "play", url: url, isWebGL: this.isWebGL() })
}

12
slice.h

@ -1,4 +1,14 @@
#pragma once
#define readu8 readB<1, u8>
#define readUInt16B readB<2, unsigned short>
#define readUInt24B readB<3, unsigned int>
#define readUInt32B readB<4, unsigned int>
#define readDoubleB readB<8, double>
#define read1 readB<1>
#define read2B readB<2>
#define read3B readB<3>
#define read4B readB<4>
#define read8B readB<8>
#define OPERATOR_CHARS(x) \
operator const char *()\
{\
@ -106,7 +116,7 @@ struct Slice
}
unsigned char operator[](int i)
{
return ((unsigned char*)(*sharedStr))[p];
return ((unsigned char*)(*sharedStr))[p+i];
}
Slice operator+(const Slice& right) {
return append((void*)right.point(), right.length);

Loading…
Cancel
Save