Browse Source

加入cocos支持

worker
李宇翔 6 years ago
parent
commit
69febbd018
  1. 2
      H5LiveClient.cpp
  2. 35
      H5LiveClient.js
  3. 247
      cocos.js
  4. 37
      cocosCom.js
  5. 48
      make.py
  6. 14
      public/264_mp3.js
  7. 18
      public/H5LiveClient.js
  8. 163
      public/H5LiveClient.worker.js
  9. 30
      public/ff_mp3.js
  10. 5
      public/index.html
  11. 1254
      settings.js
  12. 1
      thirdparty/libhevc

2
H5LiveClient.cpp

@ -239,7 +239,7 @@ struct H5LCBase
if (videoBuffer && (bufferIsPlaying || checkTimeout(_timestamp)))
{
videoBuffers.emplace(_timestamp, forward<MemoryStream>(data));
emscripten_log(0, "push timestamp:%d", _timestamp);
//emscripten_log(0, "push timestamp:%d", _timestamp);
// auto &&info = val::object();
// info.set("code", "NetStream.Play.Start");
// call<void>("onNetStatus", info);

35
H5LiveClient.js

@ -229,21 +229,21 @@ mergeInto(LibraryManager.library, {
var yTextureRef = this.yTextureRef;
var uTextureRef = this.uTextureRef;
var vTextureRef = this.vTextureRef;
if (!croppingParams) {
gl.viewport(0, 0, width, height);
} else {
gl.viewport(0, 0, croppingParams.width, croppingParams.height);
var tTop = croppingParams.top / height;
var tLeft = croppingParams.left / width;
var tBottom = croppingParams.height / height;
var tRight = croppingParams.width / width;
var texturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]);
gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer);
gl.bufferData(gl.ARRAY_BUFFER, texturePosValues, gl.DYNAMIC_DRAW);
}
this.contextGL.viewport(0, 0, this.canvasElement.width, this.canvasElement.height);
// if (!croppingParams) {
// gl.viewport(0, 0, width, height);
// } else {
// gl.viewport(0, 0, croppingParams.width, croppingParams.height);
// var tTop = croppingParams.top / height;
// var tLeft = croppingParams.left / width;
// var tBottom = croppingParams.height / height;
// var tRight = croppingParams.width / width;
// var texturePosValues = new Float32Array([tRight, tTop, tLeft, tTop, tRight, tBottom, tLeft, tBottom]);
// gl.bindBuffer(gl.ARRAY_BUFFER, texturePosBuffer);
// gl.bufferData(gl.ARRAY_BUFFER, texturePosValues, gl.DYNAMIC_DRAW);
// }
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, yTextureRef);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, width, height, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, data[0]);
@ -439,8 +439,9 @@ mergeInto(LibraryManager.library, {
this.playAudio = playAudio;
},
setVideoSize: function (w, h, dataPtr) {
// this.webGLCanvas.canvasElement.width = w;
// this.webGLCanvas.canvasElement.height = h;
//this.webGLCanvas.canvasElement.width = w;
//this.webGLCanvas.canvasElement.height = h;
if (this.webGLCanvas.isWebGL()) {
this.draw = function () {
var y = HEAPU32[dataPtr];

247
cocos.js

@ -0,0 +1,247 @@
mergeInto(LibraryManager.library, {
init: function () {
var AudioContext = (window.AudioContext || window.webkitAudioContext || window.mozAudioContext)
var context = AudioContext ? new AudioContext() : null
if (!context) {
cc.warn('no audio support')
}
const MVP = `
uniform mat4 viewProj;
attribute vec3 a_position;
attribute vec2 a_uv0;
varying vec2 uv0;
void main () {
vec4 pos = viewProj * vec4(a_position, 1);
gl_Position = pos;
uv0 = a_uv0;
}`;
const fragmentShaderScript = `
precision highp float;
varying highp vec2 uv0;
uniform sampler2D ySampler;
uniform sampler2D uSampler;
uniform sampler2D vSampler;
const mat4 YUV2RGB = mat4
(
1.1643828125, 0, 1.59602734375, -.87078515625,
1.1643828125, -.39176171875, -.81296875, .52959375,
1.1643828125, 2.017234375, 0, -1.081390625,
0, 0, 0, 1
);
void main(void) {
highp float y = texture2D(ySampler, uv0).r;
highp float u = texture2D(uSampler, uv0).r;
highp float v = texture2D(vSampler, uv0).r;
gl_FragColor = vec4(y, u, v, 1) * YUV2RGB;
}`
const renderer = cc.renderer;
const renderEngine = renderer.renderEngine;
const Texture2D = renderEngine.Texture2D;
const gfx = renderEngine.gfx;
const Pass = renderEngine.renderer.Pass;
const Technique = renderEngine.renderer.Technique;
const Effect = renderEngine.renderer.Effect;
const PARAM_TEXTURE_2D = renderEngine.renderer.PARAM_TEXTURE_2D;
const option = { format: gfx.TEXTURE_FMT_L8, minFilter: gfx.FILTER_LINEAR, magFilter: gfx.FILTER_LINEAR, wrapS: gfx.WRAP_CLAMP, wrapT: gfx.WRAP_CLAMP }
const YUV2RGB_Material = cc.Class({
extends: renderEngine.Material,
create(sprite) {
sprite.setState(0)
let name = "YUV2RGB";
let lib = renderer._forward._programLib;
!lib._templates[name] && lib.define(name, MVP, fragmentShaderScript, []);
let pass = new Pass(name);
pass.setDepth(false, false);
pass.setCullMode(gfx.CULL_NONE);
pass.setBlend(
gfx.BLEND_FUNC_ADD,
gfx.BLEND_SRC_ALPHA,
gfx.BLEND_ONE_MINUS_SRC_ALPHA,
gfx.BLEND_FUNC_ADD,
gfx.BLEND_SRC_ALPHA,
gfx.BLEND_ONE_MINUS_SRC_ALPHA
);
let mainTech = new Technique(
['transparent'],
[
{ name: 'ySampler', type: PARAM_TEXTURE_2D, val: this.ySampler = new Texture2D(renderer.device, option) },
{ name: 'uSampler', type: PARAM_TEXTURE_2D, val: this.uSampler = new Texture2D(renderer.device, option) },
{ name: 'vSampler', type: PARAM_TEXTURE_2D, val: this.vSampler = new Texture2D(renderer.device, option) },
],
[pass]
);
this._effect = this.effect = new Effect([mainTech], {}, []);
this._mainTech = mainTech;
this._texture = sprite.spriteFrame.getTexture();
this.updateHash();
sprite._state = 114;
sprite._material = sprite._renderData._material = this;
return this
},
isWebGL() { return true },
drawNextOutputPicture(width, height, croppingParams, data) {
this.updateHash(Math.random().toString(36))
this.ySampler.updateImage({ level: 0, width, height, image: data[0] })
this.uSampler.updateImage({ level: 0, width: width / 2, height: height / 2, image: data[1] })
this.vSampler.updateImage({ level: 0, width: width / 2, height: height / 2, image: data[2] })
}
})
Module.print = text => cc.log(text);
Module.printErr = text => cc.error(text)
Module.H5LiveClient = Module.H5LCBase.extend("H5LC", {
__construct: function (component) {
cc.dynamicAtlasManager.enabled = false;
let sprite = component.node.getComponent(cc.Sprite);
this.webGLCanvas = component._material = new YUV2RGB_Material().create(sprite);
this.__parent.__construct.call(this, this);
},
__destruct: function () {
this.__parent.__destruct.call(this);
},
onError: function (err) {
cc.log(this, err)
},
setStartTime: function (offset) {
cc.log("first timestamp:", offset)
var startTime = Date.now() - offset;
this.timespan = function (t) {
return t - (Date.now() - startTime);
}
function playVideo(_this) {
_this.decodeVideoBuffer();
}
this.playVideoBuffer = function (t) {
// console.log("setTimeout:", t);
return setTimeout(playVideo, t, this)
}
},
timespan: function (t) {
this.setStartTime(t);
return this.timespan(t);
},
resetTimeSpan: function () {
delete this.timespan;
},
play: function (url) {
var _this = this;
var reconnectCount = 0;
var reconnectTime = 2000;
cc.log('H5LiverClient play', url)
function setWebsocket() {
this.$play(url);
this.ws.onopen = function () {
reconnectCount = 0;
reconnectTime = 2000;
cc.log("ws open")
};
this.ws.onclose = function () {
_this.isPlaying = false;
_this.ws = null;
_this.$close();
if (reconnectCount > 3) return;
reconnectCount++;
cc.warn("ws reconnect after " + (reconnectTime / 1000 >> 0) + " second")
_this.reconnectId = setTimeout(function () {
cc.log("ws reconnecting :", reconnectCount);
reconnectTime *= 2;
setWebsocket.call(_this);
}, reconnectTime)
};
this.ws.onerror = function () {
cc.warn("ws error");
};
}
setWebsocket.call(this);
},
close: function () {
clearTimeout(this.reconnectId)
if (!this.isPlaying) return;
cc.log('close H5LiverClient')
this.isPlaying = false;
this.ws.onmessage = null;
this.ws.onclose = null;
this.ws.onerror = null;
this.ws.close();
this.ws = null;
this.$close();
delete this.timespan;
},
initAudio: function (frameCount, samplerate, channels, outputPtr) {
var isPlaying = false;
var audioBuffers = [];
var allFrameCount = frameCount * channels;
if (!context) return false;
var resampled = samplerate < 22050;
var audioBuffer = resampled ? context.createBuffer(channels, frameCount << 1, samplerate << 1) : context.createBuffer(channels, frameCount, samplerate);
var audioOutputArray = HEAP16.subarray(outputPtr, outputPtr + allFrameCount);
var playNextBuffer = function () {
isPlaying = false;
if (audioBuffers.length) {
playAudio(audioBuffers.shift());
}
if (audioBuffers.length > 1) audioBuffers.shift();
//console.log(audioBuffers.length)
};
var copyAudioOutputArray = resampled ? function (target) {
for (var i = 0; i < allFrameCount; i++) {
var j = i << 1;
target[j] = target[j + 1] = audioOutputArray[i] / 32768;
}
} : function (target) {
for (var i = 0; i < allFrameCount; i++) {
target[i] = audioOutputArray[i] / 32768;
}
};
var copyToCtxBuffer = channels > 1 ? function (fromBuffer) {
for (var channel = 0; channel < channels; channel++) {
var nowBuffering = audioBuffer.getChannelData(channel);
if (fromBuffer) {
for (var i = 0; i < frameCount; i++) {
nowBuffering[i] = fromBuffer[i * (channel + 1)];
}
} else {
for (var i = 0; i < frameCount; i++) {
nowBuffering[i] = audioOutputArray[i * (channel + 1)] / 32768;
}
}
}
} : function (fromBuffer) {
var nowBuffering = audioBuffer.getChannelData(0);
if (fromBuffer) nowBuffering.set(fromBuffer);
else copyAudioOutputArray(nowBuffering);
};
var playAudio = function (fromBuffer) {
if (isPlaying) {
var buffer = new Float32Array(resampled ? allFrameCount * 2 : allFrameCount);
copyAudioOutputArray(buffer);
audioBuffers.push(buffer);
return;
}
isPlaying = true;
copyToCtxBuffer(fromBuffer);
var source = context.createBufferSource();
source.buffer = audioBuffer;
source.connect(context.destination);
source.onended = playNextBuffer;
//setTimeout(playNextBuffer, audioBufferTime-audioBuffers.length*200);
source.start();
};
this.playAudio = playAudio;
},
setVideoSize: function (w, h, dataPtr) {
this.draw = function () {
var y = HEAPU32[dataPtr];
var u = HEAPU32[dataPtr + 1];
var v = HEAPU32[dataPtr + 2];
var outputArray = [HEAPU8.subarray(y, y + w * h), HEAPU8.subarray(u, u + (w * h >> 2)), HEAPU8.subarray(v, v + (w * h >> 2))];
this.webGLCanvas.drawNextOutputPicture(w, h, this.croppingParams, outputArray);
};
}
});
}
})

37
cocosCom.js

@ -0,0 +1,37 @@
cc.Class({
extends: cc.Component,
editor: CC_EDITOR && {
executeInEditMode: true,
requireComponent: cc.Sprite,
},
properties: {
url: "ws://localhost:8080/live/user1",
videoBuffer: 1,
autoPlay: false,
material: {
visible: false,
get() {
return this._material;
}
},
},
// LIFE-CYCLE CALLBACKS:
onLoad() {
},
start() {
this.h5lc = new Module.H5LiveClient(this);
this.h5lc.videoBuffer = this.videoBuffer;
if (this.autoPlay) this.h5lc.play(this.url);
},
play(url) {
this.h5lc.play(this.url = url);
},
close() {
this.h5lc.close();
},
onDestroy() {
this.h5lc.close()
}
});

48
h5lc.py → make.py

@ -11,42 +11,42 @@ import getopt
from subprocess import Popen, PIPE, STDOUT
exec(open(os.path.expanduser('~/.emscripten'), 'r').read())
# sys.path.append(EMSCRIPTEN_ROOT)
opts, args = getopt.getopt(sys.argv[1:], "v:a:o:", ["wasm"])
args = {}
opts, args = getopt.getopt(sys.argv[1:], "v:a:o:", [
"wasm", "disable-audio", 'cocos'])
args = {'-a': 'mp3', '-o': 'public/H5LiveClient.js'}
for op, value in opts:
args[op] = value
video_codec = '-DUSE_'+(args['-v']).upper() if '-v' in args else ''
audio_codec = '-DUSE_'+(args['-a']).upper() if '-a' in args else '-DUSE_MP3'
audio_codec = '' if '--disable-audio' in args else '-DUSE_' + \
(args['-a']).upper()
sargs = {
'USE_PTHREADS': 0 if '--cocos' in args else 1,
'WASM': 1 if '--wasm' in args else 0,
'TOTAL_MEMORY': 67108864,
'ASSERTIONS': 0,
'NO_EXIT_RUNTIME': 1,
'ERROR_ON_UNDEFINED_SYMBOLS': 0,
'DISABLE_EXCEPTION_CATCHING':1
# 'INVOKE_RUN':1
# 'DEMANGLE_SUPPORT':1
}
emcc_args = [
# '-m32',
'-O3',
'--memory-init-file', '0',
'--llvm-opts', '3',
'-s', 'WASM='+('1' if '--wasm' in args else '0'),
#'-s', 'CORRECT_SIGNS=1',
#'-s', 'CORRECT_OVERFLOWS=1',
'-s', 'TOTAL_MEMORY=67108864',
#'-s', 'FAST_MEMORY=' + str(12*1024*1024),
#'-s', 'INVOKE_RUN=0',
'-s', 'ASSERTIONS=1',
#'-s DEMANGLE_SUPPORT=1',
# '-s', 'RELOOP=1',
#'-s', '''EXPORTED_FUNCTIONS=["_main"]''',
# '--closure', '1',
# '--llvm-lto','1',
'-s', 'NO_EXIT_RUNTIME=1',
'--bind',
'-I.', '-Ithirdparty/Broadway',
'-Ithirdparty',
video_codec, audio_codec,
# '-DUSE_LIBDE265',
# '-DUSE_AAC',
# '-DUSE_FFMPEG',
'--js-library', 'H5LiveClient.js', '-s ERROR_ON_UNDEFINED_SYMBOLS=0'
]
'--js-library', 'cocos.js' if '--cocos' in args else 'H5LiveClient.js',
]+["-s "+k+"="+str(v) for k, v in sargs.items()]
# if '--cocos' in args:
# emcc_args.append('--post-js cocosCom.js')
print 'building...'
output_file = args['-o'] if '-o' in args else 'public/H5LiveClient.js'
object_files = []
if video_codec == '-DUSE_LIBHEVC':
@ -67,13 +67,15 @@ if audio_codec == '-DUSE_AAC':
elif audio_codec == '-DUSE_SPEEX':
emcc_args.append('-Ithirdparty/speex-1.2rc2/include')
object_files.append('libspeex.bc')
elif '--disable-audio' in args:
print 'disable-audio'
else:
object_files.append('mp3.bc')
print object_files
# emscripten.Building.emcc('H5LiveClient.cpp', [os.path.join(
# 'obj', x) for x in object_files]+emcc_args, output_file)
object_files = [os.path.join('obj', x) for x in object_files]
emcc_args = [os.path.join('obj', x) for x in object_files]+emcc_args
os.system('emcc H5LiveClient.cpp ' +
(' '.join(object_files+emcc_args)) + ' -o '+output_file)
(' '.join(emcc_args)) + ' -o '+args['-o'])
print 'done'

14
public/264_mp3.js
File diff suppressed because it is too large
View File

18
public/H5LiveClient.js
File diff suppressed because it is too large
View File

163
public/H5LiveClient.worker.js

@ -0,0 +1,163 @@
// Copyright 2015 The Emscripten Authors. All rights reserved.
// Emscripten is available under two separate licenses, the MIT license and the
// University of Illinois/NCSA Open Source License. Both these licenses can be
// found in the LICENSE file.
// Pthread Web Worker startup routine:
// This is the entry point file that is loaded first by each Web Worker
// that executes pthreads on the Emscripten application.
// Thread-local:
var threadInfoStruct = 0; // Info area for this thread in Emscripten HEAP (shared). If zero, this worker is not currently hosting an executing pthread.
var selfThreadId = 0; // The ID of this thread. 0 if not hosting a pthread.
var parentThreadId = 0; // The ID of the parent pthread that launched this thread.
var tempDoublePtr = 0; // A temporary memory area for global float and double marshalling operations.
// Thread-local: Each thread has its own allocated stack space.
var STACK_BASE = 0;
var STACKTOP = 0;
var STACK_MAX = 0;
// These are system-wide memory area parameters that are set at main runtime startup in main thread, and stay constant throughout the application.
var buffer; // All pthreads share the same Emscripten HEAP as SharedArrayBuffer with the main execution thread.
var DYNAMICTOP_PTR = 0;
var DYNAMIC_BASE = 0;
var ENVIRONMENT_IS_PTHREAD = true;
var PthreadWorkerInit = {};
// performance.now() is specced to return a wallclock time in msecs since that Web Worker/main thread launched. However for pthreads this can cause
// subtle problems in emscripten_get_now() as this essentially would measure time from pthread_create(), meaning that the clocks between each threads
// would be wildly out of sync. Therefore sync all pthreads to the clock on the main browser thread, so that different threads see a somewhat
// coherent clock across each of them (+/- 0.1msecs in testing)
var __performance_now_clock_drift = 0;
// Cannot use console.log or console.error in a web worker, since that would risk a browser deadlock! https://bugzilla.mozilla.org/show_bug.cgi?id=1049091
// Therefore implement custom logging facility for threads running in a worker, which queue the messages to main thread to print.
var Module = {};
// When error objects propagate from Web Worker to main thread, they lose helpful call stack and thread ID information, so print out errors early here,
// before that happens.
this.addEventListener('error', function(e) {
if (e.message.indexOf('SimulateInfiniteLoop') != -1) return e.preventDefault();
var errorSource = ' in ' + e.filename + ':' + e.lineno + ':' + e.colno;
console.error('Pthread ' + selfThreadId + ' uncaught exception' + (e.filename || e.lineno || e.colno ? errorSource : "") + ': ' + e.message + '. Error object:');
console.error(e.error);
});
function threadPrint() {
var text = Array.prototype.slice.call(arguments).join(' ');
console.log(text);
}
function threadPrintErr() {
var text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
console.error(new Error().stack);
}
function threadAlert() {
var text = Array.prototype.slice.call(arguments).join(' ');
postMessage({cmd: 'alert', text: text, threadId: selfThreadId});
}
out = threadPrint;
err = threadPrintErr;
this.alert = threadAlert;
var wasmModule;
var wasmMemory;
this.onmessage = function(e) {
try {
if (e.data.cmd === 'load') { // Preload command that is called once per worker to parse and load the Emscripten code.
// Initialize the thread-local field(s):
tempDoublePtr = e.data.tempDoublePtr;
// Initialize the global "process"-wide fields:
DYNAMIC_BASE = e.data.DYNAMIC_BASE;
DYNAMICTOP_PTR = e.data.DYNAMICTOP_PTR;
buffer = e.data.buffer;
PthreadWorkerInit = e.data.PthreadWorkerInit;
if (typeof e.data.urlOrBlob === 'string') {
importScripts(e.data.urlOrBlob);
} else {
var objectUrl = URL.createObjectURL(e.data.urlOrBlob);
importScripts(objectUrl);
URL.revokeObjectURL(objectUrl);
}
if (typeof FS !== 'undefined' && typeof FS.createStandardStreams === 'function') FS.createStandardStreams();
postMessage({ cmd: 'loaded' });
} else if (e.data.cmd === 'objectTransfer') {
PThread.receiveObjectTransfer(e.data);
} else if (e.data.cmd === 'run') { // This worker was idle, and now should start executing its pthread entry point.
__performance_now_clock_drift = performance.now() - e.data.time; // Sync up to the clock of the main thread.
threadInfoStruct = e.data.threadInfoStruct;
__register_pthread_ptr(threadInfoStruct, /*isMainBrowserThread=*/0, /*isMainRuntimeThread=*/0); // Pass the thread address inside the asm.js scope to store it for fast access that avoids the need for a FFI out.
selfThreadId = e.data.selfThreadId;
parentThreadId = e.data.parentThreadId;
// Establish the stack frame for this thread in global scope
STACK_BASE = STACKTOP = e.data.stackBase;
STACK_MAX = STACK_BASE + e.data.stackSize;
// Call inside asm.js/wasm module to set up the stack frame for this pthread in asm.js/wasm module scope
Module['establishStackSpace'](e.data.stackBase, e.data.stackBase + e.data.stackSize);
PThread.receiveObjectTransfer(e.data);
PThread.setThreadStatus(_pthread_self(), 1/*EM_THREAD_STATUS_RUNNING*/);
try {
// pthread entry points are always of signature 'void *ThreadMain(void *arg)'
// Native codebases sometimes spawn threads with other thread entry point signatures,
// such as void ThreadMain(void *arg), void *ThreadMain(), or void ThreadMain().
// That is not acceptable per C/C++ specification, but x86 compiler ABI extensions
// enable that to work. If you find the following line to crash, either change the signature
// to "proper" void *ThreadMain(void *arg) form, or try linking with the Emscripten linker
// flag -s EMULATE_FUNCTION_POINTER_CASTS=1 to add in emulation for this x86 ABI extension.
var result = Module['dynCall_ii'](e.data.start_routine, e.data.arg);
} catch(e) {
if (e === 'Canceled!') {
PThread.threadCancel();
return;
} else if (e === 'SimulateInfiniteLoop' || e === 'pthread_exit') {
return;
} else {
Atomics.store(HEAPU32, (threadInfoStruct + 4 /*C_STRUCTS.pthread.threadExitCode*/ ) >> 2, (e instanceof ExitStatus) ? e.status : -2 /*A custom entry specific to Emscripten denoting that the thread crashed.*/);
Atomics.store(HEAPU32, (threadInfoStruct + 0 /*C_STRUCTS.pthread.threadStatus*/ ) >> 2, 1); // Mark the thread as no longer running.
_emscripten_futex_wake(threadInfoStruct + 0 /*C_STRUCTS.pthread.threadStatus*/, 0x7FFFFFFF/*INT_MAX*/); // Wake all threads waiting on this thread to finish.
if (!(e instanceof ExitStatus)) throw e;
}
}
// The thread might have finished without calling pthread_exit(). If so, then perform the exit operation ourselves.
// (This is a no-op if explicit pthread_exit() had been called prior.)
if (!Module['noExitRuntime']) PThread.threadExit(result);
} else if (e.data.cmd === 'cancel') { // Main thread is asking for a pthread_cancel() on this thread.
if (threadInfoStruct && PThread.thisThreadCancelState == 0/*PTHREAD_CANCEL_ENABLE*/) {
PThread.threadCancel();
}
} else if (e.data.target === 'setimmediate') {
// no-op
} else if (e.data.cmd === 'processThreadQueue') {
if (threadInfoStruct) { // If this thread is actually running?
_emscripten_current_thread_process_queued_calls();
}
} else {
err('worker.js received unknown command ' + e.data.cmd);
console.error(e.data);
}
} catch(e) {
console.error('worker.js onmessage() captured an uncaught exception: ' + e);
console.error(e.stack);
throw e;
}
}

30
public/ff_mp3.js
File diff suppressed because it is too large
View File

5
public/index.html

@ -8,7 +8,7 @@
</head>
<body>
<canvas id="canvas" style="background-color: #0D0E1B"></canvas>
<canvas id="canvas" style="background-color: #0D0E1B" width="640" height="352"></canvas>
<button id="connectbn" style="width: 100px;height: 100px" onclick="test()">播放1</button>
<button onclick="h5lc.close()" style="width: 100px; height: 100px">结束</button>
<div id="logout">
@ -22,7 +22,8 @@
function test() {
// h5lc.play("ws://10.24.13.58:8088/live/user1.flv", canvas)
h5lc.play("ws://localhost:8080/live/user1", canvas)
h5lc.play("wss://pulls.1234326.cn/live/L01.flv", canvas)
// h5lc.play("ws://localhost:8080/live/user1", canvas)
// h5lc.play("ws://test.qihaipi.com/gnddragon/test.flv", canvas)
}

1254
settings.js
File diff suppressed because it is too large
View File

1
thirdparty/libhevc

@ -0,0 +1 @@
Subproject commit d961f28a3ee95abef7322d7bea0c084b939bff16
Loading…
Cancel
Save