Browse Source

acc rebuild

worker
unknown 8 years ago
parent
commit
59674321cf
  1. 24
      AudioDecoder.h
  2. 149
      FlvClient.cpp
  3. 121
      MonaClient.cpp
  4. 4
      NetStream.h
  5. 68
      aacDecoder/make.py
  6. 12
      buildFlv.py
  7. BIN
      buildFlv.pyc
  8. 20
      js/FlvClient.js
  9. 140
      js/FlvMain.js
  10. 17
      js/MonaClient.js
  11. 112
      js/MonaMain.js
  12. 4
      js/index - 副本.html
  13. 1
      libfdk-aac
  14. 7
      make.py

24
AudioDecoder.h

@ -4,6 +4,7 @@
#endif
#ifdef USE_AAC
#include "aacDecoder/include/neaacdec.h"
// #include "libfdk-aac/libAACdec/include/aacdecoder_lib.h"
#endif
#ifdef USE_MP3
#include "libmad/mad.h"
@ -32,12 +33,14 @@ class AudioDecoder
{
int bufferLength;
int bufferFilled;
public:
u8 *outputBuffer;
#ifdef USE_AAC
faacDecHandle faacHandle;
//faacDecConfigurationPtr faacConfiguration;
faacDecHandle faacHandle;
// HANDLE_AACDECODER aacHandler;
// UCHAR* inBuffer[FILEREAD_MAX_LAYERS];
// UINT inBufferLength[FILEREAD_MAX_LAYERS] = {0};
// UINT bytesValid[FILEREAD_MAX_LAYERS] = {0};
#endif
#ifdef USE_SPEEX
i16 *audioOutput;
@ -60,7 +63,7 @@ class AudioDecoder
#endif
#ifdef USE_AAC
faacHandle = faacDecOpen();
//faacConfiguration = faacDecGetCurrentConfiguration(faacHandle);
// aacHandler = aacDecoder_Open(TT_MP4_ADIF,1);
emscripten_log(0, "aac init! %d", faacHandle);
#endif
emscripten_log(0, "audio init! %d", this);
@ -69,6 +72,7 @@ emscripten_log(0, "audio init! %d", this);
{emscripten_log(0, "audio decoder release\n");
#ifdef USE_AAC
faacDecClose(faacHandle);
// aacDecoder_Close(aacHandler);
#endif
#ifdef USE_SPEEX
speex_decoder_destroy(speexState);
@ -145,6 +149,11 @@ emscripten_log(0, "audio init! %d", this);
frame_info.samplerate);
*/
int samplesBytes = frame_info.samples << 1;
// inBuffer[0] = (unsigned char *)input.point();
// bytesValid[0] = inBufferLength[0] = input.length();
// aacDecoder_Fill(aacHandler,inBuffer, inBufferLength,bytesValid);
// auto decoderErr = aacDecoder_DecodeFrame(aacHandler,,0);
memcpy(output, pcm_data, samplesBytes);
return samplesBytes;
}
@ -152,7 +161,12 @@ emscripten_log(0, "audio init! %d", this);
else
{
unsigned long samplerate;
unsigned char channels;
unsigned char channels;
// aacDecoder_ConfigRaw(aacHandler,(unsigned char *)input.point(), 4)
// auto info = aacDecoder_GetStreamInfo(aacHandler);
// samplerate = info->sampleRate;
// channels = info->numChannels;
faacDecInit2(faacHandle, (unsigned char *)input.point(), 4, &samplerate, &channels);
emscripten_log(0, "aac samplerate:%d channels:%d", samplerate, channels);
}

149
FlvClient.cpp

@ -1,154 +1,9 @@
#include "base.h"
#include "FlvDecoder.h"
extern "C" {extern void init(void);}
int main()
{
EM_ASM(
var fc = window["FlvClient"] = Module["FlvClient"];
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new window.AudioContext();
if (/(iPhone|iPad|iPod|iOS|Safari)/i.test(navigator.userAgent)) {
function _unlock(){
context.resume();
var source = context.createBufferSource();
source.buffer = context.createBuffer(1, 1, 22050);
source.connect(context.destination);
source.start(0);
}
document.addEventListener("mousedown", _unlock, true);
document.addEventListener("touchend", _unlock, true);
}
fc.prototype.checkVideoBuffer = function(t) {
return setTimeout(this.decodeVideoBuffer.bind(this), t);
};
fc.prototype.onNetStatus = function(info){
};
fc.prototype.initAudio = function(frameCount, samplerate, channels) {
var isPlaying = false;
var audioBuffers = [];
var allFrameCount = frameCount*channels;
var audioBufferTime = frameCount * 1000 /samplerate;
//var AudioContext = window.AudioContext || window.webkitAudioContext || window.mozAudioContext || window.msAudioContext;
//var audioCtx = this.audioContext;//new AudioContext();
if (!context)return false;
var setCurrentAudioData;
var resampled = samplerate < 22050;
var audioBuffer = resampled? context.createBuffer(channels, frameCount<<1, samplerate<<1) : audioCtx.createBuffer(channels, frameCount, samplerate);
var outputPtr = this._initAudio(frameCount, channels);
var audioOutputArray = HEAP16.subarray(outputPtr, outputPtr + allFrameCount);
var playNextBuffer = function() {
isPlaying = false;
if (audioBuffers.length) {
playAudio(audioBuffers.shift());
}
if(audioBuffers.length>1)audioBuffers.shift();
//console.log(audioBuffers.length)
};
var copyAudioOutputArray = resampled ? function(target)
{
for (var i = 0; i < allFrameCount; i++) {
var j = i << 1;
target[j] = target[j+1] = audioOutputArray[i] / 32768;
}
} : function(target)
{
for (var i = 0; i < allFrameCount; i++) {
target[i] = audioOutputArray[i] / 32768;
}
};
var copyToCtxBuffer =channels>1? function(fromBuffer)
{
for (var channel = 0; channel < channels; channel++)
{
var nowBuffering = audioBuffer.getChannelData(channel);
if (fromBuffer)
{
for (var i = 0; i < frameCount; i++) {
nowBuffering[i] = fromBuffer[i* (channel + 1)];
}
}
else
{
for (var i = 0; i < frameCount; i++) {
nowBuffering[i] = audioOutputArray[i* (channel + 1)] / 32768;
}
}
}
}:function(fromBuffer)
{
var nowBuffering = audioBuffer.getChannelData(0);
if (fromBuffer)nowBuffering.set(fromBuffer);
else copyAudioOutputArray(nowBuffering);
};
var playAudio = function(fromBuffer) {
if (isPlaying) {
var buffer = new Float32Array(resampled ? allFrameCount *2: allFrameCount);
copyAudioOutputArray(buffer);
audioBuffers.push(buffer);
return;
}
isPlaying = true;
copyToCtxBuffer(fromBuffer);
var source = context.createBufferSource();
source.buffer = audioBuffer;
source.connect(context.destination);
source.onended = playNextBuffer;
//setTimeout(playNextBuffer, audioBufferTime-audioBuffers.length*200);
source.start();
};
this.playAudio = playAudio;
};
fc.prototype.play = function(url,webGLCanvas) {
var canvas = webGLCanvas.canvasElement;
this.setVideoSize = function(w, h, dataPtr)
{
canvas.width = w;
canvas.height = h;
if (webGLCanvas.isWebGL())
{
this.draw = function() {
var y = HEAPU32[dataPtr];
var u = HEAPU32[dataPtr + 1];
var v = HEAPU32[dataPtr + 2];
var outputArray = [HEAPU8.subarray(y, y + w*h), HEAPU8.subarray(u, u + (w*h >> 2)), HEAPU8.subarray(v, v + (w*h >> 2))];
webGLCanvas.drawNextOutputPicture(w, h, null, outputArray);
};
}else
{
var outputArray = HEAPU8.subarray(dataPtr, dataPtr + (w*h << 2));
webGLCanvas.initRGB(w, h);
this.draw = function() {
webGLCanvas.drawNextOutputPicture(w, h, null, outputArray);
};
}
}.bind(this);
this.getWebGL = function()
{
return webGLCanvas;
};
var _this = this;
var ws = new WebSocket(url);
ws.onmessage = function(data){
_this.$onWsMessage(data);
};
ws.onopen = function(){
Module.print("websocket open");
};
ws.binaryType = "arraybuffer";
if(this.onWsError)ws.onerror = this.onWsError;
ws.onclose = function(){
_this.$close();
};
this.close = function(){
ws.close();
};
this.$play(this, webGLCanvas.isWebGL());
};
);
init();
return 0;
}
class FlvClient

121
MonaClient.cpp

@ -1,126 +1,9 @@
#include "MonaClient.h"
#include "NetStream.h"
extern "C" {extern void init(void);}
int main()
{
EM_ASM(
var mc = window["MonaClient"] = Module["MonaClient"];
var ns = window["NetStream"] = Module["NetStream"];
ns.prototype.attachCanvas = function(canvas) {
var webGLCanvas = new WebGLCanvas(canvas, Module["noWebGL"], {});
this.setVideoSize = function(w, h, dataPtr)
{
canvas.width = w;
canvas.height = h;
if (webGLCanvas.isWebGL())
{
this.draw = function() {
var y = HEAPU32[dataPtr];
var u = HEAPU32[dataPtr + 1];
var v = HEAPU32[dataPtr + 2];
var outputArray = [HEAPU8.subarray(y, y + w*h), HEAPU8.subarray(u, u + (w*h >> 2)), HEAPU8.subarray(v, v + (w*h >> 2))];
webGLCanvas.drawNextOutputPicture(w, h, null, outputArray);
};
}else
{
var outputArray = HEAPU8.subarray(dataPtr, dataPtr + (w*h << 2));
webGLCanvas.initRGB(w, h);
this.draw = function() {
webGLCanvas.drawNextOutputPicture(w, h, null, outputArray);
};
}
}.bind(this);
this.getWebGL = function()
{
return webGLCanvas;
};
this._attachCanvas(this, webGLCanvas.isWebGL()); };
ns.prototype.checkVideoBuffer = function(t) { return setTimeout(this.decodeVideoBuffer.bind(this), t); };
ns.prototype.onNetStatus = function(info){
};
ns.prototype.initAudio = function(frameCount, samplerate, channels) {
var isPlaying = false;
var audioBuffers = [];
var allFrameCount = frameCount*channels;
var audioBufferTime = frameCount * 1000 /samplerate;
//var AudioContext = window.AudioContext || window.webkitAudioContext || window.mozAudioContext || window.msAudioContext;
var audioCtx = this.audioContext;//new AudioContext();
if (!audioCtx)return false;
var setCurrentAudioData;
var resampled = samplerate < 22050;
var audioBuffer = resampled? audioCtx.createBuffer(channels, frameCount<<1, samplerate<<1) : audioCtx.createBuffer(channels, frameCount, samplerate);
var outputPtr = this._initAudio(this, frameCount, channels);
var audioOutputArray = HEAP16.subarray(outputPtr, outputPtr + allFrameCount);
var playNextBuffer = function() {
isPlaying = false;
if (audioBuffers.length) {
playAudio(audioBuffers.shift());
}
if(audioBuffers.length>1)audioBuffers.shift();
//console.log(audioBuffers.length)
};
var copyAudioOutputArray = resampled ? function(target)
{
for (var i = 0; i < allFrameCount; i++) {
var j = i << 1;
target[j] = target[j+1] = audioOutputArray[i] / 32768;
}
} : function(target)
{
for (var i = 0; i < allFrameCount; i++) {
target[i] = audioOutputArray[i] / 32768;
}
};
var copyToCtxBuffer =channels>1? function(fromBuffer)
{
for (var channel = 0; channel < channels; channel++)
{
var nowBuffering = audioBuffer.getChannelData(channel);
if (fromBuffer)
{
for (var i = 0; i < frameCount; i++) {
nowBuffering[i] = fromBuffer[i* (channel + 1)];
}
}
else
{
for (var i = 0; i < frameCount; i++) {
nowBuffering[i] = audioOutputArray[i* (channel + 1)] / 32768;
}
}
}
}:function(fromBuffer)
{
var nowBuffering = audioBuffer.getChannelData(0);
if (fromBuffer)nowBuffering.set(fromBuffer);
else copyAudioOutputArray(nowBuffering);
};
var playAudio = function(fromBuffer) {
if (isPlaying) {
var buffer = new Float32Array(resampled ? allFrameCount *2: allFrameCount);
copyAudioOutputArray(buffer);
audioBuffers.push(buffer);
return;
}
isPlaying = true;
copyToCtxBuffer(fromBuffer);
var source = audioCtx.createBufferSource();
source.buffer = audioBuffer;
source.connect(audioCtx.destination);
source.onended = playNextBuffer;
//setTimeout(playNextBuffer, audioBufferTime-audioBuffers.length*200);
source.start();
};
this.playAudio = playAudio;
};
mc.prototype.connect = function(url, appName, roomName) {
var ws = this.$connect(this, url, appName, roomName || "");
this.send = function(ptr, length, output) {
var outputArray = HEAPU8.subarray(ptr, ptr + length);
ws.send(outputArray);
}; };);
init();
return 0;
}
MonaClient::MonaClient()

4
NetStream.h

@ -137,12 +137,12 @@ public:
}
bool decodeAudio(clock_t _timestamp, MemoryStream & data) {
unsigned char flag = 0;
data.readB<1>(flag);
auto audioType = flag >> 4;
if(audioDecoder->decode(audioType, data))
if(audioDecoder->decode(audioType, data)){
jsThis->call<void>("playAudio");
}
return true;
}
int initAudio(val _this,int frameCount, int channels) {

68
aacDecoder/make.py

@ -35,20 +35,55 @@ emcc_args = [
# '--js-transform', 'python appender.py'
]
source_files = []
FileNames=os.listdir(os.getcwd())
if (len(FileNames)>0):
for fn in FileNames:
if(os.path.splitext(fn)[1]=='.c'):
source_files.append(fn)
# source_files = []
# FileNames=os.listdir(os.getcwd())
# if (len(FileNames)>0):
# for fn in FileNames:
# if(os.path.splitext(fn)[1]=='.c'):
# source_files.append(fn)
''''
source_files =[
'bits.c','cfft.c','common.c','decoder.c','drm_dec.c','drc.c'
'ps_dec.c','sbr_dec.c','syntax.c','mp4.c','huffman.c','specrec.c','pulse.c',
'filtbank.c',''
"bits.c",
"cfft.c",
"common.c",
"decoder.c",
"drc.c",
"drm_dec.c",
"error.c",
"filtbank.c",
"hcr.c",
"huffman.c",
"ic_predict.c",
"is.c",
"lt_predict.c",
"mdct.c",
"mp4.c",
"ms.c",
"output.c",
"pns.c",
"ps_dec.c",
"ps_syntax.c",
"pulse.c",
"rvlc.c",
"sbr_dct.c",
"sbr_dec.c",
"sbr_e_nf.c",
"sbr_fbt.c",
"sbr_hfadj.c",
"sbr_hfgen.c",
"sbr_huff.c",
"sbr_qmf.c",
"sbr_syntax.c",
"sbr_tf_grid.c",
"specrec.c",
"ssr.c",
"ssr_fb.c",
"ssr_ipqf.c",
"syntax.c",
"tns.c",
]
'''
#build_files = ['bits.c','syntax.c','decoder.c']
build_files =source_files
for file in build_files:
@ -58,9 +93,10 @@ for file in build_files:
object_files = [os.path.join('obj', x.replace('.c', '.o')) for x in source_files]
print 'link -> %s' % 'aac.bc'
emscripten.Building.link(object_files, '../obj/aac.bc')
print 'link -> %s' % 'MonaClient.bc'
emscripten.Building.link(['../obj/avc.bc','../obj/aac.bc','../obj/libspeex.bc','../obj/MonaClient.o'], '../obj/MonaClient.bc')
print 'emcc -> %s' % 'MonaClient.js'
emscripten.Building.emcc('../obj/MonaClient.bc', emcc_args, '../js/MonaClient.js')
os.system('emcc '+ (' '.join(object_files)) +' -o ../obj/aac.bc')
# emscripten.Building.link(object_files, '../obj/aac.bc')
# print 'link -> %s' % 'MonaClient.bc'
# emscripten.Building.link(['../obj/avc.bc','../obj/aac.bc','../obj/libspeex.bc','../obj/MonaClient.o'], '../obj/MonaClient.bc')
# print 'emcc -> %s' % 'MonaClient.js'
# emscripten.Building.emcc('../obj/MonaClient.bc', emcc_args, '../js/MonaClient.js')
print 'done'

12
buildFlv.py

@ -31,10 +31,10 @@ emcc_args = [
#'-Ispeex-1.2rc2/include',
'-IBroadway','-I.',
#'-I../libid3tag',
'-DUSE_MP3',
#'-DUSE_MP3',
#'-DUSE_H265',
#'-DUSE_AAC',
#'--js-library', 'library.js'
'-DUSE_AAC',
'--js-library', os.path.join('js', 'FlvMain.js')
# '--js-transform', 'python appender.py'
]
@ -47,8 +47,10 @@ if 'target.bc' in object_files:
object_files.remove('target.bc')
object_files.remove('h265.bc')
object_files = [os.path.join('obj', x) for x in object_files]
print object_files
emscripten.Building.link(object_files, 'obj/target.bc')
print 'emcc '+ (' '.join(object_files)) +' -o obj/target.bc'
# emscripten.Building.link(object_files, 'obj/target.bc')
os.system('emcc '+ (' '.join(object_files)) +' -o obj/target.bc')
print 'emcc %s -> %s' % ('target.bc', 'FlvClient.js')
emscripten.Building.emcc('obj/target.bc', emcc_args, 'js/FlvClient.js')

BIN
buildFlv.pyc

20
js/FlvClient.js
File diff suppressed because it is too large
View File

140
js/FlvMain.js

@ -0,0 +1,140 @@
mergeInto(LibraryManager.library, {
init: function() {
var fc = window["FlvClient"] = Module["FlvClient"];
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new window.AudioContext();
if (/(iPhone|iPad|iPod|iOS|Safari)/i.test(navigator.userAgent)) {
function _unlock() {
context.resume();
var source = context.createBufferSource();
source.buffer = context.createBuffer(1, 1, 22050);
source.connect(context.destination);
if (source.noteOn)
source.noteOn(0);
else
source.start(0);
}
document.addEventListener("mousedown", _unlock, true);
document.addEventListener("touchend", _unlock, true);
}
fc.prototype.checkVideoBuffer = function(t) {
return setTimeout(this.decodeVideoBuffer.bind(this), t);
};
fc.prototype.onNetStatus = function(info) {
};
fc.prototype.initAudio = function(frameCount, samplerate, channels) {
var isPlaying = false;
var audioBuffers = [];
var allFrameCount = frameCount * channels;
var audioBufferTime = frameCount * 1000 / samplerate;
//var AudioContext = window.AudioContext || window.webkitAudioContext || window.mozAudioContext || window.msAudioContext;
//var audioCtx = this.audioContext;//new AudioContext();
if (!context) return false;
var setCurrentAudioData;
var resampled = samplerate < 22050;
var audioBuffer = resampled ? context.createBuffer(channels, frameCount << 1, samplerate << 1) : context.createBuffer(channels, frameCount, samplerate);
var outputPtr = this._initAudio(frameCount, channels);
var audioOutputArray = HEAP16.subarray(outputPtr, outputPtr + allFrameCount);
var playNextBuffer = function() {
isPlaying = false;
if (audioBuffers.length) {
playAudio(audioBuffers.shift());
}
if (audioBuffers.length > 1) audioBuffers.shift();
//console.log(audioBuffers.length)
};
var copyAudioOutputArray = resampled ? function(target) {
for (var i = 0; i < allFrameCount; i++) {
var j = i << 1;
target[j] = target[j + 1] = audioOutputArray[i] / 32768;
}
} : function(target) {
for (var i = 0; i < allFrameCount; i++) {
target[i] = audioOutputArray[i] / 32768;
}
};
var copyToCtxBuffer = channels > 1 ? function(fromBuffer) {
for (var channel = 0; channel < channels; channel++) {
var nowBuffering = audioBuffer.getChannelData(channel);
if (fromBuffer) {
for (var i = 0; i < frameCount; i++) {
nowBuffering[i] = fromBuffer[i * (channel + 1)];
}
} else {
for (var i = 0; i < frameCount; i++) {
nowBuffering[i] = audioOutputArray[i * (channel + 1)] / 32768;
}
}
}
} : function(fromBuffer) {
var nowBuffering = audioBuffer.getChannelData(0);
if (fromBuffer) nowBuffering.set(fromBuffer);
else copyAudioOutputArray(nowBuffering);
};
var playAudio = function(fromBuffer) {
if (isPlaying) {
var buffer = new Float32Array(resampled ? allFrameCount * 2 : allFrameCount);
copyAudioOutputArray(buffer);
audioBuffers.push(buffer);
return;
}
isPlaying = true;
copyToCtxBuffer(fromBuffer);
var source = context.createBufferSource();
source.buffer = audioBuffer;
source.connect(context.destination);
source.onended = playNextBuffer;
//setTimeout(playNextBuffer, audioBufferTime-audioBuffers.length*200);
source.start();
};
this.playAudio = playAudio;
};
fc.prototype.play = function(url, webGLCanvas) {
var canvas = webGLCanvas.canvasElement;
this.setVideoSize = function(w, h, dataPtr) {
canvas.width = w;
canvas.height = h;
if (webGLCanvas.isWebGL()) {
this.draw = function() {
var y = HEAPU32[dataPtr];
var u = HEAPU32[dataPtr + 1];
var v = HEAPU32[dataPtr + 2];
var outputArray = [HEAPU8.subarray(y, y + w * h), HEAPU8.subarray(u, u + (w * h >> 2)), HEAPU8.subarray(v, v + (w * h >> 2))];
webGLCanvas.drawNextOutputPicture(w, h, null, outputArray);
};
} else {
var outputArray = HEAPU8.subarray(dataPtr, dataPtr + (w * h << 2));
webGLCanvas.initRGB(w, h);
this.draw = function() {
webGLCanvas.drawNextOutputPicture(w, h, null, outputArray);
};
}
}.bind(this);
this.getWebGL = function() {
return webGLCanvas;
};
var _this = this;
var ws = new WebSocket(url);
ws.onmessage = function(data) {
_this.$onWsMessage(data);
};
ws.onopen = function() {
Module.print("websocket open");
};
ws.binaryType = "arraybuffer";
if (this.onWsError) ws.onerror = this.onWsError;
ws.onclose = function() {
_this.$close();
};
this.close = function() {
ws.close();
};
this.$play(this, webGLCanvas.isWebGL());
};
}
})

17
js/MonaClient.js
File diff suppressed because it is too large
View File

112
js/MonaMain.js

@ -0,0 +1,112 @@
mergeInto(LibraryManager.library, {
init: function() {
var mc = window["MonaClient"] = Module["MonaClient"];
var ns = window["NetStream"] = Module["NetStream"];
ns.prototype.attachCanvas = function(canvas) {
var webGLCanvas = new WebGLCanvas(canvas, Module["noWebGL"], {});
this.setVideoSize = function(w, h, dataPtr) {
canvas.width = w;
canvas.height = h;
if (webGLCanvas.isWebGL()) {
this.draw = function() {
var y = HEAPU32[dataPtr];
var u = HEAPU32[dataPtr + 1];
var v = HEAPU32[dataPtr + 2];
var outputArray = [HEAPU8.subarray(y, y + w * h), HEAPU8.subarray(u, u + (w * h >> 2)), HEAPU8.subarray(v, v + (w * h >> 2))];
webGLCanvas.drawNextOutputPicture(w, h, null, outputArray);
};
} else {
var outputArray = HEAPU8.subarray(dataPtr, dataPtr + (w * h << 2));
webGLCanvas.initRGB(w, h);
this.draw = function() {
webGLCanvas.drawNextOutputPicture(w, h, null, outputArray);
};
}
}.bind(this);
this.getWebGL = function() {
return webGLCanvas;
};
this._attachCanvas(this, webGLCanvas.isWebGL());
};
ns.prototype.checkVideoBuffer = function(t) { return setTimeout(this.decodeVideoBuffer.bind(this), t); };
ns.prototype.onNetStatus = function(info) {
};
ns.prototype.initAudio = function(frameCount, samplerate, channels) {
var isPlaying = false;
var audioBuffers = [];
var allFrameCount = frameCount * channels;
var audioBufferTime = frameCount * 1000 / samplerate;
//var AudioContext = window.AudioContext || window.webkitAudioContext || window.mozAudioContext || window.msAudioContext;
var audioCtx = this.audioContext; //new AudioContext();
if (!audioCtx) return false;
var setCurrentAudioData;
var resampled = samplerate < 22050;
var audioBuffer = resampled ? audioCtx.createBuffer(channels, frameCount << 1, samplerate << 1) : audioCtx.createBuffer(channels, frameCount, samplerate);
var outputPtr = this._initAudio(this, frameCount, channels);
var audioOutputArray = HEAP16.subarray(outputPtr, outputPtr + allFrameCount);
var playNextBuffer = function() {
isPlaying = false;
if (audioBuffers.length) {
playAudio(audioBuffers.shift());
}
if (audioBuffers.length > 1) audioBuffers.shift();
console.log(audioBuffers.length)
};
var copyAudioOutputArray = resampled ? function(target) {
for (var i = 0; i < allFrameCount; i++) {
var j = i << 1;
target[j] = target[j + 1] = audioOutputArray[i] / 32768;
}
} : function(target) {
for (var i = 0; i < allFrameCount; i++) {
target[i] = audioOutputArray[i] / 32768;
}
};
var copyToCtxBuffer = channels > 1 ? function(fromBuffer) {
for (var channel = 0; channel < channels; channel++) {
var nowBuffering = audioBuffer.getChannelData(channel);
if (fromBuffer) {
for (var i = 0; i < frameCount; i++) {
nowBuffering[i] = fromBuffer[i * (channel + 1)];
}
} else {
for (var i = 0; i < frameCount; i++) {
nowBuffering[i] = audioOutputArray[i * (channel + 1)] / 32768;
}
}
}
} : function(fromBuffer) {
var nowBuffering = audioBuffer.getChannelData(0);
if (fromBuffer) nowBuffering.set(fromBuffer);
else copyAudioOutputArray(nowBuffering);
};
var playAudio = function(fromBuffer) {
if (isPlaying) {
var buffer = new Float32Array(resampled ? allFrameCount * 2 : allFrameCount);
copyAudioOutputArray(buffer);
audioBuffers.push(buffer);
return;
}
isPlaying = true;
copyToCtxBuffer(fromBuffer);
var source = audioCtx.createBufferSource();
source.buffer = audioBuffer;
source.connect(audioCtx.destination);
source.onended = playNextBuffer;
//setTimeout(playNextBuffer, audioBufferTime-audioBuffers.length*200);
source.start();
};
this.playAudio = playAudio;
};
mc.prototype.connect = function(url, appName, roomName) {
var ws = this.$connect(this, url, appName, roomName || "");
this.send = function(ptr, length, output) {
var outputArray = HEAPU8.subarray(ptr, ptr + length);
ws.send(outputArray);
};
};
}
})

4
js/index - 副本.html

@ -56,8 +56,8 @@
//ns.initAudio(50*320, 16000 , 1);
//mp3
//ns.initAudio(12 * 576, 22050, 1);
//aac
//ns.initAudio(12 * 1024, 44100, 2);
// aac
ns.initAudio(12 * 1024, 44100, 2);
ns.play(n)
}

1
libfdk-aac

@ -0,0 +1 @@
Subproject commit a3d11689433a046ad57add8ea22dedceb2fe722d

7
make.py

@ -35,8 +35,8 @@ emcc_args = [
#'-I../libid3tag',
#'-DUSE_MP3',
#'-DUSE_H265',
#'-DUSE_AAC',
#'--js-library', 'library.js'
'-DUSE_AAC',
'--js-library', os.path.join('js', 'MonaMain.js')
# '--js-transform', 'python appender.py'
]
@ -49,7 +49,8 @@ if 'target.bc' in object_files:
object_files.remove('target.bc')
object_files.remove('h265.bc')
object_files = [os.path.join('obj', x) for x in object_files]
emscripten.Building.link(object_files, 'obj/target.bc')
os.system('emcc '+ (' '.join(object_files)) +' -o obj/target.bc')
# emscripten.Building.link(object_files, 'obj/target.bc')
print 'emcc %s -> %s' % ('target.bc', 'MonaClient.js')
emscripten.Building.emcc('obj/target.bc', emcc_args, 'js/MonaClient.js')

Loading…
Cancel
Save