Update XAudioServer.js

This commit is contained in:
jsemu2 2019-04-27 02:45:30 -04:00
parent 5e48af37de
commit c7d1b38f0a

View file

@ -165,28 +165,7 @@ XAudioServer.prototype.initializeMozAudio = function () {
this.initializeResampler(XAudioJSMozAudioSampleRate); this.initializeResampler(XAudioJSMozAudioSampleRate);
} }
XAudioServer.prototype.initializeWebAudio = function () { XAudioServer.prototype.initializeWebAudio = function () {
if (!XAudioJSWebAudioLaunchedContext) { this.setupWebAudio();
try {
XAudioJSWebAudioContextHandle = new AudioContext(); //Create a system audio context.
}
catch (error) {
XAudioJSWebAudioContextHandle = new webkitAudioContext(); //Create a system audio context.
}
XAudioJSWebAudioLaunchedContext = true;
}
if (XAudioJSWebAudioAudioNode) {
XAudioJSWebAudioAudioNode.disconnect();
XAudioJSWebAudioAudioNode.onaudioprocess = null;
XAudioJSWebAudioAudioNode = null;
}
try {
XAudioJSWebAudioAudioNode = XAudioJSWebAudioContextHandle.createScriptProcessor(XAudioJSSamplesPerCallback, 0, XAudioJSChannelsAllocated); //Create the js event node.
}
catch (error) {
XAudioJSWebAudioAudioNode = XAudioJSWebAudioContextHandle.createJavaScriptNode(XAudioJSSamplesPerCallback, 0, XAudioJSChannelsAllocated); //Create the js event node.
}
XAudioJSWebAudioAudioNode.onaudioprocess = XAudioJSWebAudioEvent; //Connect the audio processing event to a handling function so we can manipulate output
XAudioJSWebAudioAudioNode.connect(XAudioJSWebAudioContextHandle.destination); //Send and chain the output of the audio manipulation to the system audio output.
this.resetCallbackAPIAudioBuffer(XAudioJSWebAudioContextHandle.sampleRate); this.resetCallbackAPIAudioBuffer(XAudioJSWebAudioContextHandle.sampleRate);
this.audioType = 1; this.audioType = 1;
/* /*
@ -220,9 +199,10 @@ XAudioServer.prototype.initializeWebAudio = function () {
}, 500); }, 500);
} }
if (this.userEventLatch && typeof XAudioJSWebAudioContextHandle.state != "undefined") { if (this.userEventLatch && typeof XAudioJSWebAudioContextHandle.state != "undefined") {
var parentObj = this;
var lazyEnableWA = function () { var lazyEnableWA = function () {
if(XAudioJSWebAudioContextHandle.state === 'suspended') { if(XAudioJSWebAudioContextHandle.state === 'suspended') {
XAudioJSWebAudioContextHandle.resume(); parentObj.setupWebAudio();
} }
} }
try { try {
@ -233,6 +213,31 @@ XAudioServer.prototype.initializeWebAudio = function () {
catch (e) {} catch (e) {}
} }
} }
XAudioServer.prototype.setupWebAudio = function () {
if (XAudioJSWebAudioLaunchedContext) {
XAudioJSWebAudioContextHandle.close();
}
try {
XAudioJSWebAudioContextHandle = new AudioContext(); //Create a system audio context.
}
catch (error) {
XAudioJSWebAudioContextHandle = new webkitAudioContext(); //Create a system audio context.
}
XAudioJSWebAudioLaunchedContext = true;
if (XAudioJSWebAudioAudioNode) {
XAudioJSWebAudioAudioNode.disconnect();
XAudioJSWebAudioAudioNode.onaudioprocess = null;
XAudioJSWebAudioAudioNode = null;
}
try {
XAudioJSWebAudioAudioNode = XAudioJSWebAudioContextHandle.createScriptProcessor(XAudioJSSamplesPerCallback, 0, XAudioJSChannelsAllocated); //Create the js event node.
}
catch (error) {
XAudioJSWebAudioAudioNode = XAudioJSWebAudioContextHandle.createJavaScriptNode(XAudioJSSamplesPerCallback, 0, XAudioJSChannelsAllocated); //Create the js event node.
}
XAudioJSWebAudioAudioNode.onaudioprocess = XAudioJSWebAudioEvent; //Connect the audio processing event to a handling function so we can manipulate output
XAudioJSWebAudioAudioNode.connect(XAudioJSWebAudioContextHandle.destination); //Send and chain the output of the audio manipulation to the system audio output.
}
XAudioServer.prototype.initializeFlashAudio = function () { XAudioServer.prototype.initializeFlashAudio = function () {
var existingFlashload = document.getElementById("XAudioJS"); var existingFlashload = document.getElementById("XAudioJS");
this.flashInitialized = false; this.flashInitialized = false;