Make sure the AudioContext is created only after user interaction
See https://goo.gl/7K7WLu
This commit is contained in:
parent
bf731c7812
commit
dd0c7f32a7
10
app/index.js
10
app/index.js
|
@ -262,7 +262,7 @@ class Settings {
|
|||
class GlobalBindings {
|
||||
constructor () {
|
||||
this.settings = new Settings()
|
||||
this.connector = new WorkerBasedMumbleConnector(audioContext.sampleRate)
|
||||
this.connector = new WorkerBasedMumbleConnector()
|
||||
this.client = null
|
||||
this.userContextMenu = new ContextMenu()
|
||||
this.channelContextMenu = new ContextMenu()
|
||||
|
@ -334,6 +334,10 @@ class GlobalBindings {
|
|||
|
||||
log('Connecting to server ', host)
|
||||
|
||||
// Note: This call needs to be delayed until the user has interacted with
|
||||
// the page in some way (which at this point they have), see: https://goo.gl/7K7WLu
|
||||
this.connector.setSampleRate(audioContext().sampleRate)
|
||||
|
||||
// TODO: token
|
||||
this.connector.connect(`wss://${host}:${port}`, {
|
||||
username: username,
|
||||
|
@ -555,9 +559,9 @@ class GlobalBindings {
|
|||
}).on('voice', stream => {
|
||||
console.log(`User ${user.username} started takling`)
|
||||
var userNode = new BufferQueueNode({
|
||||
audioContext: audioContext
|
||||
audioContext: audioContext()
|
||||
})
|
||||
userNode.connect(audioContext.destination)
|
||||
userNode.connect(audioContext().destination)
|
||||
|
||||
stream.on('data', data => {
|
||||
if (data.target === 'normal') {
|
||||
|
|
|
@ -103,7 +103,7 @@ export class VADVoiceHandler extends VoiceHandler {
|
|||
super(client, settings)
|
||||
let level = settings.vadLevel
|
||||
const self = this
|
||||
this._vad = vad(audioContext, theUserMedia, {
|
||||
this._vad = vad(audioContext(), theUserMedia, {
|
||||
onVoiceStart () {
|
||||
console.log('vad: start')
|
||||
self._active = true
|
||||
|
|
|
@ -12,7 +12,7 @@ import worker from './worker'
|
|||
* Only stuff which we need in mumble-web is proxied, i.e. this is not a generic solution.
|
||||
*/
|
||||
class WorkerBasedMumbleConnector {
|
||||
constructor (sampleRate) {
|
||||
constructor () {
|
||||
this._worker = webworkify(worker)
|
||||
this._worker.addEventListener('message', this._onMessage.bind(this))
|
||||
this._reqId = 1
|
||||
|
@ -20,7 +20,9 @@ class WorkerBasedMumbleConnector {
|
|||
this._clients = {}
|
||||
this._nextVoiceId = 1
|
||||
this._voiceStreams = {}
|
||||
}
|
||||
|
||||
setSampleRate (sampleRate) {
|
||||
this._postMessage({
|
||||
method: '_init',
|
||||
sampleRate: sampleRate
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
],
|
||||
"devDependencies": {
|
||||
"audio-buffer-utils": "^3.1.2",
|
||||
"audio-context": "^0.1.0",
|
||||
"audio-context": "^1.0.3",
|
||||
"babel-core": "^6.18.2",
|
||||
"babel-loader": "^6.2.8",
|
||||
"babel-plugin-transform-runtime": "^6.15.0",
|
||||
|
@ -52,6 +52,6 @@
|
|||
"mumble-client-codecs-browser": "^1.2.0",
|
||||
"mumble-client-websocket": "^1.0.0",
|
||||
"mumble-client": "^1.3.0",
|
||||
"web-audio-buffer-queue": "^1.0.0"
|
||||
"web-audio-buffer-queue": "^1.1.0"
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue