Hongchan Choi | 7dd0b3e | 2019-05-13 21:19:03 | [diff] [blame] | 1 | // Copyright 2019 The Chromium Authors. All rights reserved. |
| 2 | // Use of this source code is governed by a BSD-style license that can be |
| 3 | // found in the LICENSE file. |
| 4 | |
| 5 | /** |
| 6 | * @implements {SDK.SDKModelObserver<!WebAudio.WebAudioModel>} |
| 7 | */ |
| 8 | WebAudio.WebAudioView = class extends UI.ThrottledWidget { |
| 9 | constructor() { |
| 10 | super(true, 1000); |
| 11 | this.element.classList.add('web-audio-drawer'); |
| 12 | this.registerRequiredCSS('web_audio/webAudio.css'); |
| 13 | |
| 14 | // Creates the toolbar. |
| 15 | const toolbarContainer = this.contentElement.createChild( |
| 16 | 'div', 'web-audio-toolbar-container vbox'); |
| 17 | this._contextSelector = new WebAudio.AudioContextSelector(ls`BaseAudioContexts`); |
| 18 | const toolbar = new UI.Toolbar('web-audio-toolbar', toolbarContainer); |
| 19 | toolbar.appendToolbarItem(UI.Toolbar.createActionButtonForId('components.collect-garbage')); |
| 20 | toolbar.appendSeparator(); |
| 21 | toolbar.appendToolbarItem(this._contextSelector.toolbarItem()); |
| 22 | |
| 23 | // Creates the detail view. |
| 24 | this._detailViewContainer = this.contentElement.createChild('div', 'vbox flex-auto'); |
| 25 | |
Gaoping Huang | a471b30 | 2019-08-23 16:48:29 | [diff] [blame^] | 26 | this._graphManager = new WebAudio.GraphVisualizer.GraphManager(); |
| 27 | |
Hongchan Choi | 7dd0b3e | 2019-05-13 21:19:03 | [diff] [blame] | 28 | // Creates the landing page. |
| 29 | this._landingPage = new UI.VBox(); |
| 30 | this._landingPage.contentElement.classList.add('web-audio-landing-page', 'fill'); |
| 31 | this._landingPage.contentElement.appendChild(UI.html` |
| 32 | <div> |
| 33 | <p>${ls`Open a page that uses Web Audio API to start monitoring.`}</p> |
| 34 | </div> |
| 35 | `); |
| 36 | this._landingPage.show(this._detailViewContainer); |
| 37 | |
| 38 | // Creates the summary bar. |
| 39 | this._summaryBarContainer = this.contentElement.createChild('div', 'web-audio-summary-container'); |
| 40 | |
| 41 | this._contextSelector.addEventListener(WebAudio.AudioContextSelector.Events.ContextSelected, event => { |
| 42 | const context = |
| 43 | /** @type {!Protocol.WebAudio.BaseAudioContext} */ (event.data); |
| 44 | this._updateDetailView(context); |
| 45 | this.doUpdate(); |
| 46 | }); |
| 47 | |
| 48 | SDK.targetManager.observeModels(WebAudio.WebAudioModel, this); |
| 49 | } |
| 50 | |
| 51 | /** |
| 52 | * @override |
| 53 | */ |
| 54 | wasShown() { |
Hongchan Choi | de21c96 | 2019-06-06 22:15:08 | [diff] [blame] | 55 | super.wasShown(); |
Hongchan Choi | 7dd0b3e | 2019-05-13 21:19:03 | [diff] [blame] | 56 | for (const model of SDK.targetManager.models(WebAudio.WebAudioModel)) |
| 57 | this._addEventListeners(model); |
| 58 | } |
| 59 | |
| 60 | /** |
| 61 | * @override |
| 62 | */ |
| 63 | willHide() { |
| 64 | for (const model of SDK.targetManager.models(WebAudio.WebAudioModel)) |
| 65 | this._removeEventListeners(model); |
| 66 | } |
| 67 | |
| 68 | /** |
| 69 | * @override |
| 70 | * @param {!WebAudio.WebAudioModel} webAudioModel |
| 71 | */ |
| 72 | modelAdded(webAudioModel) { |
| 73 | if (this.isShowing()) |
| 74 | this._addEventListeners(webAudioModel); |
| 75 | } |
| 76 | |
| 77 | /** |
| 78 | * @override |
| 79 | * @param {!WebAudio.WebAudioModel} webAudioModel |
| 80 | */ |
| 81 | modelRemoved(webAudioModel) { |
| 82 | this._removeEventListeners(webAudioModel); |
| 83 | } |
| 84 | |
| 85 | /** |
| 86 | * @override |
| 87 | * @return {!Promise<?>} |
| 88 | */ |
| 89 | async doUpdate() { |
| 90 | await this._pollRealtimeData(); |
| 91 | this.update(); |
| 92 | } |
| 93 | |
| 94 | /** |
| 95 | * @param {!WebAudio.WebAudioModel} webAudioModel |
| 96 | */ |
| 97 | _addEventListeners(webAudioModel) { |
| 98 | webAudioModel.ensureEnabled(); |
| 99 | webAudioModel.addEventListener(WebAudio.WebAudioModel.Events.ContextCreated, this._contextCreated, this); |
| 100 | webAudioModel.addEventListener(WebAudio.WebAudioModel.Events.ContextDestroyed, this._contextDestroyed, this); |
| 101 | webAudioModel.addEventListener(WebAudio.WebAudioModel.Events.ContextChanged, this._contextChanged, this); |
Hongchan Choi | 2aae57a | 2019-05-23 20:42:41 | [diff] [blame] | 102 | webAudioModel.addEventListener(WebAudio.WebAudioModel.Events.ModelReset, this._reset, this); |
Gaoping Huang | a471b30 | 2019-08-23 16:48:29 | [diff] [blame^] | 103 | webAudioModel.addEventListener(WebAudio.WebAudioModel.Events.ModelSuspend, this._suspendModel, this); |
| 104 | webAudioModel.addEventListener( |
| 105 | WebAudio.WebAudioModel.Events.AudioListenerCreated, this._audioListenerCreated, this); |
| 106 | webAudioModel.addEventListener( |
| 107 | WebAudio.WebAudioModel.Events.AudioListenerWillBeDestroyed, this._audioListenerWillBeDestroyed, this); |
| 108 | webAudioModel.addEventListener(WebAudio.WebAudioModel.Events.AudioNodeCreated, this._audioNodeCreated, this); |
| 109 | webAudioModel.addEventListener( |
| 110 | WebAudio.WebAudioModel.Events.AudioNodeWillBeDestroyed, this._audioNodeWillBeDestroyed, this); |
| 111 | webAudioModel.addEventListener(WebAudio.WebAudioModel.Events.AudioParamCreated, this._audioParamCreated, this); |
| 112 | webAudioModel.addEventListener( |
| 113 | WebAudio.WebAudioModel.Events.AudioParamWillBeDestroyed, this._audioParamWillBeDestroyed, this); |
| 114 | webAudioModel.addEventListener(WebAudio.WebAudioModel.Events.NodesConnected, this._nodesConnected, this); |
| 115 | webAudioModel.addEventListener(WebAudio.WebAudioModel.Events.NodesDisconnected, this._nodesDisconnected, this); |
| 116 | webAudioModel.addEventListener(WebAudio.WebAudioModel.Events.NodeParamConnected, this._nodeParamConnected, this); |
| 117 | webAudioModel.addEventListener( |
| 118 | WebAudio.WebAudioModel.Events.NodeParamDisconnected, this._nodeParamDisconnected, this); |
Hongchan Choi | 7dd0b3e | 2019-05-13 21:19:03 | [diff] [blame] | 119 | } |
| 120 | |
| 121 | /** |
| 122 | * @param {!WebAudio.WebAudioModel} webAudioModel |
| 123 | */ |
| 124 | _removeEventListeners(webAudioModel) { |
| 125 | webAudioModel.removeEventListener(WebAudio.WebAudioModel.Events.ContextCreated, this._contextCreated, this); |
| 126 | webAudioModel.removeEventListener(WebAudio.WebAudioModel.Events.ContextDestroyed, this._contextDestroyed, this); |
| 127 | webAudioModel.removeEventListener(WebAudio.WebAudioModel.Events.ContextChanged, this._contextChanged, this); |
Hongchan Choi | 2aae57a | 2019-05-23 20:42:41 | [diff] [blame] | 128 | webAudioModel.removeEventListener(WebAudio.WebAudioModel.Events.ModelReset, this._reset, this); |
Gaoping Huang | a471b30 | 2019-08-23 16:48:29 | [diff] [blame^] | 129 | webAudioModel.removeEventListener(WebAudio.WebAudioModel.Events.ModelSuspend, this._suspendModel, this); |
| 130 | webAudioModel.removeEventListener( |
| 131 | WebAudio.WebAudioModel.Events.AudioListenerCreated, this._audioListenerCreated, this); |
| 132 | webAudioModel.removeEventListener( |
| 133 | WebAudio.WebAudioModel.Events.AudioListenerWillBeDestroyed, this._audioListenerWillBeDestroyed, this); |
| 134 | webAudioModel.removeEventListener(WebAudio.WebAudioModel.Events.AudioNodeCreated, this._audioNodeCreated, this); |
| 135 | webAudioModel.removeEventListener( |
| 136 | WebAudio.WebAudioModel.Events.AudioNodeWillBeDestroyed, this._audioNodeWillBeDestroyed, this); |
| 137 | webAudioModel.removeEventListener(WebAudio.WebAudioModel.Events.AudioParamCreated, this._audioParamCreated, this); |
| 138 | webAudioModel.removeEventListener( |
| 139 | WebAudio.WebAudioModel.Events.AudioParamWillBeDestroyed, this._audioParamWillBeDestroyed, this); |
| 140 | webAudioModel.removeEventListener(WebAudio.WebAudioModel.Events.NodesConnected, this._nodesConnected, this); |
| 141 | webAudioModel.removeEventListener(WebAudio.WebAudioModel.Events.NodesDisconnected, this._nodesDisconnected, this); |
| 142 | webAudioModel.removeEventListener(WebAudio.WebAudioModel.Events.NodeParamConnected, this._nodeParamConnected, this); |
| 143 | webAudioModel.removeEventListener( |
| 144 | WebAudio.WebAudioModel.Events.NodeParamDisconnected, this._nodeParamDisconnected, this); |
Hongchan Choi | 7dd0b3e | 2019-05-13 21:19:03 | [diff] [blame] | 145 | } |
| 146 | |
| 147 | /** |
| 148 | * @param {!Common.Event} event |
| 149 | */ |
| 150 | _contextCreated(event) { |
Gaoping Huang | a471b30 | 2019-08-23 16:48:29 | [diff] [blame^] | 151 | const context = /** @type {!Protocol.WebAudio.BaseAudioContext} */ (event.data); |
| 152 | this._graphManager.createContext(context.contextId); |
Hongchan Choi | 7dd0b3e | 2019-05-13 21:19:03 | [diff] [blame] | 153 | this._contextSelector.contextCreated(event); |
| 154 | } |
| 155 | |
| 156 | /** |
| 157 | * @param {!Common.Event} event |
| 158 | */ |
| 159 | _contextDestroyed(event) { |
Gaoping Huang | a471b30 | 2019-08-23 16:48:29 | [diff] [blame^] | 160 | const contextId = /** @type {!Protocol.WebAudio.GraphObjectId} */ (event.data); |
| 161 | this._graphManager.destroyContext(contextId); |
Hongchan Choi | 7dd0b3e | 2019-05-13 21:19:03 | [diff] [blame] | 162 | this._contextSelector.contextDestroyed(event); |
| 163 | } |
| 164 | |
| 165 | /** |
| 166 | * @param {!Common.Event} event |
| 167 | */ |
| 168 | _contextChanged(event) { |
Gaoping Huang | a471b30 | 2019-08-23 16:48:29 | [diff] [blame^] | 169 | const context = /** @type {!Protocol.WebAudio.BaseAudioContext} */ (event.data); |
| 170 | if (!this._graphManager.hasContext(context.contextId)) |
| 171 | return; |
| 172 | |
Hongchan Choi | 7dd0b3e | 2019-05-13 21:19:03 | [diff] [blame] | 173 | this._contextSelector.contextChanged(event); |
| 174 | } |
| 175 | |
| 176 | _reset() { |
| 177 | if (this._landingPage.isShowing()) |
| 178 | this._landingPage.detach(); |
| 179 | this._contextSelector.reset(); |
| 180 | this._detailViewContainer.removeChildren(); |
| 181 | this._landingPage.show(this._detailViewContainer); |
Gaoping Huang | a471b30 | 2019-08-23 16:48:29 | [diff] [blame^] | 182 | this._graphManager.clearGraphs(); |
| 183 | } |
| 184 | |
| 185 | _suspendModel() { |
| 186 | this._graphManager.clearGraphs(); |
| 187 | } |
| 188 | |
| 189 | /** |
| 190 | * @param {!Common.Event} event |
| 191 | */ |
| 192 | _audioListenerCreated(event) { |
| 193 | const listener = /** @type {!Protocol.WebAudio.AudioListener} */ (event.data); |
| 194 | const graph = this._graphManager.getGraph(listener.contextId); |
| 195 | if (!graph) |
| 196 | return; |
| 197 | graph.addNode({ |
| 198 | nodeId: listener.listenerId, |
| 199 | nodeType: 'Listener', |
| 200 | numberOfInputs: 0, |
| 201 | numberOfOutputs: 0, |
| 202 | }); |
| 203 | } |
| 204 | |
| 205 | /** |
| 206 | * @param {!Common.Event} event |
| 207 | */ |
| 208 | _audioListenerWillBeDestroyed(event) { |
| 209 | const {contextId, listenerId} = event.data; |
| 210 | const graph = this._graphManager.getGraph(contextId); |
| 211 | if (!graph) |
| 212 | return; |
| 213 | graph.removeNode(listenerId); |
| 214 | } |
| 215 | |
| 216 | /** |
| 217 | * @param {!Common.Event} event |
| 218 | */ |
| 219 | _audioNodeCreated(event) { |
| 220 | const node = /** @type {!Protocol.WebAudio.AudioNode} */ (event.data); |
| 221 | const graph = this._graphManager.getGraph(node.contextId); |
| 222 | if (!graph) |
| 223 | return; |
| 224 | graph.addNode({ |
| 225 | nodeId: node.nodeId, |
| 226 | nodeType: node.nodeType, |
| 227 | numberOfInputs: node.numberOfInputs, |
| 228 | numberOfOutputs: node.numberOfOutputs, |
| 229 | }); |
| 230 | } |
| 231 | |
| 232 | /** |
| 233 | * @param {!Common.Event} event |
| 234 | */ |
| 235 | _audioNodeWillBeDestroyed(event) { |
| 236 | const {contextId, nodeId} = event.data; |
| 237 | const graph = this._graphManager.getGraph(contextId); |
| 238 | if (!graph) |
| 239 | return; |
| 240 | graph.removeNode(nodeId); |
| 241 | } |
| 242 | |
| 243 | /** |
| 244 | * @param {!Common.Event} event |
| 245 | */ |
| 246 | _audioParamCreated(event) { |
| 247 | const param = /** @type {!Protocol.WebAudio.AudioParam} */ (event.data); |
| 248 | const graph = this._graphManager.getGraph(param.contextId); |
| 249 | if (!graph) |
| 250 | return; |
| 251 | graph.addParam({ |
| 252 | paramId: param.paramId, |
| 253 | paramType: param.paramType, |
| 254 | nodeId: param.nodeId, |
| 255 | }); |
| 256 | } |
| 257 | |
| 258 | /** |
| 259 | * @param {!Common.Event} event |
| 260 | */ |
| 261 | _audioParamWillBeDestroyed(event) { |
| 262 | const {contextId, paramId} = event.data; |
| 263 | const graph = this._graphManager.getGraph(contextId); |
| 264 | if (!graph) |
| 265 | return; |
| 266 | graph.removeParam(paramId); |
| 267 | } |
| 268 | |
| 269 | /** |
| 270 | * @param {!Common.Event} event |
| 271 | */ |
| 272 | _nodesConnected(event) { |
| 273 | const {contextId, sourceId, destinationId, sourceOutputIndex, destinationInputIndex} = event.data; |
| 274 | const graph = this._graphManager.getGraph(contextId); |
| 275 | if (!graph) |
| 276 | return; |
| 277 | graph.addNodeToNodeConnection({ |
| 278 | sourceId, |
| 279 | destinationId, |
| 280 | sourceOutputIndex, |
| 281 | destinationInputIndex, |
| 282 | }); |
| 283 | } |
| 284 | |
| 285 | /** |
| 286 | * @param {!Common.Event} event |
| 287 | */ |
| 288 | _nodesDisconnected(event) { |
| 289 | const {contextId, sourceId, destinationId, sourceOutputIndex, destinationInputIndex} = event.data; |
| 290 | const graph = this._graphManager.getGraph(contextId); |
| 291 | if (!graph) |
| 292 | return; |
| 293 | graph.removeNodeToNodeConnection({ |
| 294 | sourceId, |
| 295 | destinationId, |
| 296 | sourceOutputIndex, |
| 297 | destinationInputIndex, |
| 298 | }); |
| 299 | } |
| 300 | |
| 301 | /** |
| 302 | * @param {!Common.Event} event |
| 303 | */ |
| 304 | _nodeParamConnected(event) { |
| 305 | const {contextId, sourceId, destinationId, sourceOutputIndex} = event.data; |
| 306 | const graph = this._graphManager.getGraph(contextId); |
| 307 | if (!graph) |
| 308 | return; |
| 309 | // Since the destinationId is AudioParamId, we need to find the nodeId as the |
| 310 | // real destinationId. |
| 311 | const nodeId = graph.getNodeIdByParamId(destinationId); |
| 312 | if (!nodeId) |
| 313 | return; |
| 314 | graph.addNodeToParamConnection({ |
| 315 | sourceId, |
| 316 | destinationId: nodeId, |
| 317 | sourceOutputIndex, |
| 318 | destinationParamId: destinationId, |
| 319 | }); |
| 320 | } |
| 321 | |
| 322 | /** |
| 323 | * @param {!Common.Event} event |
| 324 | */ |
| 325 | _nodeParamDisconnected(event) { |
| 326 | const {contextId, sourceId, destinationId, sourceOutputIndex} = event.data; |
| 327 | const graph = this._graphManager.getGraph(contextId); |
| 328 | if (!graph) |
| 329 | return; |
| 330 | // Since the destinationId is AudioParamId, we need to find the nodeId as the |
| 331 | // real destinationId. |
| 332 | const nodeId = graph.getNodeIdByParamId(destinationId); |
| 333 | if (!nodeId) |
| 334 | return; |
| 335 | graph.removeNodeToParamConnection({ |
| 336 | sourceId, |
| 337 | destinationId: nodeId, |
| 338 | sourceOutputIndex, |
| 339 | destinationParamId: destinationId, |
| 340 | }); |
Hongchan Choi | 7dd0b3e | 2019-05-13 21:19:03 | [diff] [blame] | 341 | } |
| 342 | |
| 343 | /** |
| 344 | * @param {!Protocol.WebAudio.BaseAudioContext} context |
| 345 | */ |
| 346 | _updateDetailView(context) { |
| 347 | if (this._landingPage.isShowing()) |
| 348 | this._landingPage.detach(); |
| 349 | const detailBuilder = new WebAudio.ContextDetailBuilder(context); |
| 350 | this._detailViewContainer.removeChildren(); |
| 351 | this._detailViewContainer.appendChild(detailBuilder.getFragment()); |
| 352 | } |
| 353 | |
| 354 | /** |
Hongchan Choi | 6508833 | 2019-08-08 01:12:33 | [diff] [blame] | 355 | * @param {!Protocol.WebAudio.GraphObjectId} contextId |
Hongchan Choi | 7dd0b3e | 2019-05-13 21:19:03 | [diff] [blame] | 356 | * @param {!Protocol.WebAudio.ContextRealtimeData} contextRealtimeData |
| 357 | */ |
| 358 | _updateSummaryBar(contextId, contextRealtimeData) { |
| 359 | const summaryBuilder = |
| 360 | new WebAudio.AudioContextSummaryBuilder(contextId, contextRealtimeData); |
| 361 | this._summaryBarContainer.removeChildren(); |
| 362 | this._summaryBarContainer.appendChild(summaryBuilder.getFragment()); |
| 363 | } |
| 364 | |
| 365 | _clearSummaryBar() { |
| 366 | this._summaryBarContainer.removeChildren(); |
| 367 | } |
| 368 | |
| 369 | async _pollRealtimeData() { |
| 370 | const context = this._contextSelector.selectedContext(); |
| 371 | if (!context) { |
| 372 | this._clearSummaryBar(); |
| 373 | return; |
| 374 | } |
| 375 | |
| 376 | for (const model of SDK.targetManager.models(WebAudio.WebAudioModel)) { |
| 377 | // Display summary only for real-time context. |
| 378 | if (context.contextType === 'realtime') { |
Gaoping Huang | a471b30 | 2019-08-23 16:48:29 | [diff] [blame^] | 379 | if (!this._graphManager.hasContext(context.contextId)) |
| 380 | continue; |
Hongchan Choi | 7dd0b3e | 2019-05-13 21:19:03 | [diff] [blame] | 381 | const realtimeData = await model.requestRealtimeData(context.contextId); |
Hongchan Choi | 17a7425 | 2019-06-05 18:27:58 | [diff] [blame] | 382 | if (realtimeData) |
Hongchan Choi | 7dd0b3e | 2019-05-13 21:19:03 | [diff] [blame] | 383 | this._updateSummaryBar(context.contextId, realtimeData); |
| 384 | } else { |
| 385 | this._clearSummaryBar(); |
| 386 | } |
| 387 | } |
| 388 | } |
| 389 | }; |