mirror of
https://github.com/dart-lang/sdk
synced 2024-11-02 14:32:24 +00:00
Fix sound not playing in Dartium checked mode.
Checked mode failure with optional args and Promise missing convertNativePromiseToDartFuture calls. Easiest solution is to emit proper code instead of re-plumbing the Dartium emitter for operations with optional arguments that are Promises. https://github.com/dart-lang/sdk/issues/29810 TBR=alanknight@google.com,kevmoo@google.com Review-Url: https://codereview.chromium.org/2940543002 .
This commit is contained in:
parent
24bb748bc7
commit
b3a90540f1
5 changed files with 48 additions and 43 deletions
|
@ -361,13 +361,6 @@ class AudioContext extends EventTarget {
|
|||
@DocsEditable()
|
||||
WaveShaperNode createWaveShaper() native;
|
||||
|
||||
@JSName('decodeAudioData')
|
||||
@DomName('AudioContext.decodeAudioData')
|
||||
@DocsEditable()
|
||||
Future _decodeAudioData(ByteBuffer audioData,
|
||||
[AudioBufferCallback successCallback,
|
||||
AudioBufferCallback errorCallback]) native;
|
||||
|
||||
@DomName('AudioContext.resume')
|
||||
@DocsEditable()
|
||||
@Experimental() // untriaged
|
||||
|
@ -409,6 +402,13 @@ class AudioContext extends EventTarget {
|
|||
}
|
||||
}
|
||||
|
||||
@JSName('decodeAudioData')
|
||||
@DomName('AudioContext.decodeAudioData')
|
||||
@DocsEditable()
|
||||
Future _decodeAudioData(ByteBuffer audioData,
|
||||
[AudioBufferCallback successCallback,
|
||||
AudioBufferCallback errorCallback]) native;
|
||||
|
||||
@DomName('AudioContext.decodeAudioData')
|
||||
Future<AudioBuffer> decodeAudioData(ByteBuffer audioData) {
|
||||
var completer = new Completer<AudioBuffer>();
|
||||
|
|
|
@ -546,21 +546,6 @@ class AudioContext extends EventTarget {
|
|||
WaveShaperNode createWaveShaper() =>
|
||||
_blink.BlinkAudioContext.instance.createWaveShaper_Callback_0_(this);
|
||||
|
||||
Future _decodeAudioData(ByteBuffer audioData,
|
||||
[AudioBufferCallback successCallback,
|
||||
AudioBufferCallback errorCallback]) {
|
||||
if (errorCallback != null) {
|
||||
return _blink.BlinkAudioContext.instance.decodeAudioData_Callback_3_(
|
||||
this, audioData, successCallback, errorCallback);
|
||||
}
|
||||
if (successCallback != null) {
|
||||
return _blink.BlinkAudioContext.instance
|
||||
.decodeAudioData_Callback_2_(this, audioData, successCallback);
|
||||
}
|
||||
return _blink.BlinkAudioContext.instance
|
||||
.decodeAudioData_Callback_1_(this, audioData);
|
||||
}
|
||||
|
||||
@DomName('AudioContext.resume')
|
||||
@DocsEditable()
|
||||
@Experimental() // untriaged
|
||||
|
@ -574,18 +559,20 @@ class AudioContext extends EventTarget {
|
|||
_blink.BlinkAudioContext.instance.suspend_Callback_0_(this));
|
||||
|
||||
@DomName('AudioContext.decodeAudioData')
|
||||
Future<AudioBuffer> decodeAudioData(ByteBuffer audioData) {
|
||||
var completer = new Completer<AudioBuffer>();
|
||||
_decodeAudioData(audioData, (value) {
|
||||
completer.complete(value);
|
||||
}, (error) {
|
||||
if (error == null) {
|
||||
completer.completeError('');
|
||||
} else {
|
||||
completer.completeError(error);
|
||||
}
|
||||
});
|
||||
return completer.future;
|
||||
Future<AudioBuffer> decodeAudioData(ByteBuffer audioData,
|
||||
[AudioBufferCallback successCallback,
|
||||
AudioBufferCallback errorCallback]) {
|
||||
if (errorCallback != null) {
|
||||
return convertNativePromiseToDartFuture(_blink.BlinkAudioContext.instance
|
||||
.decodeAudioData_Callback_3_(
|
||||
this, audioData, successCallback, errorCallback));
|
||||
}
|
||||
if (successCallback != null) {
|
||||
return convertNativePromiseToDartFuture(_blink.BlinkAudioContext.instance
|
||||
.decodeAudioData_Callback_2_(this, audioData, successCallback));
|
||||
}
|
||||
return convertNativePromiseToDartFuture(_blink.BlinkAudioContext.instance
|
||||
.decodeAudioData_Callback_1_(this, audioData));
|
||||
}
|
||||
}
|
||||
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
|
||||
|
|
|
@ -1,13 +1,5 @@
|
|||
// This file introduces / supplements and forces Dart declarations.
|
||||
|
||||
[DartSupplemental,
|
||||
Constructor]
|
||||
interface AudioContext {
|
||||
// TODO(ager): Auto-generate this custom method when the info about retaining
|
||||
// typed arrays is in the IDL.
|
||||
[Custom] void decodeAudioData(ArrayBuffer audioData, AudioBufferCallback successCallback, AudioBufferCallback errorCallback);
|
||||
};
|
||||
|
||||
[DartSupplemental]
|
||||
interface WaveShaperNode {
|
||||
// TODO(ager): Auto-generate this custom method when the info about retaining
|
||||
|
|
|
@ -222,7 +222,6 @@ custom_html_constructors = monitored.Set(
|
|||
# browser. They are exposed simply by placing an underscore in front of the
|
||||
# name.
|
||||
private_html_members = monitored.Set('htmlrenamer.private_html_members', [
|
||||
'AudioContext.decodeAudioData',
|
||||
'AudioNode.connect',
|
||||
'Cache.add',
|
||||
'Cache.delete',
|
||||
|
@ -534,6 +533,7 @@ for member in convert_to_future_members:
|
|||
# TODO(jacobr): cleanup and augment this list.
|
||||
removed_html_members = monitored.Set('htmlrenamer.removed_html_members', [
|
||||
'Attr.textContent', # Not needed as it is the same as Node.textContent.
|
||||
'AudioContext.decodeAudioData',
|
||||
'AudioBufferSourceNode.looping', # TODO(vsm): Use deprecated IDL annotation
|
||||
'CSSStyleDeclaration.getPropertyCSSValue',
|
||||
'CanvasRenderingContext2D.clearShadow',
|
||||
|
|
|
@ -34,6 +34,15 @@ $if DART2JS
|
|||
}
|
||||
}
|
||||
$endif
|
||||
|
||||
$if DART2JS
|
||||
@JSName('decodeAudioData')
|
||||
@DomName('AudioContext.decodeAudioData')
|
||||
@DocsEditable()
|
||||
Future _decodeAudioData(ByteBuffer audioData,
|
||||
[AudioBufferCallback successCallback,
|
||||
AudioBufferCallback errorCallback]) native;
|
||||
|
||||
@DomName('AudioContext.decodeAudioData')
|
||||
Future<AudioBuffer> decodeAudioData(ByteBuffer audioData) {
|
||||
var completer = new Completer<AudioBuffer>();
|
||||
|
@ -48,4 +57,21 @@ $endif
|
|||
});
|
||||
return completer.future;
|
||||
}
|
||||
$else
|
||||
@DomName('AudioContext.decodeAudioData')
|
||||
Future<AudioBuffer> decodeAudioData(ByteBuffer audioData,
|
||||
[AudioBufferCallback successCallback,
|
||||
AudioBufferCallback errorCallback]) {
|
||||
if (errorCallback != null) {
|
||||
return convertNativePromiseToDartFuture(_blink.BlinkAudioContext.instance.decodeAudioData_Callback_3_(
|
||||
this, audioData, successCallback, errorCallback));
|
||||
}
|
||||
if (successCallback != null) {
|
||||
return convertNativePromiseToDartFuture(_blink.BlinkAudioContext.instance
|
||||
.decodeAudioData_Callback_2_(this, audioData, successCallback));
|
||||
}
|
||||
return convertNativePromiseToDartFuture(_blink.BlinkAudioContext.instance
|
||||
.decodeAudioData_Callback_1_(this, audioData));
|
||||
}
|
||||
$endif
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue