perf(ops): directly respond for eager ops (#18683)

This commit changes "eager ops" to directly return a response value
instead of calling "opresponse" callback in JavaScript. This saves
one boundary crossing and has a fantastic impact on the "async_ops.js"
benchmark:

```
v1.32.4
$ deno run cli/bench/async_ops.js
time 329 ms rate 3039513
time 322 ms rate 3105590
time 307 ms rate 3257328
time 301 ms rate 3322259
time 303 ms rate 3300330
time 306 ms rate 3267973
time 300 ms rate 3333333
time 301 ms rate 3322259
time 301 ms rate 3322259
time 301 ms rate 3322259
time 302 ms rate 3311258
time 301 ms rate 3322259
time 302 ms rate 3311258
time 302 ms rate 3311258
time 303 ms rate 3300330
```

```
this branch
$ ./target/release/deno run -A cli/bench/async_ops.js
time 257 ms rate 3891050
time 248 ms rate 4032258
time 251 ms rate 3984063
time 246 ms rate 4065040
time 238 ms rate 4201680
time 227 ms rate 4405286
time 228 ms rate 4385964
time 229 ms rate 4366812
time 228 ms rate 4385964
time 226 ms rate 4424778
time 226 ms rate 4424778
time 227 ms rate 4405286
time 228 ms rate 4385964
time 227 ms rate 4405286
time 228 ms rate 4385964
time 227 ms rate 4405286
time 229 ms rate 4366812
time 228 ms rate 4385964
```

Prerequisite for https://github.com/denoland/deno/pull/18652
This commit is contained in:
Bartek Iwańczuk 2023-04-13 14:32:47 +02:00 committed by GitHub
parent e39bc959c1
commit 702284dc22
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 52 additions and 36 deletions

1
Cargo.lock generated
View file

@ -1206,6 +1206,7 @@ dependencies = [
"lazy-regex",
"once_cell",
"pmutil",
"pretty_assertions",
"prettyplease",
"proc-macro-crate",
"proc-macro2 1.0.56",

View file

@ -16,5 +16,5 @@ async function bench(fun) {
if (--total) queueMicrotask(() => bench(fun));
}
const { ops } = Deno[Deno.internal].core;
bench(() => ops.op_void_async());
const core = Deno[Deno.internal].core;
bench(() => core.opAsync("op_void_async"));

View file

@ -194,8 +194,9 @@
function opAsync2(name, arg0, arg1) {
const id = nextPromiseId++;
let promise = PromisePrototypeThen(setPromise(id), unwrapOpResult);
let maybeResult;
try {
ops[name](id, arg0, arg1);
maybeResult = ops[name](id, arg0, arg1);
} catch (err) {
// Cleanup the just-created promise
getPromise(id);
@ -204,14 +205,20 @@
}
promise = handleOpCallTracing(name, id, promise);
promise[promiseIdSymbol] = id;
if (typeof maybeResult !== "undefined") {
const promise = getPromise(id);
promise.resolve(maybeResult);
}
return promise;
}
function opAsync(name, ...args) {
const id = nextPromiseId++;
let promise = PromisePrototypeThen(setPromise(id), unwrapOpResult);
let maybeResult;
try {
ops[name](id, ...new SafeArrayIterator(args));
maybeResult = ops[name](id, ...new SafeArrayIterator(args));
} catch (err) {
// Cleanup the just-created promise
getPromise(id);
@ -220,6 +227,11 @@
}
promise = handleOpCallTracing(name, id, promise);
promise[promiseIdSymbol] = id;
if (typeof maybeResult !== "undefined") {
const promise = getPromise(id);
promise.resolve(maybeResult);
}
return promise;
}

View file

@ -2437,12 +2437,12 @@ pub fn queue_fast_async_op(
}
#[inline]
pub fn queue_async_op(
pub fn queue_async_op<'s>(
ctx: &OpCtx,
scope: &mut v8::HandleScope,
scope: &'s mut v8::HandleScope,
deferred: bool,
op: impl Future<Output = (RealmIdx, PromiseId, OpId, OpResult)> + 'static,
) {
) -> Option<v8::Local<'s, v8::Value>> {
let runtime_state = match ctx.runtime_state.upgrade() {
Some(rc_state) => rc_state,
// atleast 1 Rc is held by the JsRuntime.
@ -2459,31 +2459,13 @@ pub fn queue_async_op(
);
match OpCall::eager(op) {
// This calls promise.resolve() before the control goes back to userland JS. It works something
// along the lines of:
//
// function opresolve(promiseId, ...) {
// getPromise(promiseId).resolve(...);
// }
// const p = setPromise();
// op.op_async(promiseId, ...); // Calls `opresolve`
// return p;
EagerPollResult::Ready((_, promise_id, op_id, mut resp)) if !deferred => {
let context_state_rc = JsRealm::state_from_scope(scope);
let context_state = context_state_rc.borrow();
let args = &[
v8::Integer::new(scope, promise_id).into(),
resp.to_v8(scope).unwrap(),
];
// If the result is ready we'll just return it straight to the caller, so
// we don't have to invoke a JS callback to respond. // This works under the
// assumption that `()` return value is serialized as `null`.
EagerPollResult::Ready((_, _, op_id, mut resp)) if !deferred => {
let resp = resp.to_v8(scope).unwrap();
ctx.state.borrow_mut().tracker.track_async_completed(op_id);
let tc_scope = &mut v8::TryCatch::new(scope);
let js_recv_cb =
context_state.js_recv_cb.as_ref().unwrap().open(tc_scope);
let this = v8::undefined(tc_scope).into();
js_recv_cb.call(tc_scope, this, args);
return Some(resp);
}
EagerPollResult::Ready(op) => {
let ready = OpCall::ready(op);
@ -2497,6 +2479,8 @@ pub fn queue_async_op(
state.have_unpolled_ops = true;
}
}
None
}
#[cfg(test)]

View file

@ -25,6 +25,7 @@ regex.workspace = true
syn.workspace = true
[dev-dependencies]
pretty_assertions.workspace = true
prettyplease = "0.1.21"
testing_macros = "0.2.7"
trybuild = "1.0.71"

View file

@ -319,11 +319,15 @@ fn codegen_v8_async(
};
#pre_result
#core::_ops::queue_async_op(ctx, scope, #deferred, async move {
let maybe_response = #core::_ops::queue_async_op(ctx, scope, #deferred, async move {
let result = #result_fut
#result_wrapper
(realm_idx, promise_id, op_id, #core::_ops::to_op_result(get_class, result))
});
if let Some(response) = maybe_response {
rv.set(response);
}
}
}
@ -901,6 +905,7 @@ fn exclude_lifetime_params(
mod tests {
use crate::Attributes;
use crate::Op;
use pretty_assertions::assert_eq;
use std::path::PathBuf;
#[testing_macros::fixture("optimizer_tests/**/*.rs")]

View file

@ -938,6 +938,7 @@ mod tests {
use super::*;
use crate::Attributes;
use crate::Op;
use pretty_assertions::assert_eq;
use std::path::PathBuf;
use syn::parse_quote;

View file

@ -79,7 +79,7 @@ impl op_void_async {
state.tracker.track_async(op_id);
state.get_error_class_fn
};
deno_core::_ops::queue_async_op(
let maybe_response = deno_core::_ops::queue_async_op(
ctx,
scope,
false,
@ -94,6 +94,9 @@ impl op_void_async {
)
},
);
if let Some(response) = maybe_response {
rv.set(response);
}
}
}
#[allow(clippy::too_many_arguments)]

View file

@ -90,7 +90,7 @@ impl op_async_result {
state.tracker.track_async(op_id);
state.get_error_class_fn
};
deno_core::_ops::queue_async_op(
let maybe_response = deno_core::_ops::queue_async_op(
ctx,
scope,
false,
@ -104,6 +104,9 @@ impl op_async_result {
)
},
);
if let Some(response) = maybe_response {
rv.set(response);
}
}
}
#[allow(clippy::too_many_arguments)]

View file

@ -84,7 +84,7 @@ impl send_stdin {
state.tracker.track_async(op_id);
state.get_error_class_fn
};
deno_core::_ops::queue_async_op(
let maybe_response = deno_core::_ops::queue_async_op(
ctx,
scope,
false,
@ -102,5 +102,8 @@ impl send_stdin {
)
},
);
if let Some(response) = maybe_response {
rv.set(response);
}
}
}

View file

@ -82,7 +82,7 @@ impl send_stdin {
state.tracker.track_async(op_id);
state.get_error_class_fn
};
deno_core::_ops::queue_async_op(
let maybe_response = deno_core::_ops::queue_async_op(
ctx,
scope,
false,
@ -100,5 +100,8 @@ impl send_stdin {
)
},
);
if let Some(response) = maybe_response {
rv.set(response);
}
}
}