perf(core): generate inlined wrappers for async ops (#16428)

V8's JIT can do a better job knowing the argument count and also enable
fast call path (in future).

This also lets us call async ops without `opAsync`:

```js
const { ops } = Deno.core;
await ops.op_void_async();
```

this patch: 4405286 ops/sec
main: 3508771 ops/sec
This commit is contained in:
Divy Srivastava 2022-10-27 06:40:48 -07:00 committed by GitHub
parent bfd9912e1f
commit 02187966c1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 265 additions and 112 deletions

View file

@ -63,7 +63,6 @@ pub fn bench_js_sync_with(
let code = v8::String::new(scope, looped_src.as_ref()).unwrap();
let script = v8::Script::compile(scope, code, None).unwrap();
// Run once if profiling, otherwise regular bench loop
if is_profiling() {
script.run(scope).unwrap();
@ -102,7 +101,9 @@ pub fn bench_js_async_with(
};
let looped = loop_code(inner_iters, src);
let src = looped.as_ref();
runtime
.execute_script("init", "Deno.core.initializeAsyncOps();")
.unwrap();
if is_profiling() {
for _ in 0..opts.profiling_outer {
tokio_runtime.block_on(inner_async(src, &mut runtime));

View file

@ -5,7 +5,7 @@ let [total, count] = typeof Deno !== "undefined"
: [process.argv[2], process.argv[3]];
total = total ? parseInt(total, 0) : 50;
count = count ? parseInt(count, 10) : 100000;
count = count ? parseInt(count, 10) : 1000000;
async function bench(fun) {
const start = Date.now();
@ -16,4 +16,5 @@ async function bench(fun) {
if (--total) queueMicrotask(() => bench(fun));
}
bench(() => Deno.core.opAsync("op_void_async"));
const { ops } = Deno.core;
bench(() => ops.op_void_async());

View file

@ -84,6 +84,6 @@ Deno.test(function opNamesMatch() {
// @ts-ignore: Deno.core allowed
Deno.core.opNames().sort(),
// @ts-ignore: Deno.core allowed
Object.keys(Deno.core.ops).sort(),
Object.keys(Deno.core.ops).sort().filter((name) => name !== "asyncOpsInfo"),
);
});

View file

@ -28,7 +28,7 @@
SymbolFor,
setQueueMicrotask,
} = window.__bootstrap.primordials;
const ops = window.Deno.core.ops;
const { ops } = window.Deno.core;
const errorMap = {};
// Builtin v8 / JS errors
@ -159,21 +159,63 @@
return res;
}
function opAsync(opName, ...args) {
const promiseId = nextPromiseId++;
let p = setPromise(promiseId);
try {
ops[opName](promiseId, ...args);
} catch (err) {
// Cleanup the just-created promise
getPromise(promiseId);
// Rethrow the error
throw err;
function rollPromiseId() {
return nextPromiseId++;
}
// Generate async op wrappers. See core/bindings.rs
function initializeAsyncOps() {
function genAsyncOp(op, name, args) {
return new Function(
"setPromise",
"getPromise",
"promiseIdSymbol",
"rollPromiseId",
"handleOpCallTracing",
"op",
"unwrapOpResult",
"PromisePrototypeThen",
`
return function ${name}(${args}) {
const id = rollPromiseId();
let promise = PromisePrototypeThen(setPromise(id), unwrapOpResult);
try {
op(id, ${args});
} catch (err) {
// Cleanup the just-created promise
getPromise(id);
// Rethrow the error
throw err;
}
handleOpCallTracing("${name}", id, promise);
promise[promiseIdSymbol] = id;
return promise;
}
`,
)(
setPromise,
getPromise,
promiseIdSymbol,
rollPromiseId,
handleOpCallTracing,
op,
unwrapOpResult,
PromisePrototypeThen,
);
}
p = PromisePrototypeThen(p, unwrapOpResult);
// { <name>: <argc>, ... }
for (const ele of Object.entries(ops.asyncOpsInfo())) {
if (!ele) continue;
const [name, argc] = ele;
const op = ops[name];
const args = Array.from({ length: argc }, (_, i) => `arg${i}`).join(", ");
ops[name] = genAsyncOp(op, name, args);
}
}
function handleOpCallTracing(opName, promiseId, p) {
if (opCallTracingEnabled) {
// Capture a stack trace by creating a new `Error` object. We remove the
// first 6 characters (the `Error\n` prefix) to get just the stack trace.
const stack = StringPrototypeSlice(new Error().stack, 6);
MapPrototypeSet(opCallTraces, promiseId, { opName, stack });
p = PromisePrototypeFinally(
@ -181,9 +223,10 @@
() => MapPrototypeDelete(opCallTraces, promiseId),
);
}
// Save the id on the promise so it can later be ref'ed or unref'ed
p[promiseIdSymbol] = promiseId;
return p;
}
function opAsync(opName, ...args) {
return ops[opName](...args);
}
function refOp(promiseId) {
@ -303,6 +346,7 @@
// Extra Deno.core.* exports
const core = ObjectAssign(globalThis.Deno.core, {
opAsync,
initializeAsyncOps,
resources,
metrics,
registerErrorBuilder,
@ -322,11 +366,11 @@
setPromiseHooks,
close: (rid) => ops.op_close(rid),
tryClose: (rid) => ops.op_try_close(rid),
read: opAsync.bind(null, "op_read"),
readAll: opAsync.bind(null, "op_read_all"),
write: opAsync.bind(null, "op_write"),
writeAll: opAsync.bind(null, "op_write_all"),
shutdown: opAsync.bind(null, "op_shutdown"),
read: (rid, buffer) => ops.op_read(rid, buffer),
readAll: (rid) => ops.op_read_all(rid),
write: (rid, buffer) => ops.op_write(rid, buffer),
writeAll: (rid, buffer) => ops.op_write_all(rid, buffer),
shutdown: (rid) => ops.op_shutdown(rid),
print: (msg, isErr) => ops.op_print(msg, isErr),
setMacrotaskCallback: (fn) => ops.op_set_macrotask_callback(fn),
setNextTickCallback: (fn) => ops.op_set_next_tick_callback(fn),

View file

@ -97,6 +97,7 @@ pub fn initialize_context<'s>(
scope: &mut v8::HandleScope<'s, ()>,
op_ctxs: &[OpCtx],
snapshot_loaded: bool,
will_snapshot: bool,
) -> v8::Local<'s, v8::Context> {
let scope = &mut v8::EscapableHandleScope::new(scope);
@ -116,7 +117,9 @@ pub fn initialize_context<'s>(
let ops_obj = JsRuntime::grab_global::<v8::Object>(scope, "Deno.core.ops")
.expect("Deno.core.ops to exist");
initialize_ops(scope, ops_obj, op_ctxs, snapshot_loaded);
if !will_snapshot {
initialize_async_ops_info(scope, ops_obj, op_ctxs);
}
return scope.escape(context);
}
@ -128,8 +131,10 @@ pub fn initialize_context<'s>(
// Bind functions to Deno.core.ops.*
let ops_obj = JsRuntime::ensure_objs(scope, global, "Deno.core.ops").unwrap();
initialize_ops(scope, ops_obj, op_ctxs, snapshot_loaded);
if !will_snapshot {
initialize_async_ops_info(scope, ops_obj, op_ctxs);
}
initialize_ops(scope, ops_obj, op_ctxs, !will_snapshot);
scope.escape(context)
}
@ -590,3 +595,84 @@ pub fn throw_type_error(scope: &mut v8::HandleScope, message: impl AsRef<str>) {
let exception = v8::Exception::type_error(scope, message);
scope.throw_exception(exception);
}
struct AsyncOpsInfo {
ptr: *const OpCtx,
len: usize,
}
impl<'s> IntoIterator for &'s AsyncOpsInfo {
type Item = &'s OpCtx;
type IntoIter = AsyncOpsInfoIterator<'s>;
fn into_iter(self) -> Self::IntoIter {
AsyncOpsInfoIterator {
// SAFETY: OpCtx slice is valid for the lifetime of the Isolate
info: unsafe { std::slice::from_raw_parts(self.ptr, self.len) },
index: 0,
}
}
}
struct AsyncOpsInfoIterator<'s> {
info: &'s [OpCtx],
index: usize,
}
impl<'s> Iterator for AsyncOpsInfoIterator<'s> {
type Item = &'s OpCtx;
fn next(&mut self) -> Option<Self::Item> {
loop {
match self.info.get(self.index) {
Some(ctx) if ctx.decl.is_async => {
self.index += 1;
return Some(ctx);
}
Some(_) => {
self.index += 1;
}
None => return None,
}
}
}
}
fn async_ops_info(
scope: &mut v8::HandleScope,
args: v8::FunctionCallbackArguments,
mut rv: v8::ReturnValue,
) {
let async_op_names = v8::Object::new(scope);
let external: v8::Local<v8::External> = args.data().try_into().unwrap();
let info: &AsyncOpsInfo =
// SAFETY: external is guaranteed to be a valid pointer to AsyncOpsInfo
unsafe { &*(external.value() as *const AsyncOpsInfo) };
for ctx in info {
let name = v8::String::new(scope, ctx.decl.name).unwrap();
let argc = v8::Integer::new(scope, ctx.decl.argc as i32);
async_op_names.set(scope, name.into(), argc.into());
}
rv.set(async_op_names.into());
}
fn initialize_async_ops_info(
scope: &mut v8::HandleScope,
ops_obj: v8::Local<v8::Object>,
op_ctxs: &[OpCtx],
) {
let key = v8::String::new(scope, "asyncOpsInfo").unwrap();
let external = v8::External::new(
scope,
Box::into_raw(Box::new(AsyncOpsInfo {
ptr: op_ctxs as *const [OpCtx] as _,
len: op_ctxs.len(),
})) as *mut c_void,
);
let val = v8::Function::builder(async_ops_info)
.data(external.into())
.build(scope)
.unwrap();
val.set_name(key);
ops_obj.set(scope, key.into(), val.into());
}

View file

@ -2,6 +2,8 @@
// This is not a real HTTP server. We read blindly one time into 'requestBuf',
// then write this fixed 'responseBuf'. The point of this benchmark is to
// exercise the event loop in a simple yet semi-realistic way.
Deno.core.initializeAsyncOps();
const requestBuf = new Uint8Array(64 * 1024);
const responseBuf = new Uint8Array(
"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World\n"
@ -16,7 +18,7 @@ function listen() {
/** Accepts a connection, returns rid. */
function accept(serverRid) {
return Deno.core.opAsync("op_accept", serverRid);
return Deno.core.ops.op_accept(serverRid);
}
async function serve(rid) {

View file

@ -16,6 +16,9 @@ pub struct OpDecl {
pub enabled: bool,
pub is_async: bool,
pub is_unstable: bool,
/// V8 argument count. Used as an optimization
/// hint by `core.initalizeAsyncOps`.
pub argc: usize,
pub is_v8: bool,
pub fast_fn: Option<Box<dyn FastFunction>>,
}

View file

@ -384,7 +384,8 @@ impl JsRuntime {
isolate_ptr.read()
};
let scope = &mut v8::HandleScope::new(&mut isolate);
let context = bindings::initialize_context(scope, &op_ctxs, false);
let context =
bindings::initialize_context(scope, &op_ctxs, false, true);
global_context = v8::Global::new(scope, context);
scope.set_default_context(context);
}
@ -422,7 +423,7 @@ impl JsRuntime {
};
let scope = &mut v8::HandleScope::new(&mut isolate);
let context =
bindings::initialize_context(scope, &op_ctxs, snapshot_loaded);
bindings::initialize_context(scope, &op_ctxs, snapshot_loaded, false);
global_context = v8::Global::new(scope, context);
}
@ -550,6 +551,7 @@ impl JsRuntime {
scope,
&self.state.borrow().op_ctxs,
self.built_from_snapshot,
false,
);
JsRealm::new(v8::Global::new(scope, context))
};
@ -2243,6 +2245,7 @@ pub mod tests {
#[derive(Copy, Clone)]
enum Mode {
Async,
AsyncDeferred,
AsyncZeroCopy(bool),
}
@ -2251,20 +2254,28 @@ pub mod tests {
dispatch_count: Arc<AtomicUsize>,
}
#[op(deferred)]
#[op]
async fn op_test(
rc_op_state: Rc<RefCell<OpState>>,
control: u8,
buf: Option<ZeroCopyBuf>,
) -> Result<u8, AnyError> {
#![allow(clippy::await_holding_refcell_ref)] // False positive.
let op_state_ = rc_op_state.borrow();
let test_state = op_state_.borrow::<TestState>();
test_state.dispatch_count.fetch_add(1, Ordering::Relaxed);
match test_state.mode {
let mode = test_state.mode;
drop(op_state_);
match mode {
Mode::Async => {
assert_eq!(control, 42);
Ok(43)
}
Mode::AsyncDeferred => {
tokio::task::yield_now().await;
assert_eq!(control, 42);
Ok(43)
}
Mode::AsyncZeroCopy(has_buffer) => {
assert_eq!(buf.is_some(), has_buffer);
if let Some(buf) = buf {
@ -2314,14 +2325,15 @@ pub mod tests {
#[test]
fn test_ref_unref_ops() {
let (mut runtime, _dispatch_count) = setup(Mode::Async);
let (mut runtime, _dispatch_count) = setup(Mode::AsyncDeferred);
runtime
.execute_script(
"filename.js",
r#"
Deno.core.initializeAsyncOps();
var promiseIdSymbol = Symbol.for("Deno.core.internalPromiseId");
var p1 = Deno.core.opAsync("op_test", 42);
var p2 = Deno.core.opAsync("op_test", 42);
var p1 = Deno.core.ops.op_test(42);
var p2 = Deno.core.ops.op_test(42);
"#,
)
.unwrap();
@ -2374,6 +2386,7 @@ pub mod tests {
"filename.js",
r#"
let control = 42;
Deno.core.initializeAsyncOps();
Deno.core.opAsync("op_test", control);
async function main() {
Deno.core.opAsync("op_test", control);
@ -2392,6 +2405,7 @@ pub mod tests {
.execute_script(
"filename.js",
r#"
Deno.core.initializeAsyncOps();
const p = Deno.core.opAsync("op_test", 42);
if (p[Symbol.for("Deno.core.internalPromiseId")] == undefined) {
throw new Error("missing id on returned promise");
@ -2408,6 +2422,7 @@ pub mod tests {
.execute_script(
"filename.js",
r#"
Deno.core.initializeAsyncOps();
Deno.core.opAsync("op_test");
"#,
)
@ -2422,6 +2437,7 @@ pub mod tests {
.execute_script(
"filename.js",
r#"
Deno.core.initializeAsyncOps();
let zero_copy_a = new Uint8Array([0]);
Deno.core.opAsync("op_test", null, zero_copy_a);
"#,
@ -3021,7 +3037,6 @@ pub mod tests {
function main() {
console.log("asdf);
}
main();
"#,
);
@ -3041,18 +3056,16 @@ function assert(cond) {
throw Error("assert");
}
}
function main() {
assert(false);
}
main();
"#,
);
let expected_error = r#"Error: assert
at assert (error_stack.js:4:11)
at main (error_stack.js:9:3)
at error_stack.js:12:1"#;
at main (error_stack.js:8:3)
at error_stack.js:10:1"#;
assert_eq!(result.unwrap_err().to_string(), expected_error);
}
@ -3070,7 +3083,6 @@ main();
throw new Error("async");
});
})();
try {
await p;
} catch (error) {
@ -3083,7 +3095,7 @@ main();
let expected_error = r#"Error: async
at error_async_stack.js:5:13
at async error_async_stack.js:4:5
at async error_async_stack.js:10:5"#;
at async error_async_stack.js:9:5"#;
match runtime.poll_event_loop(cx, false) {
Poll::Ready(Err(e)) => {
@ -3176,7 +3188,6 @@ function assertEquals(a, b) {
const sab = new SharedArrayBuffer(16);
const i32a = new Int32Array(sab);
globalThis.resolved = false;
(function() {
const result = Atomics.waitAsync(i32a, 0, 0);
result.value.then(
@ -3184,7 +3195,6 @@ globalThis.resolved = false;
() => { assertUnreachable();
});
})();
const notify_return_value = Atomics.notify(i32a, 0, 1);
assertEquals(1, notify_return_value);
"#,
@ -3294,7 +3304,7 @@ assertEquals(1, notify_return_value);
runtime
.execute_script(
"op_async_borrow.js",
"Deno.core.opAsync('op_async_borrow')",
"Deno.core.initializeAsyncOps(); Deno.core.ops.op_async_borrow()",
)
.unwrap();
runtime.run_event_loop(false).await.unwrap();
@ -3368,7 +3378,8 @@ Deno.core.ops.op_sync_serialize_object_with_numbers_as_keys({
.execute_script(
"op_async_serialize_object_with_numbers_as_keys.js",
r#"
Deno.core.opAsync('op_async_serialize_object_with_numbers_as_keys', {
Deno.core.initializeAsyncOps();
Deno.core.ops.op_async_serialize_object_with_numbers_as_keys({
lines: {
100: {
unit: "m"
@ -3406,6 +3417,7 @@ Deno.core.opAsync('op_async_serialize_object_with_numbers_as_keys', {
.execute_script(
"macrotasks_and_nextticks.js",
r#"
Deno.core.initializeAsyncOps();
(async function () {
const results = [];
Deno.core.ops.op_set_macrotask_callback(() => {
@ -3416,7 +3428,6 @@ Deno.core.opAsync('op_async_serialize_object_with_numbers_as_keys', {
results.push("nextTick");
Deno.core.ops.op_set_has_tick_scheduled(false);
});
Deno.core.ops.op_set_has_tick_scheduled(true);
await Deno.core.opAsync('op_async_sleep');
if (results[0] != "nextTick") {
@ -3627,7 +3638,6 @@ Deno.core.opAsync('op_async_serialize_object_with_numbers_as_keys', {
Deno.core.ops.op_store_pending_promise_exception(promise);
Deno.core.ops.op_promise_reject();
});
new Promise((_, reject) => reject(Error("reject")));
"#,
)
@ -3645,7 +3655,6 @@ Deno.core.opAsync('op_async_serialize_object_with_numbers_as_keys', {
prev(...args);
});
}
new Promise((_, reject) => reject(Error("reject")));
"#,
)
@ -3695,7 +3704,6 @@ Deno.core.opAsync('op_async_serialize_object_with_numbers_as_keys', {
Deno.core.ops.op_set_promise_reject_callback((type, promise, reason) => {
Deno.core.ops.op_promise_reject();
});
throw new Error('top level throw');
"#;
@ -3826,8 +3834,6 @@ Deno.core.opAsync('op_async_serialize_object_with_numbers_as_keys', {
const a1b = a1.subarray(0, 3);
const a2 = new Uint8Array([5,10,15]);
const a2b = a2.subarray(0, 3);
if (!(a1.length > 0 && a1b.length > 0)) {
throw new Error("a1 & a1b should have a length");
}
@ -3838,7 +3844,6 @@ Deno.core.opAsync('op_async_serialize_object_with_numbers_as_keys', {
if (a1.length > 0 || a1b.length > 0) {
throw new Error("expecting a1 & a1b to be detached");
}
const a3 = Deno.core.ops.op_boomerang(a2b);
if (a3.byteLength != 3) {
throw new Error(`Expected a3.byteLength === 3, got ${a3.byteLength}`);
@ -3849,7 +3854,6 @@ Deno.core.opAsync('op_async_serialize_object_with_numbers_as_keys', {
if (a2.byteLength > 0 || a2b.byteLength > 0) {
throw new Error("expecting a2 & a2b to be detached, a3 re-attached");
}
const wmem = new WebAssembly.Memory({ initial: 1, maximum: 2 });
const w32 = new Uint32Array(wmem.buffer);
w32[0] = 1; w32[1] = 2; w32[2] = 3;

View file

@ -118,7 +118,7 @@ pub fn op(attr: TokenStream, item: TokenStream) -> TokenStream {
let (has_fallible_fast_call, fast_impl, fast_field) =
codegen_fast_impl(&core, &func, name, is_async, must_be_fast);
let v8_body = if is_async {
let (v8_body, argc) = if is_async {
codegen_v8_async(&core, &func, margs, asyncness, deferred)
} else {
codegen_v8_sync(&core, &func, margs, has_fallible_fast_call)
@ -154,6 +154,7 @@ pub fn op(attr: TokenStream, item: TokenStream) -> TokenStream {
is_async: #is_async,
is_unstable: #is_unstable,
is_v8: #is_v8,
argc: #argc,
}
}
@ -181,7 +182,7 @@ fn codegen_v8_async(
margs: MacroArgs,
asyncness: bool,
deferred: bool,
) -> TokenStream2 {
) -> (TokenStream2, usize) {
let MacroArgs { is_v8, .. } = margs;
let special_args = f
.sig
@ -194,7 +195,7 @@ fn codegen_v8_async(
let rust_i0 = special_args.len();
let args_head = special_args.into_iter().collect::<TokenStream2>();
let (arg_decls, args_tail) = codegen_args(core, f, rust_i0, 1);
let (arg_decls, args_tail, argc) = codegen_args(core, f, rust_i0, 1);
let type_params = exclude_lifetime_params(&f.sig.generics.params);
let (pre_result, mut result_fut) = match asyncness {
@ -225,44 +226,47 @@ fn codegen_v8_async(
false => quote! { let result = Ok(result); },
};
quote! {
use #core::futures::FutureExt;
// SAFETY: #core guarantees args.data() is a v8 External pointing to an OpCtx for the isolates lifetime
let ctx = unsafe {
&*(#core::v8::Local::<#core::v8::External>::cast(args.data()).value()
as *const #core::_ops::OpCtx)
};
let op_id = ctx.id;
(
quote! {
use #core::futures::FutureExt;
// SAFETY: #core guarantees args.data() is a v8 External pointing to an OpCtx for the isolates lifetime
let ctx = unsafe {
&*(#core::v8::Local::<#core::v8::External>::cast(args.data()).value()
as *const #core::_ops::OpCtx)
};
let op_id = ctx.id;
let promise_id = args.get(0);
let promise_id = #core::v8::Local::<#core::v8::Integer>::try_from(promise_id)
.map(|l| l.value() as #core::PromiseId)
.map_err(#core::anyhow::Error::from);
// Fail if promise id invalid (not an int)
let promise_id: #core::PromiseId = match promise_id {
Ok(promise_id) => promise_id,
Err(err) => {
#core::_ops::throw_type_error(scope, format!("invalid promise id: {}", err));
return;
}
};
let promise_id = args.get(0);
let promise_id = #core::v8::Local::<#core::v8::Integer>::try_from(promise_id)
.map(|l| l.value() as #core::PromiseId)
.map_err(#core::anyhow::Error::from);
// Fail if promise id invalid (not an int)
let promise_id: #core::PromiseId = match promise_id {
Ok(promise_id) => promise_id,
Err(err) => {
#core::_ops::throw_type_error(scope, format!("invalid promise id: {}", err));
return;
}
};
#arg_decls
#arg_decls
// Track async call & get copy of get_error_class_fn
let get_class = {
let state = ::std::cell::RefCell::borrow(&ctx.state);
state.tracker.track_async(op_id);
state.get_error_class_fn
};
// Track async call & get copy of get_error_class_fn
let get_class = {
let state = ::std::cell::RefCell::borrow(&ctx.state);
state.tracker.track_async(op_id);
state.get_error_class_fn
};
#pre_result
#core::_ops::queue_async_op(ctx, scope, #deferred, async move {
let result = #result_fut
#result_wrapper
(promise_id, op_id, #core::_ops::to_op_result(get_class, result))
});
}
#pre_result
#core::_ops::queue_async_op(ctx, scope, #deferred, async move {
let result = #result_fut
#result_wrapper
(promise_id, op_id, #core::_ops::to_op_result(get_class, result))
});
},
argc,
)
}
fn scope_arg(arg: &FnArg) -> Option<TokenStream2> {
@ -516,7 +520,7 @@ fn codegen_v8_sync(
f: &syn::ItemFn,
margs: MacroArgs,
has_fallible_fast_call: bool,
) -> TokenStream2 {
) -> (TokenStream2, usize) {
let MacroArgs { is_v8, .. } = margs;
let special_args = f
.sig
@ -528,7 +532,7 @@ fn codegen_v8_sync(
.collect::<Vec<_>>();
let rust_i0 = special_args.len();
let args_head = special_args.into_iter().collect::<TokenStream2>();
let (arg_decls, args_tail) = codegen_args(core, f, rust_i0, 0);
let (arg_decls, args_tail, argc) = codegen_args(core, f, rust_i0, 0);
let ret = codegen_sync_ret(core, &f.sig.output);
let type_params = exclude_lifetime_params(&f.sig.generics.params);
@ -547,24 +551,27 @@ fn codegen_v8_sync(
quote! {}
};
quote! {
// SAFETY: #core guarantees args.data() is a v8 External pointing to an OpCtx for the isolates lifetime
let ctx = unsafe {
&*(#core::v8::Local::<#core::v8::External>::cast(args.data()).value()
as *const #core::_ops::OpCtx)
};
(
quote! {
// SAFETY: #core guarantees args.data() is a v8 External pointing to an OpCtx for the isolates lifetime
let ctx = unsafe {
&*(#core::v8::Local::<#core::v8::External>::cast(args.data()).value()
as *const #core::_ops::OpCtx)
};
#fast_error_handler
#arg_decls
#fast_error_handler
#arg_decls
let result = Self::call::<#type_params>(#args_head #args_tail);
let result = Self::call::<#type_params>(#args_head #args_tail);
// use RefCell::borrow instead of state.borrow to avoid clash with std::borrow::Borrow
let op_state = ::std::cell::RefCell::borrow(&*ctx.state);
op_state.tracker.track_sync(ctx.id);
// use RefCell::borrow instead of state.borrow to avoid clash with std::borrow::Borrow
let op_state = ::std::cell::RefCell::borrow(&*ctx.state);
op_state.tracker.track_sync(ctx.id);
#ret
}
#ret
},
argc,
)
}
struct FastApiSyn {
@ -803,12 +810,15 @@ fn is_fast_scalar(
}
}
/// (full declarations, idents, v8 argument count)
type ArgumentDecl = (TokenStream2, TokenStream2, usize);
fn codegen_args(
core: &TokenStream2,
f: &syn::ItemFn,
rust_i0: usize, // Index of first generic arg in rust
v8_i0: usize, // Index of first generic arg in v8/js
) -> (TokenStream2, TokenStream2) {
) -> ArgumentDecl {
let inputs = &f.sig.inputs.iter().skip(rust_i0).enumerate();
let ident_seq: TokenStream2 = inputs
.clone()
@ -823,7 +833,7 @@ fn codegen_args(
codegen_arg(core, arg, format!("arg_{i}").as_ref(), v8_i0 + i)
})
.collect();
(decls, ident_seq)
(decls, ident_seq, inputs.len())
}
fn codegen_arg(

View file

@ -692,6 +692,7 @@ delete Intl.v8BreakIterator;
throw new Error("Worker runtime already bootstrapped");
}
core.initializeAsyncOps();
performance.setTimeOrigin(DateNow());
net.setup(runtimeOptions.unstableFlag);
@ -791,6 +792,7 @@ delete Intl.v8BreakIterator;
throw new Error("Worker runtime already bootstrapped");
}
core.initializeAsyncOps();
performance.setTimeOrigin(DateNow());
net.setup(runtimeOptions.unstableFlag);