record io

This commit is contained in:
Dmitry Vasilev
2023-02-06 01:53:34 +08:00
parent 4410d4135a
commit be4d104dc6
16 changed files with 25216 additions and 35 deletions

View File

@@ -130,6 +130,7 @@ there is cached result.
Builtin IO functions are mocked to cache IO. Current list of builtin cached
functions is:
- `Date` constructor
- `Math.random()`
- `fetch`
@@ -148,7 +149,8 @@ Caching algorithm is:
`write`
- Arguments to IO-caching functions are expected to be deep equal to non-cached
call, for cache to be used
call, for cache to be used. Deep equality is implemented as comparing JSON
stringified arguments
- If there is a call that is not cached, then cache is busted and entire
execution is restarted

View File

@@ -3,12 +3,29 @@ import {ethers} from 'https://unpkg.com/ethers/dist/ethers.esm.js'
const URL = 'https://ethereum-goerli-rpc.allthatnode.com'
const p = ethers.getDefaultProvider(URL)
await p._networkPromise
const latest = await p.getBlock()
latest
const txs = await Promise.all(latest.transactions.map(t =>
p.getTransactionReceipt(t)
))
const totalGas = txs.reduce((gas,tx) =>
gas.add(tx.gasUsed), ethers.BigNumber.from(0))
totalGas.add(25)

261
record_io/ethers.html Normal file
View File

@@ -0,0 +1,261 @@
<script>
let io_cache = null
let io_cache_is_recording = io_cache == null
if(io_cache == null) {
io_cache = {calls: [], resolution_order: []}
}
let io_cache_is_replay_aborted = false
let io_cache_index = 0
const io_patch = (obj, method, name, use_context = false) => {
if(obj == null || obj[method] == null) {
// Method is absent in current env, skip patching
return
}
const original = obj[method]
obj[method] = function(...args) {
console.log('patched method', name, {io_cache_is_replay_aborted, io_cache_is_recording})
// TODO guard that in find_call io methods are not called?
// if(searched_location != null) {
// throw new Error('illegal state')
// }
if(io_cache_is_replay_aborted) {
// Try to finish fast
console.error('ABORT')
throw new Error('io recording aborted')
} else if(io_cache_is_recording) {
let ok, value, error
const has_new_target = new.target != null
try {
// TODO. Do we need it here? Only need for IO calls view. And also
// for expand_call and find_call, to not use cache on expand call
// and find_call
//set_record_call()
value = has_new_target
? new original(...args)
: original.apply(this, args)
console.log('value', value)
//const index = io_cache.calls.length
//if(value instanceof Promise) {
// value.finally(() => {
// console.log('resolved', index)
// io_cache.resolution_order.push(index)
// })
//} else {
// io_cache.resolution_order.push(index)
//}
/* TODO remove
if(value instanceof Promise) {
const original_value = value
value = new Promise((resolve, reject) => {
// TODO fix setTimeout.original
globalThis.setTimeout.original(
() => {
original_value.then(resolve, reject)
},
10
)
})
}
*/
// TODO
//if(value instanceof Promise) {
// const make_cb = ok => value => {
// // TODO resolve promises in the same order they were resolved on
// initial execution
// }
// // TODO should we use promise_then or patched promise.then?
// promise_then.apply(value, make_cb(true), make_cb(false))
//}
ok = true
return value
} catch(e) {
error = e
ok = false
throw e
} finally {
io_cache.calls.push({
ok,
value,
error,
args,
name,
// To discern calls with and without 'new' keyword, primary for
// Date that can be called with and without new
has_new_target,
use_context,
context: use_context ? this : undefined,
})
}
} else {
const call = io_cache.calls[io_cache_index++]
/*
TODO remove
console.log(
call == null
, call.has_new_target != (new.target != null)
, call.use_context && (call.context != this)
, call.name != name
, JSON.stringify(call.args) != JSON.stringify(args)
)
*/
if(
call == null
|| call.has_new_target != (new.target != null)
// TODO test
|| call.use_context && (call.context != this)
|| call.name != name
|| JSON.stringify(call.args) != JSON.stringify(args)
){
console.log('discard cache', call)
io_cache_is_replay_aborted = true
// Try to finish fast
throw new Error('io replay aborted')
} else {
console.log('cached call found', call)
if(call.ok) {
// TODO resolve promises in the same order they were resolved on
// initial execution
if(call.value instanceof Promise) {
const original_setTimeout = globalThis.setTimeout.original
return Promise.all([
call.value,
new Promise(resolve => original_setTimeout(
resolve,
10
))
]).then(([a,_]) => a)
// TODO remove
.then(x => {console.log('resolved',name); return x})
} else {
return call.value
}
} else {
throw call.error
}
}
}
}
Object.defineProperty(obj[method], 'name', {value: original.name})
obj[method].__original = original
}
function io_patch_remove(obj, method) {
if(obj == null || obj[method] == null) {
// Method is absent in current env, skip patching
return
}
obj[method] = obj[method].__original
}
const Response_methods = [
'arrayBuffer',
'blob',
'formData',
'json',
'text',
]
function apply_io_patches() {
io_patch(Math, 'random', 'Math.random')
// TODO test
const Date = globalThis.Date
io_patch(globalThis, 'Date', 'Date')
globalThis.Date.parse = Date.parse
globalThis.Date.now = Date.now
globalThis.Date.UTC = Date.UTC
globalThis.Date.length = Date.length
globalThis.Date.name = Date.name
io_patch(globalThis.Date, 'now', 'Date.now')
io_patch(globalThis, 'fetch', 'fetch')
// Check if Response is defined, for node.js
if(globalThis.Response != null) {
for(let key of Response_methods) {
io_patch(Response.prototype, key, 'Response.prototype.' + key, true)
}
}
//TODO
const setTimeout = globalThis.setTimeout
globalThis.setTimeout = function(cb, timeout) {
const timer_id = setTimeout(function(...args) {
console.log('timeout', timer_id)
cb(...args)
}, timeout)
console.log('setTimeout', timer_id)
return timer_id
}
globalThis.setTimeout.original = setTimeout
// TODO clearTimeout
}
function remove_io_patches() {
// TODO when to apply io_patches and promise_patches? Only once, when we
// create window?
io_patch_remove(Math, 'random')
io_patch_remove(globalThis, 'Date')
io_patch_remove(globalThis, 'fetch')
// Check if Response is defined, for node.js
if(globalThis.Response != null) {
for(let key of Response_methods) {
io_patch_remove(Response.prototype, key)
}
}
globalThis.setTimeout = globalThis.setTimeout.original
}
</script>
<script type='module'>
//import {ethers} from 'https://unpkg.com/ethers/dist/ethers.esm.js'
import {ethers} from './ethers.js'
async function run() {
const URL = 'https://ethereum-goerli-rpc.allthatnode.com'
const p = ethers.getDefaultProvider(URL)
const latest = await p.getBlock()
const txs = await Promise.all(latest.transactions.slice(0,1).map(async (t, i) => {
console.error("GETTING RECEIPT", i)
const result = await p.getTransactionReceipt(t)
console.error("GOT RECEIPT", i)
return result
}))
const totalGas = txs.reduce((gas,tx) =>
gas.add(tx.gasUsed), ethers.BigNumber.from(0))
console.log('GAS', totalGas.add(3))
}
apply_io_patches()
await run()
io_cache_is_recording = false
console.error('REPLAY')
await run()
</script>

24280
record_io/ethers.js Normal file

File diff suppressed because one or more lines are too long

28
record_io/fetch.html Normal file
View File

@@ -0,0 +1,28 @@
<script type='module'>
const original = globalThis.fetch
globalThis.fetch = function(...args) {
console.log('fetch called')
return original.apply(null, args)
}
for(let key of [
'arrayBuffer',
'blob',
'formData',
'json',
'text',
]) {
let original = Response.prototype[key]
Response.prototype[key] = function(...args){
console.log('key called', key)
return original.apply(this, args)
}
}
console.log((await (await fetch('/')).text()).length)
</script>

View File

@@ -0,0 +1,26 @@
console.log('start')
let r
const x = new Promise(resolve => r = resolve).then(() => {console.log('resolved')})
console.log('before resolve')
r()
console.log('after resolve')
/*
console.log('start')
Promise.resolve().then(() => {
console.log('1')
Promise.resolve().then(() => {
console.log('2')
})
})
console.log('end')
Promise.resolve().then(() => {
console.log('3')
Promise.resolve().then(() => {
console.log('4')
})
})
*/

16
record_io/new.js Normal file
View File

@@ -0,0 +1,16 @@
/*
function f() {
console.log('n', new.target)
}
f()
new f()
*/
const f = new Function(`
return arguments.length
`)
console.log(f(1,2,3))
console.log(f(1,2,3,4))

33
record_io/promise.js Normal file
View File

@@ -0,0 +1,33 @@
//let value = Promise.reject(1)
/*
value.then(
() => console.log('res'),
() => console.log('rej'),
)
*/
/*
const original_value = value
value = new Promise((resolve, reject) => {
globalThis.setTimeout(
() => {
console.log('timeout')
original_value.then(resolve, reject)
},
1000
)
})
try {
console.log(await value)
} catch(e) {
console.log('ERROR', e)
}
*/
const t = globalThis.setTimeout
t(() => console.log('timeout'), 100)

73
record_io/timelime Normal file
View File

@@ -0,0 +1,73 @@
Timeline Replay
call a() call a()
resolve a()
call b()
resolve b()
call c()
resolve c()
Timeline Replay
resolution_index = 0, io_index = 0
call a() call a: return promise
compare resolutions[resolution_index] with io_index
io_index < resolutions[0]
do not resolve
io_index++
call b() call b: return promise
compare resolutions[0] && io_index
io_index < resolutions[0]
do not resolve
call c() call c: return promise
resolve c()
resolve b()
resolve a()
resolutions: [
3,
2,
1,
]
Делаем реплей. К нам приходят события - вызовы функций. Мы перехватываем вызов, возвращаем промис, и ресолвим тот промис, который сейчас надо заресолвить. Например, в примере выше мы ресолвим a() после вызова с(). А b() ресолвим после ресолва с(). То есть мы можем ресолвить несколько за раз.
Record: [
call a
resolve a
call b
resolve b
]
Replay: [
call a
смотрим что возвращается промис, взводим ресолвер
ресолвер сработал
resolve a
call b
смотрим что возвращается промис, взводим ресолвер
ресолвер сработал
resolve b
]
call
resolve

View File

@@ -52,6 +52,7 @@ const apply_eval_result = (state, eval_result) => {
log_position: null
},
modules: eval_result.modules,
io_cache: eval_result.io_cache,
}
}
@@ -197,6 +198,7 @@ const external_imports_loaded = (
external_imports,
state.on_deferred_call,
state.calltree_changed_token,
state.io_cache,
)
toplevel = true
} else {
@@ -205,6 +207,7 @@ const external_imports_loaded = (
external_imports,
state.on_deferred_call,
state.calltree_changed_token,
state.io_cache,
{index: node.index, module: state.current_module},
)
toplevel = false

View File

@@ -287,6 +287,7 @@ export const eval_modules = (
external_imports,
on_deferred_call,
calltree_changed_token,
io_cache,
location
) => {
// TODO gensym __cxt, __trace, __trace_call
@@ -295,6 +296,23 @@ export const eval_modules = (
const is_async = has_toplevel_await(parse_result.modules)
const Function = is_async
? globalThis.run_window.eval('(async function(){})').constructor
: globalThis.run_window.Function
const module_fns = parse_result.sorted.map(module => (
{
module,
fn: new Function(
'__cxt',
'__trace',
'__trace_call',
'__do_await',
codegen(parse_result.modules[module], {module})
)
}
))
const cxt = {
modules: external_imports == null
? {}
@@ -325,24 +343,7 @@ export const eval_modules = (
Promise: globalThis.run_window.Promise,
}
const Function = is_async
? globalThis.run_window.eval('(async function(){})').constructor
: globalThis.run_window.Function
const module_fns = parse_result.sorted.map(module => (
{
module,
fn: new Function(
'__cxt',
'__trace',
'__trace_call',
'__do_await',
codegen(parse_result.modules[module], {module})
)
}
))
const result = run(module_fns, cxt)
const result = run(module_fns, cxt, io_cache)
const make_result = result => ({
modules: result.modules,
@@ -350,14 +351,14 @@ export const eval_modules = (
eval_cxt: result.eval_cxt,
calltree: assign_code(parse_result.modules, result.calltree),
call: result.call && assign_code(parse_result.modules, result.call),
io_cache: result.eval_cxt.io_cache,
})
if(result.then != null) {
if(is_async) {
return result.then(make_result)
} else {
return make_result(result)
}
}
export const eval_find_call = (cxt, parse_result, calltree, location) => {

View File

@@ -1 +1,4 @@
export const globals = new Set(Object.getOwnPropertyNames(globalThis))
// Not available in node.js, but add to use in tests
globals.add('fetch')

268
src/record_io.js Normal file
View File

@@ -0,0 +1,268 @@
import {set_record_call} from './runtime.js'
const io_patch = (cxt, obj, method, name, use_context = false) => {
if(obj == null || obj[method] == null) {
// Method is absent in current env, skip patching
return
}
const original = obj[method]
obj[method] = function(...args) {
// TODO guard calls from prev run
console.error('patched method', name, {
io_cache_is_recording: cxt.io_cache_is_recording,
io_cache_is_replay_aborted: cxt.io_cache_is_replay_aborted,
io_cache_index: cxt.io_cache_is_recording
? cxt.io_cache.length
: cxt.io_cache_index
})
// TODO guard that in find_call io methods are not called?
// if(searched_location != null) {
// throw new Error('illegal state')
// }
if(cxt.io_cache_is_replay_aborted) {
// Try to finish fast
throw new Error('io recording aborted')
} else if(cxt.io_cache_is_recording) {
let ok, value, error
const has_new_target = new.target != null
try {
// TODO. Do we need it here? Only need for IO calls view. And also
// for expand_call and find_call, to not use cache on expand call
// and find_call
set_record_call(cxt)
const index = cxt.io_cache.length
if(name == 'setTimeout') {
args = args.slice()
// Patch callback
const cb = args[0]
args[0] = function() {
// TODO guard calls from prev runs
// TODO guard io_cache_is_replay_aborted
cxt.io_cache.push({type: 'resolution', index})
cb()
}
}
value = has_new_target
? new original(...args)
: original.apply(this, args)
console.log('value', value)
if(value instanceof Promise) {
// TODO use native .finally for promise, not patched then?
value.finally(() => {
// TODO guard calls from prev runs
// TODO guard io_cache_is_replay_aborted
cxt.io_cache.push({type: 'resolution', index})
})
}
ok = true
return value
} catch(e) {
error = e
ok = false
throw e
} finally {
cxt.io_cache.push({
type: 'call',
name,
ok,
value,
error,
args,
// To discern calls with and without 'new' keyword, primary for
// Date that can be called with and without new
has_new_target,
use_context,
context: use_context ? this : undefined,
})
}
} else {
const call = cxt.io_cache[cxt.io_cache_index]
/*
TODO remove
console.log(
call.type != 'call'
, call == null
, call.has_new_target != (new.target != null)
, call.use_context && (call.context != this)
, call.name != name
, JSON.stringify(call.args) != JSON.stringify(args)
)
*/
// TODO if call.type != 'call', and there are no more calls, should
// we abort, or just record one more call?
if(
call == null
|| call.type != 'call'
|| call.has_new_target != (new.target != null)
// TODO test
|| call.use_context && (call.context != this)
|| call.name != name
|| (
// TODO for setTimeout, compare last arg (timeout)
name != 'setTimeout'
&&
JSON.stringify(call.args) != JSON.stringify(args)
)
){
console.log('discard cache', call)
cxt.io_cache_is_replay_aborted = true
// Try to finish fast
throw new Error('io replay aborted')
} else {
console.log('cached call found', call)
const next_resolution = cxt.io_cache.find((e, i) =>
e.type == 'resolution' && i > cxt.io_cache_index
)
if(next_resolution != null && !cxt.io_cache_resolver_is_set) {
console.error('set resolver')
const original_setTimeout = globalThis.setTimeout.__original
cxt.io_cache_resolver_is_set = true
original_setTimeout(() => {
// TODO guard from previous run
console.error('resolver', {
io_cache_is_replay_aborted: cxt.io_cache_is_replay_aborted,
io_cache_index: cxt.io_cache_index,
})
cxt.io_cache_resolver_is_set = false
// TODO check if call from prev run
if(cxt.io_cache_is_replay_aborted) {
return
}
if(cxt.io_cache_index >= cxt.io_cache.length) {
// TODO Do nothing or what?
// Should not gonna happen
throw new Error('illegal state')
} else {
const next_event = cxt.io_cache[cxt.io_cache_index]
if(next_event.type == 'call') {
// TODO Call not happened, replay?
cxt.io_cache_is_replay_aborted = true
} else {
while(
cxt.io_cache_index < cxt.io_cache.length
&&
cxt.io_cache[cxt.io_cache_index].type == 'resolution'
) {
const resolution = cxt.io_cache[cxt.io_cache_index]
const resolver = cxt.io_cache_resolvers.get(resolution.index)
cxt.io_cache_index++
if(cxt.io_cache[resolution.index].name == 'setTimeout') {
resolver()
} else {
resolver(cxt.io_cache[resolution.index].value)
}
console.log('RESOLVE', cxt.io_cache_index, resolution.index)
}
}
}
}, 0)
}
cxt.io_cache_index++
if(call.ok) {
// TODO resolve promises in the same order they were resolved on
// initial execution
if(call.value instanceof Promise) {
return new Promise(resolve => {
cxt.io_cache_resolvers.set(cxt.io_cache_index - 1, resolve)
})
} else if(name == 'setTimeout') {
const timeout_cb = args[0]
cxt.io_cache_resolvers.set(cxt.io_cache_index - 1, timeout_cb)
return call.value
} else {
return call.value
}
} else {
throw call.error
}
}
}
}
Object.defineProperty(obj[method], 'name', {value: original.name})
obj[method].__original = original
}
const io_patch_remove = (obj, method) => {
if(obj == null || obj[method] == null) {
// Method is absent in current env, skip patching
return
}
obj[method] = obj[method].__original
}
const Response_methods = [
'arrayBuffer',
'blob',
'formData',
'json',
'text',
]
export const apply_io_patches = cxt => {
io_patch(cxt, Math, 'random', 'Math.random')
io_patch(cxt, globalThis, 'setTimeout', 'setTimeout')
// TODO test
io_patch(cxt, globalThis, 'clearTimeout', 'clearTimeout')
// TODO test
const Date = globalThis.Date
io_patch(cxt, globalThis, 'Date', 'Date')
globalThis.Date.parse = Date.parse
globalThis.Date.now = Date.now
globalThis.Date.UTC = Date.UTC
io_patch(cxt, globalThis.Date, 'now', 'Date.now')
io_patch(cxt, globalThis, 'fetch', 'fetch')
// Check if Response is defined, for node.js
if(globalThis.Response != null) {
for(let key of Response_methods) {
io_patch(cxt, Response.prototype, key, 'Response.prototype.' + key, true)
}
}
}
export const remove_io_patches = cxt => {
// TODO when to apply io_patches and promise_patches? Only once, when we
// create window?
io_patch_remove(Math, 'random')
io_patch_remove(globalThis, 'setTimeout')
// TODO test
io_patch_remove(globalThis, 'clearTimeout')
io_patch_remove(globalThis, 'Date')
io_patch_remove(globalThis, 'fetch')
// Check if Response is defined, for node.js
if(globalThis.Response != null) {
for(let key of Response_methods) {
io_patch_remove(Response.prototype, key)
}
}
}

View File

@@ -1,3 +1,5 @@
import {apply_io_patches, remove_io_patches} from './record_io.js'
/*
Converts generator-returning function to promise-returning function. Allows to
have the same code both for sync and async. If we have only sync modules (no
@@ -27,10 +29,27 @@ const gen_to_promise = gen_fn => {
}
}
export const run = gen_to_promise(function*(module_fns, cxt){
const do_run = function*(module_fns, cxt, io_cache){
let calltree
cxt = io_cache == null
// TODO move all io_cache properties to the object?
? {...cxt,
io_cache_is_recording: true,
io_cache: [],
}
: {...cxt,
io_cache_is_recording: false,
io_cache,
io_cache_resolver_is_set: false,
// Map of (index in io_cache) -> resolve
io_cache_resolvers: new Map(),
io_cache_is_replay_aborted: false,
io_cache_index: 0,
}
apply_promise_patch(cxt)
apply_io_patches(cxt)
for(let {module, fn} of module_fns) {
cxt.found_call = null
@@ -60,6 +79,7 @@ export const run = gen_to_promise(function*(module_fns, cxt){
cxt.logs = []
cxt.children = null
remove_io_patches(cxt)
remove_promise_patch(cxt)
cxt.searched_location = null
@@ -73,6 +93,20 @@ export const run = gen_to_promise(function*(module_fns, cxt){
logs: _logs,
eval_cxt: cxt,
}
}
export const run = gen_to_promise(function*(module_fns, cxt, io_cache) {
const result = yield* do_run(module_fns, cxt, io_cache)
if(result.eval_cxt.io_cache_is_replay_aborted) {
// TODO test next line
result.eval_cxt.is_recording_deferred_calls = false
// run again without io cache
return yield* do_run(module_fns, cxt, null)
} else {
return result
}
})
const apply_promise_patch = cxt => {
@@ -113,7 +147,7 @@ const remove_promise_patch = cxt => {
cxt.Promise.prototype.then = cxt.promise_then
}
const set_record_call = cxt => {
export const set_record_call = cxt => {
for(let i = 0; i < cxt.stack.length; i++) {
cxt.stack[i] = true
}

View File

@@ -21,6 +21,7 @@ import {
test_initial_state, test_initial_state_async,
test_deferred_calls_state,
print_debug_ct_node,
command_input_async,
} from './utils.js'
export const tests = [
@@ -2957,17 +2958,9 @@ const y = x()`
}
await f()
`
const {state: after_edit} = COMMANDS.input(i, code2, code2.indexOf('1'))
const result = await after_edit.eval_modules_state.promise
const after_edit_finished = COMMANDS.eval_modules_finished(
after_edit,
after_edit,
result,
after_edit.eval_modules_state.node,
after_edit.eval_modules_state.toplevel
)
assert_equal(after_edit_finished.active_calltree_node.fn.name, 'f')
assert_equal(after_edit_finished.value_explorer.result.value, 1)
const next = await command_input_async(i, code2, code2.indexOf('1'))
assert_equal(next.active_calltree_node.fn.name, 'f')
assert_equal(next.value_explorer.result.value, 1)
}),
test('async/await move_cursor', async () => {
@@ -3019,4 +3012,135 @@ const y = x()`
// No assertion, must not throw
}),
test('record io', () => {
const random = globalThis.run_window.Math.random
// Patch Math.random to always return 1
Object.assign(globalThis.run_window.Math, {random: () => 1})
const initial = test_initial_state(`
const x = Math.random()
`)
// Now call to Math.random is cached, break it to ensure it was not called
// on next run
Object.assign(globalThis.run_window.Math, {random: () => { throw 'fail' }})
const next = COMMANDS.input(initial, `const x = Math.random()*2`, 0).state
assert_equal(next.value_explorer.result.value, 2)
// Patch Math.random to return 2. Now the first call to Math.random() is
// cached with value 1, and the second shoud return 2
Object.assign(globalThis.run_window.Math, {random: () => 2})
const replay_failed = COMMANDS.input(
initial,
`const x = Math.random() + Math.random()`,
0
).state
// TODO must reuse first cached call?
assert_equal(replay_failed.value_explorer.result.value, 4)
// Remove patch
Object.assign(globalThis.run_window.Math, {random})
}),
test('record io preserve promise resolution order', async () => {
const original_fetch = globalThis.run_window.fetch
// Generate fetch function which calls get resolved in reverse order
const {fetch, resolve} = new Function(`
const calls = []
return {
fetch(...args) {
let resolver
const promise = new Promise(r => resolver = r)
calls.push({resolver, promise, args})
console.log('patched fetch called')
return promise
},
resolve() {
console.log('resolve', calls);
[...calls].reverse().forEach(call => call.resolver(...call.args))
},
}
`)()
// Patch fetch
Object.assign(globalThis.run_window, {fetch})
const initial_promise = test_initial_state_async(`
const result = {}
await Promise.all(
[1, 2, 3].map(async v => Object.assign(result, {value: await fetch(v)}))
)
console.log(result)
`)
resolve()
const initial = await initial_promise
// calls to fetch are resolved in reverse order, so first call wins
assert_equal(initial.logs.logs[0].args[0].value, 1)
// Break fetch to ensure it does not get called anymore
Object.assign(globalThis.run_window, {fetch: () => {throw 'broken'}})
const with_cache = await command_input_async(
initial,
`
const result = {}
await Promise.all(
[1, 2, 3].map(async v =>
Object.assign(result, {value: await fetch(v)})
)
)
console.log(result)
`,
0
)
// cached calls to fetch shoudl be resolved in the same (reverse) order as
// on the first run, so first call wins
assert_equal(with_cache.logs.logs[0].args[0].value, 1)
// Remove patch
Object.assign(globalThis.run_window, {fetch: original_fetch})
}),
test('record io setTimeout', async () => {
const i = await test_initial_state_async(`
const delay = timeout => new Promise(resolve =>
setTimeout(() => resolve(1), timeout)
)
console.log(await delay(0))
`)
assert_equal(i.io_cache != null, true)
assert_equal(i.logs.logs[0].args[0], 1)
const code2 = `
const delay = timeout => new Promise(resolve =>
setTimeout(() => resolve(10), timeout)
)
console.log(await delay(0))
`
console.log('CODE2', code2.slice(75))
const next = await command_input_async(i, code2, 0)
// Assert cache was used
// TODO check that items were not appended
assert_equal(next.io_cache == i.io_cache, true)
assert_equal(next.logs.logs[0].args[0], 10)
}),
// TODO test resolution order with sync functions (Date, Math.random)
]

View File

@@ -39,7 +39,7 @@ export const assert_code_error = (codestring, error) => {
export const assert_code_evals_to_async = async (codestring, expected) => {
const s = await test_initial_state_async(codestring)
const frame = active_frame(s)
const result = frame.children[frame.children.length - 1].result
const result = frame.children.at(-1).result
assert_equal(result.ok, true)
assert_equal(result.value, expected)
}
@@ -78,6 +78,18 @@ export const test_initial_state_async = async code => {
)
}
export const command_input_async = async (...args) => {
const after_input = COMMANDS.input(...args).state
const result = await after_input.eval_modules_state.promise
return COMMANDS.eval_modules_finished(
after_input,
after_input,
result,
after_input.eval_modules_state.node,
after_input.eval_modules_state.toplevel
)
}
export const test_deferred_calls_state = code => {
const {get_deferred_call, on_deferred_call} = (new Function(`
let args