finishing record io

This commit is contained in:
Dmitry Vasilev
2023-02-14 18:03:10 +08:00
parent 6c82e78a0f
commit e7d4fce372
10 changed files with 130 additions and 96 deletions

View File

@@ -52,7 +52,11 @@ const apply_eval_result = (state, eval_result) => {
log_position: null
},
modules: eval_result.modules,
io_cache: eval_result.io_cache,
io_cache:
(eval_result.io_cache == null || eval_result.io_cache.length == 0)
// If new cache is empty, reuse previous cache
? state.io_cache
: eval_result.io_cache
}
}
@@ -768,7 +772,6 @@ const on_deferred_call = (state, call, calltree_changed_token, logs) => {
}
}
// TODO test
const clear_io_cache = state => {
return run_code({...state, io_cache: null})
}

View File

@@ -36,12 +36,6 @@ export class CallTree {
this.ui.editor.focus()
}
/* TODO test
if(e.key == 'F3') {
this.ui.set_active_tab('logs')
}
*/
if(e.key == 'a') {
if(FLAGS.embed_value_explorer) {
exec('calltree.select_arguments')

View File

@@ -2,8 +2,6 @@ import {header, stringify_for_header} from './value_explorer.js'
import {el} from './domutils.js'
import {has_error} from '../calltree.js'
// TODO render grey items there were not used in run
export class IO_Cache {
constructor(ui, el) {
this.el = el
@@ -22,15 +20,39 @@ export class IO_Cache {
})
}
render_io_cache(items) {
clear() {
this.el.innerHTML = ''
for(let item of items) {
this.is_rendered = false
}
render_io_cache(state, force) {
if(force) {
this.is_rendered = false
}
if(this.is_rendered) {
return
}
this.is_rendered = true
this.el.innerHTML = ''
const items = state.io_cache ?? []
// Number of items that were used during execution
const used_count = state.eval_cxt.io_cache_index ?? items.length
for(let i = 0; i < items.length; i++) {
const item = items[i]
if(item.type == 'resolution') {
continue
}
const is_used = i < used_count
this.el.appendChild(
el('div',
'call_header ' + (has_error(item) ? 'error' : ''),
'call_header '
+ (has_error(item) ? 'error ' : '')
+ (is_used ? '' : 'native '),
item.name,
'(' ,
// TODO fn_link, like in ./calltree.js

View File

@@ -22,12 +22,6 @@ export class Logs {
this.ui.editor.focus_value_explorer(this.el)
}
/* TODO test
if(e.key == 'F2') {
this.ui.set_active_tab('calltree')
}
*/
if(e.key == 'F3') {
this.ui.editor.focus()
}

View File

@@ -217,6 +217,11 @@ export class UI {
this.tabs[tab_id].classList.add('active')
Object.values(this.debugger).forEach(el => el.style.display = 'none')
this.debugger[tab_id].style.display = 'block'
if(tab_id == 'io_cache') {
this.io_cache.render_io_cache(get_state(), false)
}
if(!skip_focus) {
this.debugger[tab_id].focus()
}
@@ -304,12 +309,16 @@ export class UI {
this.calltree.render_calltree(state)
this.logs.render_logs(null, state.logs)
}
// render lazily
// TODO
//if(this.active_tab == 'io_cache') {
this.io_cache.render_io_cache(state.io_cache)
//}
render_io_cache(state) {
// render lazily, only if selected
if(this.active_tab == 'io_cache') {
this.io_cache.render_io_cache(state, true)
} else {
// Do not render until user switch to the tab
this.io_cache.clear()
}
}
render_problems(problems) {

11
src/effects.js vendored
View File

@@ -197,7 +197,7 @@ export const render_common_side_effects = (prev, next, command, ui) => {
if(prev.parse_result != next.parse_result) {
render_parse_result(ui, next)
}
if(!next.parse_result.ok) {
ui.calltree.clear_calltree()
@@ -225,7 +225,16 @@ export const render_common_side_effects = (prev, next, command, ui) => {
clear_coloring(ui)
render_coloring(ui, next)
ui.logs.rerender_logs(next.logs)
if(
prev.io_cache != next.io_cache
||
prev.eval_cxt?.io_cache_index != next.eval_cxt.io_cache_index
) {
ui.render_io_cache(next)
}
}
} else {
if(get_deferred_calls(prev) == null && get_deferred_calls(next) != null) {

View File

@@ -519,6 +519,7 @@ const do_eval_frame_expr = (node, scope, callsleft, context) => {
const value = children.reduce(
(arr, el) => {
if(el.type == 'spread') {
// TODO check if iterable and throw error
return [...arr, ...el.children[0].result.value]
} else {
return [...arr, el.result.value]

View File

@@ -1,7 +1,5 @@
import {set_record_call} from './runtime.js'
// TODO remove all console.log
const get_object_to_patch = (cxt, path) => {
let obj = cxt.window
for(let i = 0; i < path.length - 1; i++) {
@@ -22,33 +20,28 @@ const io_patch = (cxt, path, use_context = false) => {
const original = obj[method]
obj[method] = function(...args) {
// TODO if called from previous version of code (calltree_changed_token is
// different), then do not call IO function and throw error to finish
// previous run ASAP
// TODO remove
/*
console.error('patched method', name, {
io_cache_is_recording: cxt.io_cache_is_recording,
io_cache_is_replay_aborted: cxt.io_cache_is_replay_aborted,
io_cache_index: cxt.io_cache_is_recording
? cxt.io_cache.length
: cxt.io_cache_index
})
*/
// TODO if called from prev execution, then throw to finish it
// ASAP
if(cxt.io_cache_is_replay_aborted) {
// Try to finish fast
throw new Error('io replay aborted')
}
const has_new_target = new.target != null
if(cxt.is_recording_deferred_calls) {
return has_new_target
? new original(...args)
: original.apply(this, args)
}
if(cxt.io_cache_is_recording) {
let ok, value, error
const has_new_target = new.target != null
try {
// TODO. Do we need it here? Only need for IO calls view. And also
// for expand_call and find_call, to not use cache on expand call
// and find_call
// save call, so on expand_call and find_call IO functions would not be
// called.
// TODO: we have a problem when IO function is called from third-party
// lib and async context is lost
set_record_call(cxt)
const index = cxt.io_cache.length
@@ -57,27 +50,32 @@ const io_patch = (cxt, path, use_context = false) => {
args = args.slice()
// Patch callback
const cb = args[0]
args[0] = function() {
// TODO guard calls from prev runs
// TODO guard io_cache_is_replay_aborted
args[0] = Object.defineProperty(function() {
// TODO if called from prev execution, then throw to
// finish it ASAP
if(cxt.io_cache_is_replay_aborted) {
// Non necessary
return
}
cxt.io_cache.push({type: 'resolution', index})
cb()
}
}, 'name', {value: cb.name})
}
value = has_new_target
? new original(...args)
: original.apply(this, args)
// TODO remove
//console.log('value', value)
if(value instanceof cxt.window.Promise) {
// TODO use cxt.promise_then, not finally which calls
// patched 'then'?
value = value.finally(() => {
// TODO guard calls from prev runs
// TODO guard io_cache_is_replay_aborted
// TODO if called from prev execution, then throw to
// finish it ASAP
if(cxt.io_cache_is_replay_aborted) {
// Non necessary
return
}
cxt.io_cache.push({type: 'resolution', index})
})
}
@@ -106,65 +104,47 @@ const io_patch = (cxt, path, use_context = false) => {
} else {
const call = cxt.io_cache[cxt.io_cache_index]
/* TODO remove
console.log(
call.type != 'call'
, call == null
, call.has_new_target != (new.target != null)
, call.use_context && (call.context != this)
, call.name != name
, JSON.stringify(call.args) != JSON.stringify(args)
)
*/
// TODO if call.type != 'call', and there are no more calls, should
// we abort, or just record one more call?
// TODO if call == null or call.type == 'resolution', then do not discard
// cache, instead switch to record mode and append new calls to the
// cache?
if(
call == null
|| call.type != 'call'
|| call.has_new_target != (new.target != null)
// TODO test
|| call.has_new_target != has_new_target
|| call.use_context && (call.context != this)
|| call.name != name
|| (
// TODO for setTimeout, compare last arg (timeout)
name != 'setTimeout'
&&
JSON.stringify(call.args) != JSON.stringify(args)
(name == 'setTimeout' && (args[1] != call.args[1])) /* compares timeout*/
||
(
name != 'setTimeout'
&&
JSON.stringify(call.args) != JSON.stringify(args)
)
)
){
//TODO remove console.error('DISCARD cache', call)
cxt.io_cache_is_replay_aborted = true
// Try to finish fast
throw new Error('io replay aborted')
} else {
// TODO remove console.log('cached call found', call)
const next_resolution = cxt.io_cache.find((e, i) =>
e.type == 'resolution' && i > cxt.io_cache_index
)
if(next_resolution != null && !cxt.io_cache_resolver_is_set) {
console.error('set resolver')
const original_setTimeout = cxt.window.setTimeout.__original
cxt.io_cache_resolver_is_set = true
original_setTimeout(() => {
// TODO if called from prev execution, then throw to finish it ASAP
if(cxt.io_cache_is_replay_aborted) {
console.error('RESOLVER ABORTED')
return
}
// TODO guard from previous run
console.error('resolver', {
io_cache_is_replay_aborted: cxt.io_cache_is_replay_aborted,
io_cache_index: cxt.io_cache_index,
})
cxt.io_cache_resolver_is_set = false
// TODO check if call from prev run
// Sanity check
if(cxt.io_cache_index >= cxt.io_cache.length) {
throw new Error('illegal state')
@@ -172,7 +152,6 @@ const io_patch = (cxt, path, use_context = false) => {
const next_event = cxt.io_cache[cxt.io_cache_index]
if(next_event.type == 'call') {
// TODO Call not happened, replay?
cxt.io_cache_is_replay_aborted = true
} else {
while(
@@ -190,7 +169,6 @@ const io_patch = (cxt, path, use_context = false) => {
} else {
resolver(cxt.io_cache[resolution.index].value)
}
// TODO remove console.log('RESOLVE', cxt.io_cache_index, resolution.index)
}
}
@@ -243,15 +221,20 @@ const Response_methods = [
'text',
]
// TODO bare IO functions should not be exposed at all, to allow calling it
// only from patched versions. Especially setInterval which can cause leaks
export const apply_io_patches = cxt => {
io_patch(cxt, ['Math', 'random'])
io_patch(cxt, ['setTimeout'])
// TODO test
// TODO if call setTimeout and then clearTimeout, cache it and remove call of
// clearTimeout, and make only setTimeout, then it would never be called when
// replaying from cache
io_patch(cxt, ['clearTimeout'])
// TODO test
// TODO patch setInterval to only cleanup all intervals on finish
const Date = cxt.window.Date
io_patch(cxt, ['Date'])
cxt.window.Date.parse = Date.parse
@@ -273,7 +256,6 @@ export const remove_io_patches = cxt => {
io_patch_remove(cxt, ['Math', 'random'])
io_patch_remove(cxt, ['setTimeout'])
// TODO test
io_patch_remove(cxt, ['clearTimeout'])
io_patch_remove(cxt, ['Date'])

View File

@@ -33,8 +33,8 @@ const gen_to_promise = gen_fn => {
const do_run = function*(module_fns, cxt, io_cache){
let calltree
cxt = io_cache == null
// TODO move all io_cache properties to the object?
cxt = (io_cache == null || io_cache.length == 0)
// TODO group all io_cache_ properties to single object?
? {...cxt,
io_cache_is_recording: true,
io_cache: [],

View File

@@ -3050,15 +3050,18 @@ const y = x()`
const next = COMMANDS.input(initial, `const x = Math.random()*2`, 0).state
assert_equal(next.value_explorer.result.value, 2)
assert_equal(next.eval_cxt.io_cache_index, 1)
// Patch Math.random to return 2. Now the first call to Math.random() is
// cached with value 1, and the second shoud return 2
// Patch Math.random to return 2.
// TODO The first call to Math.random() is cached with value 1, and the
// second shoud return 2
Object.assign(globalThis.run_window.Math, {random: () => 2})
const replay_failed = COMMANDS.input(
initial,
`const x = Math.random() + Math.random()`,
0
).state
// TODO must reuse first cached call?
assert_equal(replay_failed.value_explorer.result.value, 4)
@@ -3211,4 +3214,21 @@ const y = x()`
false
)
}),
test('record io no io cache on deferred calls', async () => {
const code = `
const x = Math.random
export const fn = () => x()
`
const {state: i, on_deferred_call} = test_deferred_calls_state(code)
// Make deferred call
i.modules[''].fn()
const state = on_deferred_call(i)
// Deferred calls should not be record in cache
assert_equal(state.eval_cxt.io_cache.length, 0)
}),
]