rename io_cache -> io_trace

This commit is contained in:
Dmitry Vasilev
2023-06-27 15:03:03 +03:00
parent cd290ba1dd
commit d1de58fe1c
10 changed files with 110 additions and 110 deletions

View File

@@ -119,11 +119,11 @@ served from service workers).
## IO
To provide interactive experience, Leporello.js caches calls to IO functions
made by your app and can later replay them from cache, allowing to program by
To provide interactive experience, Leporello.js traces calls to IO functions
made by your app and can later replay them from trace, allowing to program by
making small iterations on your code and instantly getting feedback.
Current list of builtin functions which calls are cached is:
Current list of builtin functions which calls are traced is:
- `Date`
- `Math.random()`
- `fetch`
@@ -136,18 +136,18 @@ Current list of builtin functions which calls are cached is:
- `setTimeout`
- `clearTimeout`
Leporello.js caches all IO calls when the code is run for the first time. Then,
Leporello.js traces all IO calls when the code is run for the first time. Then,
every time you edit your code, Leporello.js tries to execute it, taking results
of IO calls from cache (it is called replay). Cached calls are stored in array.
of IO calls from the trace (it is called replay). Traced calls are stored in array.
While replay, when IO call is made, Leporello.js takes next call from the
array, and checks if function and arguments are the same for current call and
cached call. If they are the same, then Leporello.js returns cached result. To
traced call. If they are the same, then Leporello.js returns result from the trace. To
compare arguments for equality, Leporello.js uses deep equality comparison with
`JSON.stringify`. Otherwise, the cache gets discarded, and Leporello.js
executes code again, this time without cache, so the new cache array is
`JSON.stringify`. Otherwise, the trace gets discarded, and Leporello.js
executes code again, this time without the trace, so the new trace array is
populated.
If you want to bust cache manually, there is a button and a hotkey for this.
If you want to discard trace manually, there is a button and a hotkey for this.
## Hotkeys

View File

@@ -145,7 +145,7 @@
overflow: auto;
}
.logs, .io_cache {
.logs, .io_trace {
padding-left: 1em;
}

View File

@@ -51,11 +51,11 @@ const apply_eval_result = (state, eval_result) => {
log_position: null
},
modules: eval_result.modules,
io_cache:
(eval_result.io_cache == null || eval_result.io_cache.length == 0)
// If new cache is empty, reuse previous cache
? state.io_cache
: eval_result.io_cache
io_trace:
(eval_result.io_trace == null || eval_result.io_trace.length == 0)
// If new trace is empty, reuse previous trace
? state.io_trace
: eval_result.io_trace
}
}
@@ -201,7 +201,7 @@ const external_imports_loaded = (
external_imports,
state.on_deferred_call,
state.calltree_changed_token,
state.io_cache,
state.io_trace,
)
toplevel = true
} else {
@@ -210,7 +210,7 @@ const external_imports_loaded = (
external_imports,
state.on_deferred_call,
state.calltree_changed_token,
state.io_cache,
state.io_trace,
{index: node.index, module: state.current_module},
)
toplevel = false
@@ -804,8 +804,8 @@ const on_deferred_call = (state, call, calltree_changed_token, logs) => {
}
}
const clear_io_cache = state => {
return run_code({...state, io_cache: null})
const clear_io_trace = state => {
return run_code({...state, io_trace: null})
}
const do_load_dir = (state, dir) => {
@@ -902,6 +902,6 @@ export const COMMANDS = {
external_imports_loaded,
eval_modules_finished,
on_deferred_call,
clear_io_cache,
clear_io_trace,
calltree: calltree_commands,
}

View File

@@ -2,7 +2,7 @@ import {header, stringify_for_header} from './value_explorer.js'
import {el} from './domutils.js'
import {has_error} from '../calltree.js'
export class IO_Cache {
export class IO_Trace {
constructor(ui, el) {
this.el = el
this.ui = ui
@@ -25,7 +25,7 @@ export class IO_Cache {
this.is_rendered = false
}
render_io_cache(state, force) {
render_io_trace(state, force) {
if(force) {
this.is_rendered = false
}
@@ -38,9 +38,9 @@ export class IO_Cache {
this.el.innerHTML = ''
const items = state.io_cache ?? []
const items = state.io_trace ?? []
// Number of items that were used during execution
const used_count = state.eval_cxt.io_cache_index ?? items.length
const used_count = state.eval_cxt.io_trace_index ?? items.length
for(let i = 0; i < items.length; i++) {
const item = items[i]

View File

@@ -3,7 +3,7 @@ import {Editor} from './editor.js'
import {Files} from './files.js'
import {CallTree} from './calltree.js'
import {Logs} from './logs.js'
import {IO_Cache} from './io_cache.js'
import {IO_Trace} from './io_trace.js'
import {el} from './domutils.js'
export class UI {
@@ -36,11 +36,11 @@ export class UI {
href: 'javascript: void(0)',
}, 'Logs (F3)')
),
this.tabs.io_cache = el('div', 'tab',
this.tabs.io_trace = el('div', 'tab',
el('a', {
click: () => this.set_active_tab('io_cache'),
click: () => this.set_active_tab('io_trace'),
href: 'javascript: void(0)',
}, 'IO cache (F4)')
}, 'IO trace (F4)')
),
this.entrypoint_select = el('div', 'entrypoint_select')
),
@@ -52,8 +52,8 @@ export class UI {
'class': 'tab_content logs',
tabindex: 0,
}),
this.debugger.io_cache = el('div', {
'class': 'tab_content io_cache',
this.debugger.io_trace = el('div', {
'class': 'tab_content io_trace',
tabindex: 0,
}),
),
@@ -87,9 +87,9 @@ export class UI {
el('a', {
'class': 'statusbar_action first',
href: 'javascript: void(0)',
click: () => exec('clear_io_cache')
click: () => exec('clear_io_trace')
},
'Clear IO cache (F6)'
'Clear IO trace (F6)'
),
el('a', {
@@ -157,11 +157,11 @@ export class UI {
}
if(e.key == 'F4'){
this.set_active_tab('io_cache')
this.set_active_tab('io_trace')
}
if(e.key == 'F6'){
exec('clear_io_cache')
exec('clear_io_trace')
}
if(e.key == 'F7'){
@@ -177,7 +177,7 @@ export class UI {
this.calltree = new CallTree(this, this.debugger.calltree)
this.logs = new Logs(this, this.debugger.logs)
this.io_cache = new IO_Cache(this, this.debugger.io_cache)
this.io_trace = new IO_Trace(this, this.debugger.io_trace)
// TODO jump to another module
// TODO use exec
@@ -207,8 +207,8 @@ export class UI {
Object.values(this.debugger).forEach(el => el.style.display = 'none')
this.debugger[tab_id].style.display = 'block'
if(tab_id == 'io_cache') {
this.io_cache.render_io_cache(get_state(), false)
if(tab_id == 'io_trace') {
this.io_trace.render_io_trace(get_state(), false)
}
if(!skip_focus) {
@@ -306,13 +306,13 @@ export class UI {
this.logs.render_logs(null, state.logs)
}
render_io_cache(state) {
render_io_trace(state) {
// render lazily, only if selected
if(this.active_tab == 'io_cache') {
this.io_cache.render_io_cache(state, true)
if(this.active_tab == 'io_trace') {
this.io_trace.render_io_trace(state, true)
} else {
// Do not render until user switch to the tab
this.io_cache.clear()
this.io_trace.clear()
}
}
@@ -367,8 +367,8 @@ export class UI {
['Focus console logs', 'F3'],
['Navigate console logs', '↑ ↓ or jk'],
['Leave console logs', 'F3 or Esc'],
['Focus IO cache', 'F4'],
['Leave IO cache', 'F4 or Esc'],
['Focus IO trace', 'F4'],
['Leave IO trace', 'F4 or Esc'],
['Jump to definition', 'F5', 'gd'],
['Expand selection to eval expression', 'Ctrl-↓ or Ctrl-j'],
['Collapse selection', 'Ctrl-↑ or Ctrl-k'],
@@ -376,7 +376,7 @@ export class UI {
['Step out of call', 'Ctrl-o', '\\o'],
['When in call tree view, jump to return statement', 'Enter'],
['When in call tree view, jump to function arguments', 'a'],
['Clear IO cache', 'F6'],
['Clear IO trace', 'F6'],
['(Re)open run window (F7)', 'F7'],
['Expand/collapse editor to fullscreen', 'F8'],
]

6
src/effects.js vendored
View File

@@ -225,11 +225,11 @@ export const apply_side_effects = (prev, next, command, ui) => {
ui.logs.rerender_logs(next.logs)
if(
prev.io_cache != next.io_cache
prev.io_trace != next.io_trace
||
prev.eval_cxt?.io_cache_index != next.eval_cxt.io_cache_index
prev.eval_cxt?.io_trace_index != next.eval_cxt.io_trace_index
) {
ui.render_io_cache(next)
ui.render_io_trace(next)
}
}

View File

@@ -294,7 +294,7 @@ export const eval_modules = (
external_imports,
on_deferred_call,
calltree_changed_token,
io_cache,
io_trace,
location
) => {
// TODO gensym __cxt, __trace, __trace_call
@@ -350,7 +350,7 @@ export const eval_modules = (
window: globalThis.run_window,
}
const result = run(module_fns, cxt, io_cache)
const result = run(module_fns, cxt, io_trace)
const make_result = result => {
const calltree = assign_code(parse_result.modules, result.calltree)
@@ -363,7 +363,7 @@ export const eval_modules = (
eval_cxt: result.eval_cxt,
calltree,
call,
io_cache: result.eval_cxt.io_cache,
io_trace: result.eval_cxt.io_trace,
}
}

View File

@@ -32,7 +32,7 @@ const io_patch = (path, use_context = false) => {
const make_patched_method = (original, name, use_context) => {
const method = function(...args) {
if(cxt.io_cache_is_replay_aborted) {
if(cxt.io_trace_is_replay_aborted) {
// Try to finish fast
// TODO invoke callback to notify that code must be restarted?
throw new Error('io replay aborted')
@@ -41,7 +41,7 @@ const make_patched_method = (original, name, use_context) => {
const has_new_target = new.target != null
if(cxt.is_recording_deferred_calls) {
// TODO record cache on deferred calls?
// TODO record trace on deferred calls?
return has_new_target
? new original(...args)
: original.apply(this, args)
@@ -49,7 +49,7 @@ const make_patched_method = (original, name, use_context) => {
const cxt_copy = cxt
if(cxt.io_cache_is_recording) {
if(cxt.io_trace_is_recording) {
let ok, value, error
try {
// save call, so on expand_call and find_call IO functions would not be
@@ -58,7 +58,7 @@ const make_patched_method = (original, name, use_context) => {
// lib and async context is lost
set_record_call(cxt)
const index = cxt.io_cache.length
const index = cxt.io_trace.length
if(name == 'setTimeout') {
args = args.slice()
@@ -69,11 +69,11 @@ const make_patched_method = (original, name, use_context) => {
// If code execution was cancelled, then never call callback
return
}
if(cxt.io_cache_is_replay_aborted) {
if(cxt.io_trace_is_replay_aborted) {
// Non necessary
return
}
cxt.io_cache.push({type: 'resolution', index})
cxt.io_trace.push({type: 'resolution', index})
cb()
}, 'name', {value: cb.name})
}
@@ -89,11 +89,11 @@ const make_patched_method = (original, name, use_context) => {
if(cxt_copy != cxt) {
return
}
if(cxt.io_cache_is_replay_aborted) {
if(cxt.io_trace_is_replay_aborted) {
// Non necessary
return
}
cxt.io_cache.push({type: 'resolution', index})
cxt.io_trace.push({type: 'resolution', index})
})
}
@@ -104,7 +104,7 @@ const make_patched_method = (original, name, use_context) => {
ok = false
throw e
} finally {
cxt.io_cache.push({
cxt.io_trace.push({
type: 'call',
name,
ok,
@@ -119,11 +119,11 @@ const make_patched_method = (original, name, use_context) => {
})
}
} else {
const call = cxt.io_cache[cxt.io_cache_index]
const call = cxt.io_trace[cxt.io_trace_index]
// TODO if call == null or call.type == 'resolution', then do not discard
// cache, instead switch to record mode and append new calls to the
// cache?
// trace, instead switch to record mode and append new calls to the
// trace?
if(
call == null
|| call.type != 'call'
@@ -140,53 +140,53 @@ const make_patched_method = (original, name, use_context) => {
)
)
){
cxt.io_cache_is_replay_aborted = true
cxt.io_trace_is_replay_aborted = true
// Try to finish fast
throw new Error('io replay aborted')
} else {
const next_resolution = cxt.io_cache.find((e, i) =>
e.type == 'resolution' && i > cxt.io_cache_index
const next_resolution = cxt.io_trace.find((e, i) =>
e.type == 'resolution' && i > cxt.io_trace_index
)
if(next_resolution != null && !cxt.io_cache_resolver_is_set) {
if(next_resolution != null && !cxt.io_trace_resolver_is_set) {
const original_setTimeout = cxt.window.setTimeout.__original
cxt.io_cache_resolver_is_set = true
cxt.io_trace_resolver_is_set = true
original_setTimeout(() => {
if(cxt_copy != cxt) {
return
}
if(cxt.io_cache_is_replay_aborted) {
if(cxt.io_trace_is_replay_aborted) {
return
}
cxt.io_cache_resolver_is_set = false
cxt.io_trace_resolver_is_set = false
// Sanity check
if(cxt.io_cache_index >= cxt.io_cache.length) {
if(cxt.io_trace_index >= cxt.io_trace.length) {
throw new Error('illegal state')
}
const next_event = cxt.io_cache[cxt.io_cache_index]
const next_event = cxt.io_trace[cxt.io_trace_index]
if(next_event.type == 'call') {
cxt.io_cache_is_replay_aborted = true
cxt.io_trace_is_replay_aborted = true
} else {
while(
cxt.io_cache_index < cxt.io_cache.length
cxt.io_trace_index < cxt.io_trace.length
&&
cxt.io_cache[cxt.io_cache_index].type == 'resolution'
cxt.io_trace[cxt.io_trace_index].type == 'resolution'
) {
const resolution = cxt.io_cache[cxt.io_cache_index]
const resolver = cxt.io_cache_resolvers.get(resolution.index)
const resolution = cxt.io_trace[cxt.io_trace_index]
const resolver = cxt.io_trace_resolvers.get(resolution.index)
cxt.io_cache_index++
cxt.io_trace_index++
if(cxt.io_cache[resolution.index].name == 'setTimeout') {
if(cxt.io_trace[resolution.index].name == 'setTimeout') {
resolver()
} else {
resolver(cxt.io_cache[resolution.index].value)
resolver(cxt.io_trace[resolution.index].value)
}
}
}
@@ -194,17 +194,17 @@ const make_patched_method = (original, name, use_context) => {
}, 0)
}
cxt.io_cache_index++
cxt.io_trace_index++
if(call.ok) {
if(call.value instanceof cxt.window.Promise) {
// Always make promise originate from run_window
return new cxt.window.Promise(resolve => {
cxt.io_cache_resolvers.set(cxt.io_cache_index - 1, resolve)
cxt.io_trace_resolvers.set(cxt.io_trace_index - 1, resolve)
})
} else if(name == 'setTimeout') {
const timeout_cb = args[0]
cxt.io_cache_resolvers.set(cxt.io_cache_index - 1, timeout_cb)
cxt.io_trace_resolvers.set(cxt.io_trace_index - 1, timeout_cb)
return call.value
} else {
return call.value
@@ -253,9 +253,9 @@ export const apply_io_patches = () => {
io_patch(['Math', 'random'])
io_patch(['setTimeout'])
// TODO if call setTimeout and then clearTimeout, cache it and remove call of
// TODO if call setTimeout and then clearTimeout, trace it and remove call of
// clearTimeout, and make only setTimeout, then it would never be called when
// replaying from cache
// replaying from trace
io_patch(['clearTimeout'])
// TODO patch setInterval to only cleanup all intervals on finish

View File

@@ -29,23 +29,23 @@ const gen_to_promise = gen_fn => {
}
}
const do_run = function*(module_fns, cxt, io_cache){
const do_run = function*(module_fns, cxt, io_trace){
let calltree
cxt = (io_cache == null || io_cache.length == 0)
// TODO group all io_cache_ properties to single object?
cxt = (io_trace == null || io_trace.length == 0)
// TODO group all io_trace_ properties to single object?
? {...cxt,
io_cache_is_recording: true,
io_cache: [],
io_trace_is_recording: true,
io_trace: [],
}
: {...cxt,
io_cache_is_recording: false,
io_cache,
io_cache_is_replay_aborted: false,
io_cache_resolver_is_set: false,
// Map of (index in io_cache) -> resolve
io_cache_resolvers: new Map(),
io_cache_index: 0,
io_trace_is_recording: false,
io_trace,
io_trace_is_replay_aborted: false,
io_trace_resolver_is_set: false,
// Map of (index in io_trace) -> resolve
io_trace_resolvers: new Map(),
io_trace_index: 0,
}
apply_promise_patch(cxt)
@@ -94,14 +94,14 @@ const do_run = function*(module_fns, cxt, io_cache){
}
}
export const run = gen_to_promise(function*(module_fns, cxt, io_cache) {
const result = yield* do_run(module_fns, cxt, io_cache)
export const run = gen_to_promise(function*(module_fns, cxt, io_trace) {
const result = yield* do_run(module_fns, cxt, io_trace)
if(result.eval_cxt.io_cache_is_replay_aborted) {
if(result.eval_cxt.io_trace_is_replay_aborted) {
// TODO test next line
result.eval_cxt.is_recording_deferred_calls = false
// run again without io cache
// run again without io trace
return yield* do_run(module_fns, cxt, null)
} else {
return result

View File

@@ -3172,7 +3172,7 @@ const y = x()`
const next = COMMANDS.input(initial, `const x = Math.random()*2`, 0).state
assert_equal(next.value_explorer.result.value, 2)
assert_equal(next.eval_cxt.io_cache_index, 1)
assert_equal(next.eval_cxt.io_trace_index, 1)
// Patch Math.random to return 2.
// TODO The first call to Math.random() is cached with value 1, and the
@@ -3192,7 +3192,7 @@ const y = x()`
}),
test('record io cache discarded if args does not match', async () => {
test('record io trace discarded if args does not match', async () => {
// Patch fetch
patch_builtin('fetch', async () => 'first')
@@ -3318,19 +3318,19 @@ const y = x()`
patch_builtin('fetch', null)
}),
test('record io clear io cache', async () => {
test('record io clear io trace', async () => {
const s1 = test_initial_state(`Math.random()`)
const rnd = s1.value_explorer.result.value
const s2 = COMMANDS.input(s1, `Math.random() + 1`, 0).state
assert_equal(s2.value_explorer.result.value, rnd + 1)
const cleared = COMMANDS.clear_io_cache(s2)
const cleared = COMMANDS.clear_io_trace(s2)
assert_equal(
cleared.value_explorer.result.value == rnd + 1,
false
)
}),
test('record io no io cache on deferred calls', async () => {
test('record io no io trace on deferred calls', async () => {
const code = `
const x = Math.random
export const fn = () => x()
@@ -3344,7 +3344,7 @@ const y = x()`
const state = on_deferred_call(i)
// Deferred calls should not be record in cache
assert_equal(state.eval_cxt.io_cache.length, 0)
assert_equal(state.eval_cxt.io_trace.length, 0)
}),
test('record io discard prev execution', () => {
@@ -3361,19 +3361,19 @@ const y = x()`
test('record io Date', () => {
assert_equal(
test_initial_state('new Date()').io_cache.length,
test_initial_state('new Date()').io_trace.length,
1
)
assert_equal(
test_initial_state('new Date("2020-01-01")').io_cache,
test_initial_state('new Date("2020-01-01")').io_trace,
undefined,
)
assert_equal(
typeof(test_initial_state('Date()').io_cache[0].value),
typeof(test_initial_state('Date()').io_trace[0].value),
'string',
)
assert_equal(
typeof(test_initial_state('new Date()').io_cache[0].value),
typeof(test_initial_state('new Date()').io_trace[0].value),
'object',
)
}),