mirror of
https://github.com/leporello-js/leporello-js
synced 2026-01-13 13:04:30 -08:00
finish record io
This commit is contained in:
45
README.md
45
README.md
@@ -117,19 +117,13 @@ Currently every external is loaded once and cached until Leporello is restarted
|
||||
(TODO change path to modules every time it changed on disk, since modules are
|
||||
served from service workers).
|
||||
|
||||
<!---
|
||||
## IO
|
||||
|
||||
To support livecoding experience, Leporello.js continuously run code while you
|
||||
typing and navigating it. You don't even notice it while the code is pure, but
|
||||
what about functions performing IO?
|
||||
To provide livecoding experience, Leporello.js caches calls to IO functions
|
||||
made by your app and can later replay them from cache, allowing to program by
|
||||
making small iterations on your code and instantly getting feedback.
|
||||
|
||||
Leporello.js caches all IO calls when the code is run for the first time. Next
|
||||
time, after you edit your code, functions that perform IO will not be called if
|
||||
there is cached result.
|
||||
|
||||
Builtin IO functions are mocked to cache IO. Current list of builtin cached
|
||||
functions is:
|
||||
Current list of builtin functions which calls are cached is:
|
||||
- `Date`
|
||||
- `Math.random()`
|
||||
- `fetch`
|
||||
@@ -142,27 +136,18 @@ functions is:
|
||||
- `setTimeout`
|
||||
- `clearTimeout`
|
||||
|
||||
If you want to make your own own function IO-caching, or import third party
|
||||
function and make it IO-caching, then you should use `IO` pragma.
|
||||
Leporello.js caches all IO calls when the code is run for the first time. Then,
|
||||
every time you edit your code, Leporello.js tries to execute it, taking results
|
||||
of IO calls from cache (it is called replay). Cached calls are stored in array.
|
||||
While replay, when IO call is made, Leporello.js takes next call from the
|
||||
array, and checks if function and arguments are the same for current call and
|
||||
cached call. If they are the same, then Leporello.js returns cached result. To
|
||||
compare arguments for equality, Leporello.js uses deep equality comparison with
|
||||
`JSON.stringify`. Otherwise, the cache gets discarded, and Leporello.js
|
||||
executes code again, this time without cache, so the new cache array is
|
||||
populated.
|
||||
|
||||
// TODO document IO pragma
|
||||
// TODO hotkey to bust cache
|
||||
|
||||
Caching algorithm is:
|
||||
|
||||
- Cached calls are expected to be in the same order, as in the non-cached
|
||||
execution. For example, if you first call `write` and then `read` and results
|
||||
are cached, and then you modify code to first call `read` and then `write`,
|
||||
then cache will be busted and Leporello will call non-cached `read` and
|
||||
`write`
|
||||
|
||||
- Arguments to IO-caching functions are expected to be deep equal to non-cached
|
||||
call, for cache to be used. Deep equality is implemented as comparing JSON
|
||||
stringified arguments
|
||||
|
||||
- If there is a call that is not cached, then cache is busted and entire
|
||||
execution is restarted
|
||||
-->
|
||||
If you want to bust cache manually, there is a button and a hotkey for this.
|
||||
|
||||
## Hotkeys
|
||||
|
||||
|
||||
@@ -1,26 +1,37 @@
|
||||
import {ethers} from 'https://unpkg.com/ethers/dist/ethers.esm.js'
|
||||
//import {ethers} from 'https://unpkg.com/ethers/dist/ethers.js'
|
||||
import {ethers} from 'https://unpkg.com/ethers@5.7.2/dist/ethers.esm.js'
|
||||
|
||||
/*
|
||||
const URL = 'https://ethereum-goerli-rpc.allthatnode.com'
|
||||
|
||||
const p = ethers.getDefaultProvider(URL)
|
||||
await p._networkPromise
|
||||
|
||||
|
||||
const latest = await p.getBlock()
|
||||
latest
|
||||
|
||||
|
||||
const txs = await Promise.all(latest.transactions.map(t =>
|
||||
const txs = await Promise.all(latest.transactions.slice(0,2).map(t =>
|
||||
p.getTransactionReceipt(t)
|
||||
))
|
||||
|
||||
|
||||
|
||||
const totalGas = txs.reduce((gas,tx) =>
|
||||
gas.add(tx.gasUsed), ethers.BigNumber.from(0))
|
||||
|
||||
totalGas.add(25)
|
||||
|
||||
|
||||
|
||||
totalGas.add(20)
|
||||
|
||||
|
||||
/*
|
||||
const totalGas = txs.reduce((gas,tx) =>
|
||||
gas + tx.gasUsed, BigInt(0))
|
||||
|
||||
totalGas + 1
|
||||
*/
|
||||
|
||||
|
||||
|
||||
@@ -28,4 +39,4 @@ const totalGas = txs.reduce((gas,tx) =>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -163,7 +163,7 @@
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
.logs {
|
||||
.logs, .io_cache {
|
||||
padding-left: 1em;
|
||||
}
|
||||
|
||||
@@ -330,7 +330,11 @@
|
||||
color: red;
|
||||
}
|
||||
|
||||
.open_run_window {
|
||||
.statusbar_action {
|
||||
margin-right: 2em;
|
||||
}
|
||||
|
||||
.statusbar_action.first {
|
||||
margin-left: auto;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,261 +0,0 @@
|
||||
<script>
|
||||
let io_cache = null
|
||||
|
||||
let io_cache_is_recording = io_cache == null
|
||||
if(io_cache == null) {
|
||||
io_cache = {calls: [], resolution_order: []}
|
||||
}
|
||||
let io_cache_is_replay_aborted = false
|
||||
let io_cache_index = 0
|
||||
|
||||
const io_patch = (obj, method, name, use_context = false) => {
|
||||
if(obj == null || obj[method] == null) {
|
||||
// Method is absent in current env, skip patching
|
||||
return
|
||||
}
|
||||
const original = obj[method]
|
||||
obj[method] = function(...args) {
|
||||
console.log('patched method', name, {io_cache_is_replay_aborted, io_cache_is_recording})
|
||||
// TODO guard that in find_call io methods are not called?
|
||||
// if(searched_location != null) {
|
||||
// throw new Error('illegal state')
|
||||
// }
|
||||
if(io_cache_is_replay_aborted) {
|
||||
// Try to finish fast
|
||||
console.error('ABORT')
|
||||
throw new Error('io recording aborted')
|
||||
} else if(io_cache_is_recording) {
|
||||
let ok, value, error
|
||||
const has_new_target = new.target != null
|
||||
try {
|
||||
// TODO. Do we need it here? Only need for IO calls view. And also
|
||||
// for expand_call and find_call, to not use cache on expand call
|
||||
// and find_call
|
||||
//set_record_call()
|
||||
value = has_new_target
|
||||
? new original(...args)
|
||||
: original.apply(this, args)
|
||||
|
||||
console.log('value', value)
|
||||
|
||||
//const index = io_cache.calls.length
|
||||
//if(value instanceof Promise) {
|
||||
// value.finally(() => {
|
||||
// console.log('resolved', index)
|
||||
// io_cache.resolution_order.push(index)
|
||||
// })
|
||||
//} else {
|
||||
// io_cache.resolution_order.push(index)
|
||||
//}
|
||||
|
||||
/* TODO remove
|
||||
if(value instanceof Promise) {
|
||||
const original_value = value
|
||||
value = new Promise((resolve, reject) => {
|
||||
// TODO fix setTimeout.original
|
||||
globalThis.setTimeout.original(
|
||||
() => {
|
||||
original_value.then(resolve, reject)
|
||||
},
|
||||
10
|
||||
)
|
||||
})
|
||||
}
|
||||
*/
|
||||
// TODO
|
||||
//if(value instanceof Promise) {
|
||||
// const make_cb = ok => value => {
|
||||
// // TODO resolve promises in the same order they were resolved on
|
||||
// initial execution
|
||||
|
||||
// }
|
||||
// // TODO should we use promise_then or patched promise.then?
|
||||
// promise_then.apply(value, make_cb(true), make_cb(false))
|
||||
//}
|
||||
ok = true
|
||||
return value
|
||||
} catch(e) {
|
||||
error = e
|
||||
ok = false
|
||||
throw e
|
||||
} finally {
|
||||
io_cache.calls.push({
|
||||
ok,
|
||||
value,
|
||||
error,
|
||||
args,
|
||||
name,
|
||||
// To discern calls with and without 'new' keyword, primary for
|
||||
// Date that can be called with and without new
|
||||
has_new_target,
|
||||
use_context,
|
||||
context: use_context ? this : undefined,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
const call = io_cache.calls[io_cache_index++]
|
||||
/*
|
||||
TODO remove
|
||||
console.log(
|
||||
call == null
|
||||
, call.has_new_target != (new.target != null)
|
||||
, call.use_context && (call.context != this)
|
||||
, call.name != name
|
||||
, JSON.stringify(call.args) != JSON.stringify(args)
|
||||
)
|
||||
*/
|
||||
if(
|
||||
call == null
|
||||
|| call.has_new_target != (new.target != null)
|
||||
// TODO test
|
||||
|| call.use_context && (call.context != this)
|
||||
|| call.name != name
|
||||
|| JSON.stringify(call.args) != JSON.stringify(args)
|
||||
){
|
||||
console.log('discard cache', call)
|
||||
io_cache_is_replay_aborted = true
|
||||
// Try to finish fast
|
||||
throw new Error('io replay aborted')
|
||||
} else {
|
||||
console.log('cached call found', call)
|
||||
if(call.ok) {
|
||||
// TODO resolve promises in the same order they were resolved on
|
||||
// initial execution
|
||||
|
||||
if(call.value instanceof Promise) {
|
||||
const original_setTimeout = globalThis.setTimeout.original
|
||||
return Promise.all([
|
||||
call.value,
|
||||
new Promise(resolve => original_setTimeout(
|
||||
resolve,
|
||||
10
|
||||
))
|
||||
]).then(([a,_]) => a)
|
||||
// TODO remove
|
||||
.then(x => {console.log('resolved',name); return x})
|
||||
} else {
|
||||
return call.value
|
||||
}
|
||||
} else {
|
||||
throw call.error
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperty(obj[method], 'name', {value: original.name})
|
||||
|
||||
obj[method].__original = original
|
||||
}
|
||||
|
||||
function io_patch_remove(obj, method) {
|
||||
if(obj == null || obj[method] == null) {
|
||||
// Method is absent in current env, skip patching
|
||||
return
|
||||
}
|
||||
obj[method] = obj[method].__original
|
||||
}
|
||||
|
||||
const Response_methods = [
|
||||
'arrayBuffer',
|
||||
'blob',
|
||||
'formData',
|
||||
'json',
|
||||
'text',
|
||||
]
|
||||
|
||||
function apply_io_patches() {
|
||||
io_patch(Math, 'random', 'Math.random')
|
||||
|
||||
|
||||
// TODO test
|
||||
const Date = globalThis.Date
|
||||
io_patch(globalThis, 'Date', 'Date')
|
||||
globalThis.Date.parse = Date.parse
|
||||
globalThis.Date.now = Date.now
|
||||
globalThis.Date.UTC = Date.UTC
|
||||
globalThis.Date.length = Date.length
|
||||
globalThis.Date.name = Date.name
|
||||
io_patch(globalThis.Date, 'now', 'Date.now')
|
||||
|
||||
|
||||
io_patch(globalThis, 'fetch', 'fetch')
|
||||
// Check if Response is defined, for node.js
|
||||
if(globalThis.Response != null) {
|
||||
for(let key of Response_methods) {
|
||||
io_patch(Response.prototype, key, 'Response.prototype.' + key, true)
|
||||
}
|
||||
}
|
||||
|
||||
//TODO
|
||||
const setTimeout = globalThis.setTimeout
|
||||
globalThis.setTimeout = function(cb, timeout) {
|
||||
const timer_id = setTimeout(function(...args) {
|
||||
console.log('timeout', timer_id)
|
||||
cb(...args)
|
||||
}, timeout)
|
||||
console.log('setTimeout', timer_id)
|
||||
return timer_id
|
||||
}
|
||||
globalThis.setTimeout.original = setTimeout
|
||||
|
||||
|
||||
// TODO clearTimeout
|
||||
}
|
||||
|
||||
function remove_io_patches() {
|
||||
// TODO when to apply io_patches and promise_patches? Only once, when we
|
||||
// create window?
|
||||
|
||||
io_patch_remove(Math, 'random')
|
||||
io_patch_remove(globalThis, 'Date')
|
||||
io_patch_remove(globalThis, 'fetch')
|
||||
|
||||
// Check if Response is defined, for node.js
|
||||
if(globalThis.Response != null) {
|
||||
for(let key of Response_methods) {
|
||||
io_patch_remove(Response.prototype, key)
|
||||
}
|
||||
}
|
||||
globalThis.setTimeout = globalThis.setTimeout.original
|
||||
}
|
||||
</script>
|
||||
|
||||
<script type='module'>
|
||||
|
||||
|
||||
//import {ethers} from 'https://unpkg.com/ethers/dist/ethers.esm.js'
|
||||
import {ethers} from './ethers.js'
|
||||
|
||||
async function run() {
|
||||
|
||||
const URL = 'https://ethereum-goerli-rpc.allthatnode.com'
|
||||
|
||||
const p = ethers.getDefaultProvider(URL)
|
||||
|
||||
const latest = await p.getBlock()
|
||||
|
||||
const txs = await Promise.all(latest.transactions.slice(0,1).map(async (t, i) => {
|
||||
console.error("GETTING RECEIPT", i)
|
||||
const result = await p.getTransactionReceipt(t)
|
||||
console.error("GOT RECEIPT", i)
|
||||
return result
|
||||
}))
|
||||
|
||||
const totalGas = txs.reduce((gas,tx) =>
|
||||
gas.add(tx.gasUsed), ethers.BigNumber.from(0))
|
||||
|
||||
console.log('GAS', totalGas.add(3))
|
||||
}
|
||||
|
||||
|
||||
apply_io_patches()
|
||||
|
||||
await run()
|
||||
|
||||
io_cache_is_recording = false
|
||||
console.error('REPLAY')
|
||||
await run()
|
||||
|
||||
|
||||
</script>
|
||||
24280
record_io/ethers.js
24280
record_io/ethers.js
File diff suppressed because one or more lines are too long
@@ -1,28 +0,0 @@
|
||||
<script type='module'>
|
||||
|
||||
const original = globalThis.fetch
|
||||
globalThis.fetch = function(...args) {
|
||||
console.log('fetch called')
|
||||
return original.apply(null, args)
|
||||
}
|
||||
|
||||
for(let key of [
|
||||
'arrayBuffer',
|
||||
'blob',
|
||||
'formData',
|
||||
'json',
|
||||
'text',
|
||||
]) {
|
||||
|
||||
let original = Response.prototype[key]
|
||||
Response.prototype[key] = function(...args){
|
||||
console.log('key called', key)
|
||||
return original.apply(this, args)
|
||||
}
|
||||
}
|
||||
|
||||
console.log((await (await fetch('/')).text()).length)
|
||||
|
||||
|
||||
|
||||
</script>
|
||||
@@ -1,26 +0,0 @@
|
||||
console.log('start')
|
||||
|
||||
let r
|
||||
const x = new Promise(resolve => r = resolve).then(() => {console.log('resolved')})
|
||||
|
||||
console.log('before resolve')
|
||||
r()
|
||||
console.log('after resolve')
|
||||
/*
|
||||
console.log('start')
|
||||
|
||||
Promise.resolve().then(() => {
|
||||
console.log('1')
|
||||
Promise.resolve().then(() => {
|
||||
console.log('2')
|
||||
})
|
||||
})
|
||||
|
||||
console.log('end')
|
||||
Promise.resolve().then(() => {
|
||||
console.log('3')
|
||||
Promise.resolve().then(() => {
|
||||
console.log('4')
|
||||
})
|
||||
})
|
||||
*/
|
||||
@@ -1,16 +0,0 @@
|
||||
/*
|
||||
function f() {
|
||||
console.log('n', new.target)
|
||||
}
|
||||
|
||||
f()
|
||||
new f()
|
||||
*/
|
||||
|
||||
const f = new Function(`
|
||||
return arguments.length
|
||||
`)
|
||||
|
||||
|
||||
console.log(f(1,2,3))
|
||||
console.log(f(1,2,3,4))
|
||||
@@ -1,33 +0,0 @@
|
||||
//let value = Promise.reject(1)
|
||||
|
||||
/*
|
||||
value.then(
|
||||
() => console.log('res'),
|
||||
() => console.log('rej'),
|
||||
)
|
||||
*/
|
||||
|
||||
|
||||
/*
|
||||
const original_value = value
|
||||
|
||||
value = new Promise((resolve, reject) => {
|
||||
globalThis.setTimeout(
|
||||
() => {
|
||||
console.log('timeout')
|
||||
original_value.then(resolve, reject)
|
||||
},
|
||||
1000
|
||||
)
|
||||
})
|
||||
|
||||
try {
|
||||
console.log(await value)
|
||||
} catch(e) {
|
||||
console.log('ERROR', e)
|
||||
}
|
||||
*/
|
||||
|
||||
const t = globalThis.setTimeout
|
||||
|
||||
t(() => console.log('timeout'), 100)
|
||||
@@ -1,73 +0,0 @@
|
||||
Timeline Replay
|
||||
|
||||
call a() call a()
|
||||
resolve a()
|
||||
|
||||
call b()
|
||||
resolve b()
|
||||
|
||||
call c()
|
||||
resolve c()
|
||||
|
||||
|
||||
Timeline Replay
|
||||
|
||||
resolution_index = 0, io_index = 0
|
||||
|
||||
call a() call a: return promise
|
||||
compare resolutions[resolution_index] with io_index
|
||||
io_index < resolutions[0]
|
||||
do not resolve
|
||||
io_index++
|
||||
|
||||
call b() call b: return promise
|
||||
compare resolutions[0] && io_index
|
||||
io_index < resolutions[0]
|
||||
do not resolve
|
||||
|
||||
call c() call c: return promise
|
||||
|
||||
resolve c()
|
||||
resolve b()
|
||||
resolve a()
|
||||
|
||||
|
||||
resolutions: [
|
||||
3,
|
||||
2,
|
||||
1,
|
||||
]
|
||||
|
||||
Делаем реплей. К нам приходят события - вызовы функций. Мы перехватываем вызов, возвращаем промис, и ресолвим тот промис, который сейчас надо заресолвить. Например, в примере выше мы ресолвим a() после вызова с(). А b() ресолвим после ресолва с(). То есть мы можем ресолвить несколько за раз.
|
||||
|
||||
|
||||
|
||||
Record: [
|
||||
call a
|
||||
resolve a
|
||||
call b
|
||||
resolve b
|
||||
]
|
||||
|
||||
Replay: [
|
||||
|
||||
call a
|
||||
смотрим что возвращается промис, взводим ресолвер
|
||||
|
||||
ресолвер сработал
|
||||
resolve a
|
||||
|
||||
call b
|
||||
смотрим что возвращается промис, взводим ресолвер
|
||||
|
||||
ресолвер сработал
|
||||
resolve b
|
||||
]
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
call
|
||||
resolve
|
||||
@@ -41,6 +41,17 @@ const is_stackoverflow = node =>
|
||||
// Firefox
|
||||
node.error?.message == "too much recursion"
|
||||
|
||||
export const has_error = n =>
|
||||
!n.ok
|
||||
||
|
||||
(
|
||||
n.value instanceof globalThis.run_window.Promise
|
||||
&&
|
||||
n.value.status != null
|
||||
&&
|
||||
!n.value.status.ok
|
||||
)
|
||||
|
||||
export const calltree_node_loc = node => node.toplevel
|
||||
? {module: node.module}
|
||||
: node.fn.__location
|
||||
|
||||
@@ -3,7 +3,8 @@ import {el, stringify, fn_link, scrollIntoViewIfNeeded} from './domutils.js'
|
||||
import {FLAGS} from '../feature_flags.js'
|
||||
import {stringify_for_header} from './value_explorer.js'
|
||||
import {find_node} from '../ast_utils.js'
|
||||
import {is_expandable, root_calltree_node, get_deferred_calls} from '../calltree.js'
|
||||
import {is_expandable, root_calltree_node, get_deferred_calls, has_error}
|
||||
from '../calltree.js'
|
||||
|
||||
// TODO perf - quadratic difficulty
|
||||
const join = arr => arr.reduce(
|
||||
@@ -13,17 +14,6 @@ const join = arr => arr.reduce(
|
||||
[],
|
||||
)
|
||||
|
||||
const is_error = n =>
|
||||
!n.ok
|
||||
||
|
||||
(
|
||||
n.value instanceof globalThis.run_window.Promise
|
||||
&&
|
||||
n.value.status != null
|
||||
&&
|
||||
!n.value.status.ok
|
||||
)
|
||||
|
||||
export class CallTree {
|
||||
constructor(ui, container) {
|
||||
this.ui = ui
|
||||
@@ -46,9 +36,11 @@ export class CallTree {
|
||||
this.ui.editor.focus()
|
||||
}
|
||||
|
||||
/* TODO test
|
||||
if(e.key == 'F3') {
|
||||
this.ui.set_active_tab('logs')
|
||||
}
|
||||
*/
|
||||
|
||||
if(e.key == 'a') {
|
||||
if(FLAGS.embed_value_explorer) {
|
||||
@@ -119,7 +111,7 @@ export class CallTree {
|
||||
)
|
||||
: el('span',
|
||||
'call_header '
|
||||
+ (is_error(n) ? 'error' : '')
|
||||
+ (has_error(n) ? 'error' : '')
|
||||
+ (n.fn.__location == null ? ' native' : '')
|
||||
,
|
||||
// TODO show `this` argument
|
||||
|
||||
@@ -297,7 +297,7 @@ export class Editor {
|
||||
this.ace_editor.commands.removeCommand('goToNextError')
|
||||
|
||||
|
||||
this.ace_editor.commands.bindKey("F4", "goto_definition");
|
||||
this.ace_editor.commands.bindKey("F5", "goto_definition");
|
||||
VimApi._mapCommand({
|
||||
keys: 'gd',
|
||||
type: 'action',
|
||||
|
||||
45
src/editor/io_cache.js
Normal file
45
src/editor/io_cache.js
Normal file
@@ -0,0 +1,45 @@
|
||||
import {header, stringify_for_header} from './value_explorer.js'
|
||||
import {el} from './domutils.js'
|
||||
import {has_error} from '../calltree.js'
|
||||
|
||||
// TODO render grey items there were not used in run
|
||||
|
||||
export class IO_Cache {
|
||||
constructor(ui, el) {
|
||||
this.el = el
|
||||
this.ui = ui
|
||||
|
||||
this.el.addEventListener('keydown', (e) => {
|
||||
|
||||
if(e.key == 'Escape') {
|
||||
this.ui.editor.focus()
|
||||
}
|
||||
|
||||
if(e.key == 'F4') {
|
||||
this.ui.editor.focus()
|
||||
}
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
render_io_cache(items) {
|
||||
this.el.innerHTML = ''
|
||||
for(let item of items) {
|
||||
if(item.type == 'resolution') {
|
||||
continue
|
||||
}
|
||||
this.el.appendChild(
|
||||
el('div',
|
||||
'call_header ' + (has_error(item) ? 'error' : ''),
|
||||
item.name,
|
||||
'(' ,
|
||||
// TODO fn_link, like in ./calltree.js
|
||||
item.args.map(a => header(a)).join(', '),
|
||||
'): ' ,
|
||||
(item.ok ? stringify_for_header(item.value) : item.error.toString())
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -22,9 +22,11 @@ export class Logs {
|
||||
this.ui.editor.focus_value_explorer(this.el)
|
||||
}
|
||||
|
||||
/* TODO test
|
||||
if(e.key == 'F2') {
|
||||
this.ui.set_active_tab('calltree')
|
||||
}
|
||||
*/
|
||||
|
||||
if(e.key == 'F3') {
|
||||
this.ui.editor.focus()
|
||||
@@ -75,6 +77,7 @@ export class Logs {
|
||||
+ ':'
|
||||
),
|
||||
' ',
|
||||
// TODO fn_link, for function args, like in ./calltree.js
|
||||
log.args.map(a => header(a)).join(', ')
|
||||
)
|
||||
)
|
||||
|
||||
@@ -3,6 +3,7 @@ import {Editor} from './editor.js'
|
||||
import {Files} from './files.js'
|
||||
import {CallTree} from './calltree.js'
|
||||
import {Logs} from './logs.js'
|
||||
import {IO_Cache} from './io_cache.js'
|
||||
import {Eval} from './eval.js'
|
||||
import {el} from './domutils.js'
|
||||
import {FLAGS} from '../feature_flags.js'
|
||||
@@ -41,6 +42,12 @@ export class UI {
|
||||
href: 'javascript: void(0)',
|
||||
}, 'Logs (F3)')
|
||||
),
|
||||
this.tabs.io_cache = el('div', 'tab',
|
||||
el('a', {
|
||||
click: () => this.set_active_tab('io_cache'),
|
||||
href: 'javascript: void(0)',
|
||||
}, 'IO cache (F4)')
|
||||
),
|
||||
this.entrypoint_select = el('div', 'entrypoint_select')
|
||||
),
|
||||
this.debugger.calltree = el('div', {
|
||||
@@ -51,6 +58,10 @@ export class UI {
|
||||
'class': 'tab_content logs',
|
||||
tabindex: 0,
|
||||
}),
|
||||
this.debugger.io_cache = el('div', {
|
||||
'class': 'tab_content io_cache',
|
||||
tabindex: 0,
|
||||
}),
|
||||
),
|
||||
this.debugger_loading = el('div', 'debugger_wrapper')
|
||||
),
|
||||
@@ -74,11 +85,19 @@ export class UI {
|
||||
*/
|
||||
|
||||
el('a', {
|
||||
'class': 'open_run_window',
|
||||
'class': 'statusbar_action first',
|
||||
href: 'javascript: void(0)',
|
||||
click: () => exec('clear_io_cache')
|
||||
},
|
||||
'Clear IO cache (F6)'
|
||||
),
|
||||
|
||||
el('a', {
|
||||
'class': 'statusbar_action',
|
||||
href: 'javascript: void(0)',
|
||||
click: this.open_run_window,
|
||||
},
|
||||
'(Re)open run window (F6)'
|
||||
'(Re)open run window (F7)'
|
||||
),
|
||||
|
||||
this.options = el('div', 'options',
|
||||
@@ -137,13 +156,21 @@ export class UI {
|
||||
this.set_active_tab('logs')
|
||||
}
|
||||
|
||||
if(e.key == 'F5'){
|
||||
this.fullscreen_editor()
|
||||
if(e.key == 'F4'){
|
||||
this.set_active_tab('io_cache')
|
||||
}
|
||||
|
||||
if(e.key == 'F6'){
|
||||
exec('clear_io_cache')
|
||||
}
|
||||
|
||||
if(e.key == 'F7'){
|
||||
this.open_run_window()
|
||||
}
|
||||
|
||||
if(e.key == 'F8'){
|
||||
this.fullscreen_editor()
|
||||
}
|
||||
})
|
||||
|
||||
if(!FLAGS.embed_value_explorer) {
|
||||
@@ -161,6 +188,7 @@ export class UI {
|
||||
|
||||
this.calltree = new CallTree(this, this.debugger.calltree)
|
||||
this.logs = new Logs(this, this.debugger.logs)
|
||||
this.io_cache = new IO_Cache(this, this.debugger.io_cache)
|
||||
|
||||
// TODO jump to another module
|
||||
// TODO use exec
|
||||
@@ -184,6 +212,7 @@ export class UI {
|
||||
}
|
||||
|
||||
set_active_tab(tab_id, skip_focus = false) {
|
||||
this.active_tab = tab_id
|
||||
Object.values(this.tabs).forEach(el => el.classList.remove('active'))
|
||||
this.tabs[tab_id].classList.add('active')
|
||||
Object.values(this.debugger).forEach(el => el.style.display = 'none')
|
||||
@@ -272,8 +301,15 @@ export class UI {
|
||||
|
||||
this.debugger_loading.style = 'display: none'
|
||||
this.debugger_loaded.style = ''
|
||||
|
||||
this.calltree.render_calltree(state)
|
||||
this.logs.render_logs(null, state.logs)
|
||||
|
||||
// render lazily
|
||||
// TODO
|
||||
//if(this.active_tab == 'io_cache') {
|
||||
this.io_cache.render_io_cache(state.io_cache)
|
||||
//}
|
||||
}
|
||||
|
||||
render_problems(problems) {
|
||||
@@ -327,14 +363,18 @@ export class UI {
|
||||
['Focus console logs', 'F3'],
|
||||
['Navigate console logs', '↑ ↓ or jk'],
|
||||
['Leave console logs', 'F3 or Esc'],
|
||||
['Jump to definition', 'F4', 'gd'],
|
||||
['Focus IO cache', 'F4'],
|
||||
['Leave IO cache', 'F4 or Esc'],
|
||||
['Jump to definition', 'F5', 'gd'],
|
||||
['Expand selection to eval expression', 'Ctrl-↓ or Ctrl-j'],
|
||||
['Collapse selection', 'Ctrl-↑ or Ctrl-k'],
|
||||
['Step into call', 'Ctrl-i', '\\i'],
|
||||
['Step out of call', 'Ctrl-o', '\\o'],
|
||||
['When in call tree view, jump to return statement', 'Enter'],
|
||||
['When in call tree view, jump to function arguments', 'a'],
|
||||
['Expand/collapse editor to fullscreen', 'F5'],
|
||||
['Clear IO cache', 'F6'],
|
||||
['(Re)open run window (F7)', 'F7'],
|
||||
['Expand/collapse editor to fullscreen', 'F8'],
|
||||
]
|
||||
return el('dialog', 'help_dialog',
|
||||
el('table', 'help',
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
// TODO show Errors in red
|
||||
// TODO fns as clickable links (jump to definition), both for header and for
|
||||
// content
|
||||
// TODO show constructor.name in header?
|
||||
|
||||
import {el, stringify, scrollIntoViewIfNeeded} from './domutils.js'
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import {set_record_call} from './runtime.js'
|
||||
|
||||
// TODO remove all console.log
|
||||
|
||||
const get_object_to_patch = (cxt, path) => {
|
||||
let obj = cxt.window
|
||||
for(let i = 0; i < path.length - 1; i++) {
|
||||
@@ -25,6 +27,7 @@ const io_patch = (cxt, path, use_context = false) => {
|
||||
// previous run ASAP
|
||||
|
||||
// TODO remove
|
||||
/*
|
||||
console.error('patched method', name, {
|
||||
io_cache_is_recording: cxt.io_cache_is_recording,
|
||||
io_cache_is_replay_aborted: cxt.io_cache_is_replay_aborted,
|
||||
@@ -32,11 +35,7 @@ const io_patch = (cxt, path, use_context = false) => {
|
||||
? cxt.io_cache.length
|
||||
: cxt.io_cache_index
|
||||
})
|
||||
|
||||
// sanity check
|
||||
if(cxt.searched_location != null) {
|
||||
throw new Error('illegal state')
|
||||
}
|
||||
*/
|
||||
|
||||
if(cxt.io_cache_is_replay_aborted) {
|
||||
// Try to finish fast
|
||||
@@ -70,7 +69,8 @@ const io_patch = (cxt, path, use_context = false) => {
|
||||
? new original(...args)
|
||||
: original.apply(this, args)
|
||||
|
||||
console.log('value', value)
|
||||
// TODO remove
|
||||
//console.log('value', value)
|
||||
|
||||
if(value instanceof cxt.window.Promise) {
|
||||
// TODO use cxt.promise_then, not finally which calls
|
||||
@@ -134,12 +134,12 @@ const io_patch = (cxt, path, use_context = false) => {
|
||||
JSON.stringify(call.args) != JSON.stringify(args)
|
||||
)
|
||||
){
|
||||
console.error('DISCARD cache', call)
|
||||
//TODO remove console.error('DISCARD cache', call)
|
||||
cxt.io_cache_is_replay_aborted = true
|
||||
// Try to finish fast
|
||||
throw new Error('io replay aborted')
|
||||
} else {
|
||||
console.log('cached call found', call)
|
||||
// TODO remove console.log('cached call found', call)
|
||||
const next_resolution = cxt.io_cache.find((e, i) =>
|
||||
e.type == 'resolution' && i > cxt.io_cache_index
|
||||
)
|
||||
@@ -190,7 +190,7 @@ const io_patch = (cxt, path, use_context = false) => {
|
||||
} else {
|
||||
resolver(cxt.io_cache[resolution.index].value)
|
||||
}
|
||||
console.log('RESOLVE', cxt.io_cache_index, resolution.index)
|
||||
// TODO remove console.log('RESOLVE', cxt.io_cache_index, resolution.index)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
134
test/test.js
134
test/test.js
@@ -3066,6 +3066,52 @@ const y = x()`
|
||||
Object.assign(globalThis.run_window.Math, {random})
|
||||
}),
|
||||
|
||||
test('record io cache discarded if args does not match', async () => {
|
||||
const original_fetch = globalThis.run_window.fetch
|
||||
|
||||
// Patch fetch
|
||||
Object.assign(globalThis.run_window, {fetch: async () => 'first'})
|
||||
|
||||
const initial = await test_initial_state_async(`
|
||||
console.log(await fetch('url', {method: 'GET'}))
|
||||
`)
|
||||
assert_equal(initial.logs.logs[0].args[0], 'first')
|
||||
|
||||
// Patch fetch again
|
||||
Object.assign(globalThis.run_window, {fetch: async () => 'second'})
|
||||
|
||||
const cache_discarded = await command_input_async(initial, `
|
||||
console.log(await fetch('url', {method: 'POST'}))
|
||||
`, 0)
|
||||
assert_equal(cache_discarded.logs.logs[0].args[0], 'second')
|
||||
|
||||
// Remove patch
|
||||
Object.assign(globalThis.run_window, {fetch: original_fetch})
|
||||
}),
|
||||
|
||||
test('record io fetch rejects', async () => {
|
||||
const original_fetch = globalThis.run_window.fetch
|
||||
|
||||
// Patch fetch
|
||||
Object.assign(globalThis.run_window, {fetch: () => Promise.reject('fail')})
|
||||
|
||||
const initial = await test_initial_state_async(`
|
||||
await fetch('url', {method: 'GET'})
|
||||
`)
|
||||
assert_equal(root_calltree_node(initial).error, 'fail')
|
||||
|
||||
// Patch fetch again
|
||||
Object.assign(globalThis.run_window, {fetch: async () => 'result'})
|
||||
|
||||
const with_cache = await command_input_async(initial, `
|
||||
await fetch('url', {method: 'GET'})
|
||||
`, 0)
|
||||
assert_equal(root_calltree_node(initial).error, 'fail')
|
||||
|
||||
// Remove patch
|
||||
Object.assign(globalThis.run_window, {fetch: original_fetch})
|
||||
}),
|
||||
|
||||
test('record io preserve promise resolution order', async () => {
|
||||
const original_fetch = globalThis.run_window.fetch
|
||||
|
||||
@@ -3077,12 +3123,10 @@ const y = x()`
|
||||
let resolver
|
||||
const promise = new Promise(r => resolver = r)
|
||||
calls.push({resolver, promise, args})
|
||||
console.log('patched fetch called')
|
||||
return promise
|
||||
},
|
||||
|
||||
resolve() {
|
||||
console.log('resolve', calls);
|
||||
[...calls].reverse().forEach(call => call.resolver(...call.args))
|
||||
},
|
||||
}
|
||||
@@ -3091,76 +3135,80 @@ const y = x()`
|
||||
// Patch fetch
|
||||
Object.assign(globalThis.run_window, {fetch})
|
||||
|
||||
const initial_promise = test_initial_state_async(`
|
||||
const result = {}
|
||||
const code = `
|
||||
await Promise.all(
|
||||
[1, 2, 3].map(async v => Object.assign(result, {value: await fetch(v)}))
|
||||
[1, 2, 3].map(async v => {
|
||||
const result = await fetch(v)
|
||||
console.log(result)
|
||||
})
|
||||
)
|
||||
console.log(result)
|
||||
`)
|
||||
`
|
||||
|
||||
const initial_promise = test_initial_state_async(code)
|
||||
|
||||
resolve()
|
||||
|
||||
const initial = await initial_promise
|
||||
|
||||
// calls to fetch are resolved in reverse order, so first call wins
|
||||
assert_equal(initial.logs.logs[0].args[0].value, 1)
|
||||
// calls to fetch are resolved in reverse order
|
||||
assert_equal(initial.logs.logs.map(l => l.args[0]), [3,2,1])
|
||||
|
||||
// Break fetch to ensure it does not get called anymore
|
||||
// Break fetch to ensure it is not get called anymore
|
||||
Object.assign(globalThis.run_window, {fetch: () => {throw 'broken'}})
|
||||
|
||||
const with_cache = await command_input_async(
|
||||
initial,
|
||||
`
|
||||
const result = {}
|
||||
await Promise.all(
|
||||
[1, 2, 3].map(async v =>
|
||||
Object.assign(result, {value: await fetch(v)})
|
||||
)
|
||||
)
|
||||
console.log(result)
|
||||
`,
|
||||
code,
|
||||
0
|
||||
)
|
||||
|
||||
// cached calls to fetch shoudl be resolved in the same (reverse) order as
|
||||
// cached calls to fetch should be resolved in the same (reverse) order as
|
||||
// on the first run, so first call wins
|
||||
assert_equal(with_cache.logs.logs[0].args[0].value, 1)
|
||||
assert_equal(with_cache.logs.logs.map(l => l.args[0]), [3,2,1])
|
||||
|
||||
// Remove patch
|
||||
Object.assign(globalThis.run_window, {fetch: original_fetch})
|
||||
}),
|
||||
|
||||
test('record io setTimeout', async () => {
|
||||
const i = await test_initial_state_async(`
|
||||
const delay = timeout => new Promise(resolve =>
|
||||
setTimeout(() => resolve(1), timeout)
|
||||
)
|
||||
console.log(await delay(0))
|
||||
`)
|
||||
const original_fetch = globalThis.run_window.fetch
|
||||
const setTimeout_original = globalThis.run_window.setTimeout
|
||||
|
||||
assert_equal(i.io_cache != null, true)
|
||||
assert_equal(i.logs.logs[0].args[0], 1)
|
||||
// Patch fetch to return result in 10ms
|
||||
Object.assign(globalThis.run_window, {
|
||||
fetch: () => new Promise(resolve => setTimeout_original(resolve, 10))
|
||||
})
|
||||
|
||||
const code2 = `
|
||||
const delay = timeout => new Promise(resolve =>
|
||||
setTimeout(() => resolve(10), timeout)
|
||||
)
|
||||
console.log(await delay(0))
|
||||
const code = `
|
||||
setTimeout(() => console.log('timeout'), 0)
|
||||
await fetch().then(() => console.log('fetch'))
|
||||
`
|
||||
|
||||
const next = await command_input_async(i, code2, 0)
|
||||
const i = await test_initial_state_async(code)
|
||||
|
||||
// Assert cache was used
|
||||
// TODO check that items were not appended
|
||||
assert_equal(next.io_cache == i.io_cache, true)
|
||||
// First executed setTimeout, then fetch
|
||||
assert_equal(i.logs.logs.map(l => l.args[0]), ['timeout', 'fetch'])
|
||||
|
||||
assert_equal(next.logs.logs[0].args[0], 10)
|
||||
// Break fetch to ensure it would not be called
|
||||
Object.assign(globalThis.run_window, {fetch: async () => {throw 'break'}})
|
||||
|
||||
const with_cache = await command_input_async(i, code, 0)
|
||||
|
||||
// Cache must preserve resolution order
|
||||
assert_equal(with_cache.logs.logs.map(l => l.args[0]), ['timeout', 'fetch'])
|
||||
|
||||
Object.assign(globalThis.run_window, {fetch: original_fetch})
|
||||
}),
|
||||
|
||||
// TODO test resolution order with sync functions (Date, Math.random)
|
||||
|
||||
|
||||
|
||||
test('record io clear io cache', async () => {
|
||||
const s1 = test_initial_state(`Math.random()`)
|
||||
const rnd = s1.value_explorer.result.value
|
||||
const s2 = COMMANDS.input(s1, `Math.random() + 1`, 0).state
|
||||
assert_equal(s2.value_explorer.result.value, rnd + 1)
|
||||
const cleared = COMMANDS.clear_io_cache(s2)
|
||||
assert_equal(
|
||||
cleared.value_explorer.result.value == rnd + 1,
|
||||
false
|
||||
)
|
||||
}),
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user