haku: more cleanups: remove old unused VM

also remove some leftover TODOs
This commit is contained in:
リキ萌 2025-06-16 18:52:52 +02:00
parent c80cd1c7fe
commit 8b464d50f4
17 changed files with 114 additions and 1167 deletions

14
Cargo.lock generated
View file

@ -630,16 +630,7 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
name = "haku"
version = "0.1.0"
dependencies = [
"libm",
"log",
"tiny-skia",
]
[[package]]
name = "haku-cli"
version = "0.1.0"
dependencies = [
"haku",
]
[[package]]
@ -651,6 +642,7 @@ dependencies = [
"haku",
"log",
"paste",
"tiny-skia",
]
[[package]]
@ -895,9 +887,9 @@ checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
[[package]]
name = "libm"
version = "0.2.8"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa"
[[package]]
name = "libsqlite3-sys"

View file

@ -6,7 +6,7 @@ members = ["crates/*"]
haku.path = "crates/haku"
haku2.path = "crates/haku2"
log = "0.4.22"
rkgk-image-ops.path = "crates/rkgk-image-ops"
tiny-skia = { version = "0.11.4", default-features = false }
[profile.dev.package.rkgk-image-ops]
opt-level = 3

View file

@ -1,7 +0,0 @@
[package]
name = "haku-cli"
version = "0.1.0"
edition = "2021"
[dependencies]
haku.workspace = true

View file

@ -1,53 +0,0 @@
// NOTE: This is a very bad CLI. I only use it for debugging haku with LLDB.
// Sorry that it doesn't actually do anything!
use std::{error::Error, fmt::Display, io::BufRead};
use haku::{
ast::{dump::dump, Ast},
lexer::{lex, Lexer},
parser::{expr, Parser, ParserLimits},
source::SourceCode,
token::Lexis,
value::Value,
};
fn eval(code: &str) -> Result<Value, Box<dyn Error>> {
let code = SourceCode::unlimited_len(code);
let mut lexer = Lexer::new(Lexis::new(1024), code);
lex(&mut lexer).expect("too many tokens");
let mut parser = Parser::new(&lexer.lexis, &ParserLimits { max_events: 1024 });
expr(&mut parser);
let mut ast = Ast::new(1024);
let (root, _) = parser.into_ast(&mut ast).unwrap();
eprintln!("{}", dump(&ast, root, Some(code)));
Ok(Value::Nil)
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
struct DiagnosticsEmitted;
impl Display for DiagnosticsEmitted {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("diagnostics were emitted")
}
}
impl Error for DiagnosticsEmitted {}
fn main() -> Result<(), Box<dyn Error>> {
let stdin = std::io::stdin();
for line in stdin.lock().lines() {
let line = line?;
match eval(&line) {
Ok(value) => println!("{value:?}"),
Err(error) => eprintln!("error: {error}"),
}
}
Ok(())
}

View file

@ -11,6 +11,7 @@ arrayvec = { version = "0.7.4", default-features = false }
dlmalloc = { version = "0.2.6", features = ["global"] }
haku.workspace = true
log.workspace = true
tiny-skia = { workspace = true, features = ["no-std-float"] }
paste = "1.0.15"
[features]

View file

@ -12,15 +12,14 @@ use haku::{
diagnostic::Diagnostic,
lexer::{lex, Lexer},
parser::{self, IntoAstError, Parser},
render::tiny_skia::{
BlendMode, Color, FillRule, LineCap, Paint, PathBuilder, Pixmap, PremultipliedColorU8,
Rect, Shader, Stroke, Transform,
},
source::SourceCode,
system::{System, SystemImage},
token::Lexis,
};
use log::{debug, info};
use tiny_skia::{
BlendMode, Color, FillRule, LineCap, Paint, PathBuilder, Pixmap, PremultipliedColorU8, Rect,
Shader, Stroke, Transform,
};
pub mod logging;
#[cfg(not(feature = "std"))]
@ -124,8 +123,6 @@ limit_setter!(render_max_depth);
struct Instance {
limits: Limits,
system: System,
system_image: SystemImage,
defs: Defs,
defs_image: DefsImage,
@ -152,20 +149,14 @@ unsafe extern "C" fn haku_instance_new(limits: *const Limits) -> *mut Instance {
let limits = *limits;
debug!("creating new instance with limits: {limits:?}");
let system = System::new(limits.max_chunks);
let defs = Defs::new(&DefsLimits {
max_defs: limits.max_defs,
max_tags: limits.max_tags,
});
let system_image = system.image();
let defs_image = defs.image();
let instance = Box::new(Instance {
limits,
system,
system_image,
defs,
defs_image,
compile_result2: None,
@ -187,7 +178,6 @@ unsafe extern "C" fn haku_instance_destroy(instance: *mut Instance) {
unsafe extern "C" fn haku_reset(instance: *mut Instance) {
debug!("resetting instance: {instance:?}");
let instance = &mut *instance;
instance.system.restore_image(&instance.system_image);
instance.defs.restore_image(&instance.defs_image);
}
@ -430,11 +420,7 @@ unsafe extern "C" fn haku_compile_brush(
ast.len()
);
let src = Source {
code,
ast: &ast,
system: &instance.system,
};
let src = Source { code, ast: &ast };
let mut chunk = Chunk::new(instance.limits.chunk_capacity).unwrap();
let mut compiler = Compiler::new(&mut instance.defs, &mut chunk);

View file

@ -5,8 +5,6 @@ edition = "2021"
[dependencies]
log.workspace = true
tiny-skia = { version = "0.11.4", default-features = false, features = ["no-std-float"] }
libm = "0.2.8"
[features]
default = []

View file

@ -12,13 +12,12 @@ use crate::{
},
diagnostic::Diagnostic,
source::SourceCode,
system::{System, SystemFnArity},
system::{self, SystemFnArity},
};
pub struct Source<'a> {
pub code: &'a SourceCode,
pub ast: &'a Ast,
pub system: &'a System,
}
#[derive(Debug, Clone, Copy)]
@ -313,7 +312,7 @@ fn compile_unary<'a>(c: &mut Compiler<'a>, src: &Source<'a>, node_id: NodeId) ->
let name = src.ast.span(op).slice(src.code);
compile_expr(c, src, expr)?;
if let Some(index) = (src.system.resolve_fn)(SystemFnArity::Unary, name) {
if let Some(index) = system::resolve(SystemFnArity::Unary, name) {
let argument_count = 1;
c.chunk.emit_opcode(Opcode::System)?;
c.chunk.emit_u8(index)?;
@ -353,7 +352,7 @@ fn compile_binary<'a>(c: &mut Compiler<'a>, src: &Source<'a>, node_id: NodeId) -
compile_expr(c, src, left)?;
compile_expr(c, src, right)?;
if let Some(index) = (src.system.resolve_fn)(SystemFnArity::Binary, name) {
if let Some(index) = system::resolve(SystemFnArity::Binary, name) {
let argument_count = 2;
c.chunk.emit_opcode(Opcode::System)?;
c.chunk.emit_u8(index)?;
@ -391,7 +390,7 @@ fn compile_call<'a>(c: &mut Compiler<'a>, src: &Source<'a>, node_id: NodeId) ->
if let (NodeKind::Ident, Some(index)) = (
src.ast.kind(func),
(src.system.resolve_fn)(SystemFnArity::Nary, name),
system::resolve(SystemFnArity::Nary, name),
) {
c.chunk.emit_opcode(Opcode::System)?;
c.chunk.emit_u8(index)?;

View file

@ -8,10 +8,10 @@ pub mod compiler;
pub mod diagnostic;
pub mod lexer;
pub mod parser;
pub mod render;
// pub mod render;
pub mod source;
pub mod system;
pub mod token;
pub mod trampoline;
pub mod value;
pub mod vm;
// pub mod trampoline;
// pub mod value;
// pub mod vm;

File diff suppressed because it is too large Load diff

View file

@ -1,3 +1,8 @@
/*
// NOTE: The test suite is currently not in service.
// We apologise for the inconvenience.
use std::error::Error;
use haku::{
@ -7,10 +12,7 @@ use haku::{
lexer::{lex, Lexer},
parser::{self, Parser, ParserLimits},
source::SourceCode,
system::System,
token::Lexis,
value::{Closure, Ref, RefId, Value},
vm::{Vm, VmLimits},
};
fn eval(code: &str) -> Result<Value, Box<dyn Error>> {
@ -158,7 +160,7 @@ fn def_fib_recursive() {
n
else
fib (n - 1) + fib (n - 2)
fib 10
"#;
expect_number(code, 55.0, 0.0001);
@ -309,3 +311,5 @@ fn system_index() {
assert!(eval("index [1] (-1)").is_err());
assert!(eval("index [1] 1").is_err());
}
*/

View file

@ -364,9 +364,6 @@ impl SessionLoop {
}
Err(err) => error!(?err, "while loading chunks for render command"),
}
// TODO: Auto save. This'll need us to compute which chunks will be affected
// by the interactions.
}
}

View file

@ -7,11 +7,10 @@ use eyre::{bail, Context, OptionExt};
use haku::{
ast::Ast,
bytecode::{Chunk, Defs, DefsImage, DefsLimits},
compiler::{ClosureSpec, Compiler, Source},
compiler::{Compiler, Source},
lexer::{lex, Lexer},
parser::{self, Parser, ParserLimits},
source::SourceCode,
system::{ChunkId, System, SystemImage},
token::Lexis,
};
use serde::{Deserialize, Serialize};
@ -41,40 +40,30 @@ pub struct Limits {
pub struct Haku {
limits: Limits,
system: System,
system_image: SystemImage,
defs: Defs,
defs_image: DefsImage,
vm: Option<haku2::Vm>,
brush: Option<(ChunkId, ClosureSpec)>,
}
impl Haku {
pub fn new(limits: Limits) -> Self {
let system = System::new(limits.max_chunks);
let defs = Defs::new(&DefsLimits {
max_defs: limits.max_defs,
max_tags: limits.max_tags,
});
let system_image = system.image();
let defs_image = defs.image();
Self {
limits,
system,
system_image,
defs,
defs_image,
vm: None,
brush: None,
}
}
fn reset(&mut self) {
self.system.restore_image(&self.system_image);
self.defs.restore_image(&self.defs_image);
}
@ -100,11 +89,7 @@ impl Haku {
let mut ast = Ast::new(self.limits.ast_capacity);
let (root, parser_diagnostics) = parser.into_ast(&mut ast)?;
let src = Source {
code,
ast: &ast,
system: &self.system,
};
let src = Source { code, ast: &ast };
let mut chunk = Chunk::new(self.limits.chunk_capacity)
.expect("chunk capacity must be representable as a 16-bit number");
@ -121,12 +106,6 @@ impl Haku {
bail!("diagnostics were emitted");
}
let chunk_id = self
.system
.add_chunk(chunk.clone())
.context("too many chunks")?;
self.brush = Some((chunk_id, closure_spec));
let scratch = self
.vm
.take()

View file

@ -11,9 +11,3 @@ impl Vec2 {
Self { x, y }
}
}
impl From<Vec2> for haku::value::Vec2 {
fn from(value: Vec2) -> Self {
Self::new(value.x, value.y)
}
}

View file

@ -9,9 +9,9 @@ use std::{
};
use dashmap::DashMap;
use haku::render::tiny_skia::Pixmap;
use rand::RngCore;
use serde::{Deserialize, Serialize};
use tiny_skia::Pixmap;
use tokio::sync::{broadcast, Mutex};
use tracing::info;

View file

@ -2,8 +2,8 @@ use std::sync::Arc;
use dashmap::DashSet;
use eyre::Context;
use haku::render::tiny_skia::{IntSize, Pixmap};
use rayon::iter::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator};
use tiny_skia::{IntSize, Pixmap};
use tokio::sync::{mpsc, oneshot};
use tracing::{error, info, instrument};

View file

@ -156,7 +156,6 @@ class Session extends EventTarget {
let version = await this.#recvJson();
console.info("protocol version", version.version);
// TODO: This should probably verify that the version is compatible.
// We don't have a way of sending Rust stuff to JavaScript just yet, so we don't care about it.
let init = {
brush: userInit.brush,