diff --git a/crates/haku-wasm/src/lib.rs b/crates/haku-wasm/src/lib.rs index 8459dc6..0e21b02 100644 --- a/crates/haku-wasm/src/lib.rs +++ b/crates/haku-wasm/src/lib.rs @@ -6,7 +6,7 @@ use core::{alloc::Layout, mem, ptr, slice}; use alloc::{boxed::Box, string::String, vec::Vec}; use haku::{ - ast::{self, Ast}, + ast::Ast, bytecode::{Chunk, Defs, DefsImage, DefsLimits}, compiler::{compile_expr, ClosureSpec, CompileError, Compiler, Source}, diagnostic::Diagnostic, @@ -48,6 +48,12 @@ struct Limits { max_parser_events: usize, ast_capacity: usize, chunk_capacity: usize, + stack_capacity: usize, + call_stack_capacity: usize, + ref_capacity: usize, + fuel: usize, + memory: usize, + render_max_depth: usize, } impl Default for Limits { @@ -61,6 +67,12 @@ impl Default for Limits { max_parser_events: 1024, ast_capacity: 1024, chunk_capacity: 65536, + stack_capacity: 1024, + call_stack_capacity: 256, + ref_capacity: 2048, + fuel: 65536, + memory: 1024 * 1024, + render_max_depth: 256, } } } @@ -100,6 +112,12 @@ limit_setter!(max_tokens); limit_setter!(max_parser_events); limit_setter!(ast_capacity); limit_setter!(chunk_capacity); +limit_setter!(stack_capacity); +limit_setter!(call_stack_capacity); +limit_setter!(ref_capacity); +limit_setter!(fuel); +limit_setter!(memory); +limit_setter!(render_max_depth); #[derive(Debug, Clone)] struct Instance { @@ -478,24 +496,22 @@ unsafe extern "C" fn haku_num_diagnostics(instance: *const Instance) -> u32 { #[unsafe(no_mangle)] unsafe extern "C" fn haku_diagnostic_start(instance: *const Instance, index: u32) -> u32 { - (&(*instance).diagnostics2)[index as usize].span().start + (*instance).diagnostics2[index as usize].span().start } #[unsafe(no_mangle)] unsafe extern "C" fn haku_diagnostic_end(instance: *const Instance, index: u32) -> u32 { - (&(*instance).diagnostics2)[index as usize].span().end + (*instance).diagnostics2[index as usize].span().end } #[unsafe(no_mangle)] unsafe extern "C" fn haku_diagnostic_message(instance: *const Instance, index: u32) -> *const u8 { - (&(*instance).diagnostics2)[index as usize] - .message() - .as_ptr() + (*instance).diagnostics2[index as usize].message().as_ptr() } #[unsafe(no_mangle)] unsafe extern "C" fn haku_diagnostic_message_len(instance: *const Instance, index: u32) -> u32 { - (&(*instance).diagnostics2)[index as usize].message().len() as u32 + (*instance).diagnostics2[index as usize].message().len() as u32 } #[unsafe(no_mangle)] diff --git a/crates/haku/src/lexer.rs b/crates/haku/src/lexer.rs index f3e4ce2..64683a9 100644 --- a/crates/haku/src/lexer.rs +++ b/crates/haku/src/lexer.rs @@ -3,7 +3,7 @@ use alloc::vec::Vec; use crate::{ diagnostic::Diagnostic, source::{SourceCode, Span}, - token::{Lexis, Spaces, TokenAllocError, TokenKind}, + token::{Lexis, TokenAllocError, TokenKind}, }; pub struct Lexer<'a> { @@ -57,7 +57,7 @@ fn one_or_two(l: &mut Lexer<'_>, kind1: TokenKind, c2: char, kind2: TokenKind) - } fn is_ident_char(c: char) -> bool { - matches!(c, 'a'..='z' | 'A'..='Z' | '0'..='9' | '_' | '\'' | '?') + matches!(c, 'a'..='z' | 'A'..='Z' | '0'..='9' | '_') } fn ident(l: &mut Lexer<'_>) -> TokenKind { @@ -132,8 +132,7 @@ fn color(l: &mut Lexer<'_>) -> TokenKind { TokenKind::Color } -fn whitespace_and_comments(l: &mut Lexer<'_>) -> bool { - let mut matched = false; +fn whitespace_and_comments(l: &mut Lexer<'_>) { loop { match l.current() { '-' => { @@ -143,7 +142,6 @@ fn whitespace_and_comments(l: &mut Lexer<'_>) -> bool { while l.current() != '\n' && l.current() != '\0' { l.advance(); } - matched = true; } else { // An unfortunate little bit of backtracking here; // This seems like the simplest possible solution though. @@ -155,18 +153,14 @@ fn whitespace_and_comments(l: &mut Lexer<'_>) -> bool { } } - ' ' | '\r' | '\t' => { - l.advance(); - matched = true - } + ' ' | '\r' | '\t' => l.advance(), _ => break, } } - matched } -fn newline(l: &mut Lexer<'_>, has_left_space: bool) -> (TokenKind, Span, bool) { +fn newline(l: &mut Lexer<'_>) -> (TokenKind, Span) { let start = l.position; l.advance(); // skip the initial newline let end = l.position; @@ -183,11 +177,11 @@ fn newline(l: &mut Lexer<'_>, has_left_space: bool) -> (TokenKind, Span, bool) { } } - (TokenKind::Newline, Span::new(start, end), has_left_space) + (TokenKind::Newline, Span::new(start, end)) } -fn token(l: &mut Lexer<'_>) -> (TokenKind, Span, bool) { - let has_left_space = whitespace_and_comments(l); +fn token(l: &mut Lexer<'_>) -> (TokenKind, Span) { + whitespace_and_comments(l); let start = l.position; let kind = match l.current() { @@ -209,7 +203,7 @@ fn token(l: &mut Lexer<'_>) -> (TokenKind, Span, bool) { '<' => one_or_two(l, TokenKind::Less, '=', TokenKind::LessEqual), '>' => one_or_two(l, TokenKind::Greater, '=', TokenKind::GreaterEqual), - '\n' => return newline(l, has_left_space), + '\n' => return newline(l), '(' => one(l, TokenKind::LParen), ')' => one(l, TokenKind::RParen), '[' => one(l, TokenKind::LBrack), @@ -228,22 +222,13 @@ fn token(l: &mut Lexer<'_>) -> (TokenKind, Span, bool) { } }; let end = l.position; - (kind, Span::new(start, end), has_left_space) + (kind, Span::new(start, end)) } pub fn lex(l: &mut Lexer<'_>) -> Result<(), TokenAllocError> { loop { - let (kind, span, has_left_space) = token(l); - - if !l.lexis.is_empty() { - let prev = l.lexis.len() - 1; - let spaces = l.lexis.spaces(prev); - l.lexis - .set_spaces(prev, Spaces::new(spaces.left(), has_left_space)); - } - let spaces = Spaces::new(has_left_space, false); - l.lexis.push(kind, spaces, span)?; - + let (kind, span) = token(l); + l.lexis.push(kind, span)?; if kind == TokenKind::Eof { break; } diff --git a/crates/haku/src/parser.rs b/crates/haku/src/parser.rs index a8e0cee..a64aad8 100644 --- a/crates/haku/src/parser.rs +++ b/crates/haku/src/parser.rs @@ -7,7 +7,7 @@ use crate::{ ast::{Ast, NodeAllocError, NodeId, NodeKind}, diagnostic::Diagnostic, source::Span, - token::{Lexis, Spaces, TokenKind, TokenKindSet}, + token::{Lexis, TokenKind, TokenKindSet}, }; #[derive(Debug, Clone, Copy)] @@ -132,11 +132,6 @@ impl<'a> Parser<'a> { self.tokens.kind(self.position) } - #[track_caller] - fn peek_with_spaces(&self) -> (TokenKind, Spaces) { - (self.peek(), self.tokens.spaces(self.position)) - } - fn span(&self) -> Span { self.tokens.span(self.position) } @@ -303,61 +298,33 @@ impl fmt::Display for IntoAstError { impl Error for IntoAstError {} -#[derive(Debug, Clone, Copy, PartialEq, Eq)] enum Tighter { Left, Right, } -fn tighter(left: (TokenKind, Spaces), right: (TokenKind, Spaces)) -> Tighter { - #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] - enum Spacing { - Loose, - Call, - Tight, - } - - fn tightness((kind, spaces): (TokenKind, Spaces)) -> Option<(Spacing, usize)> { - let spacing = match kind { - // There are a few types of operators which are independent of tightness. - - // For : and =, it does not matter if they're spelled one way or the other, because - // there is only one way to use them (at the beginning of the expression). - TokenKind::Colon | TokenKind::Equal => Spacing::Loose, - - // For calls, there is a special intermediate level, such that they can sit between - // loose operators and tight operators. - _ if PREFIX_TOKENS.contains(kind) => Spacing::Call, - - // For everything else, the usual rules apply. - _ => match spaces.pair() { - (false, false) => Spacing::Tight, - (true, true) => Spacing::Loose, - _ => return None, // not a valid infix operator - }, - }; - let index = match kind { - TokenKind::Equal | TokenKind::Colon => 0, - // 1: reserved for `and` and `or` +fn tighter(left: TokenKind, right: TokenKind) -> Tighter { + fn tightness(kind: TokenKind) -> Option { + match kind { + TokenKind::Equal | TokenKind::Colon => Some(0), TokenKind::EqualEqual | TokenKind::NotEqual | TokenKind::Less | TokenKind::LessEqual | TokenKind::Greater - | TokenKind::GreaterEqual => 2, - TokenKind::Plus | TokenKind::Minus | TokenKind::Star | TokenKind::Slash => 3, - // 4: reserve for `.` - _ if PREFIX_TOKENS.contains(kind) => 5, - _ => return None, // not an infix operator - }; - Some((spacing, index)) + | TokenKind::GreaterEqual => Some(1), + TokenKind::Plus | TokenKind::Minus => Some(2), + TokenKind::Star | TokenKind::Slash => Some(3), + _ if PREFIX_TOKENS.contains(kind) => Some(4), + _ => None, + } } let Some(right_tightness) = tightness(right) else { return Tighter::Left; }; let Some(left_tightness) = tightness(left) else { - assert!(left.0 == TokenKind::Eof); + assert!(left == TokenKind::Eof); return Tighter::Right; }; @@ -368,13 +335,12 @@ fn tighter(left: (TokenKind, Spaces), right: (TokenKind, Spaces)) -> Tighter { } } -fn precedence_parse(p: &mut Parser, left: (TokenKind, Spaces)) { +fn precedence_parse(p: &mut Parser, left: TokenKind) { let mut lhs = prefix(p); loop { - let right = p.peek_with_spaces(); - let tighter = tighter(left, right); - match tighter { + let right = p.peek(); + match tighter(left, right) { Tighter::Left => break, Tighter::Right => { let o = p.open_before(lhs); @@ -570,12 +536,55 @@ fn if_expr(p: &mut Parser) -> Closed { p.close(o, NodeKind::If) } -// NOTE: This must be synchronised with the match expression in prefix(). +fn let_expr(p: &mut Parser) -> Closed { + let o = p.open(); + + p.advance(); // let + + if p.peek() == TokenKind::Ident { + let ident = p.open(); + p.advance(); + p.close(ident, NodeKind::Ident); + } else { + let span = p.span(); + p.emit(Diagnostic::error(span, "`let` variable name expected")); + p.advance_with_error(); + } + + if p.peek() == TokenKind::Equal { + p.advance(); + } else { + let span = p.span(); + p.emit(Diagnostic::error(span, "`=` expected after variable name")); + p.advance_with_error(); + } + + expr(p); + + if p.peek() == TokenKind::Newline { + p.advance(); + } else { + let span = p.span(); + p.emit(Diagnostic::error( + span, + "new line expected after `let` expression", + )); + p.advance_with_error(); + } + + expr(p); + + p.close(o, NodeKind::Let) +} + const PREFIX_TOKENS: TokenKindSet = TokenKindSet::new(&[ TokenKind::Ident, TokenKind::Tag, TokenKind::Number, TokenKind::Color, + // NOTE: This is ambiguous in function calls. + // In that case, the infix operator takes precedence (because the `match` arms for the infix op + // come first.) TokenKind::Minus, TokenKind::Not, TokenKind::LParen, @@ -614,8 +623,8 @@ fn prefix(p: &mut Parser) -> Closed { } } -fn infix(p: &mut Parser, op: (TokenKind, Spaces)) -> NodeKind { - match op.0 { +fn infix(p: &mut Parser, op: TokenKind) -> NodeKind { + match op { TokenKind::Plus | TokenKind::Minus | TokenKind::Star @@ -630,13 +639,13 @@ fn infix(p: &mut Parser, op: (TokenKind, Spaces)) -> NodeKind { TokenKind::Equal => infix_let(p, op), - _ if PREFIX_TOKENS.contains(op.0) => infix_call(p, op), + _ if PREFIX_TOKENS.contains(op) => infix_call(p), _ => panic!("unhandled infix operator {op:?}"), } } -fn infix_binary(p: &mut Parser, op: (TokenKind, Spaces)) -> NodeKind { +fn infix_binary(p: &mut Parser, op: TokenKind) -> NodeKind { let o = p.open(); p.advance(); p.close(o, NodeKind::Op); @@ -649,16 +658,15 @@ fn infix_binary(p: &mut Parser, op: (TokenKind, Spaces)) -> NodeKind { NodeKind::Binary } -fn infix_call(p: &mut Parser, mut arg: (TokenKind, Spaces)) -> NodeKind { +fn infix_call(p: &mut Parser) -> NodeKind { while PREFIX_TOKENS.contains(p.peek()) { - precedence_parse(p, arg); - arg = p.peek_with_spaces(); + prefix(p); } NodeKind::Call } -fn infix_let(p: &mut Parser, op: (TokenKind, Spaces)) -> NodeKind { +fn infix_let(p: &mut Parser, op: TokenKind) -> NodeKind { p.advance(); if p.peek() == TokenKind::Newline { @@ -684,7 +692,7 @@ fn infix_let(p: &mut Parser, op: (TokenKind, Spaces)) -> NodeKind { } pub fn expr(p: &mut Parser) { - precedence_parse(p, (TokenKind::Eof, Spaces::new(true, false))) + precedence_parse(p, TokenKind::Eof) } pub fn toplevel(p: &mut Parser) { diff --git a/crates/haku/src/token.rs b/crates/haku/src/token.rs index c613408..5a0ba1f 100644 --- a/crates/haku/src/token.rs +++ b/crates/haku/src/token.rs @@ -1,7 +1,4 @@ -use core::{ - error::Error, - fmt::{self, Display}, -}; +use core::{error::Error, fmt::Display}; use alloc::vec::Vec; @@ -52,16 +49,10 @@ pub enum TokenKind { Error, } -#[derive(Clone, Copy, PartialEq, Eq)] -pub struct Spaces { - value: u8, // 0b10 = left, 0b01 = right -} - #[derive(Debug, Clone)] pub struct Lexis { - kinds: Vec, - spaces: Vec, - spans: Vec, + pub kinds: Vec, + pub spans: Vec, } impl Lexis { @@ -70,7 +61,6 @@ impl Lexis { Self { kinds: Vec::with_capacity(capacity), - spaces: Vec::with_capacity(capacity), spans: Vec::with_capacity(capacity), } } @@ -83,18 +73,12 @@ impl Lexis { self.len() == 0 } - pub fn push( - &mut self, - kind: TokenKind, - spaces: Spaces, - span: Span, - ) -> Result<(), TokenAllocError> { + pub fn push(&mut self, kind: TokenKind, span: Span) -> Result<(), TokenAllocError> { if self.kinds.len() >= self.kinds.capacity() { return Err(TokenAllocError); } self.kinds.push(kind); - self.spaces.push(spaces); self.spans.push(span); Ok(()) @@ -104,48 +88,11 @@ impl Lexis { self.kinds[position as usize] } - pub fn spaces(&self, position: u32) -> Spaces { - self.spaces[position as usize] - } - - pub fn set_spaces(&mut self, position: u32, spaces: Spaces) { - self.spaces[position as usize] = spaces; - } - pub fn span(&self, position: u32) -> Span { self.spans[position as usize] } } -impl Spaces { - pub fn new(left: bool, right: bool) -> Self { - Self { - value: (left as u8) << 1 | right as u8, - } - } - - pub fn left(self) -> bool { - (self.value & 0b10) == 0b10 - } - - pub fn right(self) -> bool { - (self.value & 0b01) == 0b01 - } - - pub fn pair(self) -> (bool, bool) { - (self.left(), self.right()) - } -} - -impl fmt::Debug for Spaces { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple("Spaces") - .field(&self.left()) - .field(&self.right()) - .finish() - } -} - #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct TokenAllocError; diff --git a/crates/rkgk/src/wall/database.rs b/crates/rkgk/src/wall/database.rs index 8850c9c..3e146c0 100644 --- a/crates/rkgk/src/wall/database.rs +++ b/crates/rkgk/src/wall/database.rs @@ -81,6 +81,11 @@ impl Database { pub fn start(settings: Settings) -> eyre::Result { let db = Connection::open(settings.path).context("cannot open wall database")?; + let major: u32 = env!("CARGO_PKG_VERSION_MAJOR").parse().unwrap(); + let minor: u32 = env!("CARGO_PKG_VERSION_MINOR").parse().unwrap(); + let patch: u32 = env!("CARGO_PKG_VERSION_PATCH").parse().unwrap(); + let version = major * 1_000_000 + minor * 1_000 + patch; + info!("initial setup"); let version: u32 = db.pragma_query_value(None, "user_version", |x| x.get(0))?; @@ -105,7 +110,7 @@ pub fn start(settings: Settings) -> eyre::Result { paint_area INTEGER NOT NULL, chunk_size INTEGER NOT NULL ); - + CREATE TABLE IF NOT EXISTS t_wall_info ( id INTEGER PRIMARY KEY CHECK (id = 1), diff --git a/static/brush-box.js b/static/brush-box.js index dfcc850..92d01d7 100644 --- a/static/brush-box.js +++ b/static/brush-box.js @@ -45,8 +45,8 @@ or_: \\a, b -> else b withDotter \\d -> - visible? = mod (d Num) length < length * duty - if (visible?) + visible = mod (d Num) length < length * duty + if (visible) stroke thickness color (line (d From) (d To)) else () @@ -76,9 +76,9 @@ wavelength: 1 withDotter \\d -> pi = 3.14159265 - a = sin (d Num * wavelength / pi) + 1 / 2 + a = (sin (d Num * wavelength / pi) + 1) / 2 range = maxThickness - minThickness - thickness = a * range + minThickness + thickness = minThickness + a * range stroke thickness color (line (d From) (d To)) `.trim(), }, diff --git a/static/haku.js b/static/haku.js index a9532d0..69d1b0f 100644 --- a/static/haku.js +++ b/static/haku.js @@ -217,14 +217,9 @@ export class Haku { console.groupCollapsed("construct Haku"); { let pLimits = allocCheck(w.haku_limits_new()); - w.haku_limits_set_max_source_code_len(pLimits, limits.max_source_code_len); - w.haku_limits_set_max_chunks(pLimits, limits.max_chunks); - w.haku_limits_set_max_defs(pLimits, limits.max_defs); - w.haku_limits_set_max_tags(pLimits, limits.max_tags); - w.haku_limits_set_max_tokens(pLimits, limits.max_tokens); - w.haku_limits_set_max_parser_events(pLimits, limits.max_parser_events); - w.haku_limits_set_ast_capacity(pLimits, limits.ast_capacity); - w.haku_limits_set_chunk_capacity(pLimits, limits.chunk_capacity); + for (let name of Object.keys(limits)) { + w[`haku_limits_set_${name}`](pLimits, limits[name]); + } this.#pInstance = allocCheck(w.haku_instance_new(pLimits)); diff --git a/static/painter.js b/static/painter.js index 4bdfa94..2e9b067 100644 --- a/static/painter.js +++ b/static/painter.js @@ -55,6 +55,10 @@ export function selfController(interactionQueue, wall, layer, event) { if (renderArea != null) { let numChunksToRender = numChunksInRectangle(renderArea, layer.chunkSize); let result = renderToChunksInArea(layer, renderArea, renderToPixmap); + if (!layer.canFitNewChunks(numChunksToRender)) { + console.debug("too many chunks rendered; committing interaction early"); + event.earlyCommitInteraction(); + } return result; } else { console.debug("render area is empty, nothing will be rendered");