introduce tags, structs, and reticles

this was meant to be split into smaller changes, but I realised I edited my existing revision too late.
This commit is contained in:
liquidex 2024-09-08 13:53:29 +02:00
parent 8356b6c750
commit 5b7d9586ea
26 changed files with 1113 additions and 351 deletions

View file

@ -7,19 +7,20 @@ use core::{alloc::Layout, slice};
use alloc::{boxed::Box, vec::Vec}; use alloc::{boxed::Box, vec::Vec};
use haku::{ use haku::{
ast::Ast, ast::Ast,
bytecode::{Chunk, Defs, DefsImage}, bytecode::{Chunk, Defs, DefsImage, DefsLimits},
compiler::{compile_expr, ClosureSpec, CompileError, Compiler, Source}, compiler::{compile_expr, ClosureSpec, CompileError, Compiler, Source},
diagnostic::Diagnostic, diagnostic::Diagnostic,
lexer::{lex, Lexer}, lexer::{lex, Lexer},
parser::{self, IntoAstError, Parser}, parser::{self, IntoAstError, Parser},
render::{ render::{
tiny_skia::{Pixmap, PremultipliedColorU8}, tiny_skia::{Pixmap, PremultipliedColorU8},
Renderer, RendererLimits, RendererLimits,
}, },
source::SourceCode, source::SourceCode,
system::{ChunkId, System, SystemImage}, system::{ChunkId, System, SystemImage},
token::Lexis, token::Lexis,
value::{Closure, Ref, Value}, trampoline::{Cont, Trampoline},
value::{Closure, Ref, Vec2},
vm::{Exception, Vm, VmImage, VmLimits}, vm::{Exception, Vm, VmImage, VmLimits},
}; };
use log::{debug, info}; use log::{debug, info};
@ -46,6 +47,7 @@ struct Limits {
max_source_code_len: usize, max_source_code_len: usize,
max_chunks: usize, max_chunks: usize,
max_defs: usize, max_defs: usize,
max_tags: usize,
max_tokens: usize, max_tokens: usize,
max_parser_events: usize, max_parser_events: usize,
ast_capacity: usize, ast_capacity: usize,
@ -65,6 +67,7 @@ impl Default for Limits {
max_source_code_len: 65536, max_source_code_len: 65536,
max_chunks: 2, max_chunks: 2,
max_defs: 256, max_defs: 256,
max_tags: 256,
max_tokens: 1024, max_tokens: 1024,
max_parser_events: 1024, max_parser_events: 1024,
ast_capacity: 1024, ast_capacity: 1024,
@ -110,6 +113,7 @@ macro_rules! limit_setter {
limit_setter!(max_source_code_len); limit_setter!(max_source_code_len);
limit_setter!(max_chunks); limit_setter!(max_chunks);
limit_setter!(max_defs); limit_setter!(max_defs);
limit_setter!(max_tags);
limit_setter!(max_tokens); limit_setter!(max_tokens);
limit_setter!(max_parser_events); limit_setter!(max_parser_events);
limit_setter!(ast_capacity); limit_setter!(ast_capacity);
@ -133,10 +137,22 @@ struct Instance {
vm: Vm, vm: Vm,
vm_image: VmImage, vm_image: VmImage,
value: Value, trampoline: Option<Trampoline>,
exception: Option<Exception>, exception: Option<Exception>,
} }
impl Instance {
fn set_exception(&mut self, exn: Exception) {
debug!("setting exception = {exn:?}");
self.exception = Some(exn);
}
fn reset_exception(&mut self) {
debug!("resetting exception");
self.exception = None;
}
}
#[no_mangle] #[no_mangle]
unsafe extern "C" fn haku_instance_new(limits: *const Limits) -> *mut Instance { unsafe extern "C" fn haku_instance_new(limits: *const Limits) -> *mut Instance {
let limits = *limits; let limits = *limits;
@ -144,7 +160,10 @@ unsafe extern "C" fn haku_instance_new(limits: *const Limits) -> *mut Instance {
let system = System::new(limits.max_chunks); let system = System::new(limits.max_chunks);
let defs = Defs::new(limits.max_defs); let defs = Defs::new(&DefsLimits {
max_defs: limits.max_defs,
max_tags: limits.max_tags,
});
let vm = Vm::new( let vm = Vm::new(
&defs, &defs,
&VmLimits { &VmLimits {
@ -168,7 +187,7 @@ unsafe extern "C" fn haku_instance_new(limits: *const Limits) -> *mut Instance {
defs_image, defs_image,
vm, vm,
vm_image, vm_image,
value: Value::Nil, trampoline: None,
exception: None, exception: None,
}); });
@ -191,13 +210,6 @@ unsafe extern "C" fn haku_reset(instance: *mut Instance) {
instance.defs.restore_image(&instance.defs_image); instance.defs.restore_image(&instance.defs_image);
} }
#[no_mangle]
unsafe extern "C" fn haku_reset_vm(instance: *mut Instance) {
debug!("resetting instance VM: {instance:?}");
let instance = &mut *instance;
instance.vm.restore_image(&instance.vm_image);
}
#[no_mangle] #[no_mangle]
unsafe extern "C" fn haku_has_exception(instance: *mut Instance) -> bool { unsafe extern "C" fn haku_has_exception(instance: *mut Instance) -> bool {
(*instance).exception.is_some() (*instance).exception.is_some()
@ -426,39 +438,35 @@ unsafe extern "C" fn haku_compile_brush(
StatusCode::Ok StatusCode::Ok
} }
struct PixmapLock {
pixmap: Pixmap,
}
#[no_mangle] #[no_mangle]
extern "C" fn haku_pixmap_new(width: u32, height: u32) -> *mut PixmapLock { extern "C" fn haku_pixmap_new(width: u32, height: u32) -> *mut Pixmap {
let ptr = Box::leak(Box::new(PixmapLock { let ptr = Box::leak(Box::new(
pixmap: Pixmap::new(width, height).expect("invalid pixmap size"), Pixmap::new(width, height).expect("invalid pixmap size"),
})) as *mut _; )) as *mut _;
debug!("created pixmap with size {width}x{height}: {ptr:?}"); debug!("created pixmap with size {width}x{height}: {ptr:?}");
ptr ptr
} }
#[no_mangle] #[no_mangle]
unsafe extern "C" fn haku_pixmap_destroy(pixmap: *mut PixmapLock) { unsafe extern "C" fn haku_pixmap_destroy(pixmap: *mut Pixmap) {
debug!("destroying pixmap: {pixmap:?}"); debug!("destroying pixmap: {pixmap:?}");
drop(Box::from_raw(pixmap)) drop(Box::from_raw(pixmap))
} }
#[no_mangle] #[no_mangle]
unsafe extern "C" fn haku_pixmap_data(pixmap: *mut PixmapLock) -> *mut u8 { unsafe extern "C" fn haku_pixmap_data(pixmap: *mut Pixmap) -> *mut u8 {
let pixmap = &mut (*pixmap).pixmap; let pixmap = &mut *pixmap;
pixmap.pixels_mut().as_mut_ptr() as *mut u8 pixmap.pixels_mut().as_mut_ptr() as *mut u8
} }
#[no_mangle] #[no_mangle]
unsafe extern "C" fn haku_pixmap_clear(pixmap: *mut PixmapLock) { unsafe extern "C" fn haku_pixmap_clear(pixmap: *mut Pixmap) {
let pixmap = &mut (*pixmap).pixmap; let pixmap = &mut *pixmap;
pixmap.pixels_mut().fill(PremultipliedColorU8::TRANSPARENT); pixmap.pixels_mut().fill(PremultipliedColorU8::TRANSPARENT);
} }
#[no_mangle] #[no_mangle]
unsafe extern "C" fn haku_eval_brush(instance: *mut Instance, brush: *const Brush) -> StatusCode { unsafe extern "C" fn haku_begin_brush(instance: *mut Instance, brush: *const Brush) -> StatusCode {
let instance = &mut *instance; let instance = &mut *instance;
let brush = &*brush; let brush = &*brush;
@ -466,8 +474,10 @@ unsafe extern "C" fn haku_eval_brush(instance: *mut Instance, brush: *const Brus
panic!("brush is not compiled and ready to be used"); panic!("brush is not compiled and ready to be used");
}; };
debug!("applying defs"); instance.vm.restore_image(&instance.vm_image);
instance.vm.apply_defs(&instance.defs); instance.vm.apply_defs(&instance.defs);
instance.reset_exception();
instance.trampoline = None;
let Ok(closure_id) = instance let Ok(closure_id) = instance
.vm .vm
@ -476,51 +486,91 @@ unsafe extern "C" fn haku_eval_brush(instance: *mut Instance, brush: *const Brus
return StatusCode::OutOfRefSlots; return StatusCode::OutOfRefSlots;
}; };
debug!("resetting exception"); instance.reset_exception();
instance.exception = None; let value = match instance.vm.run(&instance.system, closure_id, &[]) {
instance.value = match instance.vm.run(&instance.system, closure_id) {
Ok(value) => value, Ok(value) => value,
Err(exn) => { Err(exn) => {
debug!("setting exception {exn:?}"); instance.set_exception(exn);
instance.exception = Some(exn);
return StatusCode::EvalException; return StatusCode::EvalException;
} }
}; };
instance.trampoline = Some(Trampoline::new(value));
StatusCode::Ok StatusCode::Ok
} }
#[no_mangle] #[no_mangle]
unsafe extern "C" fn haku_render_value( unsafe extern "C" fn haku_cont_kind(instance: *mut Instance) -> Cont {
let instance = &mut *instance;
instance.trampoline.as_ref().unwrap().cont(&instance.vm)
}
fn wrap_exception(
instance: &mut Instance,
error_code: StatusCode,
f: impl FnOnce(&mut Instance) -> Result<(), Exception>,
) -> StatusCode {
match f(instance) {
Ok(_) => StatusCode::Ok,
Err(exn) => {
instance.set_exception(exn);
error_code
}
}
}
#[no_mangle]
unsafe extern "C" fn haku_cont_scribble(
instance: *mut Instance, instance: *mut Instance,
pixmap: *mut PixmapLock, pixmap: *mut Pixmap,
translation_x: f32, translation_x: f32,
translation_y: f32, translation_y: f32,
) -> StatusCode { ) -> StatusCode {
let instance = &mut *instance; let instance = &mut *instance;
debug!("resetting exception"); instance.reset_exception();
instance.exception = None;
debug!("will render value: {:?}", instance.value); debug!("cont_scribble: pixmap={pixmap:?} translation_x={translation_x:?} translation_y={translation_y:?} trampoline={:?}", instance.trampoline);
let pixmap_locked = &mut (*pixmap).pixmap; wrap_exception(instance, StatusCode::RenderException, |instance| {
instance.trampoline.as_mut().unwrap().scribble(
let mut renderer = Renderer::new( &instance.vm,
pixmap_locked, &mut *pixmap,
&RendererLimits { Vec2 {
pixmap_stack_capacity: instance.limits.pixmap_stack_capacity, x: translation_x,
transform_stack_capacity: instance.limits.transform_stack_capacity, y: translation_y,
}, },
); &RendererLimits {
renderer.translate(translation_x, translation_y); pixmap_stack_capacity: instance.limits.pixmap_stack_capacity,
match renderer.render(&instance.vm, instance.value) { transform_stack_capacity: instance.limits.transform_stack_capacity,
Ok(()) => (), },
Err(exn) => { )
instance.exception = Some(exn); })
instance.vm.restore_image(&instance.vm_image); }
return StatusCode::RenderException;
} #[no_mangle]
} unsafe extern "C" fn haku_cont_dotter(
instance: *mut Instance,
StatusCode::Ok from_x: f32,
from_y: f32,
to_x: f32,
to_y: f32,
num: f32,
) -> StatusCode {
let instance = &mut *instance;
instance.reset_exception();
debug!(
"cont_dotter: from_x={from_x} from_y={from_y} to_x={to_x} to_y={to_y} trampoline={:?}",
instance.trampoline
);
wrap_exception(instance, StatusCode::RenderException, |instance| {
instance.trampoline.as_mut().unwrap().dotter(
&mut instance.vm,
&instance.system,
Vec2::new(from_x, from_y),
Vec2::new(to_x, to_y),
num,
)
})
} }

View file

@ -12,6 +12,7 @@ pub enum Opcode {
Nil, Nil,
False, False,
True, True,
Tag,
Number, // (float: f32) Number, // (float: f32)
Rgba, // (r: u8, g: u8, b: u8, a: u8) Rgba, // (r: u8, g: u8, b: u8, a: u8)
@ -36,6 +37,7 @@ pub enum Opcode {
// Control flow. // Control flow.
Jump, // (offset: u16) Jump, // (offset: u16)
JumpIfNot, // (offset: u16) JumpIfNot, // (offset: u16)
Field, // (count: u8, tags: [u16; count])
// Function calls. // Function calls.
Call, // (argc: u8) Call, // (argc: u8)
@ -157,6 +159,12 @@ impl Chunk {
} }
} }
impl Offset {
pub fn to_u16(self) -> u16 {
self.0
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct ChunkSizeError; pub struct ChunkSizeError;
@ -193,21 +201,48 @@ impl DefId {
} }
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct TagId(u16);
impl TagId {
pub(crate) fn from_u16(x: u16) -> Self {
Self(x)
}
pub fn to_u16(self) -> u16 {
self.0
}
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Defs { pub struct Defs {
defs: Vec<String>, defs: Vec<String>,
tags: Vec<String>,
}
#[derive(Debug, Clone, Copy)]
pub struct DefsLimits {
pub max_defs: usize,
pub max_tags: usize,
} }
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub struct DefsImage { pub struct DefsImage {
defs: usize, defs: usize,
tags: usize,
} }
impl Defs { impl Defs {
pub fn new(capacity: usize) -> Self { pub fn new(limits: &DefsLimits) -> Self {
assert!(capacity < u16::MAX as usize + 1); assert!(limits.max_defs < u16::MAX as usize + 1);
assert!(limits.max_tags < u16::MAX as usize + 1);
let mut tags = Vec::with_capacity(limits.max_tags);
add_well_known_tags(&mut tags);
Self { Self {
defs: Vec::with_capacity(capacity), defs: Vec::with_capacity(limits.max_defs),
tags,
} }
} }
@ -219,14 +254,14 @@ impl Defs {
self.len() != 0 self.len() != 0
} }
pub fn get(&mut self, name: &str) -> Option<DefId> { pub fn get_def(&mut self, name: &str) -> Option<DefId> {
self.defs self.defs
.iter() .iter()
.position(|n| *n == name) .position(|n| *n == name)
.map(|index| DefId(index as u16)) .map(|index| DefId(index as u16))
} }
pub fn add(&mut self, name: &str) -> Result<DefId, DefError> { pub fn add_def(&mut self, name: &str) -> Result<DefId, DefError> {
if self.defs.iter().any(|n| n == name) { if self.defs.iter().any(|n| n == name) {
Err(DefError::Exists) Err(DefError::Exists)
} else { } else {
@ -239,9 +274,27 @@ impl Defs {
} }
} }
fn add_tag(tags: &mut Vec<String>, name: &str) -> Result<TagId, TagError> {
if tags.len() >= tags.capacity() {
return Err(TagError::OutOfSpace);
}
let id = TagId(tags.len() as u16);
tags.push(name.to_owned());
Ok(id)
}
pub fn get_or_add_tag(&mut self, name: &str) -> Result<TagId, TagError> {
if let Some(index) = self.tags.iter().position(|n| n == name) {
Ok(TagId(index as u16))
} else {
Self::add_tag(&mut self.tags, name)
}
}
pub fn image(&self) -> DefsImage { pub fn image(&self) -> DefsImage {
DefsImage { DefsImage {
defs: self.defs.len(), defs: self.defs.len(),
tags: self.tags.len(),
} }
} }
@ -249,6 +302,9 @@ impl Defs {
self.defs.resize_with(image.defs, || { self.defs.resize_with(image.defs, || {
panic!("image must be a subset of the current defs") panic!("image must be a subset of the current defs")
}); });
self.tags.resize_with(image.tags, || {
panic!("image must be a subset of the current defs")
});
} }
} }
@ -266,3 +322,45 @@ impl Display for DefError {
}) })
} }
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TagError {
OutOfSpace,
}
impl Display for TagError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(match self {
TagError::OutOfSpace => "too many tags",
})
}
}
macro_rules! well_known_tags {
($($ident:tt = $value:tt),* $(,)?) => {
impl TagId {
$(
#[allow(non_upper_case_globals)]
pub const $ident: Self = Self($value);
)*
}
fn add_well_known_tags(tags: &mut Vec<String>) {
$(
let id = Defs::add_tag(tags, stringify!($ident)).unwrap();
assert_eq!(id, TagId::from_u16($value));
)*
}
}
}
well_known_tags! {
// NOTE: The numbers must be sorted from 0 to N, due to limitations of Rust's macro system.
// https://github.com/rust-lang/rust/issues/83527
Nil = 0,
From = 1,
To = 2,
Num = 3,
}

View file

@ -7,7 +7,9 @@ use alloc::vec::Vec;
use crate::{ use crate::{
ast::{Ast, NodeId, NodeKind}, ast::{Ast, NodeId, NodeKind},
bytecode::{Chunk, DefError, Defs, EmitError, Opcode, CAPTURE_CAPTURE, CAPTURE_LOCAL}, bytecode::{
Chunk, DefError, Defs, EmitError, Opcode, TagError, TagId, CAPTURE_CAPTURE, CAPTURE_LOCAL,
},
diagnostic::Diagnostic, diagnostic::Diagnostic,
source::SourceCode, source::SourceCode,
system::{System, SystemFnArity}, system::{System, SystemFnArity},
@ -123,11 +125,6 @@ pub fn compile_expr<'a>(c: &mut Compiler<'a>, src: &Source<'a>, node_id: NodeId)
} }
} }
fn unsupported(c: &mut Compiler, src: &Source, node_id: NodeId, message: &str) -> CompileResult {
c.emit(Diagnostic::error(src.ast.span(node_id), message));
Ok(())
}
fn compile_nil(c: &mut Compiler) -> CompileResult { fn compile_nil(c: &mut Compiler) -> CompileResult {
c.chunk.emit_opcode(Opcode::Nil)?; c.chunk.emit_opcode(Opcode::Nil)?;
@ -183,7 +180,7 @@ fn compile_ident<'a>(c: &mut Compiler<'a>, src: &Source<'a>, node_id: NodeId) ->
c.chunk.emit_u8(index)?; c.chunk.emit_u8(index)?;
} }
Ok(None) => { Ok(None) => {
if let Some(def_id) = c.defs.get(name) { if let Some(def_id) = c.defs.get_def(name) {
c.chunk.emit_opcode(Opcode::Def)?; c.chunk.emit_opcode(Opcode::Def)?;
c.chunk.emit_u16(def_id.to_u16())?; c.chunk.emit_u16(def_id.to_u16())?;
} else { } else {
@ -202,7 +199,8 @@ fn compile_ident<'a>(c: &mut Compiler<'a>, src: &Source<'a>, node_id: NodeId) ->
} }
fn compile_tag(c: &mut Compiler, src: &Source, node_id: NodeId) -> CompileResult { fn compile_tag(c: &mut Compiler, src: &Source, node_id: NodeId) -> CompileResult {
let tag = src.ast.span(node_id).slice(src.code); let span = src.ast.span(node_id);
let tag = span.slice(src.code);
match tag { match tag {
"False" => { "False" => {
@ -212,7 +210,17 @@ fn compile_tag(c: &mut Compiler, src: &Source, node_id: NodeId) -> CompileResult
c.chunk.emit_opcode(Opcode::True)?; c.chunk.emit_opcode(Opcode::True)?;
} }
_ => { _ => {
c.emit(Diagnostic::error(src.ast.span(node_id), "uppercased identifiers are reserved for future use; please start your identifiers with a lowercase letter instead")); let tag_id = c.defs.get_or_add_tag(tag).unwrap_or_else(|error| {
match error {
TagError::OutOfSpace => {
c.emit(Diagnostic::error(span, "too many unique tags used"));
}
}
TagId::Nil
});
c.chunk.emit_opcode(Opcode::Tag)?;
c.chunk.emit_u16(tag_id.to_u16())?;
} }
} }
@ -598,7 +606,7 @@ fn def_prepass<'a>(c: &mut Compiler<'a>, src: &Source<'a>, toplevel: NodeId) ->
if let (Some(ident), Some(op)) = (binary_walk.node(), binary_walk.get(NodeKind::Op)) { if let (Some(ident), Some(op)) = (binary_walk.node(), binary_walk.get(NodeKind::Op)) {
if src.ast.span(op).slice(src.code) == "=" { if src.ast.span(op).slice(src.code) == "=" {
let name = src.ast.span(ident).slice(src.code); let name = src.ast.span(ident).slice(src.code);
match c.defs.add(name) { match c.defs.add_def(name) {
Ok(_) => (), Ok(_) => (),
Err(DefError::Exists) => c.emit(Diagnostic::error( Err(DefError::Exists) => c.emit(Diagnostic::error(
src.ast.span(ident), src.ast.span(ident),
@ -651,7 +659,12 @@ fn compile_def<'a>(c: &mut Compiler<'a>, src: &Source<'a>, node_id: NodeId) -> C
return Ok(()); return Ok(());
}; };
if src.ast.kind(left) != NodeKind::Ident { if src.ast.kind(left) == NodeKind::Tag {
c.emit(Diagnostic::error(
src.ast.span(left),
"defs may not be named with uppercase letters, because uppercase letters are used for tags",
));
} else if src.ast.kind(left) != NodeKind::Ident {
c.emit(Diagnostic::error( c.emit(Diagnostic::error(
src.ast.span(left), src.ast.span(left),
"def name (identifier) expected", "def name (identifier) expected",
@ -662,7 +675,7 @@ fn compile_def<'a>(c: &mut Compiler<'a>, src: &Source<'a>, node_id: NodeId) -> C
// NOTE: def_prepass collects all definitions beforehand. // NOTE: def_prepass collects all definitions beforehand.
// In case a def ends up not existing, that means we ran out of space for defs - so emit a // In case a def ends up not existing, that means we ran out of space for defs - so emit a
// zero def instead. // zero def instead.
let def_id = c.defs.get(name).unwrap_or_default(); let def_id = c.defs.get_def(name).unwrap_or_default();
compile_expr(c, src, right)?; compile_expr(c, src, right)?;
c.chunk.emit_opcode(Opcode::SetDef)?; c.chunk.emit_opcode(Opcode::SetDef)?;

View file

@ -12,5 +12,6 @@ pub mod render;
pub mod source; pub mod source;
pub mod system; pub mod system;
pub mod token; pub mod token;
pub mod trampoline;
pub mod value; pub mod value;
pub mod vm; pub mod vm;

View file

@ -127,7 +127,7 @@ impl<'a> Renderer<'a> {
&paint, &paint,
&SStroke { &SStroke {
width: stroke.thickness, width: stroke.thickness,
line_cap: LineCap::Square, line_cap: LineCap::Round,
..Default::default() ..Default::default()
}, },
transform, transform,

View file

@ -6,8 +6,8 @@ use core::{
use alloc::vec::Vec; use alloc::vec::Vec;
use crate::{ use crate::{
bytecode::Chunk, bytecode::{Chunk, EmitError, Offset, Opcode, TagId},
value::Value, value::{BytecodeLoc, Closure, FunctionName, Value, Vec4},
vm::{Exception, FnArgs, Vm}, vm::{Exception, FnArgs, Vm},
}; };
@ -29,6 +29,7 @@ pub struct System {
pub resolve_fn: fn(SystemFnArity, &str) -> Option<u8>, pub resolve_fn: fn(SystemFnArity, &str) -> Option<u8>,
pub fns: [Option<SystemFn>; 256], pub fns: [Option<SystemFn>; 256],
pub chunks: Vec<Chunk>, pub chunks: Vec<Chunk>,
structs_chunk_offsets: StructsChunkOffsets,
} }
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
@ -36,33 +37,28 @@ pub struct SystemImage {
chunks: usize, chunks: usize,
} }
macro_rules! def_fns { #[derive(Debug, Clone)]
($($index:tt $arity:tt $name:tt => $fnref:expr),* $(,)?) => { struct StructsChunkOffsets {
pub(crate) fn init_fns(system: &mut System) { dotter: BytecodeLoc,
$(
debug_assert!(system.fns[$index].is_none());
system.fns[$index] = Some($fnref);
)*
}
pub(crate) fn resolve(arity: SystemFnArity, name: &str) -> Option<u8> {
match (arity, name){
$((SystemFnArity::$arity, $name) => Some($index),)*
_ => None,
}
}
};
} }
impl System { impl System {
pub fn new(max_chunks: usize) -> Self { pub fn new(max_chunks: usize) -> Self {
assert!(
max_chunks > 1,
"the 0th chunk is allocated for internal purposes; therefore there must be more than one chunk to execute bytecode"
);
assert!(max_chunks < u32::MAX as usize); assert!(max_chunks < u32::MAX as usize);
let (structs_chunk, structs_chunk_offsets) = Self::structs_chunk().unwrap();
let mut system = Self { let mut system = Self {
resolve_fn: Self::resolve, resolve_fn: Self::resolve,
fns: [None; 256], fns: [None; 256],
chunks: Vec::with_capacity(max_chunks), chunks: Vec::with_capacity(max_chunks),
structs_chunk_offsets,
}; };
system.chunks.push(structs_chunk);
Self::init_fns(&mut system); Self::init_fns(&mut system);
system system
} }
@ -92,6 +88,64 @@ impl System {
panic!("image must be a subset of the current system") panic!("image must be a subset of the current system")
}); });
} }
// The structs chunk contains bytecode for _struct functions_.
//
// Struct functions are a way of encoding structures with arbitrary named data fields.
// They are called like regular functions, except they always expect a single tag-type
// argument. They're used where performance is not a primary concern---in structures that appear
// once or a couple times throughout the lifetime of a program, in which convenient,
// backwards-compatible field access is a priority.
//
// Each struct function has only two opcodes: `Field`, followed by a `Return`.
// The `Field` opcode is an efficient way of encoding an if chain made solely out of tag
// comparisons, returning a closure capture (or error if there's no field with the given name.)
//
// if (tag == A) capture_0
// else if (tag == B) capture_1
// else if (tag == C) capture_2
// else error
//
// Closure captures are used here, because they're a convenient way of attaching indexed data
// to any function, even created outside the language itself.
//
// All of this results in a function that can be called like `d From` to obtain a piece of data
// stored inside of the `d` structure.
fn structs_chunk() -> Result<(Chunk, StructsChunkOffsets), EmitError> {
let mut chunk = Chunk::new(128).unwrap();
let dotter = chunk.offset();
chunk.emit_opcode(Opcode::Field)?;
chunk.emit_u8(3)?;
chunk.emit_u16(TagId::From.to_u16())?;
chunk.emit_u16(TagId::To.to_u16())?;
chunk.emit_u16(TagId::Num.to_u16())?;
chunk.emit_opcode(Opcode::Return)?;
fn loc(offset: Offset) -> BytecodeLoc {
BytecodeLoc {
chunk_id: ChunkId(0),
offset: offset.to_u16(),
}
}
Ok((
chunk,
StructsChunkOffsets {
dotter: loc(dotter),
},
))
}
pub fn create_dotter(&self, from: Vec4, to: Vec4, num: f32) -> Closure {
Closure {
start: self.structs_chunk_offsets.dotter,
name: FunctionName::Anonymous,
param_count: 1,
local_count: 0,
captures: Vec::from_iter([Value::Vec4(from), Value::Vec4(to), Value::Number(num)]),
}
}
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
@ -105,11 +159,29 @@ impl Display for ChunkError {
impl Error for ChunkError {} impl Error for ChunkError {}
macro_rules! def_fns {
($($index:tt $arity:tt $name:tt => $fnref:expr),* $(,)?) => {
pub(crate) fn init_fns(system: &mut System) {
$(
debug_assert!(system.fns[$index].is_none());
system.fns[$index] = Some($fnref);
)*
}
pub(crate) fn resolve(arity: SystemFnArity, name: &str) -> Option<u8> {
match (arity, name){
$((SystemFnArity::$arity, $name) => Some($index),)*
_ => None,
}
}
};
}
pub mod fns { pub mod fns {
use alloc::{format, vec::Vec}; use alloc::{format, vec::Vec};
use crate::{ use crate::{
value::{Fill, List, Ref, Rgba, Scribble, Shape, Stroke, Value, Vec2, Vec4}, value::{Fill, List, Ref, Reticle, Rgba, Scribble, Shape, Stroke, Value, Vec2, Vec4},
vm::{Exception, FnArgs, Vm}, vm::{Exception, FnArgs, Vm},
}; };
@ -181,8 +253,11 @@ pub mod fns {
0xc1 Nary "line" => line, 0xc1 Nary "line" => line,
0xc2 Nary "rect" => rect, 0xc2 Nary "rect" => rect,
0xc3 Nary "circle" => circle, 0xc3 Nary "circle" => circle,
0xe0 Nary "stroke" => stroke, 0xe0 Nary "stroke" => stroke,
0xe1 Nary "fill" => fill, 0xe1 Nary "fill" => fill,
0xf0 Nary "withDotter" => with_dotter,
} }
} }
@ -469,7 +544,13 @@ pub mod fns {
fn to_shape(value: Value, vm: &Vm) -> Option<Shape> { fn to_shape(value: Value, vm: &Vm) -> Option<Shape> {
match value { match value {
Value::Nil | Value::False | Value::True | Value::Number(_) | Value::Rgba(_) => None, Value::Nil
| Value::False
| Value::True
| Value::Tag(_)
| Value::Number(_)
| Value::Rgba(_) => None,
Value::Ref(id) => { Value::Ref(id) => {
if let Ref::Shape(shape) = vm.get_ref(id) { if let Ref::Shape(shape) = vm.get_ref(id) {
Some(shape.clone()) Some(shape.clone())
@ -477,6 +558,7 @@ pub mod fns {
None None
} }
} }
Value::Vec4(vec) => Some(Shape::Point(vec.into())), Value::Vec4(vec) => Some(Shape::Point(vec.into())),
} }
} }
@ -588,4 +670,22 @@ pub mod fns {
Ok(Value::Nil) Ok(Value::Nil)
} }
} }
pub fn with_dotter(vm: &mut Vm, args: FnArgs) -> Result<Value, Exception> {
if args.num() != 1 {
return Err(vm.create_exception(
"`withDotter` expects a single argument (withDotter \\d -> [])",
));
}
let draw = args.get_closure(vm, 0, "argument to `withDotter` must be a closure")?;
if draw.param_count != 1 {
return Err(vm.create_exception("function passed to `withDotter` must take in a single parameter (withDotter \\d -> [])"));
}
let id = vm.create_ref(Ref::Reticle(Reticle::Dotter {
draw: args.get(vm, 0),
}))?;
Ok(Value::Ref(id))
}
} }

View file

@ -0,0 +1,78 @@
use tiny_skia::Pixmap;
use crate::{
render::{Renderer, RendererLimits},
system::System,
value::{Ref, Reticle, Value, Vec2},
vm::{Exception, Vm},
};
#[derive(Debug, Clone, Copy)]
pub struct Trampoline {
pub value: Value,
}
// NOTE: This must be kept in sync with haku.js.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub enum Cont {
Scribble,
Dotter,
}
impl Trampoline {
pub fn new(init: Value) -> Self {
Self { value: init }
}
pub fn cont(&self, vm: &Vm) -> Cont {
let Some((_, refv)) = vm.get_ref_value(self.value) else {
return Cont::Scribble;
};
match refv {
Ref::Reticle(_) => Cont::Dotter,
_ => Cont::Scribble,
}
}
pub fn scribble(
&mut self,
vm: &Vm,
pixmap: &mut Pixmap,
translation: Vec2,
limits: &RendererLimits,
) -> Result<(), Exception> {
let mut renderer = Renderer::new(&mut *pixmap, limits);
renderer.translate(translation.x, translation.y);
renderer.render(vm, self.value)
}
pub fn dotter(
&mut self,
vm: &mut Vm,
system: &System,
from: Vec2,
to: Vec2,
num: f32,
) -> Result<(), Exception> {
let (_, vref) = vm.get_ref_value(self.value).expect("value must be a ref");
let &Ref::Reticle(Reticle::Dotter {
draw: Value::Ref(draw_id),
}) = vref
else {
panic!("value must be a dotter reticle");
};
let dotter = vm.create_ref(Ref::Closure(system.create_dotter(
from.into(),
to.into(),
num,
)))?;
let value = vm.run(system, draw_id, &[Value::Ref(dotter)])?;
self.value = value;
Ok(())
}
}

View file

@ -1,6 +1,6 @@
use alloc::vec::Vec; use alloc::vec::Vec;
use crate::{compiler::ClosureSpec, system::ChunkId}; use crate::{bytecode::TagId, compiler::ClosureSpec, system::ChunkId};
// TODO: Probably needs some pretty hardcore space optimization. // TODO: Probably needs some pretty hardcore space optimization.
// Maybe when we have static typing. // Maybe when we have static typing.
@ -9,6 +9,7 @@ pub enum Value {
Nil, Nil,
False, False,
True, True,
Tag(TagId),
Number(f32), Number(f32),
Vec4(Vec4), Vec4(Vec4),
Rgba(Rgba), Rgba(Rgba),
@ -73,6 +74,12 @@ pub struct Vec2 {
pub y: f32, pub y: f32,
} }
impl Vec2 {
pub fn new(x: f32, y: f32) -> Self {
Self { x, y }
}
}
impl From<Vec4> for Vec2 { impl From<Vec4> for Vec2 {
fn from(value: Vec4) -> Self { fn from(value: Vec4) -> Self {
Self { Self {
@ -90,6 +97,17 @@ pub struct Vec4 {
pub w: f32, pub w: f32,
} }
impl From<Vec2> for Vec4 {
fn from(value: Vec2) -> Self {
Self {
x: value.x,
y: value.y,
z: 0.0,
w: 0.0,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Default)] #[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Default)]
#[repr(C)] #[repr(C)]
pub struct Rgba { pub struct Rgba {
@ -123,6 +141,7 @@ pub enum Ref {
List(List), List(List),
Shape(Shape), Shape(Shape),
Scribble(Scribble), Scribble(Scribble),
Reticle(Reticle),
} }
impl Ref { impl Ref {
@ -206,3 +225,8 @@ pub enum Scribble {
Stroke(Stroke), Stroke(Stroke),
Fill(Fill), Fill(Fill),
} }
#[derive(Debug, Clone)]
pub enum Reticle {
Dotter { draw: Value },
}

View file

@ -5,9 +5,10 @@ use core::{
}; };
use alloc::{string::String, vec::Vec}; use alloc::{string::String, vec::Vec};
use log::debug;
use crate::{ use crate::{
bytecode::{self, Defs, Opcode, CAPTURE_CAPTURE, CAPTURE_LOCAL}, bytecode::{self, Defs, Opcode, TagId, CAPTURE_CAPTURE, CAPTURE_LOCAL},
system::{ChunkId, System}, system::{ChunkId, System},
value::{BytecodeLoc, Closure, FunctionName, List, Ref, RefId, Rgba, Value, Vec4}, value::{BytecodeLoc, Closure, FunctionName, List, Ref, RefId, Rgba, Value, Vec4},
}; };
@ -178,7 +179,12 @@ impl Vm {
.ok_or_else(|| self.create_exception("corrupted bytecode (call stack underflow)")) .ok_or_else(|| self.create_exception("corrupted bytecode (call stack underflow)"))
} }
pub fn run(&mut self, system: &System, mut closure_id: RefId) -> Result<Value, Exception> { pub fn run(
&mut self,
system: &System,
mut closure_id: RefId,
params: &[Value],
) -> Result<Value, Exception> {
let closure = self let closure = self
.get_ref(closure_id) .get_ref(closure_id)
.as_closure() .as_closure()
@ -191,7 +197,12 @@ impl Vm {
let mut fuel = self.fuel; let mut fuel = self.fuel;
let init_bottom = bottom; let init_bottom = bottom;
for _ in 0..closure.local_count { let local_count = closure.local_count;
for &param in params {
self.push(param)?;
}
for _ in 0..local_count {
self.push(Value::Nil)?; self.push(Value::Nil)?;
} }
@ -219,6 +230,11 @@ impl Vm {
Opcode::False => self.push(Value::False)?, Opcode::False => self.push(Value::False)?,
Opcode::True => self.push(Value::True)?, Opcode::True => self.push(Value::True)?,
Opcode::Tag => {
let i = chunk.read_u16(&mut pc)?;
self.push(Value::Tag(TagId::from_u16(i)))?;
}
Opcode::Number => { Opcode::Number => {
let x = chunk.read_f32(&mut pc)?; let x = chunk.read_f32(&mut pc)?;
self.push(Value::Number(x))?; self.push(Value::Number(x))?;
@ -344,6 +360,34 @@ impl Vm {
} }
} }
Opcode::Field => {
let count = chunk.read_u8(&mut pc)? as usize;
let field_tag = self.pop()?;
let Value::Tag(field_tag_id) = field_tag else {
return Err(self.create_exception("name of data field to look up must be a tag (starting with an uppercase letter)"));
};
let mut index = None;
for i in 0..count {
let tag_id = TagId::from_u16(chunk.read_u16(&mut pc)?);
if tag_id == field_tag_id {
index = Some(i);
}
}
let Some(index) = index else {
return Err(self.create_exception(
"field with this name does not exist in the given data structure",
));
};
let closure = self.get_ref(closure_id).as_closure().unwrap();
self.push(closure.captures.get(index).copied().ok_or_else(|| {
self.create_exception("corrupted bytecode (field index out of bounds)")
})?)?;
}
Opcode::Call => { Opcode::Call => {
let argument_count = chunk.read_u8(&mut pc)? as usize; let argument_count = chunk.read_u8(&mut pc)? as usize;
@ -554,6 +598,23 @@ impl FnArgs {
.to_rgba() .to_rgba()
.ok_or_else(|| vm.create_exception(message)) .ok_or_else(|| vm.create_exception(message))
} }
#[inline(never)]
pub fn get_closure<'vm>(
&self,
vm: &'vm Vm,
index: usize,
message: &'static str,
) -> Result<&'vm Closure, Exception> {
let value = self.get(vm, index);
let (_, any_ref) = vm
.get_ref_value(value)
.ok_or_else(|| vm.create_exception(message))?;
let Ref::Closure(closure) = any_ref else {
return Err(vm.create_exception(message));
};
Ok(closure)
}
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]

View file

@ -2,7 +2,7 @@ use std::error::Error;
use haku::{ use haku::{
ast::{dump::dump, Ast}, ast::{dump::dump, Ast},
bytecode::{Chunk, Defs}, bytecode::{Chunk, Defs, DefsLimits},
compiler::{compile_expr, Compiler, Source}, compiler::{compile_expr, Compiler, Source},
lexer::{lex, Lexer}, lexer::{lex, Lexer},
parser::{self, Parser, ParserLimits}, parser::{self, Parser, ParserLimits},
@ -14,7 +14,7 @@ use haku::{
}; };
fn eval(code: &str) -> Result<Value, Box<dyn Error>> { fn eval(code: &str) -> Result<Value, Box<dyn Error>> {
let mut system = System::new(1); let mut system = System::new(2);
let code = SourceCode::unlimited_len(code); let code = SourceCode::unlimited_len(code);
@ -32,7 +32,10 @@ fn eval(code: &str) -> Result<Value, Box<dyn Error>> {
system: &system, system: &system,
}; };
let mut defs = Defs::new(256); let mut defs = Defs::new(&DefsLimits {
max_defs: 256,
max_tags: 256,
});
let mut chunk = Chunk::new(65536).unwrap(); let mut chunk = Chunk::new(65536).unwrap();
let mut compiler = Compiler::new(&mut defs, &mut chunk); let mut compiler = Compiler::new(&mut defs, &mut chunk);
compile_expr(&mut compiler, &src, root)?; compile_expr(&mut compiler, &src, root)?;
@ -70,7 +73,7 @@ fn eval(code: &str) -> Result<Value, Box<dyn Error>> {
println!("closure spec: {closure_spec:?}"); println!("closure spec: {closure_spec:?}");
let closure = vm.create_ref(Ref::Closure(Closure::chunk(chunk_id, closure_spec)))?; let closure = vm.create_ref(Ref::Closure(Closure::chunk(chunk_id, closure_spec)))?;
let result = vm.run(&system, closure)?; let result = vm.run(&system, closure, &[])?;
println!("used fuel: {}", limits.fuel - vm.remaining_fuel()); println!("used fuel: {}", limits.fuel - vm.remaining_fuel());
@ -281,3 +284,11 @@ fn issue_78() {
"#; "#;
assert_eq!(eval(code).unwrap(), Value::Ref(RefId::from_u32(2))) assert_eq!(eval(code).unwrap(), Value::Ref(RefId::from_u32(2)))
} }
#[test]
fn with_dotter_identity() {
let code = r#"
withDotter \d -> d
"#;
assert_eq!(eval(code).unwrap(), Value::Ref(RefId::from_u32(0)))
}

View file

@ -12,7 +12,10 @@ use axum::{
}; };
use base64::Engine; use base64::Engine;
use eyre::{bail, Context, OptionExt}; use eyre::{bail, Context, OptionExt};
use haku::value::Value; use haku::{
trampoline::{Cont, Trampoline},
value::Value,
};
use schema::{ use schema::{
ChunkInfo, Error, LoginRequest, LoginResponse, Notify, Online, Request, Version, WallInfo, ChunkInfo, Error, LoginRequest, LoginResponse, Notify, Online, Request, Version, WallInfo,
}; };
@ -29,7 +32,8 @@ use crate::{
schema::Vec2, schema::Vec2,
wall::{ wall::{
self, auto_save::AutoSave, chunk_images::ChunkImages, chunk_iterator::ChunkIterator, self, auto_save::AutoSave, chunk_images::ChunkImages, chunk_iterator::ChunkIterator,
database::ChunkDataPair, ChunkPosition, JoinError, SessionHandle, UserInit, Wall, WallId, database::ChunkDataPair, ChunkPosition, Interaction, JoinError, SessionHandle, UserInit,
Wall, WallId,
}, },
}; };
@ -220,12 +224,8 @@ struct SessionLoop {
} }
enum RenderCommand { enum RenderCommand {
SetBrush { Interact {
brush: String, interactions: Vec<Interaction>,
},
Plot {
points: Vec<Vec2>,
done: oneshot::Sender<()>, done: oneshot::Sender<()>,
}, },
} }
@ -245,10 +245,17 @@ impl SessionLoop {
// If this ends up dropping commands - it's your fault for trying to DoS my server! // If this ends up dropping commands - it's your fault for trying to DoS my server!
let (render_commands_tx, render_commands_rx) = mpsc::channel(1); let (render_commands_tx, render_commands_rx) = mpsc::channel(1);
render_commands_tx let thread_ready = {
.send(RenderCommand::SetBrush { brush }) let (done_tx, done_rx) = oneshot::channel();
.await render_commands_tx
.unwrap(); .send(RenderCommand::Interact {
interactions: vec![Interaction::SetBrush { brush }],
done: done_tx,
})
.await
.unwrap();
done_rx
};
// We spawn our own thread so as not to clog the tokio blocking thread pool with our // We spawn our own thread so as not to clog the tokio blocking thread pool with our
// rendering shenanigans. // rendering shenanigans.
@ -265,6 +272,8 @@ impl SessionLoop {
}) })
.context("could not spawn render thread")?; .context("could not spawn render thread")?;
thread_ready.await?;
Ok(Self { Ok(Self {
wall_id, wall_id,
wall, wall,
@ -312,47 +321,82 @@ impl SessionLoop {
| wall::EventKind::Leave | wall::EventKind::Leave
| wall::EventKind::Cursor { .. } => (), | wall::EventKind::Cursor { .. } => (),
wall::EventKind::SetBrush { brush } => { wall::EventKind::Interact { interactions } => {
// SetBrush is not dropped because it is a very important event. let (done_tx, done_rx) = oneshot::channel();
_ = self
.render_commands_tx
.send(RenderCommand::SetBrush {
brush: brush.clone(),
})
.await;
}
wall::EventKind::Plot { points } => {
let chunks_to_modify: Vec<_> =
chunks_to_modify(&self.wall, points).into_iter().collect();
match self.chunk_images.load(chunks_to_modify.clone()).await {
Ok(_) => {
// We drop commands if we take too long to render instead of lagging
// the WebSocket thread.
// Theoretically this will yield much better responsiveness, but it _will_
// result in some visual glitches if we're getting bottlenecked.
let (done_tx, done_rx) = oneshot::channel();
let send_result =
self.render_commands_tx.try_send(RenderCommand::Plot {
points: points.clone(),
done: done_tx,
});
if send_result.is_err() { if interactions
info!( .iter()
?points, .any(|i| matches!(i, Interaction::SetBrush { .. }))
"render thread is overloaded, dropping request to draw points" {
); // SetBrush is an important event, so we wait for the render thread
} // to unload.
_ = self
let auto_save = Arc::clone(&self.auto_save); .render_commands_tx
tokio::spawn(async move { .send(RenderCommand::Interact {
_ = done_rx.await; interactions: interactions.clone(),
auto_save.request(chunks_to_modify).await; done: done_tx,
})
.await;
} else {
// If there is no SetBrush, there's no need to wait, so we fire events
// blindly. If the thread's not okay with that... well, whatever.
// That's your issue for making a really slow brush.
let send_result =
self.render_commands_tx.try_send(RenderCommand::Interact {
interactions: interactions.clone(),
done: done_tx,
}); });
if send_result.is_err() {
info!(
?interactions,
"render thread is overloaded, dropping interaction request"
);
} }
Err(err) => error!(?err, "while loading chunks for render command"),
} }
}
// TODO: Auto save. This'll need us to compute which chunks will be affected
// by the interactions.
} // wall::EventKind::SetBrush { brush } => {
// // SetBrush is not dropped because it is a very important event.
// _ = self
// .render_commands_tx
// .send(RenderCommand::SetBrush {
// brush: brush.clone(),
// })
// .await;
// }
// wall::EventKind::Plot { points } => {
// let chunks_to_modify: Vec<_> =
// chunks_to_modify(&self.wall, points).into_iter().collect();
// match self.chunk_images.load(chunks_to_modify.clone()).await {
// Ok(_) => {
// // We drop commands if we take too long to render instead of lagging
// // the WebSocket thread.
// // Theoretically this will yield much better responsiveness, but it _will_
// // result in some visual glitches if we're getting bottlenecked.
// let (done_tx, done_rx) = oneshot::channel();
// let send_result =
// self.render_commands_tx.try_send(RenderCommand::Plot {
// points: points.clone(),
// done: done_tx,
// });
// if send_result.is_err() {
// info!(
// ?points,
// "render thread is overloaded, dropping request to draw points"
// );
// }
// let auto_save = Arc::clone(&self.auto_save);
// tokio::spawn(async move {
// _ = done_rx.await;
// auto_save.request(chunks_to_modify).await;
// });
// }
// Err(err) => error!(?err, "while loading chunks for render command"),
// }
// }
} }
self.wall.event(wall::Event { self.wall.event(wall::Event {
@ -437,32 +481,97 @@ impl SessionLoop {
fn render_thread(wall: Arc<Wall>, limits: Limits, mut commands: mpsc::Receiver<RenderCommand>) { fn render_thread(wall: Arc<Wall>, limits: Limits, mut commands: mpsc::Receiver<RenderCommand>) {
let mut haku = Haku::new(limits); let mut haku = Haku::new(limits);
let mut trampoline = None;
let mut brush_ok = false; let mut brush_ok = false;
let mut current_render_area = RenderArea::default();
while let Some(command) = commands.blocking_recv() { while let Some(command) = commands.blocking_recv() {
match command { let RenderCommand::Interact { interactions, done } = command;
RenderCommand::SetBrush { brush } => {
brush_ok = haku.set_brush(&brush).is_ok(); let mut queue = VecDeque::from(interactions);
while let Some(interaction) = queue.pop_front() {
if let Some(render_area) = render_area(wall.settings(), &interaction) {
current_render_area = render_area;
} }
RenderCommand::Plot { points, done } => { match interaction {
if brush_ok { Interaction::SetBrush { brush } => {
if let Ok(value) = haku.eval_brush() { brush_ok = haku.set_brush(&brush).is_ok();
for point in points { }
// Ignore the result. It's better if we render _something_ rather
// than nothing. Interaction::Dotter { from, to, num } => {
_ = draw_to_chunks(&wall, &haku, value, point); if brush_ok {
if let Some(trampoline) =
jumpstart_trampoline(&mut haku, &mut trampoline)
{
let cont = haku.cont(trampoline);
if cont == Cont::Dotter {
_ = haku.dotter(trampoline, from, to, num);
} else {
error!("received Dotter interaction when a {cont:?} continuation was next");
}
} else {
info!("failed to jumpstart trampoline for Dotter interaction");
} }
haku.reset_vm();
} }
} }
_ = done.send(());
Interaction::Scribble => {
// Regarding the take(): this is to self-synchronize in case of error.
// Once a scribble is rendered, we reset back to not having a trampoline,
// and further interactions must be sent to kickstart a new one.
//
// Regarding not jumpstarting a trampoline here: it would be useless,
// because a scribble is always the last thing in the chain, and it never
// modifies the render area. Therefore what would end up happening is we'd
// end up rendering to no chunks, therefore not doing anything.
// The server doesn't need to report anything to the user and so can save
// a bit of work by _not_ evaluating the brush initially here.
if let Some(trampoline) = trampoline.take() {
_ = draw_to_chunks(&wall, &haku, current_render_area, trampoline.value);
} else {
info!("tried to scribble without an active trampoline");
}
current_render_area = RenderArea::default();
}
} }
} }
_ = done.send(());
} }
} }
} }
#[derive(Debug, Clone, Copy, Default)]
struct RenderArea {
top_left: Vec2,
bottom_right: Vec2,
}
fn render_area(wall_settings: &wall::Settings, interaction: &Interaction) -> Option<RenderArea> {
match interaction {
Interaction::Dotter { from, to, .. } => {
let half_paint_area = wall_settings.paint_area as f32 / 2.0;
Some(RenderArea {
top_left: Vec2::new(from.x - half_paint_area, from.y - half_paint_area),
bottom_right: Vec2::new(to.x + half_paint_area, to.y + half_paint_area),
})
}
Interaction::SetBrush { .. } | Interaction::Scribble => None,
}
}
fn jumpstart_trampoline<'a>(
haku: &mut Haku,
trampoline: &'a mut Option<Trampoline>,
) -> Option<&'a mut Trampoline> {
if trampoline.is_none() {
*trampoline = haku.eval_brush().ok().map(Trampoline::new);
}
trampoline.as_mut()
}
fn chunks_to_modify(wall: &Wall, points: &[Vec2]) -> HashSet<ChunkPosition> { fn chunks_to_modify(wall: &Wall, points: &[Vec2]) -> HashSet<ChunkPosition> {
let mut chunks = HashSet::new(); let mut chunks = HashSet::new();
for point in points { for point in points {
@ -483,23 +592,23 @@ fn chunks_to_modify(wall: &Wall, points: &[Vec2]) -> HashSet<ChunkPosition> {
} }
#[instrument(skip(wall, haku, value))] #[instrument(skip(wall, haku, value))]
fn draw_to_chunks(wall: &Wall, haku: &Haku, value: Value, center: Vec2) -> eyre::Result<()> { fn draw_to_chunks(
wall: &Wall,
haku: &Haku,
render_area: RenderArea,
value: Value,
) -> eyre::Result<()> {
let settings = wall.settings(); let settings = wall.settings();
let chunk_size = settings.chunk_size as f32; let chunk_size = settings.chunk_size as f32;
let paint_area = settings.paint_area as f32;
let left = center.x - paint_area / 2.0; let top_left_chunk = settings.chunk_at(render_area.top_left);
let top = center.y - paint_area / 2.0; let bottom_right_chunk = settings.chunk_at_ceil(render_area.bottom_right);
let left_chunk = settings.chunk_at_1d(left); for chunk_y in top_left_chunk.y..bottom_right_chunk.y {
let top_chunk = settings.chunk_at_1d(top); for chunk_x in top_left_chunk.x..bottom_right_chunk.x {
let right_chunk = settings.chunk_at_1d_ceil(left + paint_area); let x = f32::floor(-chunk_x as f32 * chunk_size);
let bottom_chunk = settings.chunk_at_1d_ceil(top + paint_area); let y = f32::floor(-chunk_y as f32 * chunk_size);
for chunk_y in top_chunk..bottom_chunk {
for chunk_x in left_chunk..right_chunk {
let x = f32::floor(-chunk_x as f32 * chunk_size + center.x);
let y = f32::floor(-chunk_y as f32 * chunk_size + center.y);
let chunk_ref = wall.get_or_create_chunk(ChunkPosition::new(chunk_x, chunk_y)); let chunk_ref = wall.get_or_create_chunk(ChunkPosition::new(chunk_x, chunk_y));
let mut chunk = chunk_ref.blocking_lock(); let mut chunk = chunk_ref.blocking_lock();
haku.render_value(&mut chunk.pixmap, value, Vec2 { x, y })?; haku.render_value(&mut chunk.pixmap, value, Vec2 { x, y })?;

View file

@ -6,7 +6,7 @@
use eyre::{bail, Context, OptionExt}; use eyre::{bail, Context, OptionExt};
use haku::{ use haku::{
ast::Ast, ast::Ast,
bytecode::{Chunk, Defs, DefsImage}, bytecode::{Chunk, Defs, DefsImage, DefsLimits},
compiler::{ClosureSpec, Compiler, Source}, compiler::{ClosureSpec, Compiler, Source},
lexer::{lex, Lexer}, lexer::{lex, Lexer},
parser::{self, Parser, ParserLimits}, parser::{self, Parser, ParserLimits},
@ -14,6 +14,7 @@ use haku::{
source::SourceCode, source::SourceCode,
system::{ChunkId, System, SystemImage}, system::{ChunkId, System, SystemImage},
token::Lexis, token::Lexis,
trampoline::{Cont, Trampoline},
value::{Closure, Ref, Value}, value::{Closure, Ref, Value},
vm::{Vm, VmImage, VmLimits}, vm::{Vm, VmImage, VmLimits},
}; };
@ -30,6 +31,7 @@ pub struct Limits {
pub max_source_code_len: u32, pub max_source_code_len: u32,
pub max_chunks: usize, pub max_chunks: usize,
pub max_defs: usize, pub max_defs: usize,
pub max_tags: usize,
pub max_tokens: usize, pub max_tokens: usize,
pub max_parser_events: usize, pub max_parser_events: usize,
pub ast_capacity: usize, pub ast_capacity: usize,
@ -59,7 +61,10 @@ pub struct Haku {
impl Haku { impl Haku {
pub fn new(limits: Limits) -> Self { pub fn new(limits: Limits) -> Self {
let system = System::new(limits.max_chunks); let system = System::new(limits.max_chunks);
let defs = Defs::new(limits.max_defs); let defs = Defs::new(&DefsLimits {
max_defs: limits.max_defs,
max_tags: limits.max_tags,
});
let vm = Vm::new( let vm = Vm::new(
&defs, &defs,
&VmLimits { &VmLimits {
@ -158,7 +163,7 @@ impl Haku {
let scribble = self let scribble = self
.vm .vm
.run(&self.system, closure_id) .run(&self.system, closure_id, &[])
.context("an exception occurred while evaluating the scribble")?; .context("an exception occurred while evaluating the scribble")?;
Ok(scribble) Ok(scribble)
@ -187,4 +192,18 @@ impl Haku {
pub fn reset_vm(&mut self) { pub fn reset_vm(&mut self) {
self.vm.restore_image(&self.vm_image); self.vm.restore_image(&self.vm_image);
} }
pub fn cont(&self, trampoline: &Trampoline) -> Cont {
trampoline.cont(&self.vm)
}
pub fn dotter(
&mut self,
trampoline: &mut Trampoline,
from: Vec2,
to: Vec2,
num: f32,
) -> eyre::Result<()> {
Ok(trampoline.dotter(&mut self.vm, &self.system, from.into(), to.into(), num)?)
}
} }

View file

@ -1,6 +1,6 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize, Default)]
pub struct Vec2 { pub struct Vec2 {
pub x: f32, pub x: f32,
pub y: f32, pub y: f32,
@ -11,3 +11,9 @@ impl Vec2 {
Self { x, y } Self { x, y }
} }
} }
impl From<Vec2> for haku::value::Vec2 {
fn from(value: Vec2) -> Self {
Self::new(value.x, value.y)
}
}

View file

@ -192,9 +192,19 @@ pub enum EventKind {
Leave, Leave,
Cursor { position: Vec2 }, Cursor { position: Vec2 },
Interact { interactions: Vec<Interaction> },
}
#[derive(Debug, Clone, Deserialize, Serialize)]
#[serde(
tag = "kind",
rename_all = "camelCase",
rename_all_fields = "camelCase"
)]
pub enum Interaction {
SetBrush { brush: String }, SetBrush { brush: String },
Plot { points: Vec<Vec2> }, Dotter { from: Vec2, to: Vec2, num: f32 },
Scribble,
} }
#[derive(Debug, Clone, Serialize)] #[derive(Debug, Clone, Serialize)]
@ -276,7 +286,7 @@ impl Wall {
// Drawing events are handled by the owner session's thread to make drawing as // Drawing events are handled by the owner session's thread to make drawing as
// parallel as possible. // parallel as possible.
EventKind::SetBrush { .. } | EventKind::Plot { .. } => (), EventKind::Interact { .. } => {}
} }
} }

View file

@ -27,7 +27,8 @@ In case you edited anything in the input box on the right, paste the following t
-- Try playing around with the numbers, -- Try playing around with the numbers,
-- and see what happens! -- and see what happens!
stroke 8 #000 (vec 0 0) withDotter \d ->
stroke 8 #000 (d To)
``` ```
rakugaki is a drawing program for digital scribbles and other pieces of art. rakugaki is a drawing program for digital scribbles and other pieces of art.
@ -83,10 +84,11 @@ If you want to draw multiple scribbles, you can wrap them into a list, which we
```haku ```haku
-- Draw two colorful dots instead of one! -- Draw two colorful dots instead of one!
[ withDotter \d ->
stroke 8 #F00 (vec 4 0) [
stroke 8 #00F (vec (-4) 0)) stroke 8 #F00 (vec ((vecX (d To)) + 4) (vecY (d To)))
] stroke 8 #00F (vec ((vecX (d To)) - 4) (vecY (d To)))
]
``` ```
::: aside ::: aside
@ -103,25 +105,29 @@ And what's even crazier is that you can compose lists _further_---you can make a
It'll draw the first inner list, which contains two scribbles, and then it'll draw the second inner list, which contains two scribbles. It'll draw the first inner list, which contains two scribbles, and then it'll draw the second inner list, which contains two scribbles.
```haku ```haku
[ withDotter \d ->
[ [
stroke 8 #F00 (vec 4 (-4)) [
stroke 8 #00F (vec (-4) (-4)) stroke 8 #F00 (vec ((vecX (d To)) + 4) (vecY (d To)))
stroke 8 #00F (vec ((vecX (d To)) - 4) (vecY (d To)))
]
[
stroke 8 #FF0 (vec (vecX (d To)) ((vecY (d To)) + 4))
stroke 8 #0FF (vec (vecX (d To)) ((vecY (d To)) - 4))
]
] ]
[
stroke 8 #FF0 (vec 4 4)
stroke 8 #0FF (vec (-4) 4)
]
]
``` ```
::: aside ::: aside
Another weird thing: when negating a number, you have to put it in parentheses. I know this example is kind of horrendous to read right now.
This is because haku does not see your spaces---`vec -4`, `vec - 4`, and `vec-4` all mean the same thing! This is because haku currently does not have vector math, which means we have to disassemble and reassemble vectors manually.
In this case, it will always choose the 2nd interpretation---vec minus four.
So to make it interpret our minus four as, well, _minus four_, we need to enclose it in parentheses. {% Another weird thing: when negating a number, you have to put it in parentheses. %}
{% This is because haku does not see your spaces---`vec -4`, `vec - 4`, and `vec-4` all mean the same thing! %}
{% In this case, it will always choose the 2nd interpretation---vec minus four. %}
{% So to make it interpret our minus four as, well, _minus four_, we need to enclose it in parentheses. %}
::: :::
@ -142,15 +148,24 @@ Anyways!
Recall that super simple brush from before... Recall that super simple brush from before...
```haku ```haku
stroke 8 #000 (vec 0 0) withDotter \d ->
stroke 8 #000 (d To)
``` ```
This reads as "a stroke that's 8 pixels wide, has the color `#000`, and is drawn at the point `(0, 0)` relative to the mouse cursor." This reads as "given a dotter, output a stroke that's 8 pixels wide, has the color `#000`, and is drawn at the dotter's `To` coordinates."
All these symbols are very meaningful to haku. All these symbols are very meaningful to haku.
If you reorder or remove any one of them, your brush isn't going to work! If you reorder or remove any one of them, your brush isn't going to work!
- Reading from left to right, we start with `stroke`.\ - Reading from left to right, we start with `withDotter`.
We can't draw without knowing _where_ to draw, and the `withDotter` incantation lets us ask the UI for that.
- Then, `\d ->` lets us name the data we get back from the UI.
`d` ends up containing a few useful properties, but the most useful one for us is `To`, which contains the current mouse position (where *`To`* draw).
- We'll get to what all these sigils mean to haku later!
- On the next line we have a `stroke`.
`stroke` is a _function_---a recipe for producing data!\ `stroke` is a _function_---a recipe for producing data!\
haku has [many such built-in recipes](/docs/system.html). haku has [many such built-in recipes](/docs/system.html).
`stroke` is one of them. `stroke` is one of them.
@ -192,7 +207,8 @@ Note how it's parenthesized though---recall that function arguments are separate
And with all that, we let haku mix all the ingredients together, and get a black dot under the cursor. And with all that, we let haku mix all the ingredients together, and get a black dot under the cursor.
```haku ```haku
stroke 8 #000 (vec 0 0) withDotter \d ->
stroke 8 #000 (d To)
``` ```
Nice! Nice!
@ -206,8 +222,16 @@ So to spice things up, haku has a few shapes you can choose from!
Recall that 3rd argument to `stroke`. Recall that 3rd argument to `stroke`.
We can actually pass any arbitrary shape to it, and haku will outline it for us. We can actually pass any arbitrary shape to it, and haku will outline it for us.
Right now haku supports two additional shapes: rectangles and circles. The experience of drawing with that example brush is pretty crap, because it can draw dots that don't connect with each other at all.
You can try them out by playing with this brush! Let's fix that by drawing a `line` instead!
```haku
withDotter \d ->
stroke 8 #000 (line (d From) (d To))
```
We replace the singular position `d To` with a `line`. `line` expects two arguments, which are vectors defining the line's start and end points.
For the starting position we use a _different_ property of `d`, which is `From`---this is the _previous_ value of `To`, which allows us to draw a continuous line.
```haku ```haku
[ [

View file

@ -69,6 +69,11 @@ max_chunks = 2
# Maximum amount of defs across all source code chunks. # Maximum amount of defs across all source code chunks.
max_defs = 256 max_defs = 256
# Maximum amount of unique tags across all source code chunks.
# Note that tag uniqueness is determined by the tag's source code string, so two instances of `A`
# only occupy one slot.
max_tags = 256
# Maximum amount of tokens a single chunk can have. # Maximum amount of tokens a single chunk can have.
max_tokens = 65536 max_tokens = 65536

View file

@ -1,12 +1,12 @@
import { CodeEditor, getLineStart } from "rkgk/code-editor.js"; import { CodeEditor } from "rkgk/code-editor.js";
import { BrushPreview } from "rkgk/brush-preview.js";
const defaultBrush = ` const defaultBrush = `
-- This is your brush. -- This is your brush.
-- Try playing around with the numbers, -- Try playing around with the numbers,
-- and see what happens! -- and see what happens!
stroke 8 #000 (vec 0 0) withDotter \d ->
stroke 8 #000 (d To)
`.trim(); `.trim();
export class BrushEditor extends HTMLElement { export class BrushEditor extends HTMLElement {

View file

@ -21,32 +21,35 @@ export class BrushPreview extends HTMLElement {
this.pixmap = new Pixmap(this.canvas.width, this.canvas.height); this.pixmap = new Pixmap(this.canvas.width, this.canvas.height);
} }
#renderBrushInner(haku) { async #renderBrushInner(haku) {
haku.resetVm(); this.pixmap.clear();
let evalResult = await haku.evalBrush({
runDotter: async () => {
return {
fromX: this.canvas.width / 2,
fromY: this.canvas.width / 2,
toX: this.canvas.width / 2,
toY: this.canvas.width / 2,
num: 0,
};
},
let evalResult = haku.evalBrush(); runScribble: async (renderToPixmap) => {
return renderToPixmap(this.pixmap, 0, 0);
},
});
if (evalResult.status != "ok") { if (evalResult.status != "ok") {
return { status: "error", phase: "eval", result: evalResult }; return { status: "error", phase: "eval", result: evalResult };
} }
this.pixmap.clear();
let renderResult = haku.renderValue(
this.pixmap,
this.canvas.width / 2,
this.canvas.height / 2,
);
if (renderResult.status != "ok") {
return { status: "error", phase: "render", result: renderResult };
}
this.ctx.putImageData(this.pixmap.getImageData(), 0, 0); this.ctx.putImageData(this.pixmap.getImageData(), 0, 0);
return { status: "ok" }; return { status: "ok" };
} }
renderBrush(haku) { async renderBrush(haku) {
this.unsetErrorFlag(); this.unsetErrorFlag();
let result = this.#renderBrushInner(haku); let result = await this.#renderBrushInner(haku);
if (result.status == "error") { if (result.status == "error") {
this.setErrorFlag(); this.setErrorFlag();
} }

View file

@ -19,7 +19,7 @@ class CanvasRenderer extends HTMLElement {
this.#cursorReportingBehaviour(); this.#cursorReportingBehaviour();
this.#panningBehaviour(); this.#panningBehaviour();
this.#zoomingBehaviour(); this.#zoomingBehaviour();
this.#paintingBehaviour(); this.#interactionBehaviour();
this.addEventListener("contextmenu", (event) => event.preventDefault()); this.addEventListener("contextmenu", (event) => event.preventDefault());
} }
@ -388,18 +388,6 @@ class CanvasRenderer extends HTMLElement {
// Behaviours // Behaviours
async #cursorReportingBehaviour() {
while (true) {
let event = await listen([this, "mousemove"]);
let [x, y] = this.viewport.toViewportSpace(
event.clientX - this.clientLeft,
event.offsetY - this.clientTop,
this.getWindowSize(),
);
this.dispatchEvent(Object.assign(new Event(".cursor"), { x, y }));
}
}
sendViewportUpdate() { sendViewportUpdate() {
this.dispatchEvent(new Event(".viewportUpdate")); this.dispatchEvent(new Event(".viewportUpdate"));
} }
@ -443,24 +431,28 @@ class CanvasRenderer extends HTMLElement {
); );
} }
async #paintingBehaviour() { async #cursorReportingBehaviour() {
const paint = (x, y) => { while (true) {
let [wallX, wallY] = this.viewport.toViewportSpace(x, y, this.getWindowSize()); let event = await listen([this, "mousemove"]);
this.dispatchEvent(Object.assign(new Event(".paint"), { x: wallX, y: wallY })); let [x, y] = this.viewport.toViewportSpace(
}; event.clientX - this.clientLeft,
event.offsetY - this.clientTop,
this.getWindowSize(),
);
this.dispatchEvent(Object.assign(new Event(".cursor"), { x, y }));
}
}
async #interactionBehaviour() {
while (true) { while (true) {
let mouseDown = await listen([this, "mousedown"]); let mouseDown = await listen([this, "mousedown"]);
if (mouseDown.button == 0) { if (mouseDown.button == 0) {
paint(mouseDown.offsetX, mouseDown.offsetY); let [mouseX, mouseY] = this.viewport.toViewportSpace(
while (true) { mouseDown.clientX - this.clientLeft,
let event = await listen([window, "mousemove"], [window, "mouseup"]); mouseDown.clientY - this.clientTop,
if (event.type == "mousemove") { this.getWindowSize(),
paint(event.clientX - this.clientLeft, event.offsetY - this.clientTop); );
} else if (event.type == "mouseup") { notifyInteraction(this, "start", { mouseX, mouseY, num: 0 });
break;
}
}
} }
} }
} }
@ -468,6 +460,68 @@ class CanvasRenderer extends HTMLElement {
customElements.define("rkgk-canvas-renderer", CanvasRenderer); customElements.define("rkgk-canvas-renderer", CanvasRenderer);
function notifyInteraction(canvasRenderer, kind, fields) {
canvasRenderer.dispatchEvent(
Object.assign(new InteractEvent(canvasRenderer), { interactionKind: kind, ...fields }),
);
}
class InteractEvent extends Event {
constructor(canvasRenderer) {
super(".interact");
this.canvasRenderer = canvasRenderer;
}
continueAsDotter() {
(async () => {
let event = await listen(
[this.canvasRenderer, "mousemove"],
[this.canvasRenderer, "mouseup"],
);
if (event.type == "mousemove") {
let [mouseX, mouseY] = this.canvasRenderer.viewport.toViewportSpace(
event.clientX - this.canvasRenderer.clientLeft,
event.clientY - this.canvasRenderer.clientTop,
this.canvasRenderer.getWindowSize(),
);
notifyInteraction(this.canvasRenderer, "dotter", {
previousX: this.mouseX,
previousY: this.mouseY,
mouseX,
mouseY,
num: this.num + 1,
});
}
if (event.type == "mouseup" && event.button == 0) {
// Break the loop.
return;
}
})();
if (this.previousX != null && this.previousY != null) {
return {
fromX: this.previousX,
fromY: this.previousY,
toX: this.mouseX,
toY: this.mouseY,
num: this.num,
};
} else {
return {
fromX: this.mouseX,
fromY: this.mouseY,
toX: this.mouseX,
toY: this.mouseY,
num: this.num,
};
}
}
}
class Atlas { class Atlas {
static getInitBuffer(chunkSize, nChunks) { static getInitBuffer(chunkSize, nChunks) {
let imageSize = chunkSize * nChunks; let imageSize = chunkSize * nChunks;

View file

@ -113,6 +113,12 @@ export class Pixmap {
} }
} }
// NOTE: This must be kept in sync with ContKind on the haku-wasm side.
export const ContKind = {
Scribble: 0,
Dotter: 1,
};
export class Haku { export class Haku {
#pInstance = 0; #pInstance = 0;
#pBrush = 0; #pBrush = 0;
@ -206,17 +212,49 @@ export class Haku {
} }
} }
evalBrush() { beginBrush() {
return this.#statusCodeToResultObject(w.haku_eval_brush(this.#pInstance, this.#pBrush)); return this.#statusCodeToResultObject(w.haku_begin_brush(this.#pInstance, this.#pBrush));
} }
renderValue(pixmap, translationX, translationY) { expectedContKind() {
return w.haku_cont_kind(this.#pInstance);
}
contScribble(pixmap, translationX, translationY) {
return this.#statusCodeToResultObject( return this.#statusCodeToResultObject(
w.haku_render_value(this.#pInstance, pixmap.ptr, translationX, translationY), w.haku_cont_scribble(this.#pInstance, pixmap.ptr, translationX, translationY),
); );
} }
resetVm() { contDotter({ fromX, fromY, toX, toY, num }) {
w.haku_reset_vm(this.#pInstance); return this.#statusCodeToResultObject(
w.haku_cont_dotter(this.#pInstance, fromX, fromY, toX, toY, num),
);
}
async evalBrush(options) {
let { runDotter, runScribble } = options;
let result;
result = this.beginBrush();
if (result.status != "ok") return result;
while (this.expectedContKind() != ContKind.Invalid) {
switch (this.expectedContKind()) {
case ContKind.Scribble:
result = await runScribble((pixmap, translationX, translationY) => {
return this.contScribble(pixmap, translationX, translationY);
});
return result;
case ContKind.Dotter:
let dotter = await runDotter();
result = this.contDotter(dotter);
if (result.status != "ok") return result;
break;
}
}
return { status: "ok" };
} }
} }

View file

@ -9,6 +9,7 @@ import {
} from "rkgk/session.js"; } from "rkgk/session.js";
import { debounce } from "rkgk/framework.js"; import { debounce } from "rkgk/framework.js";
import { ReticleCursor } from "rkgk/reticle-renderer.js"; import { ReticleCursor } from "rkgk/reticle-renderer.js";
import { selfController } from "rkgk/painter.js";
const updateInterval = 1000 / 60; const updateInterval = 1000 / 60;
@ -175,14 +176,8 @@ function readUrl(urlString) {
user.reticle.setCursor(x, y); user.reticle.setCursor(x, y);
} }
if (wallEvent.kind.event == "setBrush") { if (wallEvent.kind.event == "interact") {
user.setBrush(wallEvent.kind.brush); user.simulate(wall, wallEvent.kind.interactions);
}
if (wallEvent.kind.event == "plot") {
for (let { x, y } of wallEvent.kind.points) {
user.renderBrushToChunks(wall, x, y);
}
} }
} }
}); });
@ -230,30 +225,21 @@ function readUrl(urlString) {
reportCursor(event.x, event.y); reportCursor(event.x, event.y);
}); });
let plotQueue = []; let interactionQueue = [];
async function flushPlotQueue() { function flushInteractionQueue() {
let points = plotQueue.splice(0, plotQueue.length); if (interactionQueue.length != 0) {
if (points.length != 0) { session.sendInteraction(interactionQueue);
session.sendPlot(points); interactionQueue.splice(0);
} }
} }
setInterval(flushPlotQueue, updateInterval); setInterval(flushInteractionQueue, updateInterval);
canvasRenderer.addEventListener(".paint", async (event) => { canvasRenderer.addEventListener(".interact", async (event) => {
plotQueue.push({ x: event.x, y: event.y }); let result = await currentUser.haku.evalBrush(
selfController(interactionQueue, wall, event),
if (currentUser.isBrushOk) { );
brushEditor.resetErrors(); brushEditor.renderHakuResult(result.phase == "eval" ? "Evaluation" : "Rendering", result);
let result = currentUser.renderBrushToChunks(wall, event.x, event.y);
if (result.status == "error") {
brushEditor.renderHakuResult(
result.phase == "eval" ? "Evaluation" : "Rendering",
result.result,
);
}
}
}); });
canvasRenderer.addEventListener(".viewportUpdate", () => reticleRenderer.render()); canvasRenderer.addEventListener(".viewportUpdate", () => reticleRenderer.render());
@ -270,20 +256,23 @@ function readUrl(urlString) {
return; return;
} }
let previewResult = brushPreview.renderBrush(currentUser.haku); brushPreview.renderBrush(currentUser.haku).then((previewResult) => {
if (previewResult.status == "error") { if (previewResult.status == "error") {
brushEditor.renderHakuResult( brushEditor.renderHakuResult(
previewResult.phase == "eval" ? "Evaluation" : "Rendering", previewResult.phase == "eval" ? "Evaluation" : "Rendering",
previewResult.result, previewResult.result,
); );
} }
});
} }
compileBrush(); compileBrush();
brushEditor.addEventListener(".codeChanged", async () => { brushEditor.addEventListener(".codeChanged", async () => {
flushPlotQueue();
compileBrush(); compileBrush();
session.sendSetBrush(brushEditor.code); interactionQueue.push({
kind: "setBrush",
brush: brushEditor.code,
});
}); });
session.eventLoop(); session.eventLoop();

View file

@ -1,5 +1,5 @@
import { Haku } from "rkgk/haku.js"; import { ContKind, Haku } from "rkgk/haku.js";
import { Painter } from "rkgk/painter.js"; import { renderToChunksInArea, dotterRenderArea } from "rkgk/painter.js";
export class User { export class User {
nickname = ""; nickname = "";
@ -7,12 +7,11 @@ export class User {
reticle = null; reticle = null;
isBrushOk = false; isBrushOk = false;
simulation = null;
constructor(wallInfo, nickname) { constructor(wallInfo, nickname) {
this.nickname = nickname; this.nickname = nickname;
this.haku = new Haku(wallInfo.hakuLimits); this.haku = new Haku(wallInfo.hakuLimits);
this.painter = new Painter(wallInfo.paintArea);
} }
destroy() { destroy() {
@ -38,6 +37,58 @@ export class User {
return result; return result;
} }
simulate(wall, interactions) {
console.group("simulate");
for (let interaction of interactions) {
if (interaction.kind == "setBrush") {
this.simulation = null;
this.setBrush(interaction.brush);
}
if (this.isBrushOk) {
if (this.simulation == null) {
console.log("no simulation -- beginning brush");
this.simulation = { renderArea: { left: 0, top: 0, right: 0, bottom: 0 } };
this.haku.beginBrush();
}
if (interaction.kind == "dotter" && this.#expectContKind(ContKind.Dotter)) {
let dotter = {
fromX: interaction.from.x,
fromY: interaction.from.y,
toX: interaction.to.x,
toY: interaction.to.y,
num: interaction.num,
};
this.haku.contDotter(dotter);
this.simulation.renderArea = dotterRenderArea(wall, dotter);
}
if (interaction.kind == "scribble" && this.#expectContKind(ContKind.Scribble)) {
renderToChunksInArea(
wall,
this.simulation.renderArea,
(pixmap, translationX, translationY) => {
return this.haku.contScribble(pixmap, translationX, translationY);
},
);
console.log("ended simulation");
this.simulation = null;
}
}
}
console.groupEnd();
}
#expectContKind(kind) {
if (this.haku.expectedContKind() == kind) {
return true;
} else {
console.error(`expected cont kind: ${kind}`);
return false;
}
}
} }
export class OnlineUsers extends EventTarget { export class OnlineUsers extends EventTarget {

View file

@ -1,43 +1,69 @@
export class Painter { import { listen } from "rkgk/framework.js";
constructor(paintArea) {
this.paintArea = paintArea;
}
renderBrushToWall(haku, centerX, centerY, wall) { function* chunksInRectangle(left, top, right, bottom, chunkSize) {
haku.resetVm(); let leftChunk = Math.floor(left / chunkSize);
let topChunk = Math.floor(top / chunkSize);
let evalResult = haku.evalBrush(); let rightChunk = Math.ceil(right / chunkSize);
if (evalResult.status != "ok") let bottomChunk = Math.ceil(bottom / chunkSize);
return { status: "error", phase: "eval", result: evalResult }; for (let chunkY = topChunk; chunkY < bottomChunk; ++chunkY) {
for (let chunkX = leftChunk; chunkX < rightChunk; ++chunkX) {
let left = centerX - this.paintArea / 2; yield [chunkX, chunkY];
let top = centerY - this.paintArea / 2;
let leftChunk = Math.floor(left / wall.chunkSize);
let topChunk = Math.floor(top / wall.chunkSize);
let rightChunk = Math.ceil((left + this.paintArea) / wall.chunkSize);
let bottomChunk = Math.ceil((top + this.paintArea) / wall.chunkSize);
for (let chunkY = topChunk; chunkY < bottomChunk; ++chunkY) {
for (let chunkX = leftChunk; chunkX < rightChunk; ++chunkX) {
let x = Math.floor(-chunkX * wall.chunkSize + centerX);
let y = Math.floor(-chunkY * wall.chunkSize + centerY);
let chunk = wall.getOrCreateChunk(chunkX, chunkY);
chunk.markModified();
let renderResult = haku.renderValue(chunk.pixmap, x, y);
if (renderResult.status != "ok") {
return { status: "error", phase: "render", result: renderResult };
}
}
} }
for (let y = topChunk; y < bottomChunk; ++y) {
for (let x = leftChunk; x < rightChunk; ++x) {
let chunk = wall.getChunk(x, y);
chunk.syncFromPixmap();
}
}
return { status: "ok" };
} }
} }
export function renderToChunksInArea(wall, renderArea, renderToPixmap) {
for (let [chunkX, chunkY] of chunksInRectangle(
renderArea.left,
renderArea.top,
renderArea.right,
renderArea.bottom,
wall.chunkSize,
)) {
let chunk = wall.getOrCreateChunk(chunkX, chunkY);
let translationX = -chunkX * wall.chunkSize;
let translationY = -chunkY * wall.chunkSize;
let result = renderToPixmap(chunk.pixmap, translationX, translationY);
chunk.markModified();
if (result.status != "ok") return result;
}
return { status: "ok" };
}
export function dotterRenderArea(wall, dotter) {
let halfPaintArea = wall.paintArea / 2;
return {
left: dotter.toX - halfPaintArea,
top: dotter.toY - halfPaintArea,
right: dotter.toX + halfPaintArea,
bottom: dotter.toY + halfPaintArea,
};
}
export function selfController(interactionQueue, wall, event) {
let renderArea = null;
return {
async runScribble(renderToPixmap) {
interactionQueue.push({ kind: "scribble" });
if (renderArea != null) {
return renderToChunksInArea(wall, renderArea, renderToPixmap);
} else {
console.debug("render area is empty, nothing will be rendered");
}
return { status: "ok" };
},
async runDotter() {
let dotter = await event.continueAsDotter();
interactionQueue.push({
kind: "dotter",
from: { x: dotter.fromX, y: dotter.fromY },
to: { x: dotter.toX, y: dotter.toY },
num: dotter.num,
});
renderArea = dotterRenderArea(wall, dotter);
return dotter;
},
};
}

View file

@ -267,22 +267,13 @@ class Session extends EventTarget {
}); });
} }
sendPlot(points) { sendInteraction(interactions) {
console.log(interactions);
this.#sendJson({ this.#sendJson({
request: "wall", request: "wall",
wallEvent: { wallEvent: {
event: "plot", event: "interact",
points, interactions,
},
});
}
sendSetBrush(brush) {
this.#sendJson({
request: "wall",
wallEvent: {
event: "setBrush",
brush,
}, },
}); });
} }

0
static/signal.js Normal file
View file

View file

@ -28,6 +28,7 @@ export class Wall {
constructor(wallInfo) { constructor(wallInfo) {
this.chunkSize = wallInfo.chunkSize; this.chunkSize = wallInfo.chunkSize;
this.paintArea = wallInfo.paintArea;
this.onlineUsers = new OnlineUsers(wallInfo); this.onlineUsers = new OnlineUsers(wallInfo);
} }