introduce tags, structs, and reticles
this was meant to be split into smaller changes, but I realised I edited my existing revision too late.
This commit is contained in:
parent
8356b6c750
commit
5b7d9586ea
26 changed files with 1113 additions and 351 deletions
|
@ -7,19 +7,20 @@ use core::{alloc::Layout, slice};
|
|||
use alloc::{boxed::Box, vec::Vec};
|
||||
use haku::{
|
||||
ast::Ast,
|
||||
bytecode::{Chunk, Defs, DefsImage},
|
||||
bytecode::{Chunk, Defs, DefsImage, DefsLimits},
|
||||
compiler::{compile_expr, ClosureSpec, CompileError, Compiler, Source},
|
||||
diagnostic::Diagnostic,
|
||||
lexer::{lex, Lexer},
|
||||
parser::{self, IntoAstError, Parser},
|
||||
render::{
|
||||
tiny_skia::{Pixmap, PremultipliedColorU8},
|
||||
Renderer, RendererLimits,
|
||||
RendererLimits,
|
||||
},
|
||||
source::SourceCode,
|
||||
system::{ChunkId, System, SystemImage},
|
||||
token::Lexis,
|
||||
value::{Closure, Ref, Value},
|
||||
trampoline::{Cont, Trampoline},
|
||||
value::{Closure, Ref, Vec2},
|
||||
vm::{Exception, Vm, VmImage, VmLimits},
|
||||
};
|
||||
use log::{debug, info};
|
||||
|
@ -46,6 +47,7 @@ struct Limits {
|
|||
max_source_code_len: usize,
|
||||
max_chunks: usize,
|
||||
max_defs: usize,
|
||||
max_tags: usize,
|
||||
max_tokens: usize,
|
||||
max_parser_events: usize,
|
||||
ast_capacity: usize,
|
||||
|
@ -65,6 +67,7 @@ impl Default for Limits {
|
|||
max_source_code_len: 65536,
|
||||
max_chunks: 2,
|
||||
max_defs: 256,
|
||||
max_tags: 256,
|
||||
max_tokens: 1024,
|
||||
max_parser_events: 1024,
|
||||
ast_capacity: 1024,
|
||||
|
@ -110,6 +113,7 @@ macro_rules! limit_setter {
|
|||
limit_setter!(max_source_code_len);
|
||||
limit_setter!(max_chunks);
|
||||
limit_setter!(max_defs);
|
||||
limit_setter!(max_tags);
|
||||
limit_setter!(max_tokens);
|
||||
limit_setter!(max_parser_events);
|
||||
limit_setter!(ast_capacity);
|
||||
|
@ -133,10 +137,22 @@ struct Instance {
|
|||
vm: Vm,
|
||||
vm_image: VmImage,
|
||||
|
||||
value: Value,
|
||||
trampoline: Option<Trampoline>,
|
||||
exception: Option<Exception>,
|
||||
}
|
||||
|
||||
impl Instance {
|
||||
fn set_exception(&mut self, exn: Exception) {
|
||||
debug!("setting exception = {exn:?}");
|
||||
self.exception = Some(exn);
|
||||
}
|
||||
|
||||
fn reset_exception(&mut self) {
|
||||
debug!("resetting exception");
|
||||
self.exception = None;
|
||||
}
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn haku_instance_new(limits: *const Limits) -> *mut Instance {
|
||||
let limits = *limits;
|
||||
|
@ -144,7 +160,10 @@ unsafe extern "C" fn haku_instance_new(limits: *const Limits) -> *mut Instance {
|
|||
|
||||
let system = System::new(limits.max_chunks);
|
||||
|
||||
let defs = Defs::new(limits.max_defs);
|
||||
let defs = Defs::new(&DefsLimits {
|
||||
max_defs: limits.max_defs,
|
||||
max_tags: limits.max_tags,
|
||||
});
|
||||
let vm = Vm::new(
|
||||
&defs,
|
||||
&VmLimits {
|
||||
|
@ -168,7 +187,7 @@ unsafe extern "C" fn haku_instance_new(limits: *const Limits) -> *mut Instance {
|
|||
defs_image,
|
||||
vm,
|
||||
vm_image,
|
||||
value: Value::Nil,
|
||||
trampoline: None,
|
||||
exception: None,
|
||||
});
|
||||
|
||||
|
@ -191,13 +210,6 @@ unsafe extern "C" fn haku_reset(instance: *mut Instance) {
|
|||
instance.defs.restore_image(&instance.defs_image);
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn haku_reset_vm(instance: *mut Instance) {
|
||||
debug!("resetting instance VM: {instance:?}");
|
||||
let instance = &mut *instance;
|
||||
instance.vm.restore_image(&instance.vm_image);
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn haku_has_exception(instance: *mut Instance) -> bool {
|
||||
(*instance).exception.is_some()
|
||||
|
@ -426,39 +438,35 @@ unsafe extern "C" fn haku_compile_brush(
|
|||
StatusCode::Ok
|
||||
}
|
||||
|
||||
struct PixmapLock {
|
||||
pixmap: Pixmap,
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
extern "C" fn haku_pixmap_new(width: u32, height: u32) -> *mut PixmapLock {
|
||||
let ptr = Box::leak(Box::new(PixmapLock {
|
||||
pixmap: Pixmap::new(width, height).expect("invalid pixmap size"),
|
||||
})) as *mut _;
|
||||
extern "C" fn haku_pixmap_new(width: u32, height: u32) -> *mut Pixmap {
|
||||
let ptr = Box::leak(Box::new(
|
||||
Pixmap::new(width, height).expect("invalid pixmap size"),
|
||||
)) as *mut _;
|
||||
debug!("created pixmap with size {width}x{height}: {ptr:?}");
|
||||
ptr
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn haku_pixmap_destroy(pixmap: *mut PixmapLock) {
|
||||
unsafe extern "C" fn haku_pixmap_destroy(pixmap: *mut Pixmap) {
|
||||
debug!("destroying pixmap: {pixmap:?}");
|
||||
drop(Box::from_raw(pixmap))
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn haku_pixmap_data(pixmap: *mut PixmapLock) -> *mut u8 {
|
||||
let pixmap = &mut (*pixmap).pixmap;
|
||||
unsafe extern "C" fn haku_pixmap_data(pixmap: *mut Pixmap) -> *mut u8 {
|
||||
let pixmap = &mut *pixmap;
|
||||
pixmap.pixels_mut().as_mut_ptr() as *mut u8
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn haku_pixmap_clear(pixmap: *mut PixmapLock) {
|
||||
let pixmap = &mut (*pixmap).pixmap;
|
||||
unsafe extern "C" fn haku_pixmap_clear(pixmap: *mut Pixmap) {
|
||||
let pixmap = &mut *pixmap;
|
||||
pixmap.pixels_mut().fill(PremultipliedColorU8::TRANSPARENT);
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn haku_eval_brush(instance: *mut Instance, brush: *const Brush) -> StatusCode {
|
||||
unsafe extern "C" fn haku_begin_brush(instance: *mut Instance, brush: *const Brush) -> StatusCode {
|
||||
let instance = &mut *instance;
|
||||
let brush = &*brush;
|
||||
|
||||
|
@ -466,8 +474,10 @@ unsafe extern "C" fn haku_eval_brush(instance: *mut Instance, brush: *const Brus
|
|||
panic!("brush is not compiled and ready to be used");
|
||||
};
|
||||
|
||||
debug!("applying defs");
|
||||
instance.vm.restore_image(&instance.vm_image);
|
||||
instance.vm.apply_defs(&instance.defs);
|
||||
instance.reset_exception();
|
||||
instance.trampoline = None;
|
||||
|
||||
let Ok(closure_id) = instance
|
||||
.vm
|
||||
|
@ -476,51 +486,91 @@ unsafe extern "C" fn haku_eval_brush(instance: *mut Instance, brush: *const Brus
|
|||
return StatusCode::OutOfRefSlots;
|
||||
};
|
||||
|
||||
debug!("resetting exception");
|
||||
instance.exception = None;
|
||||
instance.value = match instance.vm.run(&instance.system, closure_id) {
|
||||
instance.reset_exception();
|
||||
let value = match instance.vm.run(&instance.system, closure_id, &[]) {
|
||||
Ok(value) => value,
|
||||
Err(exn) => {
|
||||
debug!("setting exception {exn:?}");
|
||||
instance.exception = Some(exn);
|
||||
instance.set_exception(exn);
|
||||
return StatusCode::EvalException;
|
||||
}
|
||||
};
|
||||
instance.trampoline = Some(Trampoline::new(value));
|
||||
|
||||
StatusCode::Ok
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn haku_render_value(
|
||||
unsafe extern "C" fn haku_cont_kind(instance: *mut Instance) -> Cont {
|
||||
let instance = &mut *instance;
|
||||
instance.trampoline.as_ref().unwrap().cont(&instance.vm)
|
||||
}
|
||||
|
||||
fn wrap_exception(
|
||||
instance: &mut Instance,
|
||||
error_code: StatusCode,
|
||||
f: impl FnOnce(&mut Instance) -> Result<(), Exception>,
|
||||
) -> StatusCode {
|
||||
match f(instance) {
|
||||
Ok(_) => StatusCode::Ok,
|
||||
Err(exn) => {
|
||||
instance.set_exception(exn);
|
||||
error_code
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn haku_cont_scribble(
|
||||
instance: *mut Instance,
|
||||
pixmap: *mut PixmapLock,
|
||||
pixmap: *mut Pixmap,
|
||||
translation_x: f32,
|
||||
translation_y: f32,
|
||||
) -> StatusCode {
|
||||
let instance = &mut *instance;
|
||||
debug!("resetting exception");
|
||||
instance.exception = None;
|
||||
instance.reset_exception();
|
||||
|
||||
debug!("will render value: {:?}", instance.value);
|
||||
debug!("cont_scribble: pixmap={pixmap:?} translation_x={translation_x:?} translation_y={translation_y:?} trampoline={:?}", instance.trampoline);
|
||||
|
||||
let pixmap_locked = &mut (*pixmap).pixmap;
|
||||
|
||||
let mut renderer = Renderer::new(
|
||||
pixmap_locked,
|
||||
&RendererLimits {
|
||||
pixmap_stack_capacity: instance.limits.pixmap_stack_capacity,
|
||||
transform_stack_capacity: instance.limits.transform_stack_capacity,
|
||||
},
|
||||
);
|
||||
renderer.translate(translation_x, translation_y);
|
||||
match renderer.render(&instance.vm, instance.value) {
|
||||
Ok(()) => (),
|
||||
Err(exn) => {
|
||||
instance.exception = Some(exn);
|
||||
instance.vm.restore_image(&instance.vm_image);
|
||||
return StatusCode::RenderException;
|
||||
}
|
||||
}
|
||||
|
||||
StatusCode::Ok
|
||||
wrap_exception(instance, StatusCode::RenderException, |instance| {
|
||||
instance.trampoline.as_mut().unwrap().scribble(
|
||||
&instance.vm,
|
||||
&mut *pixmap,
|
||||
Vec2 {
|
||||
x: translation_x,
|
||||
y: translation_y,
|
||||
},
|
||||
&RendererLimits {
|
||||
pixmap_stack_capacity: instance.limits.pixmap_stack_capacity,
|
||||
transform_stack_capacity: instance.limits.transform_stack_capacity,
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn haku_cont_dotter(
|
||||
instance: *mut Instance,
|
||||
from_x: f32,
|
||||
from_y: f32,
|
||||
to_x: f32,
|
||||
to_y: f32,
|
||||
num: f32,
|
||||
) -> StatusCode {
|
||||
let instance = &mut *instance;
|
||||
instance.reset_exception();
|
||||
|
||||
debug!(
|
||||
"cont_dotter: from_x={from_x} from_y={from_y} to_x={to_x} to_y={to_y} trampoline={:?}",
|
||||
instance.trampoline
|
||||
);
|
||||
|
||||
wrap_exception(instance, StatusCode::RenderException, |instance| {
|
||||
instance.trampoline.as_mut().unwrap().dotter(
|
||||
&mut instance.vm,
|
||||
&instance.system,
|
||||
Vec2::new(from_x, from_y),
|
||||
Vec2::new(to_x, to_y),
|
||||
num,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ pub enum Opcode {
|
|||
Nil,
|
||||
False,
|
||||
True,
|
||||
Tag,
|
||||
Number, // (float: f32)
|
||||
Rgba, // (r: u8, g: u8, b: u8, a: u8)
|
||||
|
||||
|
@ -36,6 +37,7 @@ pub enum Opcode {
|
|||
// Control flow.
|
||||
Jump, // (offset: u16)
|
||||
JumpIfNot, // (offset: u16)
|
||||
Field, // (count: u8, tags: [u16; count])
|
||||
|
||||
// Function calls.
|
||||
Call, // (argc: u8)
|
||||
|
@ -157,6 +159,12 @@ impl Chunk {
|
|||
}
|
||||
}
|
||||
|
||||
impl Offset {
|
||||
pub fn to_u16(self) -> u16 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct ChunkSizeError;
|
||||
|
||||
|
@ -193,21 +201,48 @@ impl DefId {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||
pub struct TagId(u16);
|
||||
|
||||
impl TagId {
|
||||
pub(crate) fn from_u16(x: u16) -> Self {
|
||||
Self(x)
|
||||
}
|
||||
|
||||
pub fn to_u16(self) -> u16 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Defs {
|
||||
defs: Vec<String>,
|
||||
tags: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct DefsLimits {
|
||||
pub max_defs: usize,
|
||||
pub max_tags: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct DefsImage {
|
||||
defs: usize,
|
||||
tags: usize,
|
||||
}
|
||||
|
||||
impl Defs {
|
||||
pub fn new(capacity: usize) -> Self {
|
||||
assert!(capacity < u16::MAX as usize + 1);
|
||||
pub fn new(limits: &DefsLimits) -> Self {
|
||||
assert!(limits.max_defs < u16::MAX as usize + 1);
|
||||
assert!(limits.max_tags < u16::MAX as usize + 1);
|
||||
|
||||
let mut tags = Vec::with_capacity(limits.max_tags);
|
||||
add_well_known_tags(&mut tags);
|
||||
|
||||
Self {
|
||||
defs: Vec::with_capacity(capacity),
|
||||
defs: Vec::with_capacity(limits.max_defs),
|
||||
tags,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -219,14 +254,14 @@ impl Defs {
|
|||
self.len() != 0
|
||||
}
|
||||
|
||||
pub fn get(&mut self, name: &str) -> Option<DefId> {
|
||||
pub fn get_def(&mut self, name: &str) -> Option<DefId> {
|
||||
self.defs
|
||||
.iter()
|
||||
.position(|n| *n == name)
|
||||
.map(|index| DefId(index as u16))
|
||||
}
|
||||
|
||||
pub fn add(&mut self, name: &str) -> Result<DefId, DefError> {
|
||||
pub fn add_def(&mut self, name: &str) -> Result<DefId, DefError> {
|
||||
if self.defs.iter().any(|n| n == name) {
|
||||
Err(DefError::Exists)
|
||||
} else {
|
||||
|
@ -239,9 +274,27 @@ impl Defs {
|
|||
}
|
||||
}
|
||||
|
||||
fn add_tag(tags: &mut Vec<String>, name: &str) -> Result<TagId, TagError> {
|
||||
if tags.len() >= tags.capacity() {
|
||||
return Err(TagError::OutOfSpace);
|
||||
}
|
||||
let id = TagId(tags.len() as u16);
|
||||
tags.push(name.to_owned());
|
||||
Ok(id)
|
||||
}
|
||||
|
||||
pub fn get_or_add_tag(&mut self, name: &str) -> Result<TagId, TagError> {
|
||||
if let Some(index) = self.tags.iter().position(|n| n == name) {
|
||||
Ok(TagId(index as u16))
|
||||
} else {
|
||||
Self::add_tag(&mut self.tags, name)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn image(&self) -> DefsImage {
|
||||
DefsImage {
|
||||
defs: self.defs.len(),
|
||||
tags: self.tags.len(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -249,6 +302,9 @@ impl Defs {
|
|||
self.defs.resize_with(image.defs, || {
|
||||
panic!("image must be a subset of the current defs")
|
||||
});
|
||||
self.tags.resize_with(image.tags, || {
|
||||
panic!("image must be a subset of the current defs")
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -266,3 +322,45 @@ impl Display for DefError {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum TagError {
|
||||
OutOfSpace,
|
||||
}
|
||||
|
||||
impl Display for TagError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(match self {
|
||||
TagError::OutOfSpace => "too many tags",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! well_known_tags {
|
||||
($($ident:tt = $value:tt),* $(,)?) => {
|
||||
impl TagId {
|
||||
$(
|
||||
#[allow(non_upper_case_globals)]
|
||||
pub const $ident: Self = Self($value);
|
||||
)*
|
||||
}
|
||||
|
||||
fn add_well_known_tags(tags: &mut Vec<String>) {
|
||||
$(
|
||||
let id = Defs::add_tag(tags, stringify!($ident)).unwrap();
|
||||
assert_eq!(id, TagId::from_u16($value));
|
||||
)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
well_known_tags! {
|
||||
// NOTE: The numbers must be sorted from 0 to N, due to limitations of Rust's macro system.
|
||||
// https://github.com/rust-lang/rust/issues/83527
|
||||
|
||||
Nil = 0,
|
||||
|
||||
From = 1,
|
||||
To = 2,
|
||||
Num = 3,
|
||||
}
|
||||
|
|
|
@ -7,7 +7,9 @@ use alloc::vec::Vec;
|
|||
|
||||
use crate::{
|
||||
ast::{Ast, NodeId, NodeKind},
|
||||
bytecode::{Chunk, DefError, Defs, EmitError, Opcode, CAPTURE_CAPTURE, CAPTURE_LOCAL},
|
||||
bytecode::{
|
||||
Chunk, DefError, Defs, EmitError, Opcode, TagError, TagId, CAPTURE_CAPTURE, CAPTURE_LOCAL,
|
||||
},
|
||||
diagnostic::Diagnostic,
|
||||
source::SourceCode,
|
||||
system::{System, SystemFnArity},
|
||||
|
@ -123,11 +125,6 @@ pub fn compile_expr<'a>(c: &mut Compiler<'a>, src: &Source<'a>, node_id: NodeId)
|
|||
}
|
||||
}
|
||||
|
||||
fn unsupported(c: &mut Compiler, src: &Source, node_id: NodeId, message: &str) -> CompileResult {
|
||||
c.emit(Diagnostic::error(src.ast.span(node_id), message));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn compile_nil(c: &mut Compiler) -> CompileResult {
|
||||
c.chunk.emit_opcode(Opcode::Nil)?;
|
||||
|
||||
|
@ -183,7 +180,7 @@ fn compile_ident<'a>(c: &mut Compiler<'a>, src: &Source<'a>, node_id: NodeId) ->
|
|||
c.chunk.emit_u8(index)?;
|
||||
}
|
||||
Ok(None) => {
|
||||
if let Some(def_id) = c.defs.get(name) {
|
||||
if let Some(def_id) = c.defs.get_def(name) {
|
||||
c.chunk.emit_opcode(Opcode::Def)?;
|
||||
c.chunk.emit_u16(def_id.to_u16())?;
|
||||
} else {
|
||||
|
@ -202,7 +199,8 @@ fn compile_ident<'a>(c: &mut Compiler<'a>, src: &Source<'a>, node_id: NodeId) ->
|
|||
}
|
||||
|
||||
fn compile_tag(c: &mut Compiler, src: &Source, node_id: NodeId) -> CompileResult {
|
||||
let tag = src.ast.span(node_id).slice(src.code);
|
||||
let span = src.ast.span(node_id);
|
||||
let tag = span.slice(src.code);
|
||||
|
||||
match tag {
|
||||
"False" => {
|
||||
|
@ -212,7 +210,17 @@ fn compile_tag(c: &mut Compiler, src: &Source, node_id: NodeId) -> CompileResult
|
|||
c.chunk.emit_opcode(Opcode::True)?;
|
||||
}
|
||||
_ => {
|
||||
c.emit(Diagnostic::error(src.ast.span(node_id), "uppercased identifiers are reserved for future use; please start your identifiers with a lowercase letter instead"));
|
||||
let tag_id = c.defs.get_or_add_tag(tag).unwrap_or_else(|error| {
|
||||
match error {
|
||||
TagError::OutOfSpace => {
|
||||
c.emit(Diagnostic::error(span, "too many unique tags used"));
|
||||
}
|
||||
}
|
||||
TagId::Nil
|
||||
});
|
||||
|
||||
c.chunk.emit_opcode(Opcode::Tag)?;
|
||||
c.chunk.emit_u16(tag_id.to_u16())?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -598,7 +606,7 @@ fn def_prepass<'a>(c: &mut Compiler<'a>, src: &Source<'a>, toplevel: NodeId) ->
|
|||
if let (Some(ident), Some(op)) = (binary_walk.node(), binary_walk.get(NodeKind::Op)) {
|
||||
if src.ast.span(op).slice(src.code) == "=" {
|
||||
let name = src.ast.span(ident).slice(src.code);
|
||||
match c.defs.add(name) {
|
||||
match c.defs.add_def(name) {
|
||||
Ok(_) => (),
|
||||
Err(DefError::Exists) => c.emit(Diagnostic::error(
|
||||
src.ast.span(ident),
|
||||
|
@ -651,7 +659,12 @@ fn compile_def<'a>(c: &mut Compiler<'a>, src: &Source<'a>, node_id: NodeId) -> C
|
|||
return Ok(());
|
||||
};
|
||||
|
||||
if src.ast.kind(left) != NodeKind::Ident {
|
||||
if src.ast.kind(left) == NodeKind::Tag {
|
||||
c.emit(Diagnostic::error(
|
||||
src.ast.span(left),
|
||||
"defs may not be named with uppercase letters, because uppercase letters are used for tags",
|
||||
));
|
||||
} else if src.ast.kind(left) != NodeKind::Ident {
|
||||
c.emit(Diagnostic::error(
|
||||
src.ast.span(left),
|
||||
"def name (identifier) expected",
|
||||
|
@ -662,7 +675,7 @@ fn compile_def<'a>(c: &mut Compiler<'a>, src: &Source<'a>, node_id: NodeId) -> C
|
|||
// NOTE: def_prepass collects all definitions beforehand.
|
||||
// In case a def ends up not existing, that means we ran out of space for defs - so emit a
|
||||
// zero def instead.
|
||||
let def_id = c.defs.get(name).unwrap_or_default();
|
||||
let def_id = c.defs.get_def(name).unwrap_or_default();
|
||||
|
||||
compile_expr(c, src, right)?;
|
||||
c.chunk.emit_opcode(Opcode::SetDef)?;
|
||||
|
|
|
@ -12,5 +12,6 @@ pub mod render;
|
|||
pub mod source;
|
||||
pub mod system;
|
||||
pub mod token;
|
||||
pub mod trampoline;
|
||||
pub mod value;
|
||||
pub mod vm;
|
||||
|
|
|
@ -127,7 +127,7 @@ impl<'a> Renderer<'a> {
|
|||
&paint,
|
||||
&SStroke {
|
||||
width: stroke.thickness,
|
||||
line_cap: LineCap::Square,
|
||||
line_cap: LineCap::Round,
|
||||
..Default::default()
|
||||
},
|
||||
transform,
|
||||
|
|
|
@ -6,8 +6,8 @@ use core::{
|
|||
use alloc::vec::Vec;
|
||||
|
||||
use crate::{
|
||||
bytecode::Chunk,
|
||||
value::Value,
|
||||
bytecode::{Chunk, EmitError, Offset, Opcode, TagId},
|
||||
value::{BytecodeLoc, Closure, FunctionName, Value, Vec4},
|
||||
vm::{Exception, FnArgs, Vm},
|
||||
};
|
||||
|
||||
|
@ -29,6 +29,7 @@ pub struct System {
|
|||
pub resolve_fn: fn(SystemFnArity, &str) -> Option<u8>,
|
||||
pub fns: [Option<SystemFn>; 256],
|
||||
pub chunks: Vec<Chunk>,
|
||||
structs_chunk_offsets: StructsChunkOffsets,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
|
@ -36,33 +37,28 @@ pub struct SystemImage {
|
|||
chunks: usize,
|
||||
}
|
||||
|
||||
macro_rules! def_fns {
|
||||
($($index:tt $arity:tt $name:tt => $fnref:expr),* $(,)?) => {
|
||||
pub(crate) fn init_fns(system: &mut System) {
|
||||
$(
|
||||
debug_assert!(system.fns[$index].is_none());
|
||||
system.fns[$index] = Some($fnref);
|
||||
)*
|
||||
}
|
||||
|
||||
pub(crate) fn resolve(arity: SystemFnArity, name: &str) -> Option<u8> {
|
||||
match (arity, name){
|
||||
$((SystemFnArity::$arity, $name) => Some($index),)*
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
};
|
||||
#[derive(Debug, Clone)]
|
||||
struct StructsChunkOffsets {
|
||||
dotter: BytecodeLoc,
|
||||
}
|
||||
|
||||
impl System {
|
||||
pub fn new(max_chunks: usize) -> Self {
|
||||
assert!(
|
||||
max_chunks > 1,
|
||||
"the 0th chunk is allocated for internal purposes; therefore there must be more than one chunk to execute bytecode"
|
||||
);
|
||||
assert!(max_chunks < u32::MAX as usize);
|
||||
|
||||
let (structs_chunk, structs_chunk_offsets) = Self::structs_chunk().unwrap();
|
||||
|
||||
let mut system = Self {
|
||||
resolve_fn: Self::resolve,
|
||||
fns: [None; 256],
|
||||
chunks: Vec::with_capacity(max_chunks),
|
||||
structs_chunk_offsets,
|
||||
};
|
||||
system.chunks.push(structs_chunk);
|
||||
Self::init_fns(&mut system);
|
||||
system
|
||||
}
|
||||
|
@ -92,6 +88,64 @@ impl System {
|
|||
panic!("image must be a subset of the current system")
|
||||
});
|
||||
}
|
||||
|
||||
// The structs chunk contains bytecode for _struct functions_.
|
||||
//
|
||||
// Struct functions are a way of encoding structures with arbitrary named data fields.
|
||||
// They are called like regular functions, except they always expect a single tag-type
|
||||
// argument. They're used where performance is not a primary concern---in structures that appear
|
||||
// once or a couple times throughout the lifetime of a program, in which convenient,
|
||||
// backwards-compatible field access is a priority.
|
||||
//
|
||||
// Each struct function has only two opcodes: `Field`, followed by a `Return`.
|
||||
// The `Field` opcode is an efficient way of encoding an if chain made solely out of tag
|
||||
// comparisons, returning a closure capture (or error if there's no field with the given name.)
|
||||
//
|
||||
// if (tag == A) capture_0
|
||||
// else if (tag == B) capture_1
|
||||
// else if (tag == C) capture_2
|
||||
// else error
|
||||
//
|
||||
// Closure captures are used here, because they're a convenient way of attaching indexed data
|
||||
// to any function, even created outside the language itself.
|
||||
//
|
||||
// All of this results in a function that can be called like `d From` to obtain a piece of data
|
||||
// stored inside of the `d` structure.
|
||||
fn structs_chunk() -> Result<(Chunk, StructsChunkOffsets), EmitError> {
|
||||
let mut chunk = Chunk::new(128).unwrap();
|
||||
|
||||
let dotter = chunk.offset();
|
||||
chunk.emit_opcode(Opcode::Field)?;
|
||||
chunk.emit_u8(3)?;
|
||||
chunk.emit_u16(TagId::From.to_u16())?;
|
||||
chunk.emit_u16(TagId::To.to_u16())?;
|
||||
chunk.emit_u16(TagId::Num.to_u16())?;
|
||||
chunk.emit_opcode(Opcode::Return)?;
|
||||
|
||||
fn loc(offset: Offset) -> BytecodeLoc {
|
||||
BytecodeLoc {
|
||||
chunk_id: ChunkId(0),
|
||||
offset: offset.to_u16(),
|
||||
}
|
||||
}
|
||||
|
||||
Ok((
|
||||
chunk,
|
||||
StructsChunkOffsets {
|
||||
dotter: loc(dotter),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
pub fn create_dotter(&self, from: Vec4, to: Vec4, num: f32) -> Closure {
|
||||
Closure {
|
||||
start: self.structs_chunk_offsets.dotter,
|
||||
name: FunctionName::Anonymous,
|
||||
param_count: 1,
|
||||
local_count: 0,
|
||||
captures: Vec::from_iter([Value::Vec4(from), Value::Vec4(to), Value::Number(num)]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
|
@ -105,11 +159,29 @@ impl Display for ChunkError {
|
|||
|
||||
impl Error for ChunkError {}
|
||||
|
||||
macro_rules! def_fns {
|
||||
($($index:tt $arity:tt $name:tt => $fnref:expr),* $(,)?) => {
|
||||
pub(crate) fn init_fns(system: &mut System) {
|
||||
$(
|
||||
debug_assert!(system.fns[$index].is_none());
|
||||
system.fns[$index] = Some($fnref);
|
||||
)*
|
||||
}
|
||||
|
||||
pub(crate) fn resolve(arity: SystemFnArity, name: &str) -> Option<u8> {
|
||||
match (arity, name){
|
||||
$((SystemFnArity::$arity, $name) => Some($index),)*
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub mod fns {
|
||||
use alloc::{format, vec::Vec};
|
||||
|
||||
use crate::{
|
||||
value::{Fill, List, Ref, Rgba, Scribble, Shape, Stroke, Value, Vec2, Vec4},
|
||||
value::{Fill, List, Ref, Reticle, Rgba, Scribble, Shape, Stroke, Value, Vec2, Vec4},
|
||||
vm::{Exception, FnArgs, Vm},
|
||||
};
|
||||
|
||||
|
@ -181,8 +253,11 @@ pub mod fns {
|
|||
0xc1 Nary "line" => line,
|
||||
0xc2 Nary "rect" => rect,
|
||||
0xc3 Nary "circle" => circle,
|
||||
|
||||
0xe0 Nary "stroke" => stroke,
|
||||
0xe1 Nary "fill" => fill,
|
||||
|
||||
0xf0 Nary "withDotter" => with_dotter,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -469,7 +544,13 @@ pub mod fns {
|
|||
|
||||
fn to_shape(value: Value, vm: &Vm) -> Option<Shape> {
|
||||
match value {
|
||||
Value::Nil | Value::False | Value::True | Value::Number(_) | Value::Rgba(_) => None,
|
||||
Value::Nil
|
||||
| Value::False
|
||||
| Value::True
|
||||
| Value::Tag(_)
|
||||
| Value::Number(_)
|
||||
| Value::Rgba(_) => None,
|
||||
|
||||
Value::Ref(id) => {
|
||||
if let Ref::Shape(shape) = vm.get_ref(id) {
|
||||
Some(shape.clone())
|
||||
|
@ -477,6 +558,7 @@ pub mod fns {
|
|||
None
|
||||
}
|
||||
}
|
||||
|
||||
Value::Vec4(vec) => Some(Shape::Point(vec.into())),
|
||||
}
|
||||
}
|
||||
|
@ -588,4 +670,22 @@ pub mod fns {
|
|||
Ok(Value::Nil)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_dotter(vm: &mut Vm, args: FnArgs) -> Result<Value, Exception> {
|
||||
if args.num() != 1 {
|
||||
return Err(vm.create_exception(
|
||||
"`withDotter` expects a single argument (withDotter \\d -> [])",
|
||||
));
|
||||
}
|
||||
|
||||
let draw = args.get_closure(vm, 0, "argument to `withDotter` must be a closure")?;
|
||||
if draw.param_count != 1 {
|
||||
return Err(vm.create_exception("function passed to `withDotter` must take in a single parameter (withDotter \\d -> [])"));
|
||||
}
|
||||
|
||||
let id = vm.create_ref(Ref::Reticle(Reticle::Dotter {
|
||||
draw: args.get(vm, 0),
|
||||
}))?;
|
||||
Ok(Value::Ref(id))
|
||||
}
|
||||
}
|
||||
|
|
78
crates/haku/src/trampoline.rs
Normal file
78
crates/haku/src/trampoline.rs
Normal file
|
@ -0,0 +1,78 @@
|
|||
use tiny_skia::Pixmap;
|
||||
|
||||
use crate::{
|
||||
render::{Renderer, RendererLimits},
|
||||
system::System,
|
||||
value::{Ref, Reticle, Value, Vec2},
|
||||
vm::{Exception, Vm},
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct Trampoline {
|
||||
pub value: Value,
|
||||
}
|
||||
|
||||
// NOTE: This must be kept in sync with haku.js.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
#[repr(u8)]
|
||||
pub enum Cont {
|
||||
Scribble,
|
||||
Dotter,
|
||||
}
|
||||
|
||||
impl Trampoline {
|
||||
pub fn new(init: Value) -> Self {
|
||||
Self { value: init }
|
||||
}
|
||||
|
||||
pub fn cont(&self, vm: &Vm) -> Cont {
|
||||
let Some((_, refv)) = vm.get_ref_value(self.value) else {
|
||||
return Cont::Scribble;
|
||||
};
|
||||
|
||||
match refv {
|
||||
Ref::Reticle(_) => Cont::Dotter,
|
||||
_ => Cont::Scribble,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn scribble(
|
||||
&mut self,
|
||||
vm: &Vm,
|
||||
pixmap: &mut Pixmap,
|
||||
translation: Vec2,
|
||||
limits: &RendererLimits,
|
||||
) -> Result<(), Exception> {
|
||||
let mut renderer = Renderer::new(&mut *pixmap, limits);
|
||||
renderer.translate(translation.x, translation.y);
|
||||
renderer.render(vm, self.value)
|
||||
}
|
||||
|
||||
pub fn dotter(
|
||||
&mut self,
|
||||
vm: &mut Vm,
|
||||
system: &System,
|
||||
from: Vec2,
|
||||
to: Vec2,
|
||||
num: f32,
|
||||
) -> Result<(), Exception> {
|
||||
let (_, vref) = vm.get_ref_value(self.value).expect("value must be a ref");
|
||||
let &Ref::Reticle(Reticle::Dotter {
|
||||
draw: Value::Ref(draw_id),
|
||||
}) = vref
|
||||
else {
|
||||
panic!("value must be a dotter reticle");
|
||||
};
|
||||
|
||||
let dotter = vm.create_ref(Ref::Closure(system.create_dotter(
|
||||
from.into(),
|
||||
to.into(),
|
||||
num,
|
||||
)))?;
|
||||
|
||||
let value = vm.run(system, draw_id, &[Value::Ref(dotter)])?;
|
||||
self.value = value;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
use alloc::vec::Vec;
|
||||
|
||||
use crate::{compiler::ClosureSpec, system::ChunkId};
|
||||
use crate::{bytecode::TagId, compiler::ClosureSpec, system::ChunkId};
|
||||
|
||||
// TODO: Probably needs some pretty hardcore space optimization.
|
||||
// Maybe when we have static typing.
|
||||
|
@ -9,6 +9,7 @@ pub enum Value {
|
|||
Nil,
|
||||
False,
|
||||
True,
|
||||
Tag(TagId),
|
||||
Number(f32),
|
||||
Vec4(Vec4),
|
||||
Rgba(Rgba),
|
||||
|
@ -73,6 +74,12 @@ pub struct Vec2 {
|
|||
pub y: f32,
|
||||
}
|
||||
|
||||
impl Vec2 {
|
||||
pub fn new(x: f32, y: f32) -> Self {
|
||||
Self { x, y }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec4> for Vec2 {
|
||||
fn from(value: Vec4) -> Self {
|
||||
Self {
|
||||
|
@ -90,6 +97,17 @@ pub struct Vec4 {
|
|||
pub w: f32,
|
||||
}
|
||||
|
||||
impl From<Vec2> for Vec4 {
|
||||
fn from(value: Vec2) -> Self {
|
||||
Self {
|
||||
x: value.x,
|
||||
y: value.y,
|
||||
z: 0.0,
|
||||
w: 0.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Default)]
|
||||
#[repr(C)]
|
||||
pub struct Rgba {
|
||||
|
@ -123,6 +141,7 @@ pub enum Ref {
|
|||
List(List),
|
||||
Shape(Shape),
|
||||
Scribble(Scribble),
|
||||
Reticle(Reticle),
|
||||
}
|
||||
|
||||
impl Ref {
|
||||
|
@ -206,3 +225,8 @@ pub enum Scribble {
|
|||
Stroke(Stroke),
|
||||
Fill(Fill),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Reticle {
|
||||
Dotter { draw: Value },
|
||||
}
|
||||
|
|
|
@ -5,9 +5,10 @@ use core::{
|
|||
};
|
||||
|
||||
use alloc::{string::String, vec::Vec};
|
||||
use log::debug;
|
||||
|
||||
use crate::{
|
||||
bytecode::{self, Defs, Opcode, CAPTURE_CAPTURE, CAPTURE_LOCAL},
|
||||
bytecode::{self, Defs, Opcode, TagId, CAPTURE_CAPTURE, CAPTURE_LOCAL},
|
||||
system::{ChunkId, System},
|
||||
value::{BytecodeLoc, Closure, FunctionName, List, Ref, RefId, Rgba, Value, Vec4},
|
||||
};
|
||||
|
@ -178,7 +179,12 @@ impl Vm {
|
|||
.ok_or_else(|| self.create_exception("corrupted bytecode (call stack underflow)"))
|
||||
}
|
||||
|
||||
pub fn run(&mut self, system: &System, mut closure_id: RefId) -> Result<Value, Exception> {
|
||||
pub fn run(
|
||||
&mut self,
|
||||
system: &System,
|
||||
mut closure_id: RefId,
|
||||
params: &[Value],
|
||||
) -> Result<Value, Exception> {
|
||||
let closure = self
|
||||
.get_ref(closure_id)
|
||||
.as_closure()
|
||||
|
@ -191,7 +197,12 @@ impl Vm {
|
|||
let mut fuel = self.fuel;
|
||||
|
||||
let init_bottom = bottom;
|
||||
for _ in 0..closure.local_count {
|
||||
let local_count = closure.local_count;
|
||||
|
||||
for ¶m in params {
|
||||
self.push(param)?;
|
||||
}
|
||||
for _ in 0..local_count {
|
||||
self.push(Value::Nil)?;
|
||||
}
|
||||
|
||||
|
@ -219,6 +230,11 @@ impl Vm {
|
|||
Opcode::False => self.push(Value::False)?,
|
||||
Opcode::True => self.push(Value::True)?,
|
||||
|
||||
Opcode::Tag => {
|
||||
let i = chunk.read_u16(&mut pc)?;
|
||||
self.push(Value::Tag(TagId::from_u16(i)))?;
|
||||
}
|
||||
|
||||
Opcode::Number => {
|
||||
let x = chunk.read_f32(&mut pc)?;
|
||||
self.push(Value::Number(x))?;
|
||||
|
@ -344,6 +360,34 @@ impl Vm {
|
|||
}
|
||||
}
|
||||
|
||||
Opcode::Field => {
|
||||
let count = chunk.read_u8(&mut pc)? as usize;
|
||||
|
||||
let field_tag = self.pop()?;
|
||||
let Value::Tag(field_tag_id) = field_tag else {
|
||||
return Err(self.create_exception("name of data field to look up must be a tag (starting with an uppercase letter)"));
|
||||
};
|
||||
|
||||
let mut index = None;
|
||||
for i in 0..count {
|
||||
let tag_id = TagId::from_u16(chunk.read_u16(&mut pc)?);
|
||||
if tag_id == field_tag_id {
|
||||
index = Some(i);
|
||||
}
|
||||
}
|
||||
|
||||
let Some(index) = index else {
|
||||
return Err(self.create_exception(
|
||||
"field with this name does not exist in the given data structure",
|
||||
));
|
||||
};
|
||||
|
||||
let closure = self.get_ref(closure_id).as_closure().unwrap();
|
||||
self.push(closure.captures.get(index).copied().ok_or_else(|| {
|
||||
self.create_exception("corrupted bytecode (field index out of bounds)")
|
||||
})?)?;
|
||||
}
|
||||
|
||||
Opcode::Call => {
|
||||
let argument_count = chunk.read_u8(&mut pc)? as usize;
|
||||
|
||||
|
@ -554,6 +598,23 @@ impl FnArgs {
|
|||
.to_rgba()
|
||||
.ok_or_else(|| vm.create_exception(message))
|
||||
}
|
||||
|
||||
#[inline(never)]
|
||||
pub fn get_closure<'vm>(
|
||||
&self,
|
||||
vm: &'vm Vm,
|
||||
index: usize,
|
||||
message: &'static str,
|
||||
) -> Result<&'vm Closure, Exception> {
|
||||
let value = self.get(vm, index);
|
||||
let (_, any_ref) = vm
|
||||
.get_ref_value(value)
|
||||
.ok_or_else(|| vm.create_exception(message))?;
|
||||
let Ref::Closure(closure) = any_ref else {
|
||||
return Err(vm.create_exception(message));
|
||||
};
|
||||
Ok(closure)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
|
|
|
@ -2,7 +2,7 @@ use std::error::Error;
|
|||
|
||||
use haku::{
|
||||
ast::{dump::dump, Ast},
|
||||
bytecode::{Chunk, Defs},
|
||||
bytecode::{Chunk, Defs, DefsLimits},
|
||||
compiler::{compile_expr, Compiler, Source},
|
||||
lexer::{lex, Lexer},
|
||||
parser::{self, Parser, ParserLimits},
|
||||
|
@ -14,7 +14,7 @@ use haku::{
|
|||
};
|
||||
|
||||
fn eval(code: &str) -> Result<Value, Box<dyn Error>> {
|
||||
let mut system = System::new(1);
|
||||
let mut system = System::new(2);
|
||||
|
||||
let code = SourceCode::unlimited_len(code);
|
||||
|
||||
|
@ -32,7 +32,10 @@ fn eval(code: &str) -> Result<Value, Box<dyn Error>> {
|
|||
system: &system,
|
||||
};
|
||||
|
||||
let mut defs = Defs::new(256);
|
||||
let mut defs = Defs::new(&DefsLimits {
|
||||
max_defs: 256,
|
||||
max_tags: 256,
|
||||
});
|
||||
let mut chunk = Chunk::new(65536).unwrap();
|
||||
let mut compiler = Compiler::new(&mut defs, &mut chunk);
|
||||
compile_expr(&mut compiler, &src, root)?;
|
||||
|
@ -70,7 +73,7 @@ fn eval(code: &str) -> Result<Value, Box<dyn Error>> {
|
|||
println!("closure spec: {closure_spec:?}");
|
||||
|
||||
let closure = vm.create_ref(Ref::Closure(Closure::chunk(chunk_id, closure_spec)))?;
|
||||
let result = vm.run(&system, closure)?;
|
||||
let result = vm.run(&system, closure, &[])?;
|
||||
|
||||
println!("used fuel: {}", limits.fuel - vm.remaining_fuel());
|
||||
|
||||
|
@ -281,3 +284,11 @@ fn issue_78() {
|
|||
"#;
|
||||
assert_eq!(eval(code).unwrap(), Value::Ref(RefId::from_u32(2)))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn with_dotter_identity() {
|
||||
let code = r#"
|
||||
withDotter \d -> d
|
||||
"#;
|
||||
assert_eq!(eval(code).unwrap(), Value::Ref(RefId::from_u32(0)))
|
||||
}
|
||||
|
|
|
@ -12,7 +12,10 @@ use axum::{
|
|||
};
|
||||
use base64::Engine;
|
||||
use eyre::{bail, Context, OptionExt};
|
||||
use haku::value::Value;
|
||||
use haku::{
|
||||
trampoline::{Cont, Trampoline},
|
||||
value::Value,
|
||||
};
|
||||
use schema::{
|
||||
ChunkInfo, Error, LoginRequest, LoginResponse, Notify, Online, Request, Version, WallInfo,
|
||||
};
|
||||
|
@ -29,7 +32,8 @@ use crate::{
|
|||
schema::Vec2,
|
||||
wall::{
|
||||
self, auto_save::AutoSave, chunk_images::ChunkImages, chunk_iterator::ChunkIterator,
|
||||
database::ChunkDataPair, ChunkPosition, JoinError, SessionHandle, UserInit, Wall, WallId,
|
||||
database::ChunkDataPair, ChunkPosition, Interaction, JoinError, SessionHandle, UserInit,
|
||||
Wall, WallId,
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -220,12 +224,8 @@ struct SessionLoop {
|
|||
}
|
||||
|
||||
enum RenderCommand {
|
||||
SetBrush {
|
||||
brush: String,
|
||||
},
|
||||
|
||||
Plot {
|
||||
points: Vec<Vec2>,
|
||||
Interact {
|
||||
interactions: Vec<Interaction>,
|
||||
done: oneshot::Sender<()>,
|
||||
},
|
||||
}
|
||||
|
@ -245,10 +245,17 @@ impl SessionLoop {
|
|||
// If this ends up dropping commands - it's your fault for trying to DoS my server!
|
||||
let (render_commands_tx, render_commands_rx) = mpsc::channel(1);
|
||||
|
||||
render_commands_tx
|
||||
.send(RenderCommand::SetBrush { brush })
|
||||
.await
|
||||
.unwrap();
|
||||
let thread_ready = {
|
||||
let (done_tx, done_rx) = oneshot::channel();
|
||||
render_commands_tx
|
||||
.send(RenderCommand::Interact {
|
||||
interactions: vec![Interaction::SetBrush { brush }],
|
||||
done: done_tx,
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
done_rx
|
||||
};
|
||||
|
||||
// We spawn our own thread so as not to clog the tokio blocking thread pool with our
|
||||
// rendering shenanigans.
|
||||
|
@ -265,6 +272,8 @@ impl SessionLoop {
|
|||
})
|
||||
.context("could not spawn render thread")?;
|
||||
|
||||
thread_ready.await?;
|
||||
|
||||
Ok(Self {
|
||||
wall_id,
|
||||
wall,
|
||||
|
@ -312,47 +321,82 @@ impl SessionLoop {
|
|||
| wall::EventKind::Leave
|
||||
| wall::EventKind::Cursor { .. } => (),
|
||||
|
||||
wall::EventKind::SetBrush { brush } => {
|
||||
// SetBrush is not dropped because it is a very important event.
|
||||
_ = self
|
||||
.render_commands_tx
|
||||
.send(RenderCommand::SetBrush {
|
||||
brush: brush.clone(),
|
||||
})
|
||||
.await;
|
||||
}
|
||||
wall::EventKind::Plot { points } => {
|
||||
let chunks_to_modify: Vec<_> =
|
||||
chunks_to_modify(&self.wall, points).into_iter().collect();
|
||||
match self.chunk_images.load(chunks_to_modify.clone()).await {
|
||||
Ok(_) => {
|
||||
// We drop commands if we take too long to render instead of lagging
|
||||
// the WebSocket thread.
|
||||
// Theoretically this will yield much better responsiveness, but it _will_
|
||||
// result in some visual glitches if we're getting bottlenecked.
|
||||
let (done_tx, done_rx) = oneshot::channel();
|
||||
let send_result =
|
||||
self.render_commands_tx.try_send(RenderCommand::Plot {
|
||||
points: points.clone(),
|
||||
done: done_tx,
|
||||
});
|
||||
wall::EventKind::Interact { interactions } => {
|
||||
let (done_tx, done_rx) = oneshot::channel();
|
||||
|
||||
if send_result.is_err() {
|
||||
info!(
|
||||
?points,
|
||||
"render thread is overloaded, dropping request to draw points"
|
||||
);
|
||||
}
|
||||
|
||||
let auto_save = Arc::clone(&self.auto_save);
|
||||
tokio::spawn(async move {
|
||||
_ = done_rx.await;
|
||||
auto_save.request(chunks_to_modify).await;
|
||||
if interactions
|
||||
.iter()
|
||||
.any(|i| matches!(i, Interaction::SetBrush { .. }))
|
||||
{
|
||||
// SetBrush is an important event, so we wait for the render thread
|
||||
// to unload.
|
||||
_ = self
|
||||
.render_commands_tx
|
||||
.send(RenderCommand::Interact {
|
||||
interactions: interactions.clone(),
|
||||
done: done_tx,
|
||||
})
|
||||
.await;
|
||||
} else {
|
||||
// If there is no SetBrush, there's no need to wait, so we fire events
|
||||
// blindly. If the thread's not okay with that... well, whatever.
|
||||
// That's your issue for making a really slow brush.
|
||||
let send_result =
|
||||
self.render_commands_tx.try_send(RenderCommand::Interact {
|
||||
interactions: interactions.clone(),
|
||||
done: done_tx,
|
||||
});
|
||||
if send_result.is_err() {
|
||||
info!(
|
||||
?interactions,
|
||||
"render thread is overloaded, dropping interaction request"
|
||||
);
|
||||
}
|
||||
Err(err) => error!(?err, "while loading chunks for render command"),
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Auto save. This'll need us to compute which chunks will be affected
|
||||
// by the interactions.
|
||||
} // wall::EventKind::SetBrush { brush } => {
|
||||
// // SetBrush is not dropped because it is a very important event.
|
||||
// _ = self
|
||||
// .render_commands_tx
|
||||
// .send(RenderCommand::SetBrush {
|
||||
// brush: brush.clone(),
|
||||
// })
|
||||
// .await;
|
||||
// }
|
||||
// wall::EventKind::Plot { points } => {
|
||||
// let chunks_to_modify: Vec<_> =
|
||||
// chunks_to_modify(&self.wall, points).into_iter().collect();
|
||||
// match self.chunk_images.load(chunks_to_modify.clone()).await {
|
||||
// Ok(_) => {
|
||||
// // We drop commands if we take too long to render instead of lagging
|
||||
// // the WebSocket thread.
|
||||
// // Theoretically this will yield much better responsiveness, but it _will_
|
||||
// // result in some visual glitches if we're getting bottlenecked.
|
||||
// let (done_tx, done_rx) = oneshot::channel();
|
||||
// let send_result =
|
||||
// self.render_commands_tx.try_send(RenderCommand::Plot {
|
||||
// points: points.clone(),
|
||||
// done: done_tx,
|
||||
// });
|
||||
|
||||
// if send_result.is_err() {
|
||||
// info!(
|
||||
// ?points,
|
||||
// "render thread is overloaded, dropping request to draw points"
|
||||
// );
|
||||
// }
|
||||
|
||||
// let auto_save = Arc::clone(&self.auto_save);
|
||||
// tokio::spawn(async move {
|
||||
// _ = done_rx.await;
|
||||
// auto_save.request(chunks_to_modify).await;
|
||||
// });
|
||||
// }
|
||||
// Err(err) => error!(?err, "while loading chunks for render command"),
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
self.wall.event(wall::Event {
|
||||
|
@ -437,32 +481,97 @@ impl SessionLoop {
|
|||
|
||||
fn render_thread(wall: Arc<Wall>, limits: Limits, mut commands: mpsc::Receiver<RenderCommand>) {
|
||||
let mut haku = Haku::new(limits);
|
||||
let mut trampoline = None;
|
||||
let mut brush_ok = false;
|
||||
let mut current_render_area = RenderArea::default();
|
||||
|
||||
while let Some(command) = commands.blocking_recv() {
|
||||
match command {
|
||||
RenderCommand::SetBrush { brush } => {
|
||||
brush_ok = haku.set_brush(&brush).is_ok();
|
||||
let RenderCommand::Interact { interactions, done } = command;
|
||||
|
||||
let mut queue = VecDeque::from(interactions);
|
||||
while let Some(interaction) = queue.pop_front() {
|
||||
if let Some(render_area) = render_area(wall.settings(), &interaction) {
|
||||
current_render_area = render_area;
|
||||
}
|
||||
|
||||
RenderCommand::Plot { points, done } => {
|
||||
if brush_ok {
|
||||
if let Ok(value) = haku.eval_brush() {
|
||||
for point in points {
|
||||
// Ignore the result. It's better if we render _something_ rather
|
||||
// than nothing.
|
||||
_ = draw_to_chunks(&wall, &haku, value, point);
|
||||
match interaction {
|
||||
Interaction::SetBrush { brush } => {
|
||||
brush_ok = haku.set_brush(&brush).is_ok();
|
||||
}
|
||||
|
||||
Interaction::Dotter { from, to, num } => {
|
||||
if brush_ok {
|
||||
if let Some(trampoline) =
|
||||
jumpstart_trampoline(&mut haku, &mut trampoline)
|
||||
{
|
||||
let cont = haku.cont(trampoline);
|
||||
if cont == Cont::Dotter {
|
||||
_ = haku.dotter(trampoline, from, to, num);
|
||||
} else {
|
||||
error!("received Dotter interaction when a {cont:?} continuation was next");
|
||||
}
|
||||
} else {
|
||||
info!("failed to jumpstart trampoline for Dotter interaction");
|
||||
}
|
||||
haku.reset_vm();
|
||||
}
|
||||
}
|
||||
_ = done.send(());
|
||||
|
||||
Interaction::Scribble => {
|
||||
// Regarding the take(): this is to self-synchronize in case of error.
|
||||
// Once a scribble is rendered, we reset back to not having a trampoline,
|
||||
// and further interactions must be sent to kickstart a new one.
|
||||
//
|
||||
// Regarding not jumpstarting a trampoline here: it would be useless,
|
||||
// because a scribble is always the last thing in the chain, and it never
|
||||
// modifies the render area. Therefore what would end up happening is we'd
|
||||
// end up rendering to no chunks, therefore not doing anything.
|
||||
// The server doesn't need to report anything to the user and so can save
|
||||
// a bit of work by _not_ evaluating the brush initially here.
|
||||
if let Some(trampoline) = trampoline.take() {
|
||||
_ = draw_to_chunks(&wall, &haku, current_render_area, trampoline.value);
|
||||
} else {
|
||||
info!("tried to scribble without an active trampoline");
|
||||
}
|
||||
|
||||
current_render_area = RenderArea::default();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ = done.send(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
struct RenderArea {
|
||||
top_left: Vec2,
|
||||
bottom_right: Vec2,
|
||||
}
|
||||
|
||||
fn render_area(wall_settings: &wall::Settings, interaction: &Interaction) -> Option<RenderArea> {
|
||||
match interaction {
|
||||
Interaction::Dotter { from, to, .. } => {
|
||||
let half_paint_area = wall_settings.paint_area as f32 / 2.0;
|
||||
Some(RenderArea {
|
||||
top_left: Vec2::new(from.x - half_paint_area, from.y - half_paint_area),
|
||||
bottom_right: Vec2::new(to.x + half_paint_area, to.y + half_paint_area),
|
||||
})
|
||||
}
|
||||
Interaction::SetBrush { .. } | Interaction::Scribble => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn jumpstart_trampoline<'a>(
|
||||
haku: &mut Haku,
|
||||
trampoline: &'a mut Option<Trampoline>,
|
||||
) -> Option<&'a mut Trampoline> {
|
||||
if trampoline.is_none() {
|
||||
*trampoline = haku.eval_brush().ok().map(Trampoline::new);
|
||||
}
|
||||
trampoline.as_mut()
|
||||
}
|
||||
|
||||
fn chunks_to_modify(wall: &Wall, points: &[Vec2]) -> HashSet<ChunkPosition> {
|
||||
let mut chunks = HashSet::new();
|
||||
for point in points {
|
||||
|
@ -483,23 +592,23 @@ fn chunks_to_modify(wall: &Wall, points: &[Vec2]) -> HashSet<ChunkPosition> {
|
|||
}
|
||||
|
||||
#[instrument(skip(wall, haku, value))]
|
||||
fn draw_to_chunks(wall: &Wall, haku: &Haku, value: Value, center: Vec2) -> eyre::Result<()> {
|
||||
fn draw_to_chunks(
|
||||
wall: &Wall,
|
||||
haku: &Haku,
|
||||
render_area: RenderArea,
|
||||
value: Value,
|
||||
) -> eyre::Result<()> {
|
||||
let settings = wall.settings();
|
||||
|
||||
let chunk_size = settings.chunk_size as f32;
|
||||
let paint_area = settings.paint_area as f32;
|
||||
|
||||
let left = center.x - paint_area / 2.0;
|
||||
let top = center.y - paint_area / 2.0;
|
||||
let top_left_chunk = settings.chunk_at(render_area.top_left);
|
||||
let bottom_right_chunk = settings.chunk_at_ceil(render_area.bottom_right);
|
||||
|
||||
let left_chunk = settings.chunk_at_1d(left);
|
||||
let top_chunk = settings.chunk_at_1d(top);
|
||||
let right_chunk = settings.chunk_at_1d_ceil(left + paint_area);
|
||||
let bottom_chunk = settings.chunk_at_1d_ceil(top + paint_area);
|
||||
for chunk_y in top_chunk..bottom_chunk {
|
||||
for chunk_x in left_chunk..right_chunk {
|
||||
let x = f32::floor(-chunk_x as f32 * chunk_size + center.x);
|
||||
let y = f32::floor(-chunk_y as f32 * chunk_size + center.y);
|
||||
for chunk_y in top_left_chunk.y..bottom_right_chunk.y {
|
||||
for chunk_x in top_left_chunk.x..bottom_right_chunk.x {
|
||||
let x = f32::floor(-chunk_x as f32 * chunk_size);
|
||||
let y = f32::floor(-chunk_y as f32 * chunk_size);
|
||||
let chunk_ref = wall.get_or_create_chunk(ChunkPosition::new(chunk_x, chunk_y));
|
||||
let mut chunk = chunk_ref.blocking_lock();
|
||||
haku.render_value(&mut chunk.pixmap, value, Vec2 { x, y })?;
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
use eyre::{bail, Context, OptionExt};
|
||||
use haku::{
|
||||
ast::Ast,
|
||||
bytecode::{Chunk, Defs, DefsImage},
|
||||
bytecode::{Chunk, Defs, DefsImage, DefsLimits},
|
||||
compiler::{ClosureSpec, Compiler, Source},
|
||||
lexer::{lex, Lexer},
|
||||
parser::{self, Parser, ParserLimits},
|
||||
|
@ -14,6 +14,7 @@ use haku::{
|
|||
source::SourceCode,
|
||||
system::{ChunkId, System, SystemImage},
|
||||
token::Lexis,
|
||||
trampoline::{Cont, Trampoline},
|
||||
value::{Closure, Ref, Value},
|
||||
vm::{Vm, VmImage, VmLimits},
|
||||
};
|
||||
|
@ -30,6 +31,7 @@ pub struct Limits {
|
|||
pub max_source_code_len: u32,
|
||||
pub max_chunks: usize,
|
||||
pub max_defs: usize,
|
||||
pub max_tags: usize,
|
||||
pub max_tokens: usize,
|
||||
pub max_parser_events: usize,
|
||||
pub ast_capacity: usize,
|
||||
|
@ -59,7 +61,10 @@ pub struct Haku {
|
|||
impl Haku {
|
||||
pub fn new(limits: Limits) -> Self {
|
||||
let system = System::new(limits.max_chunks);
|
||||
let defs = Defs::new(limits.max_defs);
|
||||
let defs = Defs::new(&DefsLimits {
|
||||
max_defs: limits.max_defs,
|
||||
max_tags: limits.max_tags,
|
||||
});
|
||||
let vm = Vm::new(
|
||||
&defs,
|
||||
&VmLimits {
|
||||
|
@ -158,7 +163,7 @@ impl Haku {
|
|||
|
||||
let scribble = self
|
||||
.vm
|
||||
.run(&self.system, closure_id)
|
||||
.run(&self.system, closure_id, &[])
|
||||
.context("an exception occurred while evaluating the scribble")?;
|
||||
|
||||
Ok(scribble)
|
||||
|
@ -187,4 +192,18 @@ impl Haku {
|
|||
pub fn reset_vm(&mut self) {
|
||||
self.vm.restore_image(&self.vm_image);
|
||||
}
|
||||
|
||||
pub fn cont(&self, trampoline: &Trampoline) -> Cont {
|
||||
trampoline.cont(&self.vm)
|
||||
}
|
||||
|
||||
pub fn dotter(
|
||||
&mut self,
|
||||
trampoline: &mut Trampoline,
|
||||
from: Vec2,
|
||||
to: Vec2,
|
||||
num: f32,
|
||||
) -> eyre::Result<()> {
|
||||
Ok(trampoline.dotter(&mut self.vm, &self.system, from.into(), to.into(), num)?)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize, Default)]
|
||||
pub struct Vec2 {
|
||||
pub x: f32,
|
||||
pub y: f32,
|
||||
|
@ -11,3 +11,9 @@ impl Vec2 {
|
|||
Self { x, y }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec2> for haku::value::Vec2 {
|
||||
fn from(value: Vec2) -> Self {
|
||||
Self::new(value.x, value.y)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -192,9 +192,19 @@ pub enum EventKind {
|
|||
Leave,
|
||||
|
||||
Cursor { position: Vec2 },
|
||||
Interact { interactions: Vec<Interaction> },
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(
|
||||
tag = "kind",
|
||||
rename_all = "camelCase",
|
||||
rename_all_fields = "camelCase"
|
||||
)]
|
||||
pub enum Interaction {
|
||||
SetBrush { brush: String },
|
||||
Plot { points: Vec<Vec2> },
|
||||
Dotter { from: Vec2, to: Vec2, num: f32 },
|
||||
Scribble,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
|
@ -276,7 +286,7 @@ impl Wall {
|
|||
|
||||
// Drawing events are handled by the owner session's thread to make drawing as
|
||||
// parallel as possible.
|
||||
EventKind::SetBrush { .. } | EventKind::Plot { .. } => (),
|
||||
EventKind::Interact { .. } => {}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue