Compare commits
No commits in common. "bff899c9c00f9de6460754296925e1cbb18805dd" and "d5e2fbd0cf2380a006048d29be020a140216a766" have entirely different histories.
bff899c9c0
...
d5e2fbd0cf
25 changed files with 1167 additions and 612 deletions
7
Cargo.lock
generated
7
Cargo.lock
generated
|
|
@ -817,14 +817,13 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "image"
|
||||
version = "0.25.6"
|
||||
version = "0.25.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "db35664ce6b9810857a38a906215e75a9c879f0696556a39f59c62829710251a"
|
||||
checksum = "99314c8a2152b8ddb211f924cdae532d8c5e4c8bb54728e12fff1b0cd5963a10"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
"byteorder-lite",
|
||||
"num-traits",
|
||||
"png",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1345,14 +1344,12 @@ dependencies = [
|
|||
"haku",
|
||||
"haku2",
|
||||
"handlebars",
|
||||
"image",
|
||||
"indexmap",
|
||||
"jotdown",
|
||||
"mime_guess",
|
||||
"rand",
|
||||
"rand_chacha",
|
||||
"rayon",
|
||||
"rkgk-image-ops",
|
||||
"rusqlite",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ members = ["crates/*"]
|
|||
haku.path = "crates/haku"
|
||||
haku2.path = "crates/haku2"
|
||||
log = "0.4.22"
|
||||
rkgk-image-ops.path = "crates/rkgk-image-ops"
|
||||
tiny-skia = { version = "0.11.4", default-features = false }
|
||||
|
||||
[profile.dev.package.rkgk-image-ops]
|
||||
|
|
|
|||
|
|
@ -6,8 +6,6 @@ use std::{
|
|||
};
|
||||
|
||||
fn main() -> Result<(), Box<dyn Error>> {
|
||||
println!("cargo::rerun-if-changed=build.zig");
|
||||
println!("cargo::rerun-if-changed=build.zig.zon");
|
||||
println!("cargo::rerun-if-changed=src");
|
||||
|
||||
let out_dir = env::var("OUT_DIR").unwrap();
|
||||
|
|
@ -60,5 +58,8 @@ fn main() -> Result<(), Box<dyn Error>> {
|
|||
panic!("zig failed to build");
|
||||
}
|
||||
|
||||
println!("cargo::rustc-link-search={out_dir}/zig-out/lib");
|
||||
println!("cargo::rustc-link-lib=haku2");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,6 +12,13 @@ pub fn build(b: *std.Build) void {
|
|||
.optimize = optimize,
|
||||
.pic = true,
|
||||
});
|
||||
const lib = b.addStaticLibrary(.{
|
||||
.name = "haku2",
|
||||
.root_module = mod,
|
||||
});
|
||||
lib.pie = true;
|
||||
lib.bundle_compiler_rt = true;
|
||||
b.installArtifact(lib);
|
||||
|
||||
const mod_wasm = b.createModule(.{
|
||||
.root_source_file = b.path("src/haku2.zig"),
|
||||
|
|
|
|||
|
|
@ -1,3 +1,575 @@
|
|||
/// WebAssembly code for haku2.
|
||||
/// haku2 is purely a client-side library, and does not have Rust bindings.
|
||||
use std::{
|
||||
alloc::{self, Layout},
|
||||
error::Error,
|
||||
fmt::{self, Display},
|
||||
marker::{PhantomData, PhantomPinned},
|
||||
ptr::{self, NonNull},
|
||||
slice,
|
||||
};
|
||||
|
||||
use log::trace;
|
||||
|
||||
pub static WASM: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/zig-out/bin/haku2.wasm"));
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
unsafe extern "C" fn __haku2_alloc(size: usize, align: usize) -> *mut u8 {
|
||||
if let Ok(layout) = Layout::from_size_align(size, align) {
|
||||
alloc::alloc(layout)
|
||||
} else {
|
||||
ptr::null_mut()
|
||||
}
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
unsafe extern "C" fn __haku2_realloc(
|
||||
ptr: *mut u8,
|
||||
size: usize,
|
||||
align: usize,
|
||||
new_size: usize,
|
||||
) -> *mut u8 {
|
||||
if let Ok(layout) = Layout::from_size_align(size, align) {
|
||||
alloc::realloc(ptr, layout, new_size)
|
||||
} else {
|
||||
ptr::null_mut()
|
||||
}
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
unsafe extern "C" fn __haku2_dealloc(ptr: *mut u8, size: usize, align: usize) {
|
||||
match Layout::from_size_align(size, align) {
|
||||
Ok(layout) => alloc::dealloc(ptr, layout),
|
||||
Err(_) => {
|
||||
log::error!("__haku2_dealloc: invalid layout size={size} align={align} ptr={ptr:?}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
unsafe extern "C" fn __haku2_log_err(
|
||||
scope: *const u8,
|
||||
scope_len: usize,
|
||||
msg: *const u8,
|
||||
len: usize,
|
||||
) {
|
||||
let scope = String::from_utf8_lossy(slice::from_raw_parts(scope, scope_len));
|
||||
let msg = String::from_utf8_lossy(slice::from_raw_parts(msg, len));
|
||||
log::error!("{scope}: {msg}");
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
unsafe extern "C" fn __haku2_log_warn(
|
||||
scope: *const u8,
|
||||
scope_len: usize,
|
||||
msg: *const u8,
|
||||
len: usize,
|
||||
) {
|
||||
let scope = String::from_utf8_lossy(slice::from_raw_parts(scope, scope_len));
|
||||
let msg = String::from_utf8_lossy(slice::from_raw_parts(msg, len));
|
||||
log::warn!("{scope}: {msg}");
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
unsafe extern "C" fn __haku2_log_info(
|
||||
scope: *const u8,
|
||||
scope_len: usize,
|
||||
msg: *const u8,
|
||||
len: usize,
|
||||
) {
|
||||
let scope = String::from_utf8_lossy(slice::from_raw_parts(scope, scope_len));
|
||||
let msg = String::from_utf8_lossy(slice::from_raw_parts(msg, len));
|
||||
log::info!("{scope}: {msg}");
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
unsafe extern "C" fn __haku2_log_debug(
|
||||
scope: *const u8,
|
||||
scope_len: usize,
|
||||
msg: *const u8,
|
||||
len: usize,
|
||||
) {
|
||||
let scope = String::from_utf8_lossy(slice::from_raw_parts(scope, scope_len));
|
||||
let msg = String::from_utf8_lossy(slice::from_raw_parts(msg, len));
|
||||
log::debug!("{scope}: {msg}");
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
struct ScratchC {
|
||||
_data: (),
|
||||
_marker: PhantomData<(*mut u8, PhantomPinned)>,
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
struct LimitsC {
|
||||
_data: (),
|
||||
_marker: PhantomData<(*mut u8, PhantomPinned)>,
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
struct DefsC {
|
||||
_data: (),
|
||||
_marker: PhantomData<(*mut u8, PhantomPinned)>,
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
struct VmC {
|
||||
_data: (),
|
||||
_marker: PhantomData<(*mut u8, PhantomPinned)>,
|
||||
}
|
||||
|
||||
extern "C" {
|
||||
fn haku2_scratch_new(max: usize) -> *mut ScratchC;
|
||||
fn haku2_scratch_destroy(scratch: *mut ScratchC);
|
||||
fn haku2_scratch_reset(scratch: *mut ScratchC);
|
||||
|
||||
fn haku2_limits_new() -> *mut LimitsC;
|
||||
fn haku2_limits_destroy(limits: *mut LimitsC);
|
||||
fn haku2_limits_set_stack_capacity(limits: *mut LimitsC, new: usize);
|
||||
fn haku2_limits_set_call_stack_capacity(limits: *mut LimitsC, new: usize);
|
||||
|
||||
fn haku2_defs_parse(
|
||||
defs_string: *const u8,
|
||||
defs_len: usize,
|
||||
tags_string: *const u8,
|
||||
tags_len: usize,
|
||||
) -> *mut DefsC;
|
||||
fn haku2_defs_destroy(defs: *mut DefsC);
|
||||
|
||||
fn haku2_vm_new() -> *mut VmC;
|
||||
fn haku2_vm_destroy(vm: *mut VmC);
|
||||
fn haku2_vm_reset(
|
||||
vm: *mut VmC,
|
||||
s: *mut ScratchC,
|
||||
defs: *const DefsC,
|
||||
limits: *const LimitsC,
|
||||
fuel: u32,
|
||||
);
|
||||
fn haku2_vm_run_main(
|
||||
vm: *mut VmC,
|
||||
scratch: *mut ScratchC,
|
||||
code: *const u8,
|
||||
code_len: usize,
|
||||
local_count: u8,
|
||||
) -> bool;
|
||||
fn haku2_vm_has_cont(vm: *const VmC) -> bool;
|
||||
fn haku2_vm_is_dotter(vm: *const VmC) -> bool;
|
||||
fn haku2_vm_run_dotter(
|
||||
vm: *mut VmC,
|
||||
scratch: *mut ScratchC,
|
||||
from_x: f32,
|
||||
from_y: f32,
|
||||
to_x: f32,
|
||||
to_y: f32,
|
||||
num: f32,
|
||||
) -> bool;
|
||||
fn haku2_vm_exception_len(vm: *const VmC) -> usize;
|
||||
fn haku2_vm_exception_render(vm: *const VmC, buffer: *mut u8);
|
||||
|
||||
// improper_ctypes is emitted for `*mut CanvasC`, which is an opaque {} on the Zig side and
|
||||
// therefore FFI-safe.
|
||||
#[expect(improper_ctypes)]
|
||||
fn haku2_render(vm: *mut VmC, canvas: *mut CanvasC, max_depth: usize) -> bool;
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Scratch {
|
||||
raw: NonNull<ScratchC>,
|
||||
}
|
||||
|
||||
impl Scratch {
|
||||
pub fn new(max: usize) -> Scratch {
|
||||
// SAFETY: haku2_scratch_new does not have any safety invariants.
|
||||
let raw = NonNull::new(unsafe { haku2_scratch_new(max) }).expect("out of memory");
|
||||
trace!("Scratch::new -> {raw:?}");
|
||||
Scratch { raw }
|
||||
}
|
||||
|
||||
pub fn reset(&mut self) {
|
||||
trace!("Scratch::reset({:?})", self.raw);
|
||||
// SAFETY: The pointer passed is non-null.
|
||||
unsafe {
|
||||
haku2_scratch_reset(self.raw.as_ptr());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Scratch {
|
||||
fn drop(&mut self) {
|
||||
trace!("Scratch::drop({:?})", self.raw);
|
||||
// SAFETY: The pointer passed is non-null.
|
||||
unsafe {
|
||||
haku2_scratch_destroy(self.raw.as_ptr());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct LimitsSpec {
|
||||
pub stack_capacity: usize,
|
||||
pub call_stack_capacity: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Limits {
|
||||
raw: NonNull<LimitsC>,
|
||||
}
|
||||
|
||||
// SAFETY: Limits's backing storage is only modified on creation.
|
||||
// Changing the limits requires creating a new instance.
|
||||
unsafe impl Send for Limits {}
|
||||
unsafe impl Sync for Limits {}
|
||||
|
||||
impl Limits {
|
||||
pub fn new(spec: LimitsSpec) -> Self {
|
||||
// SAFETY: haku2_limits_new has no safety invariants.
|
||||
let limits = NonNull::new(unsafe { haku2_limits_new() }).expect("out of memory");
|
||||
|
||||
// SAFETY: The following functions are called on a valid pointer.
|
||||
unsafe {
|
||||
haku2_limits_set_stack_capacity(limits.as_ptr(), spec.stack_capacity);
|
||||
haku2_limits_set_call_stack_capacity(limits.as_ptr(), spec.call_stack_capacity);
|
||||
}
|
||||
|
||||
Self { raw: limits }
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Limits {
|
||||
fn drop(&mut self) {
|
||||
// SAFETY: The pointer passed is non-null.
|
||||
unsafe {
|
||||
haku2_limits_destroy(self.raw.as_ptr());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Defs {
|
||||
raw: NonNull<DefsC>,
|
||||
}
|
||||
|
||||
// SAFETY: Defs' backing storage is not modified after creation.
|
||||
unsafe impl Send for Defs {}
|
||||
unsafe impl Sync for Defs {}
|
||||
|
||||
impl Defs {
|
||||
pub fn parse(defs: &str, tags: &str) -> Self {
|
||||
Self {
|
||||
raw: NonNull::new(unsafe {
|
||||
haku2_defs_parse(defs.as_ptr(), defs.len(), tags.as_ptr(), tags.len())
|
||||
})
|
||||
.expect("out of memory"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Defs {
|
||||
fn drop(&mut self) {
|
||||
// SAFETY: The pointer passed is non-null.
|
||||
unsafe {
|
||||
haku2_defs_destroy(self.raw.as_ptr());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Code {
|
||||
defs: Defs,
|
||||
main_chunk: Vec<u8>,
|
||||
main_local_count: u8,
|
||||
}
|
||||
|
||||
impl Code {
|
||||
/// Creates a new instance of `Code` from a valid vector of bytes.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// This does not perform any validation, and there is no way to perform such
|
||||
/// validation before constructing this. The bytecode must simply be valid, which is the case
|
||||
/// for bytecode emitted directly by the compiler.
|
||||
///
|
||||
/// Untrusted bytecode should never ever be loaded under any circumstances.
|
||||
pub unsafe fn new(defs: Defs, main_chunk: Vec<u8>, main_local_count: u8) -> Self {
|
||||
Self {
|
||||
defs,
|
||||
main_chunk,
|
||||
main_local_count,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A VM that is ready to run and loaded with valid bytecode.
|
||||
#[derive(Debug)]
|
||||
pub struct Vm {
|
||||
scratch: Scratch,
|
||||
code: Code,
|
||||
limits: Limits,
|
||||
inner: VmInner,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Cont<'vm> {
|
||||
None,
|
||||
Dotter(ContDotter<'vm>),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ContDotter<'vm> {
|
||||
vm: &'vm mut Vm,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct Dotter {
|
||||
pub from: (f32, f32),
|
||||
pub to: (f32, f32),
|
||||
pub num: f32,
|
||||
}
|
||||
|
||||
impl Vm {
|
||||
pub fn new(scratch: Scratch, code: Code, limits: Limits) -> Self {
|
||||
// SAFETY: haku2_vm_new cannot fail.
|
||||
// Do note that this returns an uninitialized VM, which must be reset before use.
|
||||
let raw = NonNull::new(unsafe { haku2_vm_new() }).expect("out of memory");
|
||||
trace!("Vm::new({scratch:?}, {code:?}, {limits:?}) -> {raw:?}");
|
||||
Self {
|
||||
// SAFETY:
|
||||
// - Ownership of scratch is passed to the VM, so the VM cannot outlive the scratch space.
|
||||
// - The VM never gives you any references back, so this is safe to do.
|
||||
// - The other arguments are only borrowed immutably for construction.
|
||||
inner: VmInner { raw },
|
||||
scratch,
|
||||
code,
|
||||
limits,
|
||||
}
|
||||
}
|
||||
|
||||
/// Begin running code. This makes the VM enter a "trampoline" state: after this call, you may
|
||||
/// proceed to call `cont` as many times as it returns a value other than [`Cont::None`].
|
||||
///
|
||||
/// Calling `begin` again during this process will work correctly, and result in another
|
||||
/// continuation being stack on top of the old one---at the expense of a stack slot.
|
||||
pub fn begin(&mut self, fuel: u32) -> Result<(), Exception> {
|
||||
trace!("Vm::begin({self:?}, {fuel})");
|
||||
self.scratch.reset();
|
||||
let ok = unsafe {
|
||||
haku2_vm_reset(
|
||||
self.inner.raw.as_ptr(),
|
||||
self.scratch.raw.as_ptr(),
|
||||
self.code.defs.raw.as_ptr(),
|
||||
self.limits.raw.as_ptr(),
|
||||
fuel,
|
||||
);
|
||||
haku2_vm_run_main(
|
||||
self.inner.raw.as_ptr(),
|
||||
self.scratch.raw.as_ptr(),
|
||||
self.code.main_chunk.as_ptr(),
|
||||
self.code.main_chunk.len(),
|
||||
self.code.main_local_count,
|
||||
)
|
||||
};
|
||||
if ok {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(self.exception().expect("missing exception after !ok"))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether `cont()` can be called to run the next continuation.
|
||||
pub fn has_cont(&self) -> bool {
|
||||
unsafe { haku2_vm_has_cont(self.inner.raw.as_ptr()) }
|
||||
}
|
||||
|
||||
fn is_dotter(&self) -> bool {
|
||||
// SAFETY: The pointer is valid.
|
||||
unsafe { haku2_vm_is_dotter(self.inner.raw.as_ptr()) }
|
||||
}
|
||||
|
||||
/// Returns how the VM should continue executing after the previous execution.
|
||||
pub fn cont(&mut self) -> Cont<'_> {
|
||||
match () {
|
||||
_ if self.is_dotter() => Cont::Dotter(ContDotter { vm: self }),
|
||||
_ => Cont::None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Renders the current scribble on top of the stack.
|
||||
/// If the value on top is not a scribble, throws an exception.
|
||||
///
|
||||
/// The rendering is performed by calling into the [`Canvas`] trait.
|
||||
pub fn render(&mut self, canvas: &mut dyn Canvas, max_depth: usize) -> Result<(), Exception> {
|
||||
let mut wrapped = CanvasC { inner: canvas };
|
||||
let ok = unsafe { haku2_render(self.inner.raw.as_ptr(), &mut wrapped, max_depth) };
|
||||
if ok {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(self.exception().expect("missing exception after !ok"))
|
||||
}
|
||||
}
|
||||
|
||||
/// Render the current exception out to a string.
|
||||
/// Returns `None` if there's no exception.
|
||||
pub fn exception(&self) -> Option<Exception> {
|
||||
// SAFETY: The pointer passed to this function is valid.
|
||||
let len = unsafe { haku2_vm_exception_len(self.inner.raw.as_ptr()) };
|
||||
if len == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut buffer = vec![0; len];
|
||||
// SAFETY: The length of the buffer is as indicated by haku2_vm_exception_len.
|
||||
unsafe {
|
||||
haku2_vm_exception_render(self.inner.raw.as_ptr(), buffer.as_mut_ptr());
|
||||
}
|
||||
Some(Exception {
|
||||
message: String::from_utf8_lossy(&buffer).into_owned(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Take the `Scratch` out of the VM for reuse in another one.
|
||||
/// The scratch memory will be reset (no bytes will be consumed.)
|
||||
pub fn into_scratch(self) -> Scratch {
|
||||
trace!("Vm::into_scratch({self:?})");
|
||||
let Vm {
|
||||
mut scratch,
|
||||
code: _,
|
||||
inner: _,
|
||||
limits: _,
|
||||
} = self;
|
||||
scratch.reset();
|
||||
scratch
|
||||
}
|
||||
}
|
||||
|
||||
impl ContDotter<'_> {
|
||||
pub fn run(self, dotter: &Dotter) -> Result<(), Exception> {
|
||||
trace!("ContDotter::run({self:?}, {dotter:?})");
|
||||
|
||||
let Dotter {
|
||||
from: (from_x, from_y),
|
||||
to: (to_x, to_y),
|
||||
num,
|
||||
} = *dotter;
|
||||
|
||||
let ok = unsafe {
|
||||
haku2_vm_run_dotter(
|
||||
self.vm.inner.raw.as_ptr(),
|
||||
self.vm.scratch.raw.as_ptr(),
|
||||
from_x,
|
||||
from_y,
|
||||
to_x,
|
||||
to_y,
|
||||
num,
|
||||
)
|
||||
};
|
||||
if ok {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(self.vm.exception().expect("missing exception after !ok"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct VmInner {
|
||||
raw: NonNull<VmC>,
|
||||
}
|
||||
|
||||
impl Drop for VmInner {
|
||||
fn drop(&mut self) {
|
||||
trace!("VmInner::drop({:?})", self.raw);
|
||||
// SAFETY: The pointer passed is non-null.
|
||||
unsafe {
|
||||
haku2_vm_destroy(self.raw.as_ptr());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Exception {
|
||||
message: String,
|
||||
}
|
||||
|
||||
impl Display for Exception {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.message)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for Exception {}
|
||||
|
||||
/// Marker for the VM to indicate that the rendering did not go down correctly.
|
||||
/// If this is encountered, it throws an exception and aborts rendering.
|
||||
#[derive(Debug)]
|
||||
pub struct RenderError;
|
||||
|
||||
pub trait Canvas {
|
||||
fn begin(&mut self) -> Result<(), RenderError>;
|
||||
fn line(&mut self, x1: f32, y1: f32, x2: f32, y2: f32) -> Result<(), RenderError>;
|
||||
fn rectangle(&mut self, x: f32, y: f32, width: f32, height: f32) -> Result<(), RenderError>;
|
||||
fn circle(&mut self, x: f32, y: f32, r: f32) -> Result<(), RenderError>;
|
||||
fn fill(&mut self, r: u8, g: u8, b: u8, a: u8) -> Result<(), RenderError>;
|
||||
fn stroke(&mut self, r: u8, g: u8, b: u8, a: u8, thickness: f32) -> Result<(), RenderError>;
|
||||
}
|
||||
|
||||
// SAFETY NOTE: I'm not sure the ownership model for this is quite correct.
|
||||
// Given how the &mut's ownership flows through the Zig side of the code, it _should_ be fine,
|
||||
// but I'm not an unsafe code expert to say this is the case for sure.
|
||||
|
||||
#[repr(C)]
|
||||
struct CanvasC<'a> {
|
||||
inner: &'a mut dyn Canvas,
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
unsafe extern "C" fn __haku2_canvas_begin(c: *mut CanvasC) -> bool {
|
||||
let c = &mut *c;
|
||||
c.inner.begin().is_ok()
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
unsafe extern "C" fn __haku2_canvas_line(
|
||||
c: *mut CanvasC,
|
||||
x1: f32,
|
||||
y1: f32,
|
||||
x2: f32,
|
||||
y2: f32,
|
||||
) -> bool {
|
||||
let c = &mut *c;
|
||||
c.inner.line(x1, y1, x2, y2).is_ok()
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
unsafe extern "C" fn __haku2_canvas_rectangle(
|
||||
c: *mut CanvasC,
|
||||
x: f32,
|
||||
y: f32,
|
||||
width: f32,
|
||||
height: f32,
|
||||
) -> bool {
|
||||
let c = &mut *c;
|
||||
c.inner.rectangle(x, y, width, height).is_ok()
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
unsafe extern "C" fn __haku2_canvas_circle(c: *mut CanvasC, x: f32, y: f32, r: f32) -> bool {
|
||||
let c = &mut *c;
|
||||
c.inner.circle(x, y, r).is_ok()
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
unsafe extern "C" fn __haku2_canvas_fill(c: *mut CanvasC, r: u8, g: u8, b: u8, a: u8) -> bool {
|
||||
let c = &mut *c;
|
||||
c.inner.fill(r, g, b, a).is_ok()
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
unsafe extern "C" fn __haku2_canvas_stroke(
|
||||
c: *mut CanvasC,
|
||||
r: u8,
|
||||
g: u8,
|
||||
b: u8,
|
||||
a: u8,
|
||||
thickness: f32,
|
||||
) -> bool {
|
||||
let c = &mut *c;
|
||||
c.inner.stroke(r, g, b, a, thickness).is_ok()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,36 +1,14 @@
|
|||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct Image<'a> {
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
pub data: &'a [u8],
|
||||
pub fn add(left: u64, right: u64) -> u64 {
|
||||
left + right
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ImageMut<'a> {
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
pub data: &'a mut [u8],
|
||||
}
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
impl ImageMut<'_> {
|
||||
pub fn composite_alpha(&mut self, src: &Image<'_>) {
|
||||
assert_eq!(self.width, src.width);
|
||||
assert_eq!(self.height, src.height);
|
||||
|
||||
fn fixp_mul(a: u8, b: u8) -> u8 {
|
||||
((a as u16 * b as u16 + 255) >> 8) as u8
|
||||
}
|
||||
|
||||
fn alpha_blend(dst: u8, src: u8, alpha: u8) -> u8 {
|
||||
fixp_mul(src, alpha) + fixp_mul(dst, 255 - alpha)
|
||||
}
|
||||
|
||||
for (dst_rgba, src_rgba) in self.data.chunks_exact_mut(4).zip(src.data.chunks_exact(4)) {
|
||||
let src_alpha = src_rgba[3];
|
||||
dst_rgba[0] = alpha_blend(dst_rgba[0], src_rgba[0], src_alpha);
|
||||
dst_rgba[1] = alpha_blend(dst_rgba[1], src_rgba[1], src_alpha);
|
||||
dst_rgba[2] = alpha_blend(dst_rgba[2], src_rgba[2], src_alpha);
|
||||
dst_rgba[3] = alpha_blend(dst_rgba[3], src_rgba[3], src_alpha);
|
||||
}
|
||||
#[test]
|
||||
fn it_works() {
|
||||
let result = add(2, 2);
|
||||
assert_eq!(result, 4);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,13 +18,11 @@ haku.workspace = true
|
|||
haku2.workspace = true
|
||||
handlebars = "6.0.0"
|
||||
indexmap = { version = "2.4.0", features = ["serde"] }
|
||||
image = { version = "0.25.6", default-features = false, features = ["png"] }
|
||||
jotdown = "0.5.0"
|
||||
mime_guess = "2.0.5"
|
||||
rand = "0.8.5"
|
||||
rand_chacha = "0.3.1"
|
||||
rayon = "1.10.0"
|
||||
rkgk-image-ops.workspace = true
|
||||
rusqlite = { version = "0.32.1", features = ["bundled"] }
|
||||
serde = { version = "1.0.206", features = ["derive"] }
|
||||
serde_json = "1.0.124"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
use std::{
|
||||
collections::{HashSet, VecDeque},
|
||||
io::Cursor,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
|
|
@ -13,14 +12,10 @@ use axum::{
|
|||
};
|
||||
use base64::Engine;
|
||||
use eyre::{bail, Context, OptionExt};
|
||||
use image::{DynamicImage, ImageFormat, ImageReader};
|
||||
use rayon::iter::{IntoParallelRefIterator, ParallelIterator as _};
|
||||
use rkgk_image_ops::{Image, ImageMut};
|
||||
use schema::{
|
||||
ChunkInfo, Error, LoginRequest, LoginResponse, Notify, Online, Request, Version, WallInfo,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tiny_skia::{IntSize, Pixmap};
|
||||
use tokio::{
|
||||
select,
|
||||
sync::{mpsc, oneshot},
|
||||
|
|
@ -28,14 +23,13 @@ use tokio::{
|
|||
use tracing::{error, info, info_span, instrument};
|
||||
|
||||
use crate::{
|
||||
haku::{Haku, Limits},
|
||||
login::{self, database::LoginStatus},
|
||||
schema::Vec2,
|
||||
wall::{
|
||||
self,
|
||||
auto_save::AutoSave,
|
||||
chunk_images::{ChunkImages, LoadedChunks},
|
||||
chunk_iterator::ChunkIterator,
|
||||
database::ChunkDataPair,
|
||||
ChunkPosition, JoinError, SessionHandle, UserInit, Wall, WallId,
|
||||
self, auto_save::AutoSave, chunk_images::ChunkImages, chunk_iterator::ChunkIterator,
|
||||
database::ChunkDataPair, render::ChunkCanvas, ChunkPosition, Interaction, JoinError,
|
||||
SessionHandle, UserInit, Wall, WallId,
|
||||
},
|
||||
};
|
||||
|
||||
|
|
@ -173,7 +167,6 @@ async fn fallible_websocket(api: Arc<Api>, ws: &mut WebSocket) -> eyre::Result<(
|
|||
wall_info: WallInfo {
|
||||
chunk_size: open_wall.wall.settings().chunk_size,
|
||||
paint_area: open_wall.wall.settings().paint_area,
|
||||
max_edit_size: open_wall.wall.settings().max_edit_size,
|
||||
online: users_online,
|
||||
haku_limits: api.config.haku.clone(),
|
||||
},
|
||||
|
|
@ -202,6 +195,8 @@ async fn fallible_websocket(api: Arc<Api>, ws: &mut WebSocket) -> eyre::Result<(
|
|||
open_wall.chunk_images,
|
||||
open_wall.auto_save,
|
||||
session_handle,
|
||||
api.config.haku.clone(),
|
||||
login_request.init.brush,
|
||||
)
|
||||
.await?
|
||||
.event_loop(ws)
|
||||
|
|
@ -224,16 +219,9 @@ struct SessionLoop {
|
|||
pending_images: VecDeque<ChunkDataPair>,
|
||||
}
|
||||
|
||||
struct EditWithData {
|
||||
chunk: ChunkPosition,
|
||||
data_type: String,
|
||||
data: Vec<u8>,
|
||||
}
|
||||
|
||||
enum RenderCommand {
|
||||
Edit {
|
||||
chunks: LoadedChunks,
|
||||
edits: Vec<EditWithData>,
|
||||
Interact {
|
||||
interactions: Vec<Interaction>,
|
||||
done: oneshot::Sender<()>,
|
||||
},
|
||||
}
|
||||
|
|
@ -245,12 +233,26 @@ impl SessionLoop {
|
|||
chunk_images: Arc<ChunkImages>,
|
||||
auto_save: Arc<AutoSave>,
|
||||
handle: SessionHandle,
|
||||
limits: Limits,
|
||||
brush: String,
|
||||
) -> eyre::Result<Self> {
|
||||
// Limit how many commands may come in _pretty darn hard_ because these can be really
|
||||
// CPU-intensive.
|
||||
// If this ends up dropping commands - it's your fault for trying to DoS my server!
|
||||
let (render_commands_tx, render_commands_rx) = mpsc::channel(1);
|
||||
|
||||
let thread_ready = {
|
||||
let (done_tx, done_rx) = oneshot::channel();
|
||||
render_commands_tx
|
||||
.send(RenderCommand::Interact {
|
||||
interactions: vec![Interaction::SetBrush { brush }],
|
||||
done: done_tx,
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
done_rx
|
||||
};
|
||||
|
||||
// We spawn our own thread so as not to clog the tokio blocking thread pool with our
|
||||
// rendering shenanigans.
|
||||
std::thread::Builder::new()
|
||||
|
|
@ -261,11 +263,13 @@ impl SessionLoop {
|
|||
let _span =
|
||||
info_span!("render_thread", ?wall_id, session_id = ?handle.session_id)
|
||||
.entered();
|
||||
render_thread(wall, render_commands_rx)
|
||||
Self::render_thread(wall, limits, render_commands_rx)
|
||||
}
|
||||
})
|
||||
.context("could not spawn render thread")?;
|
||||
|
||||
thread_ready.await?;
|
||||
|
||||
Ok(Self {
|
||||
wall_id,
|
||||
wall,
|
||||
|
|
@ -306,49 +310,51 @@ impl SessionLoop {
|
|||
}
|
||||
|
||||
Request::Wall { wall_event } => {
|
||||
match wall_event {
|
||||
match &wall_event {
|
||||
// This match only concerns itself with drawing-related events to offload
|
||||
// all the evaluation and drawing work to this session's drawing thread.
|
||||
wall::EventKind::Join { .. }
|
||||
| wall::EventKind::Leave
|
||||
| wall::EventKind::Cursor { .. }
|
||||
| wall::EventKind::Interact { .. } => {
|
||||
self.wall.event(wall::Event {
|
||||
session_id: self.handle.session_id,
|
||||
kind: wall_event,
|
||||
});
|
||||
}
|
||||
| wall::EventKind::Cursor { .. } => (),
|
||||
|
||||
wall::EventKind::Edit { edits } => {
|
||||
let chunk_data = recv_expect(ws).await?.into_data();
|
||||
|
||||
let mut edits_with_data = Vec::with_capacity(edits.len());
|
||||
for edit in edits {
|
||||
if let Some(data) = chunk_data
|
||||
.get(edit.data_offset..edit.data_offset + edit.data_length)
|
||||
{
|
||||
edits_with_data.push(EditWithData {
|
||||
chunk: edit.chunk,
|
||||
data_type: edit.data_type,
|
||||
data: data.to_owned(),
|
||||
});
|
||||
}
|
||||
}
|
||||
wall::EventKind::Interact { interactions } => {
|
||||
let (done_tx, done_rx) = oneshot::channel();
|
||||
|
||||
let chunks_to_modify: Vec<_> =
|
||||
edits_with_data.iter().map(|edit| edit.chunk).collect();
|
||||
chunks_to_modify(self.wall.settings(), interactions)
|
||||
.into_iter()
|
||||
.collect();
|
||||
match self.chunk_images.load(chunks_to_modify.clone()).await {
|
||||
Ok(chunks) => {
|
||||
let (done_tx, done_rx) = oneshot::channel();
|
||||
// Wait during contention.
|
||||
// We don't want to drop any edits, as that would result in
|
||||
// graphical glitches and desyncs.
|
||||
_ = self
|
||||
.render_commands_tx
|
||||
.send(RenderCommand::Edit {
|
||||
chunks,
|
||||
edits: edits_with_data,
|
||||
done: done_tx,
|
||||
})
|
||||
.await;
|
||||
Ok(_) => {
|
||||
if interactions
|
||||
.iter()
|
||||
.any(|i| matches!(i, Interaction::SetBrush { .. }))
|
||||
{
|
||||
// SetBrush is an important event, so we wait for the render thread
|
||||
// to unload.
|
||||
_ = self
|
||||
.render_commands_tx
|
||||
.send(RenderCommand::Interact {
|
||||
interactions: interactions.clone(),
|
||||
done: done_tx,
|
||||
})
|
||||
.await;
|
||||
} else {
|
||||
// If there is no SetBrush, there's no need to wait, so we fire events
|
||||
// blindly. If the thread's not okay with that... well, whatever.
|
||||
// That's your issue for making a really slow brush.
|
||||
let send_result =
|
||||
self.render_commands_tx.try_send(RenderCommand::Interact {
|
||||
interactions: interactions.clone(),
|
||||
done: done_tx,
|
||||
});
|
||||
if send_result.is_err() {
|
||||
info!(
|
||||
?interactions,
|
||||
"render thread is overloaded, dropping interaction request"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let auto_save = Arc::clone(&self.auto_save);
|
||||
tokio::spawn(async move {
|
||||
|
|
@ -360,6 +366,11 @@ impl SessionLoop {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.wall.event(wall::Event {
|
||||
session_id: self.handle.session_id,
|
||||
kind: wall_event,
|
||||
});
|
||||
}
|
||||
|
||||
Request::Viewport {
|
||||
|
|
@ -435,82 +446,132 @@ impl SessionLoop {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn render_thread(wall: Arc<Wall>, mut commands: mpsc::Receiver<RenderCommand>) {
|
||||
while let Some(command) = commands.blocking_recv() {
|
||||
let RenderCommand::Edit {
|
||||
chunks,
|
||||
edits,
|
||||
done,
|
||||
} = command;
|
||||
fn render_thread(wall: Arc<Wall>, limits: Limits, mut commands: mpsc::Receiver<RenderCommand>) {
|
||||
let mut haku = Haku::new(limits);
|
||||
let mut brush_ok = false;
|
||||
let mut current_render_area = RenderArea::default();
|
||||
|
||||
let positions: Vec<_> = edits.iter().map(|edit| edit.chunk).collect();
|
||||
while let Some(command) = commands.blocking_recv() {
|
||||
let RenderCommand::Interact { interactions, done } = command;
|
||||
|
||||
let chunk_size = wall.settings().chunk_size;
|
||||
let chunk_int_size = IntSize::from_wh(chunk_size, chunk_size).unwrap();
|
||||
info!("decoding edits");
|
||||
let decoded = edits
|
||||
.par_iter()
|
||||
.flat_map(|edit| match &edit.data_type[..] {
|
||||
"image/png" => {
|
||||
let mut reader = ImageReader::new(Cursor::new(&edit.data));
|
||||
reader.set_format(ImageFormat::Png);
|
||||
reader.limits({
|
||||
let mut limits = image::Limits::no_limits();
|
||||
limits.max_image_width = Some(chunk_size);
|
||||
limits.max_image_height = Some(chunk_size);
|
||||
limits
|
||||
});
|
||||
let mut queue = VecDeque::from(interactions);
|
||||
while let Some(interaction) = queue.pop_front() {
|
||||
if let Some(render_area) = render_area(wall.settings(), &interaction) {
|
||||
current_render_area = render_area;
|
||||
}
|
||||
|
||||
reader
|
||||
.decode()
|
||||
.context("image decoding failed")
|
||||
.and_then(|image| {
|
||||
if image.width() != chunk_size || image.height() != chunk_size {
|
||||
bail!(
|
||||
"{:?} image size {}x{} does not match chunk size {}",
|
||||
edit.chunk,
|
||||
image.width(),
|
||||
image.height(),
|
||||
chunk_size
|
||||
);
|
||||
match interaction {
|
||||
Interaction::SetBrush { brush } => {
|
||||
brush_ok = haku.set_brush(&brush).is_ok();
|
||||
}
|
||||
|
||||
Interaction::Dotter { from, to, num } => {
|
||||
if brush_ok {
|
||||
jumpstart_trampoline(&mut haku);
|
||||
match haku.cont() {
|
||||
haku2::Cont::Dotter(dotter) => match dotter.run(&haku2::Dotter {
|
||||
from: (from.x, from.y),
|
||||
to: (to.x, to.y),
|
||||
num,
|
||||
}) {
|
||||
Ok(_) => (),
|
||||
Err(err) => error!("exception while running dotter: {err}"),
|
||||
},
|
||||
other => error!("received Dotter interaction when a {other:?} continuation was next")
|
||||
}
|
||||
Ok(image)
|
||||
})
|
||||
.map(DynamicImage::into_rgba8)
|
||||
.inspect_err(|err| info!(?edit.chunk, ?err, "error while decoding"))
|
||||
.ok()
|
||||
.and_then(|image| Pixmap::from_vec(image.into_raw(), chunk_int_size))
|
||||
}
|
||||
}
|
||||
|
||||
Interaction::Scribble => {
|
||||
match haku.cont() {
|
||||
haku2::Cont::None => {
|
||||
draw_to_chunks(&wall, current_render_area, &mut haku);
|
||||
}
|
||||
_ => error!("tried to draw a scribble with an active continuation"),
|
||||
}
|
||||
|
||||
current_render_area = RenderArea::default();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ => {
|
||||
info!(edit.data_type, "unknown data type");
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<Pixmap>>();
|
||||
info!("edits decoded");
|
||||
|
||||
let mut chunks_locked = Vec::new();
|
||||
for position in &positions {
|
||||
chunks_locked.push(chunks.chunks[position].blocking_lock());
|
||||
_ = done.send(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
struct RenderArea {
|
||||
top_left: Vec2,
|
||||
bottom_right: Vec2,
|
||||
}
|
||||
|
||||
fn render_area(wall_settings: &wall::Settings, interaction: &Interaction) -> Option<RenderArea> {
|
||||
match interaction {
|
||||
Interaction::Dotter { from, to, .. } => {
|
||||
let half_paint_area = wall_settings.paint_area as f32 / 2.0;
|
||||
Some(RenderArea {
|
||||
top_left: Vec2::new(from.x - half_paint_area, from.y - half_paint_area),
|
||||
bottom_right: Vec2::new(to.x + half_paint_area, to.y + half_paint_area),
|
||||
})
|
||||
}
|
||||
Interaction::SetBrush { .. } | Interaction::Scribble => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn chunks_to_modify(
|
||||
wall_settings: &wall::Settings,
|
||||
interactions: &[Interaction],
|
||||
) -> HashSet<ChunkPosition> {
|
||||
let mut chunks = HashSet::new();
|
||||
|
||||
for interaction in interactions {
|
||||
// NOTE: This is mostly a tentative overestimation, and can result in more chunks being
|
||||
// marked as needing autosave than will be touched in reality.
|
||||
// It's better to play safe in this case than lose data.
|
||||
if let Some(render_area) = render_area(wall_settings, interaction) {
|
||||
let top_left_chunk = wall_settings.chunk_at(render_area.top_left);
|
||||
let bottom_right_chunk = wall_settings.chunk_at_ceil(render_area.bottom_right);
|
||||
for chunk_y in top_left_chunk.y..bottom_right_chunk.y {
|
||||
for chunk_x in top_left_chunk.x..bottom_right_chunk.x {
|
||||
chunks.insert(ChunkPosition::new(chunk_x, chunk_y));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
chunks
|
||||
}
|
||||
|
||||
fn jumpstart_trampoline(haku: &mut Haku) {
|
||||
if !haku.has_cont() {
|
||||
if let Err(e) = haku.eval_brush() {
|
||||
error!("eval_brush2 exception: {e:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument(skip(wall, vm))]
|
||||
fn draw_to_chunks(wall: &Wall, render_area: RenderArea, vm: &mut Haku) {
|
||||
let settings = wall.settings();
|
||||
|
||||
let chunk_size = settings.chunk_size as f32;
|
||||
|
||||
let top_left_chunk = settings.chunk_at(render_area.top_left);
|
||||
let bottom_right_chunk = settings.chunk_at_ceil(render_area.bottom_right);
|
||||
|
||||
for chunk_y in top_left_chunk.y..bottom_right_chunk.y {
|
||||
for chunk_x in top_left_chunk.x..bottom_right_chunk.x {
|
||||
let x = f32::floor(-chunk_x as f32 * chunk_size);
|
||||
let y = f32::floor(-chunk_y as f32 * chunk_size);
|
||||
let chunk_ref = wall.get_or_create_chunk(ChunkPosition::new(chunk_x, chunk_y));
|
||||
let mut chunk = chunk_ref.blocking_lock();
|
||||
chunk.touch();
|
||||
let mut canvas = ChunkCanvas::new(&mut chunk).translated(x, y);
|
||||
if let Err(e) = vm.render(&mut canvas, 256) {
|
||||
info!(chunk_x, chunk_y, "drawing failed: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
for (mut dst_chunk, src_pixmap) in chunks_locked.into_iter().zip(decoded) {
|
||||
let mut dst = ImageMut {
|
||||
width: chunk_size,
|
||||
height: chunk_size,
|
||||
data: dst_chunk.pixmap.data_mut(),
|
||||
};
|
||||
let src = Image {
|
||||
width: chunk_size,
|
||||
height: chunk_size,
|
||||
data: src_pixmap.data(),
|
||||
};
|
||||
dst.composite_alpha(&src);
|
||||
}
|
||||
|
||||
_ = done.send(());
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -42,7 +42,6 @@ pub struct Online {
|
|||
pub struct WallInfo {
|
||||
pub chunk_size: u32,
|
||||
pub paint_area: u32,
|
||||
pub max_edit_size: usize,
|
||||
pub haku_limits: crate::haku::Limits,
|
||||
pub online: Vec<Online>,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,20 @@
|
|||
//! High-level wrapper for Haku.
|
||||
|
||||
// TODO: This should be used as the basis for haku-wasm as well as haku tests in the future to
|
||||
// avoid duplicating code.
|
||||
|
||||
use eyre::{bail, Context, OptionExt};
|
||||
use haku::{
|
||||
ast::Ast,
|
||||
bytecode::{Chunk, Defs, DefsImage, DefsLimits},
|
||||
compiler::{Compiler, Source},
|
||||
lexer::{lex, Lexer},
|
||||
parser::{self, Parser, ParserLimits},
|
||||
source::SourceCode,
|
||||
token::Lexis,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{info, instrument, Level};
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
// NOTE: For serialization, this struct does _not_ have serde(rename_all = "camelCase") on it,
|
||||
|
|
@ -22,3 +36,123 @@ pub struct Limits {
|
|||
pub memory: usize,
|
||||
pub render_max_depth: usize,
|
||||
}
|
||||
|
||||
pub struct Haku {
|
||||
limits: Limits,
|
||||
|
||||
defs: Defs,
|
||||
defs_image: DefsImage,
|
||||
|
||||
vm: Option<haku2::Vm>,
|
||||
}
|
||||
|
||||
impl Haku {
|
||||
pub fn new(limits: Limits) -> Self {
|
||||
let defs = Defs::new(&DefsLimits {
|
||||
max_defs: limits.max_defs,
|
||||
max_tags: limits.max_tags,
|
||||
});
|
||||
|
||||
let defs_image = defs.image();
|
||||
|
||||
Self {
|
||||
limits,
|
||||
defs,
|
||||
defs_image,
|
||||
vm: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn reset(&mut self) {
|
||||
self.defs.restore_image(&self.defs_image);
|
||||
}
|
||||
|
||||
#[instrument(skip(self, code), err)]
|
||||
pub fn set_brush(&mut self, code: &str) -> eyre::Result<()> {
|
||||
info!(?code);
|
||||
|
||||
self.reset();
|
||||
|
||||
let code = SourceCode::limited_len(code, self.limits.max_source_code_len)
|
||||
.ok_or_eyre("source code is too long")?;
|
||||
|
||||
let mut lexer = Lexer::new(Lexis::new(self.limits.max_tokens), code);
|
||||
lex(&mut lexer)?;
|
||||
|
||||
let mut parser = Parser::new(
|
||||
&lexer.lexis,
|
||||
&ParserLimits {
|
||||
max_events: self.limits.max_parser_events,
|
||||
},
|
||||
);
|
||||
parser::toplevel(&mut parser);
|
||||
let mut ast = Ast::new(self.limits.ast_capacity);
|
||||
let (root, parser_diagnostics) = parser.into_ast(&mut ast)?;
|
||||
|
||||
let src = Source { code, ast: &ast };
|
||||
|
||||
let mut chunk = Chunk::new(self.limits.chunk_capacity)
|
||||
.expect("chunk capacity must be representable as a 16-bit number");
|
||||
let mut compiler = Compiler::new(&mut self.defs, &mut chunk);
|
||||
haku::compiler::compile_expr(&mut compiler, &src, root)
|
||||
.context("failed to compile the chunk")?;
|
||||
let closure_spec = compiler.closure_spec();
|
||||
|
||||
if !lexer.diagnostics.is_empty()
|
||||
|| !parser_diagnostics.is_empty()
|
||||
|| !compiler.diagnostics.is_empty()
|
||||
{
|
||||
info!(?lexer.diagnostics, ?parser_diagnostics, ?compiler.diagnostics, "diagnostics were emitted");
|
||||
bail!("diagnostics were emitted");
|
||||
}
|
||||
|
||||
let scratch = self
|
||||
.vm
|
||||
.take()
|
||||
.map(|vm| vm.into_scratch())
|
||||
.unwrap_or_else(|| haku2::Scratch::new(self.limits.memory));
|
||||
let defs = haku2::Defs::parse(&self.defs.serialize_defs(), &self.defs.serialize_tags());
|
||||
// SAFETY: The code is fresh out of the compiler oven, so it is guaranteed to be valid.
|
||||
// Well, more or less. There may lurk bugs.
|
||||
let code = unsafe { haku2::Code::new(defs, chunk.bytecode, closure_spec.local_count) };
|
||||
let limits = haku2::Limits::new(haku2::LimitsSpec {
|
||||
stack_capacity: self.limits.stack_capacity,
|
||||
call_stack_capacity: self.limits.call_stack_capacity,
|
||||
});
|
||||
self.vm = Some(haku2::Vm::new(scratch, code, limits));
|
||||
|
||||
info!("brush set successfully");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(skip(self), err(level = Level::INFO))]
|
||||
pub fn eval_brush(&mut self) -> eyre::Result<()> {
|
||||
let vm = self
|
||||
.vm
|
||||
.as_mut()
|
||||
.ok_or_eyre("brush is not compiled and ready to be used")?;
|
||||
vm.begin(self.limits.fuel as u32)
|
||||
.context("an exception occurred during begin()")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(skip(self, canvas, max_depth), err(level = Level::INFO))]
|
||||
pub fn render(&mut self, canvas: &mut dyn haku2::Canvas, max_depth: usize) -> eyre::Result<()> {
|
||||
let vm = self
|
||||
.vm
|
||||
.as_mut()
|
||||
.ok_or_eyre("VM is not ready for rendering")?;
|
||||
vm.render(canvas, max_depth)
|
||||
.context("exception while rendering")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn has_cont(&mut self) -> bool {
|
||||
self.vm.as_mut().expect("VM is not started").has_cont()
|
||||
}
|
||||
|
||||
pub fn cont(&mut self) -> haku2::Cont<'_> {
|
||||
self.vm.as_mut().expect("VM is not started").cont()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ pub mod broker;
|
|||
pub mod chunk_images;
|
||||
pub mod chunk_iterator;
|
||||
pub mod database;
|
||||
pub mod render;
|
||||
|
||||
pub use broker::Broker;
|
||||
pub use database::Database;
|
||||
|
|
@ -134,7 +135,6 @@ pub struct Settings {
|
|||
pub max_sessions: usize,
|
||||
pub paint_area: u32,
|
||||
pub chunk_size: u32,
|
||||
pub max_edit_size: usize,
|
||||
}
|
||||
|
||||
impl Settings {
|
||||
|
|
@ -208,7 +208,6 @@ pub enum EventKind {
|
|||
|
||||
Cursor { position: Vec2 },
|
||||
Interact { interactions: Vec<Interaction> },
|
||||
Edit { edits: Vec<Edit> },
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
|
|
@ -223,15 +222,6 @@ pub enum Interaction {
|
|||
Scribble,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(tag = "kind", rename_all = "camelCase")]
|
||||
pub struct Edit {
|
||||
pub chunk: ChunkPosition,
|
||||
pub data_type: String, // media type of `data`
|
||||
pub data_offset: usize,
|
||||
pub data_length: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Online {
|
||||
|
|
@ -264,16 +254,11 @@ impl Wall {
|
|||
self.chunks.get(&at).map(|chunk| Arc::clone(&chunk))
|
||||
}
|
||||
|
||||
pub fn get_or_create_chunk(&self, at: ChunkPosition) -> (Arc<Mutex<Chunk>>, bool) {
|
||||
let entry = self.chunks.entry(at);
|
||||
let created = matches!(&entry, dashmap::Entry::Vacant(_));
|
||||
(
|
||||
Arc::clone(&entry.or_insert_with(|| {
|
||||
info!(?at, "chunk created");
|
||||
Arc::new(Mutex::new(Chunk::new(self.settings.chunk_size)))
|
||||
})),
|
||||
created,
|
||||
)
|
||||
pub fn get_or_create_chunk(&self, at: ChunkPosition) -> Arc<Mutex<Chunk>> {
|
||||
Arc::clone(&self.chunks.entry(at).or_insert_with(|| {
|
||||
info!(?at, "chunk created");
|
||||
Arc::new(Mutex::new(Chunk::new(self.settings.chunk_size)))
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn join(self: &Arc<Self>, session: Session) -> Result<SessionHandle, JoinError> {
|
||||
|
|
@ -306,9 +291,9 @@ impl Wall {
|
|||
pub fn event(&self, event: Event) {
|
||||
if let Some(mut session) = self.sessions.get_mut(&event.session_id) {
|
||||
match &event.kind {
|
||||
// Events that get broadcasted through the wall such that all clients get them.
|
||||
// No need to react in any way.
|
||||
EventKind::Join { .. } | EventKind::Leave | EventKind::Interact { .. } => (),
|
||||
// Join and Leave are events that only get broadcasted through the wall such that
|
||||
// all users get them. We don't need to react to them in any way.
|
||||
EventKind::Join { .. } | EventKind::Leave => (),
|
||||
|
||||
EventKind::Cursor { position } => {
|
||||
session.cursor = Some(*position);
|
||||
|
|
@ -316,7 +301,7 @@ impl Wall {
|
|||
|
||||
// Drawing events are handled by the owner session's thread to make drawing as
|
||||
// parallel as possible.
|
||||
EventKind::Edit { .. } => {}
|
||||
EventKind::Interact { .. } => {}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,8 @@
|
|||
use std::{collections::HashSet, sync::Arc, time::Duration};
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
sync::Arc,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::{
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
use std::{collections::HashMap, sync::Arc, time::Instant};
|
||||
use std::{sync::Arc, time::Instant};
|
||||
|
||||
use dashmap::DashSet;
|
||||
use eyre::Context;
|
||||
use rayon::iter::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator};
|
||||
use tiny_skia::{IntSize, Pixmap};
|
||||
use tokio::sync::{mpsc, oneshot, Mutex};
|
||||
use tokio::sync::{mpsc, oneshot};
|
||||
use tracing::{error, info, instrument};
|
||||
|
||||
use super::{database::ChunkDataPair, Chunk, ChunkPosition, Database, Wall};
|
||||
use super::{database::ChunkDataPair, ChunkPosition, Database, Wall};
|
||||
|
||||
/// Chunk image encoding, caching, and storage service.
|
||||
pub struct ChunkImages {
|
||||
|
|
@ -28,13 +28,6 @@ pub struct NewlyEncoded {
|
|||
pub last_mod: Instant,
|
||||
}
|
||||
|
||||
/// Chunks loaded from a load operation.
|
||||
/// Returned from loads to keep reference counts above 1, such that the chunks are not garbage
|
||||
/// collected while they're loaded and being operated on.
|
||||
pub struct LoadedChunks {
|
||||
pub chunks: HashMap<ChunkPosition, Arc<Mutex<Chunk>>>,
|
||||
}
|
||||
|
||||
enum Command {
|
||||
Encode {
|
||||
chunks: Vec<ChunkPosition>,
|
||||
|
|
@ -43,7 +36,7 @@ enum Command {
|
|||
|
||||
Load {
|
||||
chunks: Vec<ChunkPosition>,
|
||||
reply: oneshot::Sender<eyre::Result<LoadedChunks>>,
|
||||
reply: oneshot::Sender<eyre::Result<()>>,
|
||||
},
|
||||
}
|
||||
|
||||
|
|
@ -75,7 +68,7 @@ impl ChunkImages {
|
|||
rx.await.ok().unwrap_or_default()
|
||||
}
|
||||
|
||||
pub async fn load(&self, chunks: Vec<ChunkPosition>) -> eyre::Result<LoadedChunks> {
|
||||
pub async fn load(&self, chunks: Vec<ChunkPosition>) -> eyre::Result<()> {
|
||||
let (tx, rx) = oneshot::channel();
|
||||
self.commands_tx
|
||||
.send(Command::Load { chunks, reply: tx })
|
||||
|
|
@ -163,29 +156,20 @@ impl ChunkImageLoop {
|
|||
_ = reply.send(EncodeResult { data: all, new });
|
||||
}
|
||||
|
||||
async fn load_inner(self: Arc<Self>, chunks: Vec<ChunkPosition>) -> eyre::Result<LoadedChunks> {
|
||||
// Reference all the chunks that we need, such that they're not garbage collected
|
||||
// during loading.
|
||||
let mut chunk_refs = HashMap::new();
|
||||
let mut positions_to_load = vec![];
|
||||
for &position in &chunks {
|
||||
let (chunk, created) = self.wall.get_or_create_chunk(position);
|
||||
chunk_refs.insert(position, chunk);
|
||||
|
||||
if created {
|
||||
positions_to_load.push(position);
|
||||
}
|
||||
async fn load_inner(self: Arc<Self>, mut chunks: Vec<ChunkPosition>) -> eyre::Result<()> {
|
||||
// Skip already loaded chunks.
|
||||
chunks.retain(|&position| !self.wall.has_chunk(position));
|
||||
if chunks.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if positions_to_load.is_empty() {
|
||||
return Ok(LoadedChunks { chunks: chunk_refs });
|
||||
}
|
||||
info!(?chunks, "to load");
|
||||
|
||||
info!(?positions_to_load, "to load");
|
||||
let chunks = self.db.read_chunks(chunks.clone()).await?;
|
||||
|
||||
let to_load = self.db.read_chunks(positions_to_load.clone()).await?;
|
||||
let chunks2 = chunks.clone();
|
||||
let decoded = tokio::task::spawn_blocking(move || {
|
||||
to_load
|
||||
chunks2
|
||||
.par_iter()
|
||||
.flat_map(|ChunkDataPair { position, data }| {
|
||||
webp::Decoder::new(data)
|
||||
|
|
@ -209,25 +193,30 @@ impl ChunkImageLoop {
|
|||
.await
|
||||
.context("failed to decode chunks from the database")?;
|
||||
|
||||
let mut chunk_locks = Vec::with_capacity(decoded.len());
|
||||
for (position, _) in &decoded {
|
||||
let chunk_ref = &chunk_refs[position];
|
||||
chunk_locks.push(chunk_ref.lock().await);
|
||||
// I don't know yet if locking all the chunks is a good idea at this point.
|
||||
// I can imagine contended chunks having some trouble loading.
|
||||
let chunk_arcs: Vec<_> = decoded
|
||||
.iter()
|
||||
.map(|(position, _)| self.wall.get_or_create_chunk(*position))
|
||||
.collect();
|
||||
let mut chunk_refs = Vec::with_capacity(chunk_arcs.len());
|
||||
for arc in &chunk_arcs {
|
||||
chunk_refs.push(arc.lock().await);
|
||||
}
|
||||
|
||||
info!(num = ?chunk_locks.len(), "replacing chunks' pixmaps");
|
||||
for ((_, pixmap), mut chunk) in decoded.into_iter().zip(chunk_locks) {
|
||||
info!(num = ?chunk_refs.len(), "replacing chunks' pixmaps");
|
||||
for ((_, pixmap), mut chunk) in decoded.into_iter().zip(chunk_refs) {
|
||||
chunk.pixmap = pixmap;
|
||||
}
|
||||
|
||||
Ok(LoadedChunks { chunks: chunk_refs })
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(skip(self, reply))]
|
||||
async fn load(
|
||||
self: Arc<Self>,
|
||||
chunks: Vec<ChunkPosition>,
|
||||
reply: oneshot::Sender<eyre::Result<LoadedChunks>>,
|
||||
reply: oneshot::Sender<eyre::Result<()>>,
|
||||
) {
|
||||
_ = reply.send(self.load_inner(chunks).await);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
use std::{convert::identity, path::PathBuf, sync::Arc};
|
||||
use std::{convert::identity, path::PathBuf, sync::Arc, time::Instant};
|
||||
|
||||
use eyre::Context;
|
||||
use rusqlite::Connection;
|
||||
|
|
@ -88,8 +88,6 @@ pub fn start(settings: Settings) -> eyre::Result<Database> {
|
|||
|
||||
info!("initial setup");
|
||||
|
||||
let version: u32 = db.pragma_query_value(None, "user_version", |x| x.get(0))?;
|
||||
|
||||
db.execute_batch(
|
||||
r#"
|
||||
PRAGMA application_id = 0x726B6757; -- rkgW
|
||||
|
|
@ -174,31 +172,10 @@ pub fn start(settings: Settings) -> eyre::Result<Database> {
|
|||
),
|
||||
)?;
|
||||
|
||||
// Migrations
|
||||
|
||||
if version < 1 {
|
||||
info!("migrate v1: add max_edit_size");
|
||||
db.execute_batch(
|
||||
r#"
|
||||
PRAGMA user_version = 1;
|
||||
ALTER TABLE t_wall_settings ADD COLUMN max_edit_size INTEGER;
|
||||
"#,
|
||||
)?;
|
||||
db.execute(
|
||||
r#"
|
||||
UPDATE OR IGNORE t_wall_settings
|
||||
SET max_edit_size = ?;
|
||||
"#,
|
||||
(settings.default_wall_settings.max_edit_size,),
|
||||
)?;
|
||||
}
|
||||
|
||||
// Set up access thread
|
||||
|
||||
let wall_settings = db.query_row(
|
||||
r#"
|
||||
SELECT
|
||||
max_chunks, max_sessions, paint_area, chunk_size, max_edit_size
|
||||
max_chunks, max_sessions, paint_area, chunk_size
|
||||
FROM t_wall_settings;
|
||||
"#,
|
||||
(),
|
||||
|
|
@ -208,7 +185,6 @@ pub fn start(settings: Settings) -> eyre::Result<Database> {
|
|||
max_sessions: row.get(1)?,
|
||||
paint_area: row.get(2)?,
|
||||
chunk_size: row.get(3)?,
|
||||
max_edit_size: row.get(4)?,
|
||||
})
|
||||
},
|
||||
)?;
|
||||
|
|
|
|||
113
crates/rkgk/src/wall/render.rs
Normal file
113
crates/rkgk/src/wall/render.rs
Normal file
|
|
@ -0,0 +1,113 @@
|
|||
//! Implementation of a haku2 canvas based on tiny-skia.
|
||||
|
||||
use std::mem;
|
||||
|
||||
use tiny_skia::{
|
||||
BlendMode, Color, FillRule, LineCap, Paint, PathBuilder, Shader, Stroke, Transform,
|
||||
};
|
||||
|
||||
use super::Chunk;
|
||||
|
||||
pub struct ChunkCanvas<'c> {
|
||||
chunk: &'c mut Chunk,
|
||||
transform: Transform,
|
||||
pb: PathBuilder,
|
||||
}
|
||||
|
||||
impl<'c> ChunkCanvas<'c> {
|
||||
pub fn new(chunk: &'c mut Chunk) -> Self {
|
||||
Self {
|
||||
chunk,
|
||||
transform: Transform::identity(),
|
||||
pb: PathBuilder::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn translated(mut self, x: f32, y: f32) -> Self {
|
||||
self.transform = self.transform.post_translate(x, y);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl haku2::Canvas for ChunkCanvas<'_> {
|
||||
fn begin(&mut self) -> Result<(), haku2::RenderError> {
|
||||
self.pb.clear();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn line(&mut self, x1: f32, y1: f32, x2: f32, y2: f32) -> Result<(), haku2::RenderError> {
|
||||
self.pb.move_to(x1, y1);
|
||||
self.pb.line_to(x2, y2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn rectangle(
|
||||
&mut self,
|
||||
x: f32,
|
||||
y: f32,
|
||||
width: f32,
|
||||
height: f32,
|
||||
) -> Result<(), haku2::RenderError> {
|
||||
if let Some(rect) = tiny_skia::Rect::from_xywh(x, y, width, height) {
|
||||
self.pb.push_rect(rect);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn circle(&mut self, x: f32, y: f32, r: f32) -> Result<(), haku2::RenderError> {
|
||||
self.pb.push_circle(x, y, r);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn fill(&mut self, r: u8, g: u8, b: u8, a: u8) -> Result<(), haku2::RenderError> {
|
||||
let pb = mem::take(&mut self.pb);
|
||||
if let Some(path) = pb.finish() {
|
||||
let paint = Paint {
|
||||
shader: Shader::SolidColor(Color::from_rgba8(r, g, b, a)),
|
||||
..default_paint()
|
||||
};
|
||||
self.chunk
|
||||
.pixmap
|
||||
.fill_path(&path, &paint, FillRule::EvenOdd, self.transform, None);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn stroke(
|
||||
&mut self,
|
||||
r: u8,
|
||||
g: u8,
|
||||
b: u8,
|
||||
a: u8,
|
||||
thickness: f32,
|
||||
) -> Result<(), haku2::RenderError> {
|
||||
let pb = mem::take(&mut self.pb);
|
||||
if let Some(path) = pb.finish() {
|
||||
let paint = Paint {
|
||||
shader: Shader::SolidColor(Color::from_rgba8(r, g, b, a)),
|
||||
..default_paint()
|
||||
};
|
||||
self.chunk.pixmap.stroke_path(
|
||||
&path,
|
||||
&paint,
|
||||
&Stroke {
|
||||
width: thickness,
|
||||
line_cap: LineCap::Round,
|
||||
..Default::default()
|
||||
},
|
||||
self.transform,
|
||||
None,
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn default_paint() -> Paint<'static> {
|
||||
Paint {
|
||||
shader: Shader::SolidColor(Color::BLACK),
|
||||
blend_mode: BlendMode::SourceOver,
|
||||
anti_alias: false,
|
||||
force_hq_pipeline: false,
|
||||
}
|
||||
}
|
||||
|
|
@ -48,11 +48,6 @@ chunk_size = 168
|
|||
# can produce.
|
||||
paint_area = 504
|
||||
|
||||
# Maximum size of a single edit, in chunks.
|
||||
# You don't want to make this _too_ large, otherwise the client will try to allocate too many
|
||||
# chunks at once. Way more than WebAssembly's 4GiB address space can handle.
|
||||
max_edit_size = 256
|
||||
|
||||
[wall_broker.auto_save]
|
||||
|
||||
# How often should modified chunks be saved to the database.
|
||||
|
|
|
|||
|
|
@ -318,7 +318,7 @@ export class BrushBox extends HTMLElement {
|
|||
if (id == null) {
|
||||
// Save a new preset.
|
||||
id = `user/${randomId()}`;
|
||||
console.info("saving new brush", id);
|
||||
console.log("saving new brush", id);
|
||||
this.userPresets.push({
|
||||
id,
|
||||
name,
|
||||
|
|
@ -327,7 +327,7 @@ export class BrushBox extends HTMLElement {
|
|||
} else {
|
||||
// Overwrite an existing one.
|
||||
let preset = this.userPresets.find((p) => p.id == id);
|
||||
console.info("overwriting existing brush", preset);
|
||||
console.log("overwriting existing brush", preset);
|
||||
preset.code = code;
|
||||
}
|
||||
this.saveUserPresets();
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { listen, Pool } from "rkgk/framework.js";
|
||||
import { listen } from "rkgk/framework.js";
|
||||
import { Viewport } from "rkgk/viewport.js";
|
||||
import { Wall, chunkKey } from "rkgk/wall.js";
|
||||
import { Wall } from "rkgk/wall.js";
|
||||
|
||||
class CanvasRenderer extends HTMLElement {
|
||||
viewport = new Viewport();
|
||||
|
|
@ -196,8 +196,7 @@ class CanvasRenderer extends HTMLElement {
|
|||
|
||||
console.debug("initialized atlas allocator", this.atlasAllocator);
|
||||
|
||||
this.batches = [];
|
||||
this.batchPool = new Pool();
|
||||
this.chunksThisFrame = new Map();
|
||||
|
||||
console.debug("GL error state", this.gl.getError());
|
||||
|
||||
|
|
@ -295,55 +294,31 @@ class CanvasRenderer extends HTMLElement {
|
|||
|
||||
this.#collectChunksThisFrame();
|
||||
|
||||
for (let batch of this.batches) {
|
||||
for (let [i, chunks] of batch) {
|
||||
for (let [i, chunks] of this.chunksThisFrame) {
|
||||
let atlas = this.atlasAllocator.atlases[i];
|
||||
this.gl.bindTexture(this.gl.TEXTURE_2D, atlas.id);
|
||||
|
||||
this.#resetRectBuffer();
|
||||
for (let chunk of chunks) {
|
||||
let { i, allocation } = this.getChunkAllocation(chunk.x, chunk.y);
|
||||
let atlas = this.atlasAllocator.atlases[i];
|
||||
this.gl.bindTexture(this.gl.TEXTURE_2D, atlas.id);
|
||||
|
||||
this.#resetRectBuffer();
|
||||
for (let chunk of chunks) {
|
||||
let { i, allocation } = this.getChunkAllocation(
|
||||
chunk.layerId,
|
||||
chunk.x,
|
||||
chunk.y,
|
||||
);
|
||||
let atlas = this.atlasAllocator.atlases[i];
|
||||
this.#pushRect(
|
||||
chunk.x * this.wall.chunkSize,
|
||||
chunk.y * this.wall.chunkSize,
|
||||
this.wall.chunkSize,
|
||||
this.wall.chunkSize,
|
||||
(allocation.x * atlas.chunkSize) / atlas.textureSize,
|
||||
(allocation.y * atlas.chunkSize) / atlas.textureSize,
|
||||
atlas.chunkSize / atlas.textureSize,
|
||||
atlas.chunkSize / atlas.textureSize,
|
||||
);
|
||||
}
|
||||
this.#drawRects();
|
||||
this.#pushRect(
|
||||
chunk.x * this.wall.chunkSize,
|
||||
chunk.y * this.wall.chunkSize,
|
||||
this.wall.chunkSize,
|
||||
this.wall.chunkSize,
|
||||
(allocation.x * atlas.chunkSize) / atlas.textureSize,
|
||||
(allocation.y * atlas.chunkSize) / atlas.textureSize,
|
||||
atlas.chunkSize / atlas.textureSize,
|
||||
atlas.chunkSize / atlas.textureSize,
|
||||
);
|
||||
}
|
||||
this.#drawRects();
|
||||
}
|
||||
|
||||
// TODO: This is a nice debug view.
|
||||
// There should be a switch to it somewhere in the app.
|
||||
/*
|
||||
let x = 0;
|
||||
let y = 0;
|
||||
for (let atlas of this.atlasAllocator.atlases) {
|
||||
this.#resetRectBuffer();
|
||||
this.gl.bindTexture(this.gl.TEXTURE_2D, atlas.id);
|
||||
this.#pushRect(x, y, atlas.textureSize, atlas.textureSize, 0, 0, 1, 1);
|
||||
this.#drawRects();
|
||||
if (x > atlas.textureSize * 16) {
|
||||
y += atlas.textureSize;
|
||||
x = 0;
|
||||
}
|
||||
x += atlas.textureSize;
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
getChunkAllocation(layerId, chunkX, chunkY) {
|
||||
let key = `${layerId}/${chunkKey(chunkX, chunkY)}`;
|
||||
getChunkAllocation(chunkX, chunkY) {
|
||||
let key = Wall.chunkKey(chunkX, chunkY);
|
||||
if (this.chunkAllocations.has(key)) {
|
||||
return this.chunkAllocations.get(key);
|
||||
} else {
|
||||
|
|
@ -353,54 +328,36 @@ class CanvasRenderer extends HTMLElement {
|
|||
}
|
||||
}
|
||||
|
||||
deallocateChunks(layer) {
|
||||
for (let chunkKey of layer.chunks.keys()) {
|
||||
let key = `${layer.id}/${chunkKey}`;
|
||||
if (this.chunkAllocations.has(key)) {
|
||||
let allocation = this.chunkAllocations.get(key);
|
||||
this.atlasAllocator.dealloc(allocation);
|
||||
this.chunkAllocations.delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#collectChunksThisFrame() {
|
||||
for (let batch of this.batches) {
|
||||
batch.clear();
|
||||
this.batchPool.free(batch);
|
||||
}
|
||||
this.batches.splice(0, this.batches.length);
|
||||
// NOTE: Not optimal that we don't preserve the arrays anyhow; it would be better if we
|
||||
// preserved the allocations.
|
||||
this.chunksThisFrame.clear();
|
||||
|
||||
let visibleRect = this.viewport.getVisibleRect(this.getWindowSize());
|
||||
let left = Math.floor(visibleRect.x / this.wall.chunkSize);
|
||||
let top = Math.floor(visibleRect.y / this.wall.chunkSize);
|
||||
let right = Math.ceil((visibleRect.x + visibleRect.width) / this.wall.chunkSize);
|
||||
let bottom = Math.ceil((visibleRect.y + visibleRect.height) / this.wall.chunkSize);
|
||||
|
||||
for (let layer of this.wall.layers) {
|
||||
let batch = this.batchPool.alloc(Map);
|
||||
for (let chunkY = top; chunkY < bottom; ++chunkY) {
|
||||
for (let chunkX = left; chunkX < right; ++chunkX) {
|
||||
let chunk = layer.getChunk(chunkX, chunkY);
|
||||
if (chunk != null) {
|
||||
if (chunk.renderDirty) {
|
||||
this.#updateChunkTexture(layer, chunkX, chunkY);
|
||||
chunk.renderDirty = false;
|
||||
}
|
||||
|
||||
let allocation = this.getChunkAllocation(layer.id, chunkX, chunkY);
|
||||
|
||||
let array = batch.get(allocation.i);
|
||||
if (array == null) {
|
||||
array = [];
|
||||
batch.set(allocation.i, array);
|
||||
}
|
||||
|
||||
array.push({ layerId: layer.id, x: chunkX, y: chunkY });
|
||||
for (let chunkY = top; chunkY < bottom; ++chunkY) {
|
||||
for (let chunkX = left; chunkX < right; ++chunkX) {
|
||||
let chunk = this.wall.getChunk(chunkX, chunkY);
|
||||
if (chunk != null) {
|
||||
if (chunk.renderDirty) {
|
||||
this.#updateChunkTexture(chunkX, chunkY);
|
||||
chunk.renderDirty = false;
|
||||
}
|
||||
|
||||
let allocation = this.getChunkAllocation(chunkX, chunkY);
|
||||
|
||||
let array = this.chunksThisFrame.get(allocation.i);
|
||||
if (array == null) {
|
||||
array = [];
|
||||
this.chunksThisFrame.set(allocation.i, array);
|
||||
}
|
||||
|
||||
array.push({ x: chunkX, y: chunkY });
|
||||
}
|
||||
}
|
||||
this.batches.push(batch);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -438,9 +395,9 @@ class CanvasRenderer extends HTMLElement {
|
|||
this.gl.drawArraysInstanced(this.gl.TRIANGLES, 0, 6, this.uboRectsNum);
|
||||
}
|
||||
|
||||
#updateChunkTexture(layer, chunkX, chunkY) {
|
||||
let allocation = this.getChunkAllocation(layer.id, chunkX, chunkY);
|
||||
let chunk = layer.getChunk(chunkX, chunkY);
|
||||
#updateChunkTexture(chunkX, chunkY) {
|
||||
let allocation = this.getChunkAllocation(chunkX, chunkY);
|
||||
let chunk = this.wall.getChunk(chunkX, chunkY);
|
||||
this.atlasAllocator.upload(this.gl, allocation, chunk.pixmap);
|
||||
}
|
||||
|
||||
|
|
@ -517,10 +474,6 @@ class CanvasRenderer extends HTMLElement {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
commitInteraction() {
|
||||
this.dispatchEvent(new Event(".commitInteraction"));
|
||||
}
|
||||
}
|
||||
|
||||
customElements.define("rkgk-canvas-renderer", CanvasRenderer);
|
||||
|
|
@ -560,7 +513,6 @@ class InteractEvent extends Event {
|
|||
|
||||
if (event.type == "mouseup" && event.button == 0) {
|
||||
// Break the loop.
|
||||
this.canvasRenderer.commitInteraction();
|
||||
return;
|
||||
}
|
||||
})();
|
||||
|
|
@ -624,10 +576,6 @@ class Atlas {
|
|||
return this.free.pop();
|
||||
}
|
||||
|
||||
dealloc(xy) {
|
||||
this.free.push(xy);
|
||||
}
|
||||
|
||||
upload(gl, { x, y }, pixmap) {
|
||||
gl.bindTexture(gl.TEXTURE_2D, this.id);
|
||||
gl.texSubImage2D(
|
||||
|
|
@ -673,11 +621,6 @@ class AtlasAllocator {
|
|||
return { i, allocation };
|
||||
}
|
||||
|
||||
dealloc({ i, allocation }) {
|
||||
let atlas = this.atlases[i];
|
||||
atlas.dealloc(allocation);
|
||||
}
|
||||
|
||||
upload(gl, { i, allocation }, pixmap) {
|
||||
this.atlases[i].upload(gl, allocation, pixmap);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -54,24 +54,6 @@ export function debounce(time, fn) {
|
|||
};
|
||||
}
|
||||
|
||||
export class Pool {
|
||||
constructor() {
|
||||
this.pool = [];
|
||||
}
|
||||
|
||||
alloc(ctor) {
|
||||
if (this.pool.length > 0) {
|
||||
return this.pool.pop();
|
||||
} else {
|
||||
return new ctor();
|
||||
}
|
||||
}
|
||||
|
||||
free(obj) {
|
||||
this.pool.push(obj);
|
||||
}
|
||||
}
|
||||
|
||||
export class SaveData {
|
||||
constructor(prefix) {
|
||||
this.prefix = `rkgk.${prefix}`;
|
||||
|
|
|
|||
|
|
@ -778,8 +778,6 @@ rkgk-connection-status {
|
|||
/* Context menu */
|
||||
|
||||
rkgk-context-menu-space {
|
||||
z-index: var(--z-modal);
|
||||
|
||||
pointer-events: none;
|
||||
|
||||
& > rkgk-context-menu {
|
||||
|
|
|
|||
|
|
@ -192,7 +192,7 @@ function readUrl(urlString) {
|
|||
}
|
||||
});
|
||||
|
||||
let sendViewportUpdate = debounce(updateInterval / 4, () => {
|
||||
let sendViewportUpdate = debounce(updateInterval, () => {
|
||||
let visibleRect = canvasRenderer.getVisibleChunkRect();
|
||||
session.sendViewport(visibleRect);
|
||||
});
|
||||
|
|
@ -213,12 +213,7 @@ function readUrl(urlString) {
|
|||
let blob = chunkData.slice(info.offset, info.offset + info.length, "image/webp");
|
||||
updatePromises.push(
|
||||
createImageBitmap(blob).then((bitmap) => {
|
||||
let chunk = wall.mainLayer.getOrCreateChunk(
|
||||
info.position.x,
|
||||
info.position.y,
|
||||
);
|
||||
if (chunk == null) return;
|
||||
|
||||
let chunk = wall.getOrCreateChunk(info.position.x, info.position.y);
|
||||
chunk.ctx.globalCompositeOperation = "copy";
|
||||
chunk.ctx.drawImage(bitmap, 0, 0);
|
||||
chunk.syncToPixmap();
|
||||
|
|
@ -235,7 +230,7 @@ function readUrl(urlString) {
|
|||
}
|
||||
});
|
||||
|
||||
let reportCursor = debounce(updateInterval, (x, y) => session.sendCursor(x, y));
|
||||
let reportCursor = debounce(updateInterval, (x, y) => session.sendCursor(x, y), console.log);
|
||||
canvasRenderer.addEventListener(".cursor", async (event) => {
|
||||
reportCursor(event.x, event.y);
|
||||
});
|
||||
|
|
@ -253,38 +248,12 @@ function readUrl(urlString) {
|
|||
canvasRenderer.addEventListener(".interact", async (event) => {
|
||||
if (!currentUser.haku.ok) return;
|
||||
|
||||
let layer = currentUser.getScratchLayer(wall);
|
||||
let result = await currentUser.haku.evalBrush(
|
||||
selfController(interactionQueue, wall, layer, event),
|
||||
selfController(interactionQueue, wall, event),
|
||||
);
|
||||
brushEditor.renderHakuResult(result);
|
||||
});
|
||||
|
||||
canvasRenderer.addEventListener(".commitInteraction", async () => {
|
||||
let scratchLayer = currentUser.commitScratchLayer(wall);
|
||||
if (scratchLayer == null) return;
|
||||
|
||||
canvasRenderer.deallocateChunks(scratchLayer);
|
||||
let edits = await scratchLayer.toEdits();
|
||||
scratchLayer.destroy();
|
||||
|
||||
let editRecords = [];
|
||||
let dataParts = [];
|
||||
let cursor = 0;
|
||||
for (let edit of edits) {
|
||||
editRecords.push({
|
||||
chunk: edit.chunk,
|
||||
dataType: edit.data.type,
|
||||
dataOffset: cursor,
|
||||
dataLength: edit.data.size,
|
||||
});
|
||||
dataParts.push(edit.data);
|
||||
cursor += edit.data.size;
|
||||
}
|
||||
|
||||
session.sendEdit(editRecords, new Blob(dataParts));
|
||||
});
|
||||
|
||||
canvasRenderer.addEventListener(".viewportUpdate", () => reticleRenderer.render());
|
||||
canvasRenderer.addEventListener(".viewportUpdateEnd", () =>
|
||||
updateUrl(session, canvasRenderer.viewport),
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import { ContKind, Haku } from "rkgk/haku.js";
|
||||
import { renderToChunksInArea, dotterRenderArea } from "rkgk/painter.js";
|
||||
import { Layer } from "rkgk/wall.js";
|
||||
|
||||
export class User {
|
||||
nickname = "";
|
||||
|
|
@ -10,22 +9,19 @@ export class User {
|
|||
isBrushOk = false;
|
||||
simulation = null;
|
||||
|
||||
scratchLayer = null;
|
||||
|
||||
constructor(wallInfo, nickname) {
|
||||
this.nickname = nickname;
|
||||
this.haku = new Haku(wallInfo.hakuLimits);
|
||||
}
|
||||
|
||||
destroy() {
|
||||
console.info("destroying user", this.nickname);
|
||||
this.haku.destroy();
|
||||
}
|
||||
|
||||
setBrush(brush) {
|
||||
console.groupCollapsed("setBrush", this.nickname);
|
||||
let compileResult = this.haku.setBrush(brush);
|
||||
console.info("compiling brush complete", compileResult);
|
||||
console.log("compiling brush complete", compileResult);
|
||||
console.groupEnd();
|
||||
|
||||
this.isBrushOk = compileResult.status == "ok";
|
||||
|
|
@ -36,14 +32,14 @@ export class User {
|
|||
renderBrushToChunks(wall, x, y) {
|
||||
console.groupCollapsed("renderBrushToChunks", this.nickname);
|
||||
let result = this.painter.renderBrushToWall(this.haku, x, y, wall);
|
||||
console.info("rendering brush to chunks complete");
|
||||
console.log("rendering brush to chunks complete");
|
||||
console.groupEnd();
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
simulate(wall, interactions) {
|
||||
console.group("simulate", this.nickname);
|
||||
console.group("simulate");
|
||||
for (let interaction of interactions) {
|
||||
if (interaction.kind == "setBrush") {
|
||||
this.simulation = null;
|
||||
|
|
@ -52,7 +48,7 @@ export class User {
|
|||
|
||||
if (this.isBrushOk) {
|
||||
if (this.simulation == null) {
|
||||
console.info("no simulation -- beginning brush");
|
||||
console.log("no simulation -- beginning brush");
|
||||
this.simulation = { renderArea: { left: 0, top: 0, right: 0, bottom: 0 } };
|
||||
this.haku.beginBrush();
|
||||
}
|
||||
|
|
@ -71,13 +67,13 @@ export class User {
|
|||
|
||||
if (interaction.kind == "scribble" && this.#expectContKind(ContKind.Scribble)) {
|
||||
renderToChunksInArea(
|
||||
this.getScratchLayer(wall),
|
||||
wall,
|
||||
this.simulation.renderArea,
|
||||
(pixmap, translationX, translationY) => {
|
||||
return this.haku.contScribble(pixmap, translationX, translationY);
|
||||
},
|
||||
);
|
||||
console.info("ended simulation");
|
||||
console.log("ended simulation");
|
||||
this.simulation = null;
|
||||
}
|
||||
}
|
||||
|
|
@ -104,34 +100,6 @@ export class User {
|
|||
memoryMax: wallInfo.hakuLimits.memory,
|
||||
};
|
||||
}
|
||||
|
||||
getScratchLayer(wall) {
|
||||
if (this.scratchLayer == null) {
|
||||
this.scratchLayer = wall.addLayer(
|
||||
new Layer({
|
||||
name: `scratch ${this.nickname}`,
|
||||
chunkSize: wall.chunkSize,
|
||||
chunkLimit: wall.maxEditSize,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return this.scratchLayer;
|
||||
}
|
||||
|
||||
// Returns the scratch layer committed to the wall, so that the caller may do additional
|
||||
// processing with the completed layer (i.e. send to the server.)
|
||||
// The layer has to be .destroy()ed once you're done working with it.
|
||||
commitScratchLayer(wall) {
|
||||
if (this.scratchLayer != null) {
|
||||
wall.mainLayer.compositeAlpha(this.scratchLayer);
|
||||
wall.removeLayer(this.scratchLayer);
|
||||
let scratchLayer = this.scratchLayer;
|
||||
this.scratchLayer = null;
|
||||
return scratchLayer;
|
||||
} else {
|
||||
console.error("commitScratchLayer without an active scratch layer", this.nickname);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class OnlineUsers extends EventTarget {
|
||||
|
|
|
|||
|
|
@ -1,20 +1,10 @@
|
|||
import { listen } from "rkgk/framework.js";
|
||||
|
||||
function numChunksInRectangle(rect, chunkSize) {
|
||||
let leftChunk = Math.floor(rect.left / chunkSize);
|
||||
let topChunk = Math.floor(rect.top / chunkSize);
|
||||
let rightChunk = Math.ceil(rect.right / chunkSize);
|
||||
let bottomChunk = Math.ceil(rect.bottom / chunkSize);
|
||||
let numX = rightChunk - leftChunk;
|
||||
let numY = bottomChunk - topChunk;
|
||||
return numX * numY;
|
||||
}
|
||||
|
||||
function* chunksInRectangle(rect, chunkSize) {
|
||||
let leftChunk = Math.floor(rect.left / chunkSize);
|
||||
let topChunk = Math.floor(rect.top / chunkSize);
|
||||
let rightChunk = Math.ceil(rect.right / chunkSize);
|
||||
let bottomChunk = Math.ceil(rect.bottom / chunkSize);
|
||||
function* chunksInRectangle(left, top, right, bottom, chunkSize) {
|
||||
let leftChunk = Math.floor(left / chunkSize);
|
||||
let topChunk = Math.floor(top / chunkSize);
|
||||
let rightChunk = Math.ceil(right / chunkSize);
|
||||
let bottomChunk = Math.ceil(bottom / chunkSize);
|
||||
for (let chunkY = topChunk; chunkY < bottomChunk; ++chunkY) {
|
||||
for (let chunkX = leftChunk; chunkX < rightChunk; ++chunkX) {
|
||||
yield [chunkX, chunkY];
|
||||
|
|
@ -22,13 +12,17 @@ function* chunksInRectangle(rect, chunkSize) {
|
|||
}
|
||||
}
|
||||
|
||||
export function renderToChunksInArea(layer, renderArea, renderToPixmap) {
|
||||
for (let [chunkX, chunkY] of chunksInRectangle(renderArea, layer.chunkSize)) {
|
||||
let chunk = layer.getOrCreateChunk(chunkX, chunkY);
|
||||
if (chunk == null) continue;
|
||||
|
||||
let translationX = -chunkX * layer.chunkSize;
|
||||
let translationY = -chunkY * layer.chunkSize;
|
||||
export function renderToChunksInArea(wall, renderArea, renderToPixmap) {
|
||||
for (let [chunkX, chunkY] of chunksInRectangle(
|
||||
renderArea.left,
|
||||
renderArea.top,
|
||||
renderArea.right,
|
||||
renderArea.bottom,
|
||||
wall.chunkSize,
|
||||
)) {
|
||||
let chunk = wall.getOrCreateChunk(chunkX, chunkY);
|
||||
let translationX = -chunkX * wall.chunkSize;
|
||||
let translationY = -chunkY * wall.chunkSize;
|
||||
let result = renderToPixmap(chunk.pixmap, translationX, translationY);
|
||||
chunk.markModified();
|
||||
if (result.status != "ok") return result;
|
||||
|
|
@ -47,19 +41,13 @@ export function dotterRenderArea(wall, dotter) {
|
|||
};
|
||||
}
|
||||
|
||||
export function selfController(interactionQueue, wall, layer, event) {
|
||||
export function selfController(interactionQueue, wall, event) {
|
||||
let renderArea = null;
|
||||
return {
|
||||
async runScribble(renderToPixmap) {
|
||||
interactionQueue.push({ kind: "scribble" });
|
||||
if (renderArea != null) {
|
||||
let numChunksToRender = numChunksInRectangle(renderArea, layer.chunkSize);
|
||||
let result = renderToChunksInArea(layer, renderArea, renderToPixmap);
|
||||
if (!layer.canFitNewChunks(numChunksToRender)) {
|
||||
console.debug("too many chunks rendered; committing interaction early");
|
||||
event.earlyCommitInteraction();
|
||||
}
|
||||
return result;
|
||||
return renderToChunksInArea(wall, renderArea, renderToPixmap);
|
||||
} else {
|
||||
console.debug("render area is empty, nothing will be rendered");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -266,17 +266,6 @@ class Session extends EventTarget {
|
|||
});
|
||||
}
|
||||
|
||||
sendEdit(edits, data) {
|
||||
this.#sendJson({
|
||||
request: "wall",
|
||||
wallEvent: {
|
||||
event: "edit",
|
||||
edits,
|
||||
},
|
||||
});
|
||||
this.ws.send(data);
|
||||
}
|
||||
|
||||
sendInteraction(interactions) {
|
||||
this.#sendJson({
|
||||
request: "wall",
|
||||
|
|
|
|||
122
static/wall.js
122
static/wall.js
|
|
@ -9,10 +9,6 @@ export class Chunk {
|
|||
this.renderDirty = false;
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this.pixmap.destroy();
|
||||
}
|
||||
|
||||
syncFromPixmap() {
|
||||
this.ctx.putImageData(this.pixmap.getImageData(), 0, 0);
|
||||
}
|
||||
|
|
@ -27,117 +23,31 @@ export class Chunk {
|
|||
}
|
||||
}
|
||||
|
||||
let layerIdCounter = 0;
|
||||
|
||||
export class Layer {
|
||||
chunks = new Map();
|
||||
id = layerIdCounter++;
|
||||
|
||||
constructor({ name, chunkSize, chunkLimit }) {
|
||||
this.name = name;
|
||||
this.chunkSize = chunkSize;
|
||||
this.chunkLimit = chunkLimit;
|
||||
|
||||
console.info("created layer", this.id, this.name);
|
||||
}
|
||||
|
||||
destroy() {
|
||||
for (let { chunk } of this.chunks.values()) {
|
||||
chunk.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
getChunk(x, y) {
|
||||
return this.chunks.get(chunkKey(x, y))?.chunk;
|
||||
}
|
||||
|
||||
getOrCreateChunk(x, y) {
|
||||
let key = chunkKey(x, y);
|
||||
if (this.chunks.has(key)) {
|
||||
return this.chunks.get(key)?.chunk;
|
||||
} else {
|
||||
if (this.chunkLimit != null && this.chunks.size >= this.chunkLimit) return null;
|
||||
|
||||
let chunk = new Chunk(this.chunkSize);
|
||||
this.chunks.set(key, { x, y, chunk });
|
||||
return chunk;
|
||||
}
|
||||
}
|
||||
|
||||
compositeAlpha(src) {
|
||||
for (let { x, y, chunk: srcChunk } of src.chunks.values()) {
|
||||
srcChunk.syncFromPixmap();
|
||||
let dstChunk = this.getOrCreateChunk(x, y);
|
||||
if (dstChunk == null) continue;
|
||||
|
||||
dstChunk.ctx.globalCompositeOperation = "source-over";
|
||||
dstChunk.ctx.drawImage(srcChunk.canvas, 0, 0);
|
||||
dstChunk.syncToPixmap();
|
||||
dstChunk.markModified();
|
||||
}
|
||||
}
|
||||
|
||||
async toEdits() {
|
||||
let edits = [];
|
||||
|
||||
let start = performance.now();
|
||||
|
||||
for (let { x, y, chunk } of this.chunks.values()) {
|
||||
edits.push({
|
||||
chunk: { x, y },
|
||||
data: chunk.canvas.convertToBlob({ type: "image/png" }),
|
||||
});
|
||||
}
|
||||
|
||||
for (let edit of edits) {
|
||||
edit.data = await edit.data;
|
||||
}
|
||||
|
||||
let end = performance.now();
|
||||
console.debug("toEdits done", end - start);
|
||||
|
||||
return edits;
|
||||
}
|
||||
}
|
||||
|
||||
export function chunkKey(x, y) {
|
||||
return `${x},${y}`;
|
||||
}
|
||||
|
||||
export class Wall {
|
||||
layers = []; // do not modify directly; only read
|
||||
#layersById = new Map();
|
||||
#chunks = new Map();
|
||||
|
||||
constructor(wallInfo) {
|
||||
this.chunkSize = wallInfo.chunkSize;
|
||||
this.paintArea = wallInfo.paintArea;
|
||||
this.maxEditSize = wallInfo.maxEditSize;
|
||||
this.onlineUsers = new OnlineUsers(wallInfo);
|
||||
|
||||
this.mainLayer = new Layer({ name: "main", chunkSize: this.chunkSize });
|
||||
this.addLayer(this.mainLayer);
|
||||
}
|
||||
|
||||
addLayer(layer) {
|
||||
if (!this.#layersById.get(layer.id)) {
|
||||
this.layers.push(layer);
|
||||
this.#layersById.set(layer.id, layer);
|
||||
static chunkKey(x, y) {
|
||||
return `(${x},${y})`;
|
||||
}
|
||||
|
||||
getChunk(x, y) {
|
||||
return this.#chunks.get(Wall.chunkKey(x, y));
|
||||
}
|
||||
|
||||
getOrCreateChunk(x, y) {
|
||||
let key = Wall.chunkKey(x, y);
|
||||
if (this.#chunks.has(key)) {
|
||||
return this.#chunks.get(key);
|
||||
} else {
|
||||
console.warn("attempt to add layer more than once", layer);
|
||||
let chunk = new Chunk(this.chunkSize);
|
||||
this.#chunks.set(key, chunk);
|
||||
return chunk;
|
||||
}
|
||||
return layer;
|
||||
}
|
||||
|
||||
removeLayer(layer) {
|
||||
if (this.#layersById.delete(layer.id)) {
|
||||
let index = this.layers.findIndex((x) => x == layer);
|
||||
this.layers.splice(index, 1);
|
||||
} else {
|
||||
console.warn("attempt to remove layer more than once", layer);
|
||||
}
|
||||
}
|
||||
|
||||
getLayerById(id) {
|
||||
return this.#layersById.get(id);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue