add a lot more logging to both client and server to analyze #81

> #81 Compiling brush doesn't seem to finish correctly sometimes, which causes loss of data
> Sometimes it seems like the client state can get desynced (server render thread dies due to a panic?) and then the server starts dropping all requests for drawing the brush.
> These panics should never happen of course, but we need better logging first to determine the exact cause.
This commit is contained in:
liquidex 2024-09-02 20:14:20 +02:00
parent 6cfbc4f05f
commit 26d3b8ed8a
5 changed files with 86 additions and 11 deletions

View file

@ -337,11 +337,20 @@ unsafe extern "C" fn haku_compile_brush(
return StatusCode::SourceCodeTooLong; return StatusCode::SourceCodeTooLong;
}; };
debug!("compiling: lexing");
let mut lexer = Lexer::new(Lexis::new(instance.limits.max_tokens), code); let mut lexer = Lexer::new(Lexis::new(instance.limits.max_tokens), code);
if lex(&mut lexer).is_err() { if lex(&mut lexer).is_err() {
info!("compiling failed: too many tokens");
return StatusCode::TooManyTokens; return StatusCode::TooManyTokens;
}; };
debug!(
"compiling: lexed successfully to {} tokens",
lexer.lexis.len()
);
debug!("compiling: parsing");
let mut ast = Ast::new(instance.limits.ast_capacity); let mut ast = Ast::new(instance.limits.ast_capacity);
let mut parser = Parser::new( let mut parser = Parser::new(
&lexer.lexis, &lexer.lexis,
@ -352,10 +361,21 @@ unsafe extern "C" fn haku_compile_brush(
parser::toplevel(&mut parser); parser::toplevel(&mut parser);
let (root, mut parser_diagnostics) = match parser.into_ast(&mut ast) { let (root, mut parser_diagnostics) = match parser.into_ast(&mut ast) {
Ok((r, d)) => (r, d), Ok((r, d)) => (r, d),
Err(IntoAstError::NodeAlloc(_)) => return StatusCode::TooManyAstNodes, Err(IntoAstError::NodeAlloc(_)) => {
Err(IntoAstError::UnbalancedEvents) => return StatusCode::ParserUnbalancedEvents, info!("compiling failed: too many AST nodes");
return StatusCode::TooManyAstNodes;
}
Err(IntoAstError::UnbalancedEvents) => {
info!("compiling failed: parser produced unbalanced events");
return StatusCode::ParserUnbalancedEvents;
}
}; };
debug!(
"compiling: parsed successfully into {} AST nodes",
ast.len()
);
let src = Source { let src = Source {
code, code,
ast: &ast, ast: &ast,
@ -366,7 +386,10 @@ unsafe extern "C" fn haku_compile_brush(
let mut compiler = Compiler::new(&mut instance.defs, &mut chunk); let mut compiler = Compiler::new(&mut instance.defs, &mut chunk);
if let Err(error) = compile_expr(&mut compiler, &src, root) { if let Err(error) = compile_expr(&mut compiler, &src, root) {
match error { match error {
CompileError::Emit => return StatusCode::ChunkTooBig, CompileError::Emit => {
info!("compiling failed: chunk overflowed while emitting code");
return StatusCode::ChunkTooBig;
}
} }
} }
let closure_spec = compiler.closure_spec(); let closure_spec = compiler.closure_spec();
@ -376,9 +399,16 @@ unsafe extern "C" fn haku_compile_brush(
diagnostics.append(&mut compiler.diagnostics); diagnostics.append(&mut compiler.diagnostics);
if !diagnostics.is_empty() { if !diagnostics.is_empty() {
brush.diagnostics = diagnostics; brush.diagnostics = diagnostics;
debug!("compiling failed: diagnostics were emitted");
return StatusCode::DiagnosticsEmitted; return StatusCode::DiagnosticsEmitted;
} }
debug!(
"compiling: chunk has {} bytes of bytecode",
chunk.bytecode.len()
);
debug!("compiling: {closure_spec:?}");
let chunk_id = match instance.system.add_chunk(chunk) { let chunk_id = match instance.system.add_chunk(chunk) {
Ok(chunk_id) => chunk_id, Ok(chunk_id) => chunk_id,
Err(_) => return StatusCode::TooManyChunks, Err(_) => return StatusCode::TooManyChunks,

View file

@ -74,6 +74,14 @@ impl Ast {
ast ast
} }
pub fn len(&self) -> usize {
self.kinds.len()
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn alloc(&mut self, kind: NodeKind, span: Span) -> Result<NodeId, NodeAllocError> { pub fn alloc(&mut self, kind: NodeKind, span: Span) -> Result<NodeId, NodeAllocError> {
if self.kinds.len() >= self.kinds.capacity() { if self.kinds.len() >= self.kinds.capacity() {
return Err(NodeAllocError); return Err(NodeAllocError);

View file

@ -21,7 +21,7 @@ use tokio::{
select, select,
sync::{mpsc, oneshot}, sync::{mpsc, oneshot},
}; };
use tracing::{error, instrument}; use tracing::{error, info, info_span, instrument};
use crate::{ use crate::{
haku::{Haku, Limits}, haku::{Haku, Limits},
@ -29,7 +29,7 @@ use crate::{
schema::Vec2, schema::Vec2,
wall::{ wall::{
self, auto_save::AutoSave, chunk_images::ChunkImages, chunk_iterator::ChunkIterator, self, auto_save::AutoSave, chunk_images::ChunkImages, chunk_iterator::ChunkIterator,
database::ChunkDataPair, ChunkPosition, JoinError, SessionHandle, UserInit, Wall, database::ChunkDataPair, ChunkPosition, JoinError, SessionHandle, UserInit, Wall, WallId,
}, },
}; };
@ -190,6 +190,7 @@ async fn fallible_websocket(api: Arc<Api>, ws: &mut WebSocket) -> eyre::Result<(
// to remind us about unexpected nulls. // to remind us about unexpected nulls.
SessionLoop::start( SessionLoop::start(
wall_id,
open_wall.wall, open_wall.wall,
open_wall.chunk_images, open_wall.chunk_images,
open_wall.auto_save, open_wall.auto_save,
@ -205,6 +206,7 @@ async fn fallible_websocket(api: Arc<Api>, ws: &mut WebSocket) -> eyre::Result<(
} }
struct SessionLoop { struct SessionLoop {
wall_id: WallId,
wall: Arc<Wall>, wall: Arc<Wall>,
chunk_images: Arc<ChunkImages>, chunk_images: Arc<ChunkImages>,
auto_save: Arc<AutoSave>, auto_save: Arc<AutoSave>,
@ -230,6 +232,7 @@ enum RenderCommand {
impl SessionLoop { impl SessionLoop {
async fn start( async fn start(
wall_id: WallId,
wall: Arc<Wall>, wall: Arc<Wall>,
chunk_images: Arc<ChunkImages>, chunk_images: Arc<ChunkImages>,
auto_save: Arc<AutoSave>, auto_save: Arc<AutoSave>,
@ -253,11 +256,17 @@ impl SessionLoop {
.name(String::from("haku render thread")) .name(String::from("haku render thread"))
.spawn({ .spawn({
let wall = Arc::clone(&wall); let wall = Arc::clone(&wall);
move || Self::render_thread(wall, limits, render_commands_rx) move || {
let _span =
info_span!("render_thread", %wall_id, session_id = ?handle.session_id)
.entered();
Self::render_thread(wall, limits, render_commands_rx)
}
}) })
.context("could not spawn render thread")?; .context("could not spawn render thread")?;
Ok(Self { Ok(Self {
wall_id,
wall, wall,
chunk_images, chunk_images,
auto_save, auto_save,
@ -269,6 +278,7 @@ impl SessionLoop {
}) })
} }
#[instrument(skip(self, ws), fields(wall_id = %self.wall_id, session_id = ?self.handle.session_id))]
async fn event_loop(&mut self, ws: &mut WebSocket) -> eyre::Result<()> { async fn event_loop(&mut self, ws: &mut WebSocket) -> eyre::Result<()> {
loop { loop {
select! { select! {
@ -317,11 +327,19 @@ impl SessionLoop {
// Theoretically this will yield much better responsiveness, but it _will_ // Theoretically this will yield much better responsiveness, but it _will_
// result in some visual glitches if we're getting bottlenecked. // result in some visual glitches if we're getting bottlenecked.
let (done_tx, done_rx) = oneshot::channel(); let (done_tx, done_rx) = oneshot::channel();
_ = self.render_commands_tx.try_send(RenderCommand::Plot { let send_result =
self.render_commands_tx.try_send(RenderCommand::Plot {
points: points.clone(), points: points.clone(),
done: done_tx, done: done_tx,
}); });
if send_result.is_err() {
info!(
?points,
"render thread is overloaded, dropping request to draw points"
);
}
let auto_save = Arc::clone(&self.auto_save); let auto_save = Arc::clone(&self.auto_save);
tokio::spawn(async move { tokio::spawn(async move {
_ = done_rx.await; _ = done_rx.await;

View file

@ -18,6 +18,7 @@ use haku::{
vm::{Vm, VmImage, VmLimits}, vm::{Vm, VmImage, VmLimits},
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::{info, instrument, Level};
use crate::schema::Vec2; use crate::schema::Vec2;
@ -91,7 +92,10 @@ impl Haku {
self.defs.restore_image(&self.defs_image); self.defs.restore_image(&self.defs_image);
} }
#[instrument(skip(self, code), err)]
pub fn set_brush(&mut self, code: &str) -> eyre::Result<()> { pub fn set_brush(&mut self, code: &str) -> eyre::Result<()> {
info!(?code);
self.reset(); self.reset();
let code = SourceCode::limited_len(code, self.limits.max_source_code_len) let code = SourceCode::limited_len(code, self.limits.max_source_code_len)
@ -127,15 +131,19 @@ impl Haku {
|| !parser_diagnostics.is_empty() || !parser_diagnostics.is_empty()
|| !compiler.diagnostics.is_empty() || !compiler.diagnostics.is_empty()
{ {
info!(?lexer.diagnostics, ?parser_diagnostics, ?compiler.diagnostics, "diagnostics were emitted");
bail!("diagnostics were emitted"); bail!("diagnostics were emitted");
} }
let chunk_id = self.system.add_chunk(chunk).context("too many chunks")?; let chunk_id = self.system.add_chunk(chunk).context("too many chunks")?;
self.brush = Some((chunk_id, closure_spec)); self.brush = Some((chunk_id, closure_spec));
info!("brush set successfully");
Ok(()) Ok(())
} }
#[instrument(skip(self), err(level = Level::INFO))]
pub fn eval_brush(&mut self) -> eyre::Result<Value> { pub fn eval_brush(&mut self) -> eyre::Result<Value> {
let (chunk_id, closure_spec) = self let (chunk_id, closure_spec) = self
.brush .brush
@ -156,6 +164,7 @@ impl Haku {
Ok(scribble) Ok(scribble)
} }
#[instrument(skip(self, pixmap, value, translation), err(level = Level::INFO))]
pub fn render_value( pub fn render_value(
&self, &self,
pixmap: &mut Pixmap, pixmap: &mut Pixmap,

View file

@ -20,13 +20,23 @@ export class User {
} }
setBrush(brush) { setBrush(brush) {
console.group("setBrush", this.nickname);
let compileResult = this.haku.setBrush(brush); let compileResult = this.haku.setBrush(brush);
console.log("compiling brush complete", compileResult);
console.groupEnd();
this.isBrushOk = compileResult.status == "ok"; this.isBrushOk = compileResult.status == "ok";
return compileResult; return compileResult;
} }
renderBrushToChunks(wall, x, y) { renderBrushToChunks(wall, x, y) {
return this.painter.renderBrushToWall(this.haku, x, y, wall); console.group("renderBrushToChunks", this.nickname);
let result = this.painter.renderBrushToWall(this.haku, x, y, wall);
console.log("rendering brush to chunks complete");
console.groupEnd();
return result;
} }
} }