add a lot more logging to both client and server to analyze #81

> #81 Compiling brush doesn't seem to finish correctly sometimes, which causes loss of data
> Sometimes it seems like the client state can get desynced (server render thread dies due to a panic?) and then the server starts dropping all requests for drawing the brush.
> These panics should never happen of course, but we need better logging first to determine the exact cause.
This commit is contained in:
liquidex 2024-09-02 20:14:20 +02:00
parent 6cfbc4f05f
commit 26d3b8ed8a
5 changed files with 86 additions and 11 deletions

View file

@ -337,11 +337,20 @@ unsafe extern "C" fn haku_compile_brush(
return StatusCode::SourceCodeTooLong;
};
debug!("compiling: lexing");
let mut lexer = Lexer::new(Lexis::new(instance.limits.max_tokens), code);
if lex(&mut lexer).is_err() {
info!("compiling failed: too many tokens");
return StatusCode::TooManyTokens;
};
debug!(
"compiling: lexed successfully to {} tokens",
lexer.lexis.len()
);
debug!("compiling: parsing");
let mut ast = Ast::new(instance.limits.ast_capacity);
let mut parser = Parser::new(
&lexer.lexis,
@ -352,10 +361,21 @@ unsafe extern "C" fn haku_compile_brush(
parser::toplevel(&mut parser);
let (root, mut parser_diagnostics) = match parser.into_ast(&mut ast) {
Ok((r, d)) => (r, d),
Err(IntoAstError::NodeAlloc(_)) => return StatusCode::TooManyAstNodes,
Err(IntoAstError::UnbalancedEvents) => return StatusCode::ParserUnbalancedEvents,
Err(IntoAstError::NodeAlloc(_)) => {
info!("compiling failed: too many AST nodes");
return StatusCode::TooManyAstNodes;
}
Err(IntoAstError::UnbalancedEvents) => {
info!("compiling failed: parser produced unbalanced events");
return StatusCode::ParserUnbalancedEvents;
}
};
debug!(
"compiling: parsed successfully into {} AST nodes",
ast.len()
);
let src = Source {
code,
ast: &ast,
@ -366,7 +386,10 @@ unsafe extern "C" fn haku_compile_brush(
let mut compiler = Compiler::new(&mut instance.defs, &mut chunk);
if let Err(error) = compile_expr(&mut compiler, &src, root) {
match error {
CompileError::Emit => return StatusCode::ChunkTooBig,
CompileError::Emit => {
info!("compiling failed: chunk overflowed while emitting code");
return StatusCode::ChunkTooBig;
}
}
}
let closure_spec = compiler.closure_spec();
@ -376,9 +399,16 @@ unsafe extern "C" fn haku_compile_brush(
diagnostics.append(&mut compiler.diagnostics);
if !diagnostics.is_empty() {
brush.diagnostics = diagnostics;
debug!("compiling failed: diagnostics were emitted");
return StatusCode::DiagnosticsEmitted;
}
debug!(
"compiling: chunk has {} bytes of bytecode",
chunk.bytecode.len()
);
debug!("compiling: {closure_spec:?}");
let chunk_id = match instance.system.add_chunk(chunk) {
Ok(chunk_id) => chunk_id,
Err(_) => return StatusCode::TooManyChunks,

View file

@ -74,6 +74,14 @@ impl Ast {
ast
}
pub fn len(&self) -> usize {
self.kinds.len()
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn alloc(&mut self, kind: NodeKind, span: Span) -> Result<NodeId, NodeAllocError> {
if self.kinds.len() >= self.kinds.capacity() {
return Err(NodeAllocError);

View file

@ -21,7 +21,7 @@ use tokio::{
select,
sync::{mpsc, oneshot},
};
use tracing::{error, instrument};
use tracing::{error, info, info_span, instrument};
use crate::{
haku::{Haku, Limits},
@ -29,7 +29,7 @@ use crate::{
schema::Vec2,
wall::{
self, auto_save::AutoSave, chunk_images::ChunkImages, chunk_iterator::ChunkIterator,
database::ChunkDataPair, ChunkPosition, JoinError, SessionHandle, UserInit, Wall,
database::ChunkDataPair, ChunkPosition, JoinError, SessionHandle, UserInit, Wall, WallId,
},
};
@ -190,6 +190,7 @@ async fn fallible_websocket(api: Arc<Api>, ws: &mut WebSocket) -> eyre::Result<(
// to remind us about unexpected nulls.
SessionLoop::start(
wall_id,
open_wall.wall,
open_wall.chunk_images,
open_wall.auto_save,
@ -205,6 +206,7 @@ async fn fallible_websocket(api: Arc<Api>, ws: &mut WebSocket) -> eyre::Result<(
}
struct SessionLoop {
wall_id: WallId,
wall: Arc<Wall>,
chunk_images: Arc<ChunkImages>,
auto_save: Arc<AutoSave>,
@ -230,6 +232,7 @@ enum RenderCommand {
impl SessionLoop {
async fn start(
wall_id: WallId,
wall: Arc<Wall>,
chunk_images: Arc<ChunkImages>,
auto_save: Arc<AutoSave>,
@ -253,11 +256,17 @@ impl SessionLoop {
.name(String::from("haku render thread"))
.spawn({
let wall = Arc::clone(&wall);
move || Self::render_thread(wall, limits, render_commands_rx)
move || {
let _span =
info_span!("render_thread", %wall_id, session_id = ?handle.session_id)
.entered();
Self::render_thread(wall, limits, render_commands_rx)
}
})
.context("could not spawn render thread")?;
Ok(Self {
wall_id,
wall,
chunk_images,
auto_save,
@ -269,6 +278,7 @@ impl SessionLoop {
})
}
#[instrument(skip(self, ws), fields(wall_id = %self.wall_id, session_id = ?self.handle.session_id))]
async fn event_loop(&mut self, ws: &mut WebSocket) -> eyre::Result<()> {
loop {
select! {
@ -317,10 +327,18 @@ impl SessionLoop {
// Theoretically this will yield much better responsiveness, but it _will_
// result in some visual glitches if we're getting bottlenecked.
let (done_tx, done_rx) = oneshot::channel();
_ = self.render_commands_tx.try_send(RenderCommand::Plot {
points: points.clone(),
done: done_tx,
});
let send_result =
self.render_commands_tx.try_send(RenderCommand::Plot {
points: points.clone(),
done: done_tx,
});
if send_result.is_err() {
info!(
?points,
"render thread is overloaded, dropping request to draw points"
);
}
let auto_save = Arc::clone(&self.auto_save);
tokio::spawn(async move {

View file

@ -18,6 +18,7 @@ use haku::{
vm::{Vm, VmImage, VmLimits},
};
use serde::{Deserialize, Serialize};
use tracing::{info, instrument, Level};
use crate::schema::Vec2;
@ -91,7 +92,10 @@ impl Haku {
self.defs.restore_image(&self.defs_image);
}
#[instrument(skip(self, code), err)]
pub fn set_brush(&mut self, code: &str) -> eyre::Result<()> {
info!(?code);
self.reset();
let code = SourceCode::limited_len(code, self.limits.max_source_code_len)
@ -127,15 +131,19 @@ impl Haku {
|| !parser_diagnostics.is_empty()
|| !compiler.diagnostics.is_empty()
{
info!(?lexer.diagnostics, ?parser_diagnostics, ?compiler.diagnostics, "diagnostics were emitted");
bail!("diagnostics were emitted");
}
let chunk_id = self.system.add_chunk(chunk).context("too many chunks")?;
self.brush = Some((chunk_id, closure_spec));
info!("brush set successfully");
Ok(())
}
#[instrument(skip(self), err(level = Level::INFO))]
pub fn eval_brush(&mut self) -> eyre::Result<Value> {
let (chunk_id, closure_spec) = self
.brush
@ -156,6 +164,7 @@ impl Haku {
Ok(scribble)
}
#[instrument(skip(self, pixmap, value, translation), err(level = Level::INFO))]
pub fn render_value(
&self,
pixmap: &mut Pixmap,

View file

@ -20,13 +20,23 @@ export class User {
}
setBrush(brush) {
console.group("setBrush", this.nickname);
let compileResult = this.haku.setBrush(brush);
console.log("compiling brush complete", compileResult);
console.groupEnd();
this.isBrushOk = compileResult.status == "ok";
return compileResult;
}
renderBrushToChunks(wall, x, y) {
return this.painter.renderBrushToWall(this.haku, x, y, wall);
console.group("renderBrushToChunks", this.nickname);
let result = this.painter.renderBrushToWall(this.haku, x, y, wall);
console.log("rendering brush to chunks complete");
console.groupEnd();
return result;
}
}