remove unused imports
This commit is contained in:
parent
eb25d0b1cf
commit
9cb24a0b1e
|
@ -1,6 +1,6 @@
|
||||||
use alloc::vec::Vec;
|
use alloc::vec::Vec;
|
||||||
use tiny_skia::{
|
use tiny_skia::{
|
||||||
BlendMode, Color, FillRule, LineCap, Paint, Path, PathBuilder, Pixmap, Rect, Shader,
|
BlendMode, Color, FillRule, LineCap, Paint, Path, PathBuilder, Pixmap, Shader,
|
||||||
Stroke as SStroke, Transform,
|
Stroke as SStroke, Transform,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -19,10 +19,9 @@ use schema::{
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tokio::{
|
use tokio::{
|
||||||
select,
|
select,
|
||||||
sync::{self, mpsc, oneshot},
|
sync::{mpsc, oneshot},
|
||||||
time::Instant,
|
|
||||||
};
|
};
|
||||||
use tracing::{error, info, instrument};
|
use tracing::{error, instrument};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
haku::{Haku, Limits},
|
haku::{Haku, Limits},
|
||||||
|
@ -254,8 +253,7 @@ impl SessionLoop {
|
||||||
.name(String::from("haku render thread"))
|
.name(String::from("haku render thread"))
|
||||||
.spawn({
|
.spawn({
|
||||||
let wall = Arc::clone(&wall);
|
let wall = Arc::clone(&wall);
|
||||||
let chunk_images = Arc::clone(&chunk_images);
|
move || Self::render_thread(wall, limits, render_commands_rx)
|
||||||
move || Self::render_thread(wall, chunk_images, limits, render_commands_rx)
|
|
||||||
})
|
})
|
||||||
.context("could not spawn render thread")?;
|
.context("could not spawn render thread")?;
|
||||||
|
|
||||||
|
@ -415,12 +413,7 @@ impl SessionLoop {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_thread(
|
fn render_thread(wall: Arc<Wall>, limits: Limits, mut commands: mpsc::Receiver<RenderCommand>) {
|
||||||
wall: Arc<Wall>,
|
|
||||||
chunk_images: Arc<ChunkImages>,
|
|
||||||
limits: Limits,
|
|
||||||
mut commands: mpsc::Receiver<RenderCommand>,
|
|
||||||
) {
|
|
||||||
let mut haku = Haku::new(limits);
|
let mut haku = Haku::new(limits);
|
||||||
let mut brush_ok = false;
|
let mut brush_ok = false;
|
||||||
|
|
||||||
|
@ -436,7 +429,7 @@ impl SessionLoop {
|
||||||
for point in points {
|
for point in points {
|
||||||
// Ignore the result. It's better if we render _something_ rather
|
// Ignore the result. It's better if we render _something_ rather
|
||||||
// than nothing.
|
// than nothing.
|
||||||
_ = draw_to_chunks(&wall, &chunk_images, &haku, value, point);
|
_ = draw_to_chunks(&wall, &haku, value, point);
|
||||||
}
|
}
|
||||||
haku.reset_vm();
|
haku.reset_vm();
|
||||||
}
|
}
|
||||||
|
@ -467,14 +460,8 @@ fn chunks_to_modify(wall: &Wall, points: &[Vec2]) -> HashSet<ChunkPosition> {
|
||||||
chunks
|
chunks
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(wall, chunk_images, haku, value))]
|
#[instrument(skip(wall, haku, value))]
|
||||||
fn draw_to_chunks(
|
fn draw_to_chunks(wall: &Wall, haku: &Haku, value: Value, center: Vec2) -> eyre::Result<()> {
|
||||||
wall: &Wall,
|
|
||||||
chunk_images: &ChunkImages,
|
|
||||||
haku: &Haku,
|
|
||||||
value: Value,
|
|
||||||
center: Vec2,
|
|
||||||
) -> eyre::Result<()> {
|
|
||||||
let settings = wall.settings();
|
let settings = wall.settings();
|
||||||
|
|
||||||
let chunk_size = settings.chunk_size as f32;
|
let chunk_size = settings.chunk_size as f32;
|
||||||
|
@ -497,16 +484,5 @@ fn draw_to_chunks(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// NOTE: Maybe sending in an iterator would be more efficient?
|
|
||||||
// If there were many chunks modified, (which there probably weren't,) this could allocate
|
|
||||||
// a lot of memory.
|
|
||||||
chunk_images.mark_modified_blocking(
|
|
||||||
(top_chunk..bottom_chunk)
|
|
||||||
.flat_map(|chunk_y| {
|
|
||||||
(left_chunk..right_chunk).map(move |chunk_x| ChunkPosition::new(chunk_x, chunk_y))
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use argon2::{
|
use argon2::{
|
||||||
password_hash::{PasswordHasher, Salt, SaltString},
|
password_hash::{PasswordHasher, SaltString},
|
||||||
Argon2, PasswordHash, PasswordVerifier,
|
Argon2, PasswordHash, PasswordVerifier,
|
||||||
};
|
};
|
||||||
use base64::Engine;
|
use base64::Engine;
|
||||||
|
|
|
@ -13,10 +13,8 @@ use eyre::Context;
|
||||||
use tokio::{fs, net::TcpListener};
|
use tokio::{fs, net::TcpListener};
|
||||||
use tower_http::services::{ServeDir, ServeFile};
|
use tower_http::services::{ServeDir, ServeFile};
|
||||||
use tracing::{info, info_span};
|
use tracing::{info, info_span};
|
||||||
use tracing_subscriber::fmt::format::FmtSpan;
|
|
||||||
|
|
||||||
mod api;
|
mod api;
|
||||||
mod binary;
|
|
||||||
mod config;
|
mod config;
|
||||||
mod haku;
|
mod haku;
|
||||||
mod id;
|
mod id;
|
||||||
|
|
|
@ -1,14 +1,13 @@
|
||||||
use std::{backtrace::Backtrace, collections::HashSet, sync::Arc, time::Duration};
|
use std::{collections::HashSet, sync::Arc, time::Duration};
|
||||||
|
|
||||||
use dashmap::DashSet;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tokio::{
|
use tokio::{
|
||||||
sync::mpsc,
|
sync::mpsc,
|
||||||
time::{interval, MissedTickBehavior},
|
time::{interval, MissedTickBehavior},
|
||||||
};
|
};
|
||||||
use tracing::{info, instrument};
|
use tracing::instrument;
|
||||||
|
|
||||||
use super::{chunk_images::ChunkImages, ChunkPosition, Database, Wall};
|
use super::{chunk_images::ChunkImages, ChunkPosition};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||||
pub struct Settings {
|
pub struct Settings {
|
||||||
|
|
|
@ -26,10 +26,6 @@ enum Command {
|
||||||
chunks: Vec<ChunkPosition>,
|
chunks: Vec<ChunkPosition>,
|
||||||
reply: oneshot::Sender<eyre::Result<()>>,
|
reply: oneshot::Sender<eyre::Result<()>>,
|
||||||
},
|
},
|
||||||
|
|
||||||
MarkModified {
|
|
||||||
chunks: Vec<ChunkPosition>,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ChunkImages {
|
impl ChunkImages {
|
||||||
|
@ -69,12 +65,6 @@ impl ChunkImages {
|
||||||
rx.await.context("failed to load chunks")?
|
rx.await.context("failed to load chunks")?
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mark_modified_blocking(&self, chunks: Vec<ChunkPosition>) {
|
|
||||||
_ = self
|
|
||||||
.commands_tx
|
|
||||||
.blocking_send(Command::MarkModified { chunks });
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn chunk_exists(&self, position: ChunkPosition) -> bool {
|
pub fn chunk_exists(&self, position: ChunkPosition) -> bool {
|
||||||
self.wall.has_chunk(position) || self.async_loop.chunks_in_db.contains(&position)
|
self.wall.has_chunk(position) || self.async_loop.chunks_in_db.contains(&position)
|
||||||
}
|
}
|
||||||
|
@ -221,17 +211,12 @@ impl ChunkImageLoop {
|
||||||
while let Some(command) = commands_rx.recv().await {
|
while let Some(command) = commands_rx.recv().await {
|
||||||
match command {
|
match command {
|
||||||
Command::Encode { chunks, reply } => {
|
Command::Encode { chunks, reply } => {
|
||||||
// TODO: This should have a caching layer.
|
|
||||||
tokio::spawn(Arc::clone(&self).encode(chunks, reply));
|
tokio::spawn(Arc::clone(&self).encode(chunks, reply));
|
||||||
}
|
}
|
||||||
|
|
||||||
Command::Load { chunks, reply } => {
|
Command::Load { chunks, reply } => {
|
||||||
tokio::spawn(Arc::clone(&self).load(chunks, reply));
|
tokio::spawn(Arc::clone(&self).load(chunks, reply));
|
||||||
}
|
}
|
||||||
|
|
||||||
Command::MarkModified { chunks } => {
|
|
||||||
// TODO: This should invalidate data from the caching layer.
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
use std::iter::Take;
|
|
||||||
|
|
||||||
use super::ChunkPosition;
|
use super::ChunkPosition;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
|
@ -19,26 +17,6 @@ impl ChunkIterator {
|
||||||
bottom_right,
|
bottom_right,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn take_next(&mut self, n: i32) -> Take<Self> {
|
|
||||||
assert!(n > 0);
|
|
||||||
|
|
||||||
let take = (*self).take(n as usize);
|
|
||||||
|
|
||||||
let x = self.cursor.x - self.left;
|
|
||||||
let width = self.bottom_right.x - self.left;
|
|
||||||
if width != 0 {
|
|
||||||
self.cursor.x = self.left + (x + n) % width;
|
|
||||||
self.cursor.y += n / width;
|
|
||||||
} else {
|
|
||||||
// In a width = 0 configuration, we iterate vertically.
|
|
||||||
// This is probably not the right thing to do, but we're just doing this to guard
|
|
||||||
// against malicious clients.
|
|
||||||
self.cursor.y += n;
|
|
||||||
}
|
|
||||||
|
|
||||||
take
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Iterator for ChunkIterator {
|
impl Iterator for ChunkIterator {
|
||||||
|
|
|
@ -1,17 +1,10 @@
|
||||||
use std::{
|
use std::{convert::identity, path::PathBuf, sync::Arc};
|
||||||
convert::identity,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
sync::Arc,
|
|
||||||
};
|
|
||||||
|
|
||||||
use chrono::Utc;
|
|
||||||
use eyre::Context;
|
use eyre::Context;
|
||||||
use rusqlite::Connection;
|
use rusqlite::Connection;
|
||||||
use tokio::sync::{mpsc, oneshot};
|
use tokio::sync::{mpsc, oneshot};
|
||||||
use tracing::{error, info, instrument};
|
use tracing::{error, info, instrument};
|
||||||
|
|
||||||
use crate::login::UserId;
|
|
||||||
|
|
||||||
use super::{ChunkPosition, WallId};
|
use super::{ChunkPosition, WallId};
|
||||||
|
|
||||||
pub struct Settings {
|
pub struct Settings {
|
||||||
|
@ -32,18 +25,12 @@ pub struct ChunkDataPair {
|
||||||
}
|
}
|
||||||
|
|
||||||
enum Command {
|
enum Command {
|
||||||
SetWallInfo {
|
Write {
|
||||||
created_by: UserId,
|
|
||||||
title: String,
|
|
||||||
reply: oneshot::Sender<eyre::Result<()>>,
|
|
||||||
},
|
|
||||||
|
|
||||||
WriteChunks {
|
|
||||||
chunks: Vec<ChunkDataPair>,
|
chunks: Vec<ChunkDataPair>,
|
||||||
reply: oneshot::Sender<eyre::Result<()>>,
|
reply: oneshot::Sender<eyre::Result<()>>,
|
||||||
},
|
},
|
||||||
|
|
||||||
ReadChunks {
|
Read {
|
||||||
chunks: Vec<ChunkPosition>,
|
chunks: Vec<ChunkPosition>,
|
||||||
reply: oneshot::Sender<Vec<ChunkDataPair>>,
|
reply: oneshot::Sender<Vec<ChunkDataPair>>,
|
||||||
},
|
},
|
||||||
|
@ -62,7 +49,7 @@ impl Database {
|
||||||
pub async fn write_chunks(&self, chunks: Vec<ChunkDataPair>) -> eyre::Result<()> {
|
pub async fn write_chunks(&self, chunks: Vec<ChunkDataPair>) -> eyre::Result<()> {
|
||||||
let (tx, rx) = oneshot::channel();
|
let (tx, rx) = oneshot::channel();
|
||||||
self.command_tx
|
self.command_tx
|
||||||
.send(Command::WriteChunks { chunks, reply: tx })
|
.send(Command::Write { chunks, reply: tx })
|
||||||
.await
|
.await
|
||||||
.context("database is offline")?;
|
.context("database is offline")?;
|
||||||
rx.await.context("database returned an error")?
|
rx.await.context("database returned an error")?
|
||||||
|
@ -74,7 +61,7 @@ impl Database {
|
||||||
) -> eyre::Result<Vec<ChunkDataPair>> {
|
) -> eyre::Result<Vec<ChunkDataPair>> {
|
||||||
let (tx, rx) = oneshot::channel();
|
let (tx, rx) = oneshot::channel();
|
||||||
self.command_tx
|
self.command_tx
|
||||||
.send(Command::ReadChunks { chunks, reply: tx })
|
.send(Command::Read { chunks, reply: tx })
|
||||||
.await
|
.await
|
||||||
.context("database is offline")?;
|
.context("database is offline")?;
|
||||||
rx.await.context("database did not return anything")
|
rx.await.context("database did not return anything")
|
||||||
|
@ -207,17 +194,6 @@ pub fn start(settings: Settings) -> eyre::Result<Database> {
|
||||||
std::thread::Builder::new()
|
std::thread::Builder::new()
|
||||||
.name(format!("database thread {}", settings.wall_id))
|
.name(format!("database thread {}", settings.wall_id))
|
||||||
.spawn(move || {
|
.spawn(move || {
|
||||||
let mut s_set_wall_info = db
|
|
||||||
.prepare(
|
|
||||||
r#"
|
|
||||||
INSERT OR REPLACE
|
|
||||||
INTO t_wall_info
|
|
||||||
(created_by, title)
|
|
||||||
VALUES (?, ?);
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let mut s_write_chunk = db
|
let mut s_write_chunk = db
|
||||||
.prepare(
|
.prepare(
|
||||||
r#"
|
r#"
|
||||||
|
@ -250,20 +226,7 @@ pub fn start(settings: Settings) -> eyre::Result<Database> {
|
||||||
|
|
||||||
while let Some(command) = command_rx.blocking_recv() {
|
while let Some(command) = command_rx.blocking_recv() {
|
||||||
match command {
|
match command {
|
||||||
Command::SetWallInfo {
|
Command::Write { chunks, reply } => {
|
||||||
created_by,
|
|
||||||
title,
|
|
||||||
reply,
|
|
||||||
} => {
|
|
||||||
_ = reply.send(
|
|
||||||
s_set_wall_info
|
|
||||||
.execute((created_by.0, title))
|
|
||||||
.map(|_| ())
|
|
||||||
.context("failed to set wall info"),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
Command::WriteChunks { chunks, reply } => {
|
|
||||||
let mut result = Ok(());
|
let mut result = Ok(());
|
||||||
for ChunkDataPair { position, data } in chunks {
|
for ChunkDataPair { position, data } in chunks {
|
||||||
if let Err(error) =
|
if let Err(error) =
|
||||||
|
@ -279,7 +242,7 @@ pub fn start(settings: Settings) -> eyre::Result<Database> {
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
Command::ReadChunks { chunks, reply } => {
|
Command::Read { chunks, reply } => {
|
||||||
let result = chunks
|
let result = chunks
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|position| {
|
.flat_map(|position| {
|
||||||
|
|
Loading…
Reference in a new issue