remove unused imports
This commit is contained in:
parent
eb25d0b1cf
commit
9cb24a0b1e
|
@ -1,6 +1,6 @@
|
|||
use alloc::vec::Vec;
|
||||
use tiny_skia::{
|
||||
BlendMode, Color, FillRule, LineCap, Paint, Path, PathBuilder, Pixmap, Rect, Shader,
|
||||
BlendMode, Color, FillRule, LineCap, Paint, Path, PathBuilder, Pixmap, Shader,
|
||||
Stroke as SStroke, Transform,
|
||||
};
|
||||
|
||||
|
|
|
@ -19,10 +19,9 @@ use schema::{
|
|||
use serde::{Deserialize, Serialize};
|
||||
use tokio::{
|
||||
select,
|
||||
sync::{self, mpsc, oneshot},
|
||||
time::Instant,
|
||||
sync::{mpsc, oneshot},
|
||||
};
|
||||
use tracing::{error, info, instrument};
|
||||
use tracing::{error, instrument};
|
||||
|
||||
use crate::{
|
||||
haku::{Haku, Limits},
|
||||
|
@ -254,8 +253,7 @@ impl SessionLoop {
|
|||
.name(String::from("haku render thread"))
|
||||
.spawn({
|
||||
let wall = Arc::clone(&wall);
|
||||
let chunk_images = Arc::clone(&chunk_images);
|
||||
move || Self::render_thread(wall, chunk_images, limits, render_commands_rx)
|
||||
move || Self::render_thread(wall, limits, render_commands_rx)
|
||||
})
|
||||
.context("could not spawn render thread")?;
|
||||
|
||||
|
@ -415,12 +413,7 @@ impl SessionLoop {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn render_thread(
|
||||
wall: Arc<Wall>,
|
||||
chunk_images: Arc<ChunkImages>,
|
||||
limits: Limits,
|
||||
mut commands: mpsc::Receiver<RenderCommand>,
|
||||
) {
|
||||
fn render_thread(wall: Arc<Wall>, limits: Limits, mut commands: mpsc::Receiver<RenderCommand>) {
|
||||
let mut haku = Haku::new(limits);
|
||||
let mut brush_ok = false;
|
||||
|
||||
|
@ -436,7 +429,7 @@ impl SessionLoop {
|
|||
for point in points {
|
||||
// Ignore the result. It's better if we render _something_ rather
|
||||
// than nothing.
|
||||
_ = draw_to_chunks(&wall, &chunk_images, &haku, value, point);
|
||||
_ = draw_to_chunks(&wall, &haku, value, point);
|
||||
}
|
||||
haku.reset_vm();
|
||||
}
|
||||
|
@ -467,14 +460,8 @@ fn chunks_to_modify(wall: &Wall, points: &[Vec2]) -> HashSet<ChunkPosition> {
|
|||
chunks
|
||||
}
|
||||
|
||||
#[instrument(skip(wall, chunk_images, haku, value))]
|
||||
fn draw_to_chunks(
|
||||
wall: &Wall,
|
||||
chunk_images: &ChunkImages,
|
||||
haku: &Haku,
|
||||
value: Value,
|
||||
center: Vec2,
|
||||
) -> eyre::Result<()> {
|
||||
#[instrument(skip(wall, haku, value))]
|
||||
fn draw_to_chunks(wall: &Wall, haku: &Haku, value: Value, center: Vec2) -> eyre::Result<()> {
|
||||
let settings = wall.settings();
|
||||
|
||||
let chunk_size = settings.chunk_size as f32;
|
||||
|
@ -497,16 +484,5 @@ fn draw_to_chunks(
|
|||
}
|
||||
}
|
||||
|
||||
// NOTE: Maybe sending in an iterator would be more efficient?
|
||||
// If there were many chunks modified, (which there probably weren't,) this could allocate
|
||||
// a lot of memory.
|
||||
chunk_images.mark_modified_blocking(
|
||||
(top_chunk..bottom_chunk)
|
||||
.flat_map(|chunk_y| {
|
||||
(left_chunk..right_chunk).map(move |chunk_x| ChunkPosition::new(chunk_x, chunk_y))
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use argon2::{
|
||||
password_hash::{PasswordHasher, Salt, SaltString},
|
||||
password_hash::{PasswordHasher, SaltString},
|
||||
Argon2, PasswordHash, PasswordVerifier,
|
||||
};
|
||||
use base64::Engine;
|
||||
|
|
|
@ -13,10 +13,8 @@ use eyre::Context;
|
|||
use tokio::{fs, net::TcpListener};
|
||||
use tower_http::services::{ServeDir, ServeFile};
|
||||
use tracing::{info, info_span};
|
||||
use tracing_subscriber::fmt::format::FmtSpan;
|
||||
|
||||
mod api;
|
||||
mod binary;
|
||||
mod config;
|
||||
mod haku;
|
||||
mod id;
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
use std::{backtrace::Backtrace, collections::HashSet, sync::Arc, time::Duration};
|
||||
use std::{collections::HashSet, sync::Arc, time::Duration};
|
||||
|
||||
use dashmap::DashSet;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::{
|
||||
sync::mpsc,
|
||||
time::{interval, MissedTickBehavior},
|
||||
};
|
||||
use tracing::{info, instrument};
|
||||
use tracing::instrument;
|
||||
|
||||
use super::{chunk_images::ChunkImages, ChunkPosition, Database, Wall};
|
||||
use super::{chunk_images::ChunkImages, ChunkPosition};
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct Settings {
|
||||
|
|
|
@ -26,10 +26,6 @@ enum Command {
|
|||
chunks: Vec<ChunkPosition>,
|
||||
reply: oneshot::Sender<eyre::Result<()>>,
|
||||
},
|
||||
|
||||
MarkModified {
|
||||
chunks: Vec<ChunkPosition>,
|
||||
},
|
||||
}
|
||||
|
||||
impl ChunkImages {
|
||||
|
@ -69,12 +65,6 @@ impl ChunkImages {
|
|||
rx.await.context("failed to load chunks")?
|
||||
}
|
||||
|
||||
pub fn mark_modified_blocking(&self, chunks: Vec<ChunkPosition>) {
|
||||
_ = self
|
||||
.commands_tx
|
||||
.blocking_send(Command::MarkModified { chunks });
|
||||
}
|
||||
|
||||
pub fn chunk_exists(&self, position: ChunkPosition) -> bool {
|
||||
self.wall.has_chunk(position) || self.async_loop.chunks_in_db.contains(&position)
|
||||
}
|
||||
|
@ -221,17 +211,12 @@ impl ChunkImageLoop {
|
|||
while let Some(command) = commands_rx.recv().await {
|
||||
match command {
|
||||
Command::Encode { chunks, reply } => {
|
||||
// TODO: This should have a caching layer.
|
||||
tokio::spawn(Arc::clone(&self).encode(chunks, reply));
|
||||
}
|
||||
|
||||
Command::Load { chunks, reply } => {
|
||||
tokio::spawn(Arc::clone(&self).load(chunks, reply));
|
||||
}
|
||||
|
||||
Command::MarkModified { chunks } => {
|
||||
// TODO: This should invalidate data from the caching layer.
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
use std::iter::Take;
|
||||
|
||||
use super::ChunkPosition;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
|
@ -19,26 +17,6 @@ impl ChunkIterator {
|
|||
bottom_right,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn take_next(&mut self, n: i32) -> Take<Self> {
|
||||
assert!(n > 0);
|
||||
|
||||
let take = (*self).take(n as usize);
|
||||
|
||||
let x = self.cursor.x - self.left;
|
||||
let width = self.bottom_right.x - self.left;
|
||||
if width != 0 {
|
||||
self.cursor.x = self.left + (x + n) % width;
|
||||
self.cursor.y += n / width;
|
||||
} else {
|
||||
// In a width = 0 configuration, we iterate vertically.
|
||||
// This is probably not the right thing to do, but we're just doing this to guard
|
||||
// against malicious clients.
|
||||
self.cursor.y += n;
|
||||
}
|
||||
|
||||
take
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for ChunkIterator {
|
||||
|
|
|
@ -1,17 +1,10 @@
|
|||
use std::{
|
||||
convert::identity,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use std::{convert::identity, path::PathBuf, sync::Arc};
|
||||
|
||||
use chrono::Utc;
|
||||
use eyre::Context;
|
||||
use rusqlite::Connection;
|
||||
use tokio::sync::{mpsc, oneshot};
|
||||
use tracing::{error, info, instrument};
|
||||
|
||||
use crate::login::UserId;
|
||||
|
||||
use super::{ChunkPosition, WallId};
|
||||
|
||||
pub struct Settings {
|
||||
|
@ -32,18 +25,12 @@ pub struct ChunkDataPair {
|
|||
}
|
||||
|
||||
enum Command {
|
||||
SetWallInfo {
|
||||
created_by: UserId,
|
||||
title: String,
|
||||
reply: oneshot::Sender<eyre::Result<()>>,
|
||||
},
|
||||
|
||||
WriteChunks {
|
||||
Write {
|
||||
chunks: Vec<ChunkDataPair>,
|
||||
reply: oneshot::Sender<eyre::Result<()>>,
|
||||
},
|
||||
|
||||
ReadChunks {
|
||||
Read {
|
||||
chunks: Vec<ChunkPosition>,
|
||||
reply: oneshot::Sender<Vec<ChunkDataPair>>,
|
||||
},
|
||||
|
@ -62,7 +49,7 @@ impl Database {
|
|||
pub async fn write_chunks(&self, chunks: Vec<ChunkDataPair>) -> eyre::Result<()> {
|
||||
let (tx, rx) = oneshot::channel();
|
||||
self.command_tx
|
||||
.send(Command::WriteChunks { chunks, reply: tx })
|
||||
.send(Command::Write { chunks, reply: tx })
|
||||
.await
|
||||
.context("database is offline")?;
|
||||
rx.await.context("database returned an error")?
|
||||
|
@ -74,7 +61,7 @@ impl Database {
|
|||
) -> eyre::Result<Vec<ChunkDataPair>> {
|
||||
let (tx, rx) = oneshot::channel();
|
||||
self.command_tx
|
||||
.send(Command::ReadChunks { chunks, reply: tx })
|
||||
.send(Command::Read { chunks, reply: tx })
|
||||
.await
|
||||
.context("database is offline")?;
|
||||
rx.await.context("database did not return anything")
|
||||
|
@ -207,17 +194,6 @@ pub fn start(settings: Settings) -> eyre::Result<Database> {
|
|||
std::thread::Builder::new()
|
||||
.name(format!("database thread {}", settings.wall_id))
|
||||
.spawn(move || {
|
||||
let mut s_set_wall_info = db
|
||||
.prepare(
|
||||
r#"
|
||||
INSERT OR REPLACE
|
||||
INTO t_wall_info
|
||||
(created_by, title)
|
||||
VALUES (?, ?);
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let mut s_write_chunk = db
|
||||
.prepare(
|
||||
r#"
|
||||
|
@ -250,20 +226,7 @@ pub fn start(settings: Settings) -> eyre::Result<Database> {
|
|||
|
||||
while let Some(command) = command_rx.blocking_recv() {
|
||||
match command {
|
||||
Command::SetWallInfo {
|
||||
created_by,
|
||||
title,
|
||||
reply,
|
||||
} => {
|
||||
_ = reply.send(
|
||||
s_set_wall_info
|
||||
.execute((created_by.0, title))
|
||||
.map(|_| ())
|
||||
.context("failed to set wall info"),
|
||||
);
|
||||
}
|
||||
|
||||
Command::WriteChunks { chunks, reply } => {
|
||||
Command::Write { chunks, reply } => {
|
||||
let mut result = Ok(());
|
||||
for ChunkDataPair { position, data } in chunks {
|
||||
if let Err(error) =
|
||||
|
@ -279,7 +242,7 @@ pub fn start(settings: Settings) -> eyre::Result<Database> {
|
|||
));
|
||||
}
|
||||
|
||||
Command::ReadChunks { chunks, reply } => {
|
||||
Command::Read { chunks, reply } => {
|
||||
let result = chunks
|
||||
.into_iter()
|
||||
.flat_map(|position| {
|
||||
|
|
Loading…
Reference in a new issue