diff --git a/resource_management/src/asset/asset_handler.rs b/resource_management/src/asset/asset_handler.rs index 031efc12..67704a49 100644 --- a/resource_management/src/asset/asset_handler.rs +++ b/resource_management/src/asset/asset_handler.rs @@ -1,4 +1,6 @@ -use super::{AssetResolver, StorageBackend}; +use crate::StorageBackend; + +use super::AssetResolver; /// An asset handler is responsible for loading assets of a certain type from a url. pub trait AssetHandler { diff --git a/resource_management/src/asset/asset_manager.rs b/resource_management/src/asset/asset_manager.rs index 3ca49f45..72ddad72 100644 --- a/resource_management/src/asset/asset_manager.rs +++ b/resource_management/src/asset/asset_manager.rs @@ -1,4 +1,4 @@ -use crate::{asset::{AssetResolver, StorageBackend}, GenericResourceSerialization}; +use crate::{asset::AssetResolver, resource::resource_handler::ResourceReader, DbStorageBackend, GenericResourceResponse, GenericResourceSerialization}; use super::asset_handler::AssetHandler; @@ -24,43 +24,9 @@ impl AssetManager { } } - let mut args = std::env::args(); + // let mut args = std::env::args(); - let mut memory_only = args.find(|arg| arg == "--ResourceManager.memory_only").is_some(); - - if cfg!(test) { // If we are running tests we want to use memory database. This way we can run tests in parallel. - memory_only = true; - } - - let db_res = if !memory_only { - polodb_core::Database::open_file(resolve_internal_path(std::path::Path::new("assets.db"))) - } else { - log::info!("Using memory database instead of file database."); - polodb_core::Database::open_memory() - }; - - match db_res { - Ok(db) => db, - Err(_) => { - // Delete file and try again - std::fs::remove_file(resolve_internal_path(std::path::Path::new("assets.db"))).unwrap(); - - log::warn!("Database file was corrupted, deleting and trying again."); - - let db_res = polodb_core::Database::open_file(resolve_internal_path(std::path::Path::new("assets.db"))); - - match db_res { - Ok(db) => db, - Err(_) => match polodb_core::Database::open_memory() { // If we can't create a file database, create a memory database. This way we can still run the application. - Ok(db) => { - log::error!("Could not create database file, using memory database instead."); - db - }, - Err(_) => panic!("Could not create database"), - } - } - } - }; + // let mut memory_only = args.find(|arg| arg == "--ResourceManager.memory_only").is_some(); AssetManager { asset_handlers: Vec::new(), @@ -82,19 +48,7 @@ impl AssetManager { let asset_resolver = MyAssetResolver {}; - struct MyStorageBackend {} - - impl StorageBackend for MyStorageBackend { - fn store(&self, _: GenericResourceSerialization, _: &[u8]) -> Result<(), ()> { - Ok(()) - } - - fn read(&self, _: &str) -> Result<(GenericResourceSerialization, Box<[u8]>), ()> { - todo!() - } - } - - let storage_backend = MyStorageBackend {}; + let storage_backend = DbStorageBackend::new(&resolve_asset_path(&std::path::Path::new("assets.db"))); let asset_handler_loads = self.asset_handlers.iter().map(|asset_handler| asset_handler.load(&asset_resolver, &storage_backend, url, &json)); @@ -130,9 +84,9 @@ fn resolve_asset_path(path: &std::path::Path) -> std::path::PathBuf { mod tests { use smol::future::FutureExt; - use crate::asset::StorageBackend; + use crate::StorageBackend; -use super::*; + use super::*; struct TestAssetHandler { diff --git a/resource_management/src/asset/audio_asset_handler.rs b/resource_management/src/asset/audio_asset_handler.rs index a69b935b..5b5a34b9 100644 --- a/resource_management/src/asset/audio_asset_handler.rs +++ b/resource_management/src/asset/audio_asset_handler.rs @@ -1,8 +1,8 @@ use smol::future::FutureExt; -use crate::{types::{Audio, BitDepths}, GenericResourceSerialization}; +use crate::{types::{Audio, BitDepths}, GenericResourceSerialization, StorageBackend}; -use super::{asset_handler::AssetHandler, AssetResolver, StorageBackend}; +use super::{asset_handler::AssetHandler, AssetResolver,}; pub struct AudioAssetHandler { @@ -131,9 +131,10 @@ mod tests { assert_eq!(resource.url, "gun.wav"); assert_eq!(resource.class, "Audio"); - assert_eq!(resource.resource.get_str("bit_depth").unwrap(), "Sixteen"); - assert_eq!(resource.resource.get_i32("channel_count").unwrap(), 1); - assert_eq!(resource.resource.get_i64("sample_rate").unwrap(), 48000); - assert_eq!(resource.resource.get_i64("sample_count").unwrap(), 152456 / 1 / (16 / 8)); + let resource = resource.resource.as_document().expect("Resource is not a document"); + assert_eq!(resource.get_str("bit_depth").unwrap(), "Sixteen"); + assert_eq!(resource.get_i32("channel_count").unwrap(), 1); + assert_eq!(resource.get_i64("sample_rate").unwrap(), 48000); + assert_eq!(resource.get_i64("sample_count").unwrap(), 152456 / 1 / (16 / 8)); } } \ No newline at end of file diff --git a/resource_management/src/asset/image_asset_handler.rs b/resource_management/src/asset/image_asset_handler.rs index 145b2f02..5eb46e64 100644 --- a/resource_management/src/asset/image_asset_handler.rs +++ b/resource_management/src/asset/image_asset_handler.rs @@ -1,9 +1,9 @@ use smol::future::FutureExt; use utils::Extent; -use crate::{types::{CompressionSchemes, Formats, Image}, GenericResourceSerialization}; +use crate::{types::{CompressionSchemes, Formats, Image}, GenericResourceSerialization, StorageBackend}; -use super::{asset_handler::AssetHandler, AssetResolver, StorageBackend}; +use super::{asset_handler::AssetHandler, AssetResolver,}; pub struct ImageAssetHandler { } diff --git a/resource_management/src/asset/material_asset_handler.rs b/resource_management/src/asset/material_asset_handler.rs index 6dfef217..c85b97e7 100644 --- a/resource_management/src/asset/material_asset_handler.rs +++ b/resource_management/src/asset/material_asset_handler.rs @@ -2,9 +2,9 @@ use std::{borrow::Borrow, cell::RefCell, ops::Deref}; use smol::future::FutureExt; -use crate::{resource::material_resource_handler::ProgramGenerator, shader_generation::{ShaderGenerationSettings, ShaderGenerator}, types::{AlphaMode, Material, Model, Property, Shader, ShaderTypes, Value, Variant, VariantVariable}, GenericResourceSerialization, ProcessedResources}; +use crate::{resource::material_resource_handler::ProgramGenerator, shader_generation::{ShaderGenerationSettings, ShaderGenerator}, types::{AlphaMode, Material, Model, Property, Shader, ShaderTypes, Value, Variant, VariantVariable}, GenericResourceSerialization, ProcessedResources, StorageBackend}; -use super::{asset_handler::AssetHandler, AssetResolver, StorageBackend}; +use super::{asset_handler::AssetHandler, AssetResolver,}; struct MaterialAssetHandler { generator: Option>, @@ -179,7 +179,6 @@ mod tests { use crate::{asset::{asset_handler::AssetHandler, tests::{TestAssetResolver, TestStorageBackend}}, resource::material_resource_handler::ProgramGenerator}; #[test] - #[ignore] fn load_material() { let asset_resolver = TestAssetResolver::new(); let storage_backend = TestStorageBackend::new(); diff --git a/resource_management/src/asset/mesh_asset_handler.rs b/resource_management/src/asset/mesh_asset_handler.rs index 283455eb..ed4fbc69 100644 --- a/resource_management/src/asset/mesh_asset_handler.rs +++ b/resource_management/src/asset/mesh_asset_handler.rs @@ -7,10 +7,10 @@ use crate::{ IntegralTypes, Material, Mesh, MeshletStream, Model, Primitive, Property, SubMesh, Value, VertexComponent, VertexSemantics, }, - GenericResourceSerialization, ProcessedResources, + GenericResourceSerialization, ProcessedResources, StorageBackend, }; -use super::{asset_handler::AssetHandler, AssetResolver, StorageBackend}; +use super::{asset_handler::AssetHandler, AssetResolver,}; pub struct MeshAssetHandler {} @@ -533,7 +533,7 @@ mod tests { ], }, ], - } + }.into() ); // TODO: ASSERT BINARY DATA diff --git a/resource_management/src/asset/mod.rs b/resource_management/src/asset/mod.rs index d9e55bcc..be5d33fe 100644 --- a/resource_management/src/asset/mod.rs +++ b/resource_management/src/asset/mod.rs @@ -3,7 +3,7 @@ use smol::{future::FutureExt, io::AsyncReadExt}; -use crate::GenericResourceSerialization; +use crate::{resource::resource_handler::ResourceReader, GenericResourceResponse, GenericResourceSerialization}; pub mod asset_manager; pub mod asset_handler; @@ -69,20 +69,15 @@ pub trait AssetResolver: Sync + Send { } } -pub trait StorageBackend: Sync + Send { - fn store(&self, resource: GenericResourceSerialization, data: &[u8]) -> Result<(), ()>; - fn read(&self, id: &str) -> Result<(GenericResourceSerialization, Box<[u8]>), ()>; -} - #[cfg(test)] pub mod tests { use std::{collections::HashMap, sync::{Arc, Mutex}}; use smol::future::FutureExt; - use crate::GenericResourceSerialization; + use crate::{resource::{resource_handler::ResourceReader, tests::TestResourceReader}, GenericResourceResponse, GenericResourceSerialization, StorageBackend}; - use super::{read_asset_from_source, AssetResolver, StorageBackend}; + use super::{read_asset_from_source, AssetResolver,}; pub struct TestAssetResolver { files: Arc>>>, @@ -138,19 +133,24 @@ pub mod tests { } impl StorageBackend for TestStorageBackend { - fn store(&self, resource: GenericResourceSerialization, data: &[u8]) -> Result<(), ()> { + fn store<'a>(&'a self, resource: GenericResourceSerialization, data: &[u8]) -> utils::BoxedFuture<'a, Result<(), ()>> { self.resources.lock().unwrap().push((resource, data.into())); - Ok(()) + + Box::pin(async move { + Ok(()) + }) } - fn read(&self, id: &str) -> Result<(GenericResourceSerialization, Box<[u8]>), ()> { - let resources = self.resources.lock().unwrap(); - for resource in resources.iter() { - if resource.0.url == id { - return Ok(resource.clone()); + fn read<'a>(&'a self, id: &'a str) -> utils::BoxedFuture<'a, Option<(GenericResourceResponse, Box)>> { + Box::pin(async move { + let resources = self.resources.lock().unwrap(); + for (resource, data) in resources.iter() { + if resource.url == id { + return Some((GenericResourceResponse::new(resource.url.clone(), resource.class.clone(), data.len(), resource.resource.clone()), Box::new(TestResourceReader::new(data.clone())) as Box)); + } } - } - Err(()) + None + }) } } } \ No newline at end of file diff --git a/resource_management/src/lib.rs b/resource_management/src/lib.rs index d1401ecf..99866fcd 100644 --- a/resource_management/src/lib.rs +++ b/resource_management/src/lib.rs @@ -5,6 +5,12 @@ #![feature(async_closure)] #![feature(closure_lifetime_binder)] +use std::{borrow::Cow, hash::Hasher}; + +use polodb_core::bson; +use resource::resource_handler::{FileResourceReader, ReadTargets, ResourceReader}; +use smol::io::AsyncWriteExt; + pub mod asset; pub mod resource; @@ -26,7 +32,7 @@ pub struct GenericResourceSerialization { /// List of resources that this resource depends on. required_resources: Vec, /// The resource data. - resource: polodb_core::bson::Document, + resource: bson::Bson, } impl GenericResourceSerialization { @@ -35,7 +41,7 @@ impl GenericResourceSerialization { url, required_resources: Vec::new(), class: resource.get_class().to_string(), - resource: polodb_core::bson::to_document(&resource).unwrap(), + resource: polodb_core::bson::to_bson(&resource).unwrap(), } } @@ -45,6 +51,34 @@ impl GenericResourceSerialization { } } +#[derive()] +pub struct GenericResourceResponse<'a> { + /// The resource id. This is used to identify the resource. Needs to be meaningful and will be a public constant. + url: String, + /// The resource class (EJ: "Texture", "Mesh", "Material", etc.) + class: String, + size: usize, + /// The resource data. + resource: bson::Bson, + read_target: Option>, +} + +impl <'a> GenericResourceResponse<'a> { + pub fn new(url: String, class: String, size: usize, resource: bson::Bson,) -> Self { + GenericResourceResponse { + url, + class, + size, + resource, + read_target: None, + } + } + + pub fn set_read_target(&mut self, buffer: Box<[u8]>) { + self.read_target = Some(ReadTargets::Box(buffer)); + } +} + #[derive(Debug, Clone)] pub enum ProcessedResources { Generated((GenericResourceSerialization, Vec)), @@ -125,7 +159,7 @@ impl <'a> LoadResourceRequest<'a> { } } -pub struct ResourceResponse { +pub struct ResourceResponse<'a> { id: u64, url: String, size: u64, @@ -134,19 +168,21 @@ pub struct ResourceResponse { class: String, resource: Box, required_resources: Vec, + read_target: Option>, } -impl ResourceResponse { - pub fn new(r: &GenericResourceSerialization, resource: T) -> Self { +impl <'a> ResourceResponse<'a> { + pub fn new(r: GenericResourceResponse<'a>, resource: T) -> Self { ResourceResponse { id: 0, - url: r.url.clone(), + url: r.url, size: 0, offset: 0, hash: 0, - class: r.class.clone(), + class: r.class, resource: Box::new(resource), required_resources: Vec::new(), + read_target: r.read_target, } } } @@ -180,8 +216,8 @@ impl <'a> LoadRequest<'a> { } } -pub struct Response { - pub resources: Vec, +pub struct Response<'a> { + pub resources: Vec>, } /// Options for loading a resource. @@ -207,4 +243,163 @@ pub struct CreateInfo<'a> { pub name: &'a str, pub info: Box, pub data: &'a [u8], +} + +pub struct TypedResourceDocument { + url: String, + class: String, + resource: bson::Bson, +} + +impl TypedResourceDocument { + pub fn new(url: String, class: String, resource: bson::Bson) -> Self { + TypedResourceDocument { + url, + class, + resource, + } + } +} + +impl From for TypedResourceDocument { + fn from(value: GenericResourceSerialization) -> Self { + TypedResourceDocument::new(value.url, value.class, value.resource) + } +} + +pub trait StorageBackend: Sync + Send { + fn store<'a>(&'a self, resource: GenericResourceSerialization, data: &'a [u8]) -> utils::BoxedFuture<'a, Result<(), ()>>; + fn read<'a>(&'a self, id: &'a str) -> utils::BoxedFuture<'a, Option<(GenericResourceResponse, Box)>>; +} + +struct DbStorageBackend { + db: polodb_core::Database, +} + +impl DbStorageBackend { + pub fn new(path: &std::path::Path) -> Self { + let mut memory_only = false; + + if cfg!(test) { // If we are running tests we want to use memory database. This way we can run tests in parallel. + memory_only = true; + } + + let db_res = if !memory_only { + polodb_core::Database::open_file(path) + } else { + log::info!("Using memory database instead of file database."); + polodb_core::Database::open_memory() + }; + + let db = match db_res { + Ok(db) => db, + Err(_) => { + // Delete file and try again + std::fs::remove_file(path).unwrap(); + + log::warn!("Database file was corrupted, deleting and trying again."); + + let db_res = polodb_core::Database::open_file(path); + + match db_res { + Ok(db) => db, + Err(_) => match polodb_core::Database::open_memory() { // If we can't create a file database, create a memory database. This way we can still run the application. + Ok(db) => { + log::error!("Could not create database file, using memory database instead."); + db + }, + Err(_) => panic!("Could not create database"), + } + } + } + }; + + DbStorageBackend { + db, + } + } + + fn resolve_resource_path(path: &std::path::Path) -> std::path::PathBuf { + if cfg!(test) { + std::env::temp_dir().join("resources").join(path) + } else { + std::path::PathBuf::from("resources/").join(path) + } + } +} + +impl StorageBackend for DbStorageBackend { + fn read<'a>(&'a self, id: &'a str) -> utils::BoxedFuture<'a, Option<(GenericResourceResponse, Box)>> { + Box::pin(async move { + let resource_document = self.db.collection::("resources").find_one(bson::doc! { "_id": id }).ok()??; + + let resource = { + let id = id.to_string(); + let class = resource_document.get_str("class").ok()?.to_string(); + let size = resource_document.get_i64("size").ok()? as usize; + let resource = resource_document.get("resource")?.clone(); + GenericResourceResponse::new(id, class, size, resource) + }; + + let resource_reader = FileResourceReader::new(smol::fs::File::open(Self::resolve_resource_path(std::path::Path::new(id))).await.ok()?); + + Some((resource, Box::new(resource_reader) as Box)) + }) + } + + fn store<'a>(&'a self, resource: GenericResourceSerialization, data: &'a [u8]) -> utils::BoxedFuture<'a, Result<(), ()>> { + Box::pin(async move { + let mut resource_document = bson::Document::new(); + + let mut hasher = std::collections::hash_map::DefaultHasher::new(); + + let size = 0usize; + let url = ""; + let class = ""; + + resource_document.insert("id", hasher.finish() as i64); + resource_document.insert("size", size as i64); + + resource_document.insert("url", url); + + // resource_package.0.url.hash(&mut hasher); + + resource_document.insert("class", class); + + let mut required_resources_json = bson::Array::new(); + + resource_document.insert("required_resources", required_resources_json); + + let json_resource = resource.resource.clone(); + + if let None = resource_document.get("hash") { + let mut hasher = std::collections::hash_map::DefaultHasher::new(); + + std::hash::Hasher::write(&mut hasher, data); // Hash binary data + + std::hash::Hasher::write(&mut hasher, &bson::to_vec(&json_resource).unwrap()); // Hash resource metadata, since changing the resources description must also change the hash. (For caching purposes) + + resource_document.insert("hash", hasher.finish() as i64); + } + + resource_document.insert("resource", json_resource); + + log::debug!("Generated resource: {:#?}", &resource_document); + + let insert_result = self.db.collection::("resources").insert_one(&resource_document).or(Err(()))?; + + let resource_id = insert_result.inserted_id.as_object_id().unwrap(); + + let resource_path = Self::resolve_resource_path(std::path::Path::new(&resource_id.to_string())); + + let mut file = smol::fs::File::create(resource_path).await.or(Err(()))?; + + file.write_all(data).await.or(Err(()))?; + file.flush().await.or(Err(()))?; // Must flush to ensure the file is written to disk, or else reads can cause failures + resource_document.insert("_id", resource_id); + + Ok(()) + }) + + } } \ No newline at end of file diff --git a/resource_management/src/resource/audio_resource_handler.rs b/resource_management/src/resource/audio_resource_handler.rs index 6c7b1b7e..c0246303 100644 --- a/resource_management/src/resource/audio_resource_handler.rs +++ b/resource_management/src/resource/audio_resource_handler.rs @@ -1,7 +1,7 @@ use polodb_core::bson; use serde::Deserialize; -use crate::{types::Audio, GenericResourceSerialization, ResourceResponse}; +use crate::{types::Audio, GenericResourceResponse, GenericResourceSerialization, ResourceResponse, TypedResourceDocument}; use super::resource_handler::{ReadTargets, ResourceHandler, ResourceReader}; @@ -20,17 +20,30 @@ impl ResourceHandler for AudioResourceHandler { &["Audio"] } - fn read<'a>(&'a self, resource: &'a GenericResourceSerialization, file: &'a mut dyn ResourceReader, buffers: &'a mut ReadTargets<'a>) -> utils::BoxedFuture<'a, Option> { + fn read<'a>(&'a self, mut resource: GenericResourceResponse<'a>, mut reader: Box,) -> utils::BoxedFuture<'a, Option> { Box::pin(async move { let audio_resource = Audio::deserialize(bson::Deserializer::new(resource.resource.clone().into())).ok()?; - match buffers { - ReadTargets::Buffer(buffer) => { - file.read_into(0, buffer).await?; - }, - _ => { - return None; + if let Some(read_target) = &mut resource.read_target { + match read_target { + ReadTargets::Buffer(buffer) => { + reader.read_into(0, buffer).await?; + }, + ReadTargets::Box(buffer) => { + reader.read_into(0, buffer).await?; + }, + _ => { + return None; + } + } + } else { + let mut buffer = Vec::with_capacity(resource.size); + unsafe { + buffer.set_len(resource.size); + } + reader.read_into(0, &mut buffer).await?; + resource.set_read_target(buffer.into_boxed_slice()); } Some(ResourceResponse::new(resource, audio_resource)) @@ -40,7 +53,9 @@ impl ResourceHandler for AudioResourceHandler { #[cfg(test)] mod tests { - use crate::{asset::{asset_handler::AssetHandler, audio_asset_handler::AudioAssetHandler, tests::{TestAssetResolver, TestStorageBackend}, StorageBackend}, resource::tests::TestResourceReader, types::{Audio, BitDepths}}; + use std::ops::DerefMut; + + use crate::{asset::{asset_handler::AssetHandler, audio_asset_handler::AudioAssetHandler, tests::{TestAssetResolver, TestStorageBackend},}, types::{Audio, BitDepths}, StorageBackend}; use super::*; @@ -64,17 +79,9 @@ mod tests { let audio_resource_handler = AudioResourceHandler::new(); - let (resource, data) = storage_backend.read(url).expect("Failed to read asset from storage"); + let (resource, mut reader) = smol::block_on(storage_backend.read(url)).expect("Failed to read asset from storage"); - let mut resource_reader = TestResourceReader::new(data); - - let mut buffer = vec![0; 152456]; - - unsafe { - buffer.set_len(152456); - } - - let resource = smol::block_on(audio_resource_handler.read(&resource, &mut resource_reader, &mut ReadTargets::Buffer(&mut buffer))).unwrap(); + let resource = smol::block_on(audio_resource_handler.read(resource, reader,)).unwrap(); assert_eq!(resource.url, "gun.wav"); assert_eq!(resource.class, "Audio"); @@ -86,6 +93,13 @@ mod tests { assert_eq!(audio.sample_rate, 48000); assert_eq!(audio.sample_count, 152456 / 1 / (16 / 8)); - assert_eq!(buffer.len(), audio.sample_count as usize * audio.channel_count as usize * (Into::::into(audio.bit_depth) / 8) as usize); + match &resource.read_target.expect("Expected read target") { + ReadTargets::Box(buffer) => { + assert_eq!(buffer.len(), audio.sample_count as usize * audio.channel_count as usize * (Into::::into(audio.bit_depth) / 8) as usize); + }, + _ => { + panic!("Expected read target to be a buffer"); + }, + } } } \ No newline at end of file diff --git a/resource_management/src/resource/image_resource_handler.rs b/resource_management/src/resource/image_resource_handler.rs index 08597614..23146210 100644 --- a/resource_management/src/resource/image_resource_handler.rs +++ b/resource_management/src/resource/image_resource_handler.rs @@ -2,7 +2,7 @@ use polodb_core::bson; use serde::Deserialize; use smol::{fs::File, future::FutureExt, io::AsyncReadExt}; -use crate::{types::Image, GenericResourceSerialization, Resource, ResourceResponse, Stream}; +use crate::{types::Image, GenericResourceResponse, GenericResourceSerialization, Resource, ResourceResponse, Stream, TypedResourceDocument}; use super::resource_handler::{ReadTargets, ResourceHandler, ResourceReader}; @@ -21,18 +21,26 @@ impl ResourceHandler for ImageResourceHandler { &["Image"] } - fn read<'a>(&'a self, resource: &'a GenericResourceSerialization, file: &'a mut dyn ResourceReader, read_target: &'a mut ReadTargets<'a>) -> utils::BoxedFuture<'a, Option> { + fn read<'a>(&'a self, mut resource: GenericResourceResponse<'a>, mut reader: Box,) -> utils::BoxedFuture<'a, Option> { Box::pin(async move { let image_resource = Image::deserialize(bson::Deserializer::new(resource.resource.clone().into())).ok()?; - match read_target { - ReadTargets::Buffer(buffer) => { - file.read_into(0, buffer).await?; - }, - _ => { - return None; + if let Some(read_target) = &mut resource.read_target { + match read_target { + ReadTargets::Buffer(buffer) => { + reader.read_into(0, buffer).await?; + }, + _ => { + return None; + } + } - + } else { + let mut buffer = Vec::with_capacity(resource.size); + unsafe { + buffer.set_len(resource.size); + } + reader.read_into(0, &mut buffer).await?; } Some(ResourceResponse::new(resource, image_resource)) @@ -42,7 +50,7 @@ impl ResourceHandler for ImageResourceHandler { #[cfg(test)] mod tests { - use crate::{asset::{asset_handler::AssetHandler, image_asset_handler::{self, ImageAssetHandler}, tests::{TestAssetResolver, TestStorageBackend}, StorageBackend}, resource::{resource_manager::ResourceManager, tests::TestResourceReader}, types::Image}; + use crate::{asset::{asset_handler::AssetHandler, image_asset_handler::ImageAssetHandler, tests::{TestAssetResolver, TestStorageBackend},}, StorageBackend}; use super::*; @@ -66,17 +74,9 @@ mod tests { let image_resource_handler = ImageResourceHandler::new(); - let (resource, data) = storage_backend.read(url).expect("Failed to read asset from storage"); - - let mut resource_reader = TestResourceReader::new(data); - - let mut buffer = vec![0; 2048 * 2048 * 4]; - - unsafe { - buffer.set_len(2048 * 2048 * 4); - } + let (resource, mut reader) = smol::block_on(storage_backend.read(url)).expect("Failed to read asset from storage"); - let resource = smol::block_on(image_resource_handler.read(&resource, &mut resource_reader, &mut ReadTargets::Buffer(&mut buffer))).expect("Failed to read image resource"); + let resource = smol::block_on(image_resource_handler.read(resource, reader,)).expect("Failed to read image resource"); let image = resource.resource.downcast_ref::().unwrap(); diff --git a/resource_management/src/resource/material_resource_handler.rs b/resource_management/src/resource/material_resource_handler.rs index 1e57d488..c61ebd8e 100644 --- a/resource_management/src/resource/material_resource_handler.rs +++ b/resource_management/src/resource/material_resource_handler.rs @@ -1,10 +1,9 @@ use polodb_core::bson; use serde::Deserialize; -use smol::{fs::File, io::AsyncReadExt}; -use crate::{types::{Material, Shader, ShaderTypes, Variant}, GenericResourceSerialization, ProcessedResources, Resource, ResourceResponse, Stream}; +use crate::{types::Material, GenericResourceResponse, GenericResourceSerialization, ResourceResponse, TypedResourceDocument}; -use super::{resource_handler::{ReadTargets, ResourceHandler, ResourceReader}, resource_manager::ResourceManager,}; +use super::resource_handler::{ReadTargets, ResourceHandler, ResourceReader}; pub struct MaterialResourcerHandler {} @@ -25,7 +24,7 @@ impl ResourceHandler for MaterialResourcerHandler { &["Material", "Shader", "Variant"] } - fn read<'a>(&'a self, resource: &'a GenericResourceSerialization, file: &'a mut dyn ResourceReader, _: &'a mut ReadTargets<'a>) -> utils::BoxedFuture<'a, Option> { + fn read<'a>(&'a self, mut resource: GenericResourceResponse<'a>, mut reader: Box,) -> utils::BoxedFuture<'a, Option> { // vec![("Material", // Box::new(|_document| { // Box::new(Material::deserialize(polodb_core::bson::Deserializer::new(_document.into())).unwrap()) @@ -56,20 +55,5 @@ mod tests { #[test] #[ignore] // We need to implement a shader generator to test this fn load_material() { - let mut resource_manager = ResourceManager::new(); - - resource_manager.add_resource_handler(super::MaterialResourcerHandler::new()); - - let (response, _) = smol::block_on(resource_manager.get("solid")).expect("Failed to load material"); - - assert_eq!(response.resources.len(), 2); // 1 material, 1 shader - - let resource_container = &response.resources[0]; - - assert_eq!(resource_container.class, "Shader"); - - let resource_container = &response.resources[1]; - - assert_eq!(resource_container.class, "Material"); } } \ No newline at end of file diff --git a/resource_management/src/resource/mesh_resource_handler.rs b/resource_management/src/resource/mesh_resource_handler.rs index 970b3448..a0ceba90 100644 --- a/resource_management/src/resource/mesh_resource_handler.rs +++ b/resource_management/src/resource/mesh_resource_handler.rs @@ -1,7 +1,7 @@ use polodb_core::bson; use serde::Deserialize; -use crate::{types::{IndexStreamTypes, Mesh, Size, VertexSemantics}, GenericResourceSerialization, ResourceResponse, Stream}; +use crate::{types::{IndexStreamTypes, Mesh, Size, VertexSemantics}, GenericResourceResponse, GenericResourceSerialization, ResourceResponse, Stream, TypedResourceDocument}; use super::resource_handler::{ReadTargets, ResourceHandler, ResourceReader}; @@ -20,19 +20,30 @@ impl ResourceHandler for MeshResourceHandler { &["Mesh"] } - fn read<'a>(&'a self, resource: &'a GenericResourceSerialization, file: &'a mut dyn ResourceReader, read_target: &'a mut ReadTargets<'a>) -> utils::BoxedFuture<'a, Option> { + fn read<'a>(&'a self, mut resource: GenericResourceResponse<'a>, mut reader: Box,) -> utils::BoxedFuture<'a, Option> { Box::pin(async move { let mesh_resource = Mesh::deserialize(bson::Deserializer::new(resource.resource.clone().into())).ok()?; - let mut buffers = match read_target { - ReadTargets::Streams(streams) => { - streams.iter_mut().map(|b| { - (b.name, utils::BufferAllocator::new(b.buffer)) - }).collect::>() + let mut buffers = if let Some(read_target) = &mut resource.read_target { + match read_target { + ReadTargets::Streams(streams) => { + streams.iter_mut().map(|b| { + (b.name, utils::BufferAllocator::new(b.buffer)) + }).collect::>() + } + _ => { + return None; + } + } - _ => { - return None; + } else { + let mut buffer = Vec::with_capacity(resource.size); + unsafe { + buffer.set_len(resource.size); } + reader.read_into(0, &mut buffer).await?; + + panic!(); }; for sub_mesh in &mesh_resource.sub_meshes { @@ -40,16 +51,16 @@ impl ResourceHandler for MeshResourceHandler { for (name, buffer) in &mut buffers { match *name { "Vertex" => { - file.read_into(0, buffer.take(primitive.vertex_count as usize * primitive.vertex_components.size())).await?; + reader.read_into(0, buffer.take(primitive.vertex_count as usize * primitive.vertex_components.size())).await?; } "Vertex.Position" => { - file.read_into(0, buffer.take(primitive.vertex_count as usize * 12)).await?; + reader.read_into(0, buffer.take(primitive.vertex_count as usize * 12)).await?; } "Vertex.Normal" => { #[cfg(debug_assertions)] if !primitive.vertex_components.iter().any(|v| v.semantic == VertexSemantics::Normal) { log::error!("Requested Vertex.Normal stream but mesh does not have normals."); continue; } - file.read_into(primitive.vertex_count as usize * 12, buffer.take(primitive.vertex_count as usize * 12)).await?; + reader.read_into(primitive.vertex_count as usize * 12, buffer.take(primitive.vertex_count as usize * 12)).await?; } "TriangleIndices" => { #[cfg(debug_assertions)] @@ -57,7 +68,7 @@ impl ResourceHandler for MeshResourceHandler { let triangle_index_stream = primitive.index_streams.iter().find(|stream| stream.stream_type == IndexStreamTypes::Triangles).unwrap(); - file.read_into(triangle_index_stream.offset as usize, buffer.take(triangle_index_stream.count as usize * triangle_index_stream.data_type.size())).await?; + reader.read_into(triangle_index_stream.offset as usize, buffer.take(triangle_index_stream.count as usize * triangle_index_stream.data_type.size())).await?; } "VertexIndices" => { #[cfg(debug_assertions)] @@ -65,7 +76,7 @@ impl ResourceHandler for MeshResourceHandler { let vertex_index_stream = primitive.index_streams.iter().find(|stream| stream.stream_type == IndexStreamTypes::Vertices).unwrap(); - file.read_into(vertex_index_stream.offset as usize, buffer.take(vertex_index_stream.count as usize * vertex_index_stream.data_type.size())).await?; + reader.read_into(vertex_index_stream.offset as usize, buffer.take(vertex_index_stream.count as usize * vertex_index_stream.data_type.size())).await?; } "MeshletIndices" => { #[cfg(debug_assertions)] @@ -73,7 +84,7 @@ impl ResourceHandler for MeshResourceHandler { let meshlet_indices_stream = primitive.index_streams.iter().find(|stream| stream.stream_type == IndexStreamTypes::Meshlets).unwrap(); - file.read_into(meshlet_indices_stream.offset as usize, buffer.take(meshlet_indices_stream.count as usize * meshlet_indices_stream.data_type.size())).await?; + reader.read_into(meshlet_indices_stream.offset as usize, buffer.take(meshlet_indices_stream.count as usize * meshlet_indices_stream.data_type.size())).await?; } "Meshlets" => { #[cfg(debug_assertions)] @@ -81,7 +92,7 @@ impl ResourceHandler for MeshResourceHandler { let meshlet_stream = primitive.meshlet_stream.as_ref().unwrap(); - file.read_into(meshlet_stream.offset as usize, buffer.take(meshlet_stream.count as usize * 2)).await?; + reader.read_into(meshlet_stream.offset as usize, buffer.take(meshlet_stream.count as usize * 2)).await?; } _ => { log::error!("Unknown buffer tag: {}", name); @@ -134,7 +145,9 @@ impl ResourceHandler for MeshResourceHandler { #[cfg(test)] mod tests { - use crate::{asset::{asset_handler::AssetHandler, mesh_asset_handler::MeshAssetHandler, tests::{TestAssetResolver, TestStorageBackend}, StorageBackend}, resource::{image_resource_handler::ImageResourceHandler, resource_manager::ResourceManager, tests::TestResourceReader}, types::{IndexStreamTypes, IntegralTypes, Mesh, VertexSemantics}, LoadRequest, LoadResourceRequest, Stream}; + use std::ops::DerefMut; + +use crate::{asset::{asset_handler::AssetHandler, mesh_asset_handler::MeshAssetHandler, tests::{TestAssetResolver, TestStorageBackend},}, resource::{image_resource_handler::ImageResourceHandler, resource_manager::ResourceManager, tests::TestResourceReader}, types::{IndexStreamTypes, IntegralTypes, Mesh, VertexSemantics}, LoadRequest, LoadResourceRequest, StorageBackend, Stream}; use super::*; @@ -159,9 +172,7 @@ mod tests { let mesh_resource_handler = MeshResourceHandler::new(); - let (resource, data) = storage_backend.read(url).expect("Failed to read asset from storage"); - - let mut resource_reader = TestResourceReader::new(data); + let (resource, mut reader) = smol::block_on(storage_backend.read(url)).expect("Failed to read asset from storage"); let mut vertex_positions_buffer = vec![0; 11808 * 12]; let mut vertex_normals_buffer = vec![0; 11808 * 12]; @@ -177,7 +188,9 @@ mod tests { meshlet_index_buffer.set_len(11808 * 3); } - let resource = smol::block_on(mesh_resource_handler.read(&resource, &mut resource_reader, &mut ReadTargets::Streams(&mut [Stream::new("Vertex.Position", &mut vertex_positions_buffer), Stream::new("Vertex.Normal", &mut vertex_normals_buffer), Stream::new("TriangleIndices", &mut index_buffer), Stream::new("Meshlets", &mut meshlet_buffer)]))).unwrap(); + //&mut ReadTargets::Streams(&mut [Stream::new("Vertex.Position", &mut vertex_positions_buffer), Stream::new("Vertex.Normal", &mut vertex_normals_buffer), Stream::new("TriangleIndices", &mut index_buffer), Stream::new("Meshlets", &mut meshlet_buffer)]) + + let resource = smol::block_on(mesh_resource_handler.read(resource, reader,)).unwrap(); let mesh = resource.resource.downcast_ref::().unwrap(); @@ -265,9 +278,7 @@ mod tests { let mesh_resource_handler = MeshResourceHandler::new(); - let (resource, data) = storage_backend.read(url).expect("Failed to read asset from storage"); - - let mut resource_reader = TestResourceReader::new(data); + let (resource, mut reader) = smol::block_on(storage_backend.read(url)).expect("Failed to read asset from storage"); let mut vertex_positions_buffer = vec![0; 24 * 12]; let mut vertex_normals_buffer = vec![0; 24 * 12]; @@ -283,7 +294,9 @@ mod tests { meshlet_index_buffer.set_len(36 * 3); } - let resource = smol::block_on(mesh_resource_handler.read(&resource, &mut resource_reader, &mut ReadTargets::Streams(&mut [Stream::new("Vertex.Position", &mut vertex_positions_buffer), Stream::new("Vertex.Normal", &mut vertex_normals_buffer), Stream::new("TriangleIndices", &mut index_buffer), Stream::new("Meshlets", &mut meshlet_buffer)]))).unwrap(); + // &mut ReadTargets::Streams(&mut [Stream::new("Vertex.Position", &mut vertex_positions_buffer), Stream::new("Vertex.Normal", &mut vertex_normals_buffer), Stream::new("TriangleIndices", &mut index_buffer), Stream::new("Meshlets", &mut meshlet_buffer)]) + + let resource = smol::block_on(mesh_resource_handler.read(resource, reader,)).unwrap(); let mesh = resource.resource.downcast_ref::().unwrap(); diff --git a/resource_management/src/resource/resource_handler.rs b/resource_management/src/resource/resource_handler.rs index 0ce68eca..1cc5bb57 100644 --- a/resource_management/src/resource/resource_handler.rs +++ b/resource_management/src/resource/resource_handler.rs @@ -1,12 +1,16 @@ +use std::borrow::Cow; + use smol::{fs::File, io::{AsyncReadExt, AsyncSeekExt}}; -use crate::{GenericResourceSerialization, ResourceResponse, Stream}; +use crate::{GenericResourceResponse, ResourceResponse, Stream,}; pub enum ReadTargets<'a> { + Box(Box<[u8]>), Buffer(&'a mut [u8]), Streams(&'a mut [Stream<'a>]), } +/// The resource reader trait provides methods to read a single resource. pub trait ResourceReader { fn read_into<'a>(&'a mut self, offset: usize, buffer: &'a mut [u8]) -> utils::BoxedFuture<'a, Option<()>>; } @@ -33,10 +37,10 @@ impl ResourceReader for FileResourceReader { } } -pub trait ResourceHandler { +pub trait ResourceHandler: Send { fn get_handled_resource_classes<'a>(&self,) -> &'a [&'a str] { &[] } - fn read<'a>(&'a self, resource: &'a GenericResourceSerialization, reader: &'a mut dyn ResourceReader, read_target: &'a mut ReadTargets<'a>) -> utils::BoxedFuture>; + fn read<'a>(&'a self, resource: GenericResourceResponse<'a>, reader: Box,) -> utils::BoxedFuture<'a, Option>>; } \ No newline at end of file diff --git a/resource_management/src/resource/resource_manager.rs b/resource_management/src/resource/resource_manager.rs index c3e2dadc..8db317c6 100644 --- a/resource_management/src/resource/resource_manager.rs +++ b/resource_management/src/resource/resource_manager.rs @@ -1,9 +1,4 @@ -use std::hash::{Hash, Hasher}; -use futures::future::join_all; -use polodb_core::bson::oid::ObjectId; -use smol::{fs::File, io::{AsyncReadExt, AsyncWriteExt}}; -use crate::{GenericResourceSerialization, LoadRequest, LoadResourceRequest, LoadResults, Lox, ProcessedResources, Request, Resource, ResourceRequest, ResourceResponse, Response}; - +use crate::{DbStorageBackend, LoadRequest, LoadResults, Request, ResourceResponse, Response, StorageBackend}; use super::resource_handler::ResourceHandler; /// Resource manager. @@ -15,13 +10,8 @@ use super::resource_handler::ResourceHandler; /// When in a release build it will exclusively load resources from cache. /// /// If accessing the filesystem paths will be relative to the assets directory, and assets should omit the extension. -/// -/// The stored resource document is like the following: -/// ```json -/// { "_id":"OId" , "id": 01234, "path": "../..", "class": "X", "size": 0, "resource": { ... }, "hash": 0 } -/// ``` pub struct ResourceManager { - db: polodb_core::Database, + storage_backend: Box, resource_handlers: Vec>, } @@ -43,46 +33,19 @@ impl ResourceManager { } } - let mut args = std::env::args(); + // let mut args = std::env::args(); - let mut memory_only = args.find(|arg| arg == "--ResourceManager.memory_only").is_some(); + // let mut memory_only = args.find(|arg| arg == "--ResourceManager.memory_only").is_some(); - if cfg!(test) { // If we are running tests we want to use memory database. This way we can run tests in parallel. - memory_only = true; + ResourceManager { + storage_backend: Box::new(DbStorageBackend::new(&Self::resolve_resource_path(std::path::Path::new("resources.db")))), + resource_handlers: Vec::with_capacity(8), } + } - let db_res = if !memory_only { - polodb_core::Database::open_file(Self::resolve_resource_path(std::path::Path::new("resources.db"))) - } else { - log::info!("Using memory database instead of file database."); - polodb_core::Database::open_memory() - }; - - let db = match db_res { - Ok(db) => db, - Err(_) => { - // Delete file and try again - std::fs::remove_file(Self::resolve_resource_path(std::path::Path::new("resources.db"))).unwrap(); - - log::warn!("Database file was corrupted, deleting and trying again."); - - let db_res = polodb_core::Database::open_file(Self::resolve_resource_path(std::path::Path::new("resources.db"))); - - match db_res { - Ok(db) => db, - Err(_) => match polodb_core::Database::open_memory() { // If we can't create a file database, create a memory database. This way we can still run the application. - Ok(db) => { - log::error!("Could not create database file, using memory database instead."); - db - }, - Err(_) => panic!("Could not create database"), - } - } - } - }; - + pub fn new_with_storage_backend(storage_backend: T) -> Self { ResourceManager { - db, + storage_backend: Box::new(storage_backend), resource_handlers: Vec::with_capacity(8), } } @@ -98,30 +61,20 @@ impl ResourceManager { /// If the resource is in cache but it's data cannot be parsed, it will return None. /// Return is a tuple containing the resource description and it's associated binary data.\ /// The requested resource will always the last one in the array. With the previous resources being the ones it depends on. This way when iterating the array forward the dependencies will be loaded first. - pub async fn get(&self, path: &str) -> Option<(Response, Vec)> { - todo!(); - - // let request = self.load_from_cache_or_source(path).await?; - - // let size = request.resources.iter().map(|r| r.size).sum::() as usize; - - // let mut buffer = Vec::with_capacity(size); + pub async fn get<'a>(&'a self, id: &'a str) -> Option> { + let load: ResourceResponse<'a> = { + let (resource, reader) = self.storage_backend.read(id).await?; - // unsafe { buffer.set_len(size); } - - // let mut a = utils::BufferAllocator::new(&mut buffer); - - // let request = request.resources.into_iter().map(|r| { let size = r.size as usize; LoadResourceRequest::new(r).buffer(a.take(size)) }).collect::>(); - - // let response = self.load_data_from_cache(LoadRequest::new(request),).await.ok()?; + self.resource_handlers.iter().find(|rh| rh.get_handled_resource_classes().contains(&resource.class.as_str()))?.read(resource, reader).await? + }; - // Some((response, buffer)) + Some(load) } /// Tries to load the information/metadata for a resource (and it's dependencies).\ /// This is a more advanced version of get() as it allows to use your own buffer and/or apply some transformation to the resources when loading.\ - /// The result of this function can be later fed into `load_resource()` which will load the binary data. - pub async fn request_resource(&self, path: &str) -> Option { + /// The result of this function can be later fed into `load()` which will load the binary data. + pub async fn request(&self, id: &str) -> Option { // let request = self.load_from_cache_or_source(path).await?; // Some(request) todo!() @@ -132,150 +85,83 @@ impl ResourceManager { /// If no buffer range is provided it will return the data in a vector. /// /// If a buffer is not provided for a resurce in the options parameters it will be either be loaded into the provided buffer or returned in a vector. - /// - /// Options: Let's you specify how to load the resources. - /// ```json - /// { "resources": [{ "path": "../..", "buffer":{ "index": 0, "offset": 0 } }]} - /// ``` - pub async fn load_resource<'a>(&self, request: LoadRequest<'a>) -> Result<(Response, Option>), LoadResults> { - let response = self.load_data_from_cache(request).await?; - Ok((response, None)) + pub async fn load<'a>(&self, request: LoadRequest<'a>) -> Result { + Err(LoadResults::LoadFailed) } - /// Stores the asset as a resource. - /// Returns the resource document. - async fn write_resource_to_cache(&self, resource_package: &(GenericResourceSerialization, Vec)) -> Option { - let mut resource_document = polodb_core::bson::Document::new(); - - let mut hasher = std::collections::hash_map::DefaultHasher::new(); + fn resolve_resource_path(path: &std::path::Path) -> std::path::PathBuf { + if cfg!(test) { + std::env::temp_dir().join("resources").join(path) + } else { + std::path::PathBuf::from("resources/").join(path) + } + } +} - resource_document.insert("id", hasher.finish() as i64); - resource_document.insert("size", resource_package.1.len() as i64); +// TODO: test resource caching - resource_document.insert("url", resource_package.0.url.clone()); - resource_package.0.url.hash(&mut hasher); +#[cfg(test)] +mod tests { + // TODO: test resource load order - resource_document.insert("class", resource_package.0.class.clone()); + use crate::{asset::tests::TestStorageBackend, resource::resource_handler::ResourceReader, GenericResourceResponse, GenericResourceSerialization, LoadResourceRequest, Resource}; - let mut required_resources_json = polodb_core::bson::Array::new(); + use super::*; - for required_resources in &resource_package.0.required_resources { // TODO: make new type that gives a guarantee that these resources have been loaded - match required_resources { - ProcessedResources::Generated(g) => { - required_resources_json.push(polodb_core::bson::Bson::String(g.0.url.clone())); - }, - ProcessedResources::Reference(r) => { - required_resources_json.push(polodb_core::bson::Bson::String(r.clone())); - } - } + struct MyResourceHandler {} + impl MyResourceHandler { + pub fn new() -> Self { + MyResourceHandler {} } + } - resource_document.insert("required_resources", required_resources_json); - - let json_resource = resource_package.0.resource.clone(); - - if let None = resource_document.get("hash") { - let mut hasher = std::collections::hash_map::DefaultHasher::new(); - - std::hash::Hasher::write(&mut hasher, resource_package.1.as_slice()); // Hash binary data + impl Resource for () { + fn get_class(&self) -> &'static str { + "MyResource" + } + } - std::hash::Hasher::write(&mut hasher, &polodb_core::bson::to_vec(&json_resource).unwrap()); // Hash resource metadata, since changing the resources description must also change the hash. (For caching purposes) + impl ResourceHandler for MyResourceHandler { + fn get_handled_resource_classes<'a>(&self,) -> &'a [&'a str] { + &["MyResource"] + } - resource_document.insert("hash", hasher.finish() as i64); + fn read<'a>(&'a self, r: GenericResourceResponse<'a>, _: Box,) -> utils::BoxedFuture> { + Box::pin(async move { + Some(ResourceResponse::new(r, ())) + }) } + } - resource_document.insert("resource", json_resource); + // #[test] + // fn get() { + // let storage_backend = TestStorageBackend::new(); - log::debug!("Generated resource: {:#?}", &resource_document); + // smol::block_on(storage_backend.store(GenericResourceSerialization::new("test".to_string(), ()), &[])).expect("Failed to store resource"); - let insert_result = self.db.collection::("resources").insert_one(&resource_document).ok()?; + // let mut resource_manager = ResourceManager::new_with_storage_backend(storage_backend); - let resource_id = insert_result.inserted_id.as_object_id()?; + // resource_manager.add_resource_handler(MyResourceHandler::new()); - let resource_path = Self::resolve_resource_path(std::path::Path::new(&resource_id.to_string())); + // smol::block_on(resource_manager.get("test")).unwrap(); + // } - let mut file = smol::fs::File::create(resource_path).await.ok()?; + // #[test] + // fn request() { + // let storage_backend = TestStorageBackend::new(); - file.write_all(resource_package.1.as_slice()).await.ok()?; - file.flush().await.ok()?; // Must flush to ensure the file is written to disk, or else reads can cause failures + // smol::block_on(storage_backend.store(GenericResourceSerialization::new("test".to_string(), ()), &[])).expect("Failed to store resource"); - resource_document.insert("_id", resource_id); + // let mut resource_manager = ResourceManager::new_with_storage_backend(storage_backend); - Some(resource_document) - } + // resource_manager.add_resource_handler(MyResourceHandler::new()); - /// Tries to load a resource from cache.\ - /// If the resource cannot be found/loaded or if it's become stale it will return None. - async fn load_data_from_cache<'a>(&self, request: LoadRequest<'a>) -> Result { - todo!(); - // let offset = 0usize; - - // let resources = request.resources.into_iter().map(|resource_container| { // Build responses - // let response = ResourceResponse { - // id: resource_container.resource_request.id, - // url: resource_container.resource_request.url, - // size: resource_container.resource_request.size, - // offset: offset as u64, - // hash: resource_container.resource_request.hash, - // class: resource_container.resource_request.class, - // resource: resource_container.resource_request.resource, - // required_resources: resource_container.resource_request.required_resources, - // }; - - // (resource_container.resource_request._id, resource_container.streams, response) - // }).map(async move |(db_resource_id, slice, response)| { // Load resources - // let native_db_resource_id = db_resource_id.to_string(); - - // let mut file = match File::open(Self::resolve_resource_path(std::path::Path::new(&native_db_resource_id))).await { - // Ok(it) => it, - // Err(reason) => { - // match reason { // TODO: handle specific errors - // _ => return Err(LoadResults::CacheFileNotFound), - // } - // } - // }; - - // match slice { - // Lox::None => {} - // Lox::Buffer(buffer) => { - // match file.read_exact(buffer).await { - // Ok(_) => {}, - // Err(_) => { - // return Err(LoadResults::LoadFailed); - // } - // } - // } - // Lox::Streams(mut streams) => { - // if let Some(resource_handler) = self.resource_handlers.iter().find(|h| h.can_handle_type(response.class.as_str())) { - // resource_handler.read(response.resource.deref(), &mut file, &mut streams).await; - // } else { - // log::warn!("No resource handler could handle resource: {}", response.url); - // } - // } - // } - - // Ok(response) - // }).collect::>(); - - // let resources = join_all(resources).await.into_iter().collect::, _>>()?; - - // return Ok(Response { resources }); - } + // let request = smol::block_on(resource_manager.request("test")).unwrap(); - fn resolve_resource_path(path: &std::path::Path) -> std::path::PathBuf { - if cfg!(test) { - std::env::temp_dir().join("resources").join(path) - } else { - std::path::PathBuf::from("resources/").join(path) - } - } -} + // assert_eq!(request.resources.len(), 1); -// TODO: test resource caching + // let request = LoadRequest::new(request.resources.into_iter().map(|r| LoadResourceRequest::new(r)).collect::>()); -#[cfg(test)] -mod tests { - // TODO: test resource load order - - use super::*; + // smol::block_on(resource_manager.load(request)).expect("Failed to load resource"); + // } } \ No newline at end of file diff --git a/resource_management/src/shader_generation.rs b/resource_management/src/shader_generation.rs index 89c19d1f..a41181c8 100644 --- a/resource_management/src/shader_generation.rs +++ b/resource_management/src/shader_generation.rs @@ -1,4 +1,4 @@ -use std::{cell::RefCell, collections::{HashMap, HashSet}, ops::Deref}; +use std::{cell::RefCell, collections::HashSet}; pub struct ShaderGenerator { minified: bool, @@ -54,8 +54,6 @@ pub struct ShaderCompilation { impl ShaderCompilation { pub fn generate_shader(&mut self, main_function_node: &jspd::NodeReference) -> String { - // let mut string = shader_generator::generate_glsl_header_block(&shader_generator::ShaderGenerationSettings::new("Compute")); - let mut string = String::with_capacity(2048); self.generate_shader_internal(&mut string, main_function_node); @@ -64,7 +62,6 @@ impl ShaderCompilation { } pub fn generate_glsl_shader(&mut self, shader_compilation_settings: &ShaderGenerationSettings, main_function_node: &jspd::NodeReference) -> String { - // let mut string = shader_generator::generate_glsl_header_block(&shader_generator::ShaderGenerationSettings::new("Compute")); let mut string = String::with_capacity(2048); self.generate_shader_internal(&mut string, main_function_node); diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 3bb89e2b..f75523bf 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,2 +1,2 @@ [toolchain] -channel = "nightly-2024-02-14" \ No newline at end of file +channel = "nightly-2024-02-24" \ No newline at end of file