Skip to content

Commit

Permalink
Applied some Clippy suggestions.
Browse files Browse the repository at this point in the history
  • Loading branch information
facundo-villa committed Nov 3, 2023
1 parent fcd221d commit c0ad7ad
Show file tree
Hide file tree
Showing 22 changed files with 183 additions and 226 deletions.
4 changes: 2 additions & 2 deletions src/application.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ impl Application for BaseApplication {
info!("Deinitialized base Byte-Engine application.");
}

fn tick(&mut self) { return; }
fn tick(&mut self) {}

fn get_name(&self) -> String { self.name.clone() }
}
Expand Down Expand Up @@ -171,7 +171,7 @@ impl Application for GraphicsApplication {

let _: orchestrator::EntityHandle<window_system::Window> = orchestrator.spawn(window_system::Window{ name: "Main Window".to_string(), extent: crate::Extent { width: 1920, height: 1080, depth: 1 }, id_name: "main_window".to_string() });

GraphicsApplication { application, file_tracker_handle: file_tracker_handle, window_system_handle, input_system_handle, mouse_device_handle, visibility_render_domain_handle, tick_count: 0, render_system_handle }
GraphicsApplication { application, file_tracker_handle, window_system_handle, input_system_handle, mouse_device_handle, visibility_render_domain_handle, tick_count: 0, render_system_handle }
}

fn initialize(&mut self, _arguments: std::env::Args) {
Expand Down
2 changes: 1 addition & 1 deletion src/executor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ impl Executor {
let mut future_slot = task.future.lock().unwrap();
if let Some(mut future) = future_slot.take() {
let waker = futures::task::waker_ref(&task);
let context = &mut futures::task::Context::from_waker(&*waker);
let context = &mut futures::task::Context::from_waker(&waker);
if let std::task::Poll::Pending = future.as_mut().poll(context) {
*future_slot = Some(future);
}
Expand Down
2 changes: 1 addition & 1 deletion src/file_tracker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ impl FileTracker {

let res = self.db.collection::<polodb_core::bson::Document>("files").find_one(polodb_core::bson::doc! { "path": path.to_str().unwrap(),}).unwrap();

if !res.is_some() {
if res.is_none() {
self.db.collection("files").insert_one(polodb_core::bson::doc! {
"path": path.to_str().unwrap(),
"last_modified": _m as i64,
Expand Down
6 changes: 3 additions & 3 deletions src/input_manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ impl InputManager {
/// ```
pub fn register_input_source(&mut self, device_handle: &DeviceClassHandle, name: &str, value_type: InputTypes) -> InputSourceHandle {
let input_source = InputSource {
device_class_handle: device_handle.clone(),
device_class_handle: *device_handle,
name: name.to_string(),
type_: value_type,
};
Expand Down Expand Up @@ -506,7 +506,7 @@ impl InputManager {
InputTypes::Float(_) => std::mem::discriminant(&value) == std::mem::discriminant(&Value::Float(0f32)),
InputTypes::Int(_) => std::mem::discriminant(&value) == std::mem::discriminant(&Value::Int(0)),
InputTypes::Rgba(_) => std::mem::discriminant(&value) == std::mem::discriminant(&Value::Rgba(RGBA { r: 0f32, g: 0f32, b: 0f32, a: 0f32 })),
InputTypes::Vector2(_) => std::mem::discriminant(&value) == std::mem::discriminant(&&Value::Vector2(Vector2 { x: 0f32, y: 0f32 })),
InputTypes::Vector2(_) => std::mem::discriminant(&value) == std::mem::discriminant(&Value::Vector2(Vector2 { x: 0f32, y: 0f32 })),
InputTypes::Vector3(_) => std::mem::discriminant(&value) == std::mem::discriminant(&Value::Vector3(Vector3 { x: 0f32, y: 0f32, z: 0f32 })),
InputTypes::Quaternion(_) => std::mem::discriminant(&value) == std::mem::discriminant(&Value::Quaternion(Quaternion::identity())),
};
Expand Down Expand Up @@ -576,7 +576,7 @@ impl InputManager {
pub fn update(&mut self, orchestrator: orchestrator::OrchestratorReference) {
let records = &self.records;

if records.len() == 0 { return; }
if records.is_empty() { return; }

for (i, action) in self.actions.iter().enumerate() {
let action_records = records.iter().filter(|r| action.input_event_descriptions.iter().any(|ied| ied.input_source_handle == r.input_source_handle));
Expand Down
52 changes: 26 additions & 26 deletions src/jspd/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ fn execute_lexers<'a>(lexers: &[Lexer<'a>], iterator: std::slice::Iter<'a, Strin
}
}

return Err(LexError::Undefined); // No lexer could handle this syntax.
Err(LexError::Undefined) // No lexer could handle this syntax.
}

/// Tries to execute a list of lexers on a stream of tokens. But it's ok if none of them can handle the syntax.
Expand All @@ -113,7 +113,7 @@ fn try_execute_lexers<'a>(lexers: &[Lexer<'a>], iterator: std::slice::Iter<'a, S
}
}

return None;
None
}

fn lex_parsed_node(parser_node: &parser::Node, parser_program: &parser::ProgramState, program: &mut ProgramState) -> Result<Rc<Node>, LexError> {
Expand All @@ -125,9 +125,9 @@ fn lex_parsed_node(parser_node: &parser::Node, parser_program: &parser::ProgramS
ch.push(lex_parsed_node(child, parser_program, program)?);
}

return Ok(Rc::new(Node {
Ok(Rc::new(Node {
node: Nodes::Scope{ name: name.clone(), children: ch, }
}));
}))
}
parser::Nodes::Struct { name, fields } => {
if let Some(n) = program.types.get(name) { // If the type already exists, return it.
Expand All @@ -154,7 +154,7 @@ fn lex_parsed_node(parser_node: &parser::Node, parser_program: &parser::ProgramS
program.types.insert(name.clone(), node.clone());
program.types.insert(format!("{}*", name.clone()), node.clone());

return Ok(node);
Ok(node)
}
parser::Nodes::Member { name, r#type } => {
let t = if r#type.contains('<') {
Expand All @@ -166,19 +166,19 @@ fn lex_parsed_node(parser_node: &parser::Node, parser_program: &parser::ProgramS

let inner_type_name = s.next().ok_or(LexError::Undefined)?;

let inner_type = if inner_type_name.ends_with('*') {
let inner_type = if let Some(stripped) = inner_type_name.strip_suffix('*') {
let x = Rc::new(
Node {
node: Nodes::Struct {
name: format!("{}*", &inner_type_name[..inner_type_name.len() - 1]),
name: format!("{}*", stripped),
template: Some(outer_type.clone()),
fields: Vec::new(),
types: Vec::new(),
},
}
);

program.types.insert(format!("{}*", &inner_type_name[..inner_type_name.len() - 1]), x.clone());
program.types.insert(format!("{}*", stripped), x.clone());

x
} else {
Expand Down Expand Up @@ -214,12 +214,12 @@ fn lex_parsed_node(parser_node: &parser::Node, parser_program: &parser::ProgramS
lex_parsed_node(t, parser_program, program)?
};

return Ok(Rc::new(Node {
Ok(Rc::new(Node {
node: Nodes::Member {
name: name.clone(),
r#type: t,
},
}));
}))
}
parser::Nodes::Function { name, params, return_type, statements, raw } => {
let t = parser_program.types.get(return_type.as_str()).ok_or(LexError::NoSuchType{ type_name: return_type.clone() })?;
Expand All @@ -238,38 +238,38 @@ fn lex_parsed_node(parser_node: &parser::Node, parser_program: &parser::ProgramS
parser::Nodes::Expression(expression) => {
match expression {
parser::Expressions::Accessor{ left, right } => {
return Ok(Rc::new(Node {
Ok(Rc::new(Node {
node: Nodes::Expression(Expressions::Accessor {
left: lex_parsed_node(&left, parser_program, program)?,
right: lex_parsed_node(&right, parser_program, program)?,
left: lex_parsed_node(left, parser_program, program)?,
right: lex_parsed_node(right, parser_program, program)?,
}),
}));
}))
}
parser::Expressions::Member{ name } => {
return Ok(Rc::new(Node {
Ok(Rc::new(Node {
node: Nodes::Expression(Expressions::Member {
name: name.clone(),

}),
}));
}))
}
parser::Expressions::Literal{ value } => {
return Ok(Rc::new(Node {
Ok(Rc::new(Node {
node: Nodes::Expression(Expressions::Literal {
value: value.clone(),
}),
}));
}))
}
parser::Expressions::FunctionCall{ name, parameters } => {
return Ok(Rc::new(Node {
Ok(Rc::new(Node {
node: Nodes::Expression(Expressions::FunctionCall {
name: name.clone(),
parameters: parameters.iter().map(|e| lex_parsed_node(e, parser_program, program).unwrap()).collect(),
}),
}));
}))
}
parser::Expressions::Operator{ name, left, right } => {
return Ok(Rc::new(Node {
Ok(Rc::new(Node {
node: Nodes::Expression(Expressions::Operator {
operator: match name.as_str() {
"+" => Operators::Plus,
Expand All @@ -281,19 +281,19 @@ fn lex_parsed_node(parser_node: &parser::Node, parser_program: &parser::ProgramS
"==" => Operators::Equality,
_ => { panic!("Invalid operator") }
},
left: lex_parsed_node(&left, parser_program, program)?,
right: lex_parsed_node(&right, parser_program, program)?,
left: lex_parsed_node(left, parser_program, program)?,
right: lex_parsed_node(right, parser_program, program)?,
}),
}));
}))
}
parser::Expressions::VariableDeclaration{ name, r#type } => {
return Ok(Rc::new(Node {
Ok(Rc::new(Node {
node: Nodes::Expression(Expressions::VariableDeclaration {
name: name.clone(),
// r#type: lex_parsed_node(&r#type, parser_program, program)?,
r#type: r#type.clone(),
}),
}));
}))
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/jspd/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ pub(crate) fn compile_to_jspd(source: &str) -> Result<lexer::Node, CompilationEr
let (parser_root_node, parser_program) = parser::parse(tokens).map_err(|_e| CompilationError::Undefined)?;
let jspd = lexer::lex(&parser_root_node, &parser_program).map_err(|_e| CompilationError::Undefined)?;

return Ok(jspd);
Ok(jspd)
}

#[derive(Debug)]
Expand Down Expand Up @@ -100,7 +100,7 @@ pub(crate) fn json_to_jspd(source: &json::JsonValue) -> Result<lexer::Node, ()>
_ => { panic!("Unsupported node type;") }
};

return Ok(Rc::new(parser_node));
Ok(Rc::new(parser_node))
}

let mut parser_program = parser::ProgramState {
Expand Down
35 changes: 17 additions & 18 deletions src/jspd/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,10 +75,9 @@ pub(super) fn parse(tokens: Vec<String>) -> Result<(Node, ProgramState), Parsing
}
}

return Ok((make_scope("root", children), program_state));
Ok((make_scope("root", children), program_state))
}

use std::borrow::BorrowMut;
use std::{collections::HashMap, rc::Rc};

#[derive(Clone, Debug)]
Expand Down Expand Up @@ -225,7 +224,7 @@ fn execute_parsers<'a>(parsers: &[FeatureParser<'a>], mut iterator: std::slice::
}
}

return Err(ParsingFailReasons::BadSyntax{ message: format!("Tried several parsers none could handle the syntax for statement: {}", iterator.next().unwrap()) }); // No parser could handle this syntax.
Err(ParsingFailReasons::BadSyntax{ message: format!("Tried several parsers none could handle the syntax for statement: {}", iterator.next().unwrap()) }) // No parser could handle this syntax.
}

/// Tries to execute a list of parsers on a stream of tokens. But it's ok if none of them can handle the syntax.
Expand All @@ -236,7 +235,7 @@ fn try_execute_parsers<'a>(parsers: &[FeatureParser<'a>], iterator: std::slice::
}
}

return None;
None
}

/// Execute a list of parsers on a stream of tokens.
Expand All @@ -247,7 +246,7 @@ fn execute_expression_parsers<'a>(parsers: &[ExpressionParser<'a>], mut iterator
}
}

return Err(ParsingFailReasons::BadSyntax{ message: format!("Tried several parsers none could handle the syntax for statement: {}", iterator.next().unwrap()) }); // No parser could handle this syntax.
Err(ParsingFailReasons::BadSyntax{ message: format!("Tried several parsers none could handle the syntax for statement: {}", iterator.next().unwrap()) }) // No parser could handle this syntax.
}

/// Tries to execute a list of parsers on a stream of tokens. But it's ok if none of them can handle the syntax.
Expand All @@ -258,7 +257,7 @@ fn try_execute_expression_parsers<'a>(parsers: &[ExpressionParser<'a>], iterator
}
}

return None;
None
}

fn parse_member<'a>(mut iterator: std::slice::Iter<'a, String>, program: &ProgramState) -> FeatureParserResult<'a> {
Expand All @@ -281,7 +280,7 @@ fn parse_member<'a>(mut iterator: std::slice::Iter<'a, String>, program: &Progra

iterator.next().ok_or(ParsingFailReasons::BadSyntax{ message: format!("Expected semicolon") })?; // Skip semicolon

return Ok(((node, program.clone()), iterator));
Ok(((node, program.clone()), iterator))
}

fn parse_macro<'a>(iterator: std::slice::Iter<'a, String>, program: &ProgramState) -> FeatureParserResult<'a> {
Expand All @@ -292,7 +291,7 @@ fn parse_macro<'a>(iterator: std::slice::Iter<'a, String>, program: &ProgramStat
iter.next().ok_or(ParsingFailReasons::BadSyntax{ message: format!("Expected to find macro name.") })?;
iter.next().and_then(|v| if v == "]" { Some(v) } else { None }).ok_or(ParsingFailReasons::BadSyntax{ message: format!("Expected to find ] after macro.") })?;

return Ok(((Rc::new(make_scope("MACRO", vec![])), program.clone()), iter));
Ok(((Rc::new(make_scope("MACRO", vec![])), program.clone()), iter))
}

fn parse_struct<'a>(mut iterator: std::slice::Iter<'a, String>, program: &ProgramState) -> FeatureParserResult<'a> {
Expand Down Expand Up @@ -327,7 +326,7 @@ fn parse_struct<'a>(mut iterator: std::slice::Iter<'a, String>, program: &Progra

program.types.insert(name.clone(), node.clone());

return Ok(((node, program.clone()), iterator));
Ok(((node, program.clone()), iterator))
}

fn parse_var_decl<'a>(mut iterator: std::slice::Iter<'a, String>, program: &ProgramState, mut expressions: Vec<Atoms>,) -> ExpressionParserResult<'a> {
Expand All @@ -343,7 +342,7 @@ fn parse_var_decl<'a>(mut iterator: std::slice::Iter<'a, String>, program: &Prog

let expressions = execute_expression_parsers(&possible_following_expressions, iterator, program, expressions)?;

return Ok(expressions);
Ok(expressions)
}

fn parse_variable<'a>(mut iterator: std::slice::Iter<'a, String>, program: &ProgramState, mut expressions: Vec<Atoms>,) -> ExpressionParserResult<'a> {
Expand Down Expand Up @@ -376,7 +375,7 @@ fn parse_literal<'a>(mut iterator: std::slice::Iter<'a, String>, program: &Progr

expressions.push(Atoms::Literal{ value: value.clone() });

return Ok((expressions, iterator));
Ok((expressions, iterator))
}

fn parse_rvalue<'a>(iterator: std::slice::Iter<'a, String>, program: &ProgramState, expressions: Vec<Atoms>,) -> ExpressionParserResult<'a> {
Expand Down Expand Up @@ -477,7 +476,7 @@ fn parse_statement<'a>(iterator: std::slice::Iter<'a, String>, program: &Program
}
}

return Ok(((dandc(&expressions), program.clone()), iterator));
Ok(((dandc(&expressions), program.clone()), iterator))
}

fn parse_function<'a>(mut iterator: std::slice::Iter<'a, String>, program: &ProgramState) -> FeatureParserResult<'a> {
Expand Down Expand Up @@ -508,10 +507,10 @@ fn parse_function<'a>(mut iterator: std::slice::Iter<'a, String>, program: &Prog
}
}

return Ok(((Rc::new(make_function(name, vec![], return_type, statements, None)), program.clone()), iterator));
Ok(((Rc::new(make_function(name, vec![], return_type, statements, None)), program.clone()), iterator))
}

use std::ops::{Index, DerefMut};
use std::ops::Index;

impl Index<&str> for Node {
type Output = Node;
Expand All @@ -521,10 +520,10 @@ impl Index<&str> for Node {
Nodes::Scope { name, children } => {
for child in children {
match &child.node {
Nodes::Scope { name: child_name, children: _ } => { if child_name == index { return &child; } }
Nodes::Struct { name: child_name, fields: _ } => { if child_name == index { return &child; } }
Nodes::Member { name: child_name, r#type: _ } => { if child_name == index { return &child; } }
Nodes::Function { name: child_name, params: _, return_type: _, statements: _, raw: _ } => { if child_name == index { return &child; } }
Nodes::Scope { name: child_name, children: _ } => { if child_name == index { return child; } }
Nodes::Struct { name: child_name, fields: _ } => { if child_name == index { return child; } }
Nodes::Member { name: child_name, r#type: _ } => { if child_name == index { return child; } }
Nodes::Function { name: child_name, params: _, return_type: _, statements: _, raw: _ } => { if child_name == index { return child; } }
_ => {}
}
}
Expand Down
Loading

0 comments on commit c0ad7ad

Please sign in to comment.