generated from OBJNULL/Dockerized-Rust
Fixed Inference spelling mistake
This commit is contained in:
parent
b9b07c4b75
commit
ef7046b2c8
4 changed files with 8 additions and 8 deletions
|
@ -4,7 +4,7 @@ use std::env;
|
|||
// Enums
|
||||
pub enum OperationMode {
|
||||
Training,
|
||||
Infrence,
|
||||
Inference,
|
||||
}
|
||||
|
||||
// Functions
|
||||
|
@ -15,7 +15,7 @@ pub fn get_operation_mode() -> Option<OperationMode> {
|
|||
// Getting operation mode
|
||||
match args[1].as_str() {
|
||||
"training" => Some(OperationMode::Training),
|
||||
"infrence" => Some(OperationMode::Infrence),
|
||||
"inference" => Some(OperationMode::Inference),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,10 +13,10 @@ fn main() {
|
|||
|
||||
// Creating a Neural Network with the Operation Mode
|
||||
match operation_mode {
|
||||
None => panic!("Main: `OperationMode` not defined!"),
|
||||
Some(mode) => {
|
||||
neural = NeuralNetwork::new(mode);
|
||||
}
|
||||
},
|
||||
_ => panic!("Main: `OperationMode` not defined!"),
|
||||
}
|
||||
|
||||
// Starting the network
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// Libraries
|
||||
mod data;
|
||||
mod infrence;
|
||||
mod inference;
|
||||
mod model;
|
||||
mod training;
|
||||
use super::config::OperationMode;
|
||||
|
@ -44,7 +44,7 @@ impl NeuralNetwork {
|
|||
);
|
||||
|
||||
// Infer the model
|
||||
infrence::infer::<MyBackend>(
|
||||
inference::infer::<MyBackend>(
|
||||
MODEL_DIRECTORY,
|
||||
device,
|
||||
burn::data::dataset::vision::MnistDataset::test()
|
||||
|
@ -58,7 +58,7 @@ impl NeuralNetwork {
|
|||
let device = burn::backend::wgpu::WgpuDevice::default();
|
||||
|
||||
// Infer the model
|
||||
infrence::infer::<MyBackend>(
|
||||
inference::infer::<MyBackend>(
|
||||
MODEL_DIRECTORY,
|
||||
device,
|
||||
burn::data::dataset::vision::MnistDataset::test()
|
||||
|
@ -71,7 +71,7 @@ impl NeuralNetwork {
|
|||
// Switching based on mode
|
||||
match self.mode {
|
||||
OperationMode::Training => self.train(),
|
||||
OperationMode::Infrence => self.infer(),
|
||||
OperationMode::Inference => self.infer(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue