Skip to content

Commit

Permalink
Removed previous error calculation
Browse files Browse the repository at this point in the history
  • Loading branch information
BradenEverson committed Feb 1, 2024
1 parent 10fc2e7 commit b2a7e4e
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 5 deletions.
5 changes: 3 additions & 2 deletions src/core/layer/methods/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,9 @@ impl ErrorTypes{
pub fn get_error(&self, actual: &Box<dyn Input>, expected: &Box<dyn Input>, batch_size: usize) -> Box<dyn Input> {
return match self {
ErrorTypes::MeanAbsolute => {
let parsed_matrix = Matrix::from(actual.to_param_2d());
Box::new((parsed_matrix - &Matrix::from(expected.to_param_2d())).transpose())
let actual_matrix = Matrix::from(actual.to_param_2d());
let expected_matrix = Matrix::from(expected.to_param_2d());
Box::new((actual_matrix - &expected_matrix).transpose())
},
ErrorTypes::MeanSquared => {
let actual_matrix = Matrix::from(actual.to_param_2d());
Expand Down
4 changes: 1 addition & 3 deletions src/core/network.rs
Original file line number Diff line number Diff line change
Expand Up @@ -205,12 +205,10 @@ impl Network{
///
///When constructing a neural network, be cautious that your layers behave well with each other
fn back_propegate(&mut self, outputs: &Vec<f32>, target_obj: &Box<dyn Input>, loss: &ErrorTypes) {
//let mut parsed = Matrix::from(outputs.to_param_2d());

if let None = self.layers[self.layers.len()-1].get_activation() {
panic!("Output layer is not a dense layer");
}

let mut gradients: Box<dyn Input>;
let actual: Box<dyn Input> = Box::new(outputs.clone());
let mut errors: Box<dyn Input> = loss.get_error(&actual, target_obj, 1);//Box::new((parsed - &Matrix::from(target_obj.to_param_2d())).transpose());
Expand Down

0 comments on commit b2a7e4e

Please sign in to comment.