Skip to content

Commit

Permalink
full trainer test
Browse files Browse the repository at this point in the history
  • Loading branch information
SermetPekin committed Dec 5, 2024
1 parent 0b8f043 commit c865e31
Showing 1 changed file with 43 additions and 0 deletions.
43 changes: 43 additions & 0 deletions test/test_trainer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,48 @@
from micrograd import Activation, Value

from micrograd import Value, MLP, Optimizer , Trainer

# Dataset
inputs = [
[Value(1.0), Value(2.0)],
[Value(2.0), Value(3.0)],
[Value(3.0), Value(4.0)],
[Value(4.0), Value(5.0)]
]
targets = [
Value(9.0),
Value(14.0),
Value(19.0),
Value(24.0)
]


# Loss function
def mean_squared_error(predicted: Value, target: Value) -> Value:
return (predicted - target) ** 2


def test_complete_train(capsys):
with capsys.disabled():

# Model
model = MLP(input_size=2, layer_sizes=[3, 1])

# Optimizer
optimizer = Optimizer()

# Trainer
trainer = Trainer(model=model, loss_fn=mean_squared_error, optimizer=optimizer)

# Train
trainer.train(inputs, targets, epochs=100, learning_rate=0.01)

# Test
test_input = [Value(5.0), Value(6.0)] # Expected output: 31
prediction = model(test_input)
print(f"Prediction for input {test_input}: {prediction.data:.4f}")



def test_relu():
assert Activation.relu(Value(3.0)).data == Value(3.0).data
Expand Down

0 comments on commit c865e31

Please sign in to comment.