Synaptopus/tests/test_backprop.py

70 lines
1.7 KiB
Python

from __future__ import annotations
import random
from synaptopus.backprop import BackpropNetwork
def test_backprop_supports_multiple_hidden_layers() -> None:
network = BackpropNetwork.random(
input_size=3,
hidden_layers=(4, 3),
output_size=2,
learning_rate=0.5,
momentum=0.1,
rng=random.Random(7),
)
result = network.predict((0.0, 1.0, 0.5))
assert network.hidden_layers == (4, 3)
assert network.output_size == 2
assert len(result.outputs) == 2
assert len(result.layer_states) == 3
def test_backprop_can_learn_xor_with_two_hidden_layers() -> None:
network = BackpropNetwork.random(
input_size=2,
hidden_layers=(4, 4),
output_size=1,
learning_rate=0.8,
momentum=0.2,
rng=random.Random(11),
)
samples = (
((0.0, 0.0), (0.0,)),
((0.0, 1.0), (1.0,)),
((1.0, 0.0), (1.0,)),
((1.0, 1.0), (0.0,)),
)
for _ in range(6000):
for inputs, targets in samples:
network.train_step(inputs, targets)
predictions = {
inputs: network.predict(inputs).outputs[0]
for inputs, _ in samples
}
assert predictions[(0.0, 0.0)] < 0.2
assert predictions[(0.0, 1.0)] > 0.8
assert predictions[(1.0, 0.0)] > 0.8
assert predictions[(1.0, 1.0)] < 0.2
def test_backprop_round_trips_through_dict() -> None:
network = BackpropNetwork.random(
input_size=2,
hidden_layers=(3, 2),
output_size=1,
rng=random.Random(3),
)
restored = BackpropNetwork.from_dict(network.to_dict())
assert restored.layer_sizes == network.layer_sizes
assert restored.weights == network.weights
assert restored.biases == network.biases