From bc66016be3e21a17d42de2921f4765626c5809bf Mon Sep 17 00:00:00 2001 From: Okerew <93822247+Okerew@users.noreply.github.com> Date: Fri, 30 Aug 2024 21:25:40 +0200 Subject: [PATCH] Update test.py --- tests/test.py | 35 ++++++++++++++++++++++++++++++++--- 1 file changed, 32 insertions(+), 3 deletions(-) diff --git a/tests/test.py b/tests/test.py index 63afe70..28f725a 100644 --- a/tests/test.py +++ b/tests/test.py @@ -1,5 +1,5 @@ from okrolearn.okrolearn import * -from okrolearn.optimizers import SGDOptimizer +from okrolearn.optimizers import SGDOptimizer, RMSPropOptimizer def print_epoch_start(epoch, total_epochs): print(f"Starting epoch {epoch + 1}/{total_epochs}") @@ -28,10 +28,39 @@ def print_epoch_start(epoch, total_epochs): network.add(GELUActivationLayer()) inputs = Tensor(np.random.rand(100, 3)) targets = Tensor(np.random.randint(0, 3, size=(100,))) +hyperparameters = [ + { + 'lr_scheduler': LearningRateScheduler(initial_lr=0.01), + 'optimizer': SGDOptimizer(lr=0.01), + 'temperature': 1.0 + }, + { + 'lr_scheduler': StepLRScheduler(initial_lr=0.001, step_size=30, gamma=0.1), + 'optimizer': AdamOptimizer(lr=0.001, beta1=0.9, beta2=0.999, epsilon=1e-8), + 'temperature': 1.0 + }, + { + 'lr_scheduler': ExponentialDecayScheduler(initial_lr=0.001, decay_rate=0.1), + 'optimizer': NadamOptimizer(epsilon=1e-8), + 'temperature': 0.5 + }, + { + 'lr_scheduler': LinearDecayScheduler(initial_lr=0.001, final_lr=0.0001, total_epochs=100), + 'optimizer': AdadeltaOptimizer(epsilon=1e-8), + 'temperature': 1.5 + }, + { + 'lr_scheduler': TimeBasedDecayScheduler(initial_lr=0.001, decay_rate=0.01), + 'optimizer': RMSPropOptimizer(lr=0.001, epsilon=1e-8), + 'temperature': 1.0 + } +] + +network.hyperparameter_tuning(inputs, targets, batch_size=10, epochs=30, loss_function=CrossEntropyLoss(), hyperparameters=hyperparameters) loss_function = CrossEntropyLoss() optimizer = SGDOptimizer(lr=0.01) -losses = network.train(inputs, targets, epochs=50, lr=0.01, optimizer=optimizer, batch_size=10, loss_function=loss_function, ) +losses = network.train(inputs, targets, epochs=50, lr_scheduler=LearningRateScheduler(0.01), optimizer=optimizer, batch_size=10, loss_function=loss_function, ) network.plot_parameter_changes() network.analyze_gradients() network.print_profile_stats() @@ -61,7 +90,7 @@ def print_epoch_start(epoch, total_epochs): test_network.add(GELUActivationLayer()) test_network.load_weights('model.pt') -test_network.train(inputs, targets, epochs=50, lr=0.01, optimizer=optimizer, batch_size=10, loss_function=loss_function) +test_network.train(inputs, targets, epochs=50, lr_scheduler=LearningRateScheduler(0.01), optimizer=optimizer, batch_size=10, loss_function=loss_function) test_network.save("model2.pt") test_inputs = Tensor(np.random.rand(10, 3))