Skip to content

Commit

Permalink
Add cifar sync model sweep configs
Browse files Browse the repository at this point in the history
  • Loading branch information
Ubuntu committed Apr 18, 2023
1 parent bb0d7be commit d09859c
Show file tree
Hide file tree
Showing 3 changed files with 151 additions and 0 deletions.
53 changes: 53 additions & 0 deletions sweep_configs/cifar_pool_sync_over_model.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
name: cifar_pool_sync_over_model_dropout
description: Test cifar pooled dropout using sync across model
project: k-dropout
program: train_net.py
method: grid
metric:
name: test_loss
goal: minimize
# TODO: early_terminate
command:
- ${env}
- ${interpreter}
- ${program}
- ${args}
- --sync_over_model
parameters:
# experiment
restarts: # TODO: find a better way to do multiple restarts
value: 1
# model
input_size:
value: 3072
hidden_size:
value: 2000
output_size:
value: 10
n_hidden:
value: 2
# dropout
dropout_layer:
value: pool
p:
value: 0.5
pool_size:
values: [1, 100, 1000, 10000, 100000, 1000000, 20000000]
m:
values: [1, 128, 512]
# dataset
dataset_name:
value: cifar10
batch_size:
value: 512
test_batch_size:
value: 512
num_workers:
value: 4
# training
device:
value: cuda
epochs:
value: 300
lr:
value: 0.0005
53 changes: 53 additions & 0 deletions sweep_configs/cifar_pool_sync_over_model_fine_grain.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
name: cifar_pool_sync_over_model_dropout_fine_grain
description: Test cifar pooled dropout using sync across model
project: k-dropout
program: train_net.py
method: grid
metric:
name: test_loss
goal: minimize
# TODO: early_terminate
command:
- ${env}
- ${interpreter}
- ${program}
- ${args}
- --sync_over_model
parameters:
# experiment
restarts: # TODO: find a better way to do multiple restarts
value: 1
# model
input_size:
value: 3072
hidden_size:
value: 2000
output_size:
value: 10
n_hidden:
value: 2
# dropout
dropout_layer:
value: pool
p:
value: 0.5
pool_size:
values: [1, 10, 20, 30, 50, 80, 100, 200, 300, 400, 500, 800, 1000]
m:
value: 512
# dataset
dataset_name:
value: cifar10
batch_size:
value: 512
test_batch_size:
value: 512
num_workers:
value: 4
# training
device:
value: cuda
epochs:
value: 300
lr:
value: 0.0005
45 changes: 45 additions & 0 deletions sweep_configs/cifar_standard_dropout_sweep.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
name: cifar_standard_dropout
description: Sweeping cifar p with standard dropout
project: k-dropout
program: train_net.py
method: grid
metric:
name: test_loss
goal: minimize
# TODO: early_terminate
parameters:
# experiment
restarts: # TODO: find a better way to do multiple restarts
value: 1
# model
input_size:
value: 3072
hidden_size:
value: 2000
output_size:
value: 10
n_hidden:
value: 2
# dropout
dropout_layer:
value: standard
input_p:
values: [0, 0.2, 0.5, 0.8]
p:
values: [0, 0.2, 0.5, 0.8]
# dataset
dataset_name:
value: cifar10
batch_size:
value: 512
test_batch_size:
value: 512
num_workers:
value: 4
# training
device:
value: cuda
epochs:
value: 300
lr:
value: 0.0005

0 comments on commit d09859c

Please sign in to comment.