From 0e48d347b1052235b1219f8be5198dcc83fa22a0 Mon Sep 17 00:00:00 2001 From: davidfitzek Date: Tue, 22 Aug 2023 17:11:20 +0200 Subject: [PATCH] update configs --- config/config_large.yaml | 9 +++------ config/config_medium.yaml | 9 +++------ config/config_small.yaml | 15 ++++++--------- 3 files changed, 12 insertions(+), 21 deletions(-) diff --git a/config/config_large.yaml b/config/config_large.yaml index 4349f916..4263d69d 100644 --- a/config/config_large.yaml +++ b/config/config_large.yaml @@ -34,18 +34,15 @@ optimizer: eta_min: 0.00001 dataset: - num_atoms: null - num_samples: null - delta: null - num_workers: 4 + num_workers: 8 rydberg: num_states: 2 - num_encoder_embedding_dims: 3 profiler: profiler: "PyTorchProfiler" + advanced_monitoring: False misc: seed: 8596 - prog_bar: True + prog_bar: False diff --git a/config/config_medium.yaml b/config/config_medium.yaml index 902081c2..cdddb902 100644 --- a/config/config_medium.yaml +++ b/config/config_medium.yaml @@ -34,18 +34,15 @@ optimizer: eta_min: 0.00001 dataset: - num_atoms: null - num_samples: null - delta: null - num_workers: 0 + num_workers: 8 rydberg: num_states: 2 - num_encoder_embedding_dims: 3 profiler: profiler: "PyTorchProfiler" + advanced_monitoring: False misc: seed: 104 - prog_bar: True + prog_bar: False diff --git a/config/config_small.yaml b/config/config_small.yaml index ea903f06..01b403e4 100644 --- a/config/config_small.yaml +++ b/config/config_small.yaml @@ -11,16 +11,16 @@ transformer: graph_embedding: graph_num_layers: 2 graph_hidden_dim: 64 - in_node_dim: 4 # omega delta beta, blockade_radius + in_node_dim: 4 # omega delta beta, Rb training: strategy: "auto" precision: 32 # half precision saves lots of memory! 32 is default max_epochs: 1000 - batch_size: 32 + batch_size: 4096 learning_rate: 0.001 criterion: "NLLLoss" #KLLoss - from_checkpoint: null + from_checkpoint: Null accumulate_grad_batches: 1 detect_anomaly: True @@ -34,18 +34,15 @@ optimizer: eta_min: 0.00001 dataset: - num_atoms: null - num_samples: null - delta: null - num_workers: 0 + num_workers: 8 rydberg: num_states: 2 - num_encoder_embedding_dims: 3 profiler: profiler: "PyTorchProfiler" + advanced_monitoring: False # Only for debugging misc: seed: 104 - prog_bar: True + prog_bar: False