Skip to content

Commit

Permalink
clean up
Browse files Browse the repository at this point in the history
  • Loading branch information
epwalsh committed Feb 23, 2025
1 parent 32a5894 commit c858268
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions src/scripts/train/OLMo2-7B-long-context.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,7 @@
from olmo_core.optim import AdamWConfig, CosWithWarmup, OptimGroupOverride
from olmo_core.train import TrainerConfig
from olmo_core.train.callbacks import CheckpointerCallback, CometCallback, WandBCallback
from olmo_core.train.train_module import (
TransformerActivationCheckpointingConfig,
from olmo_core.train.train_module import ( # TransformerActivationCheckpointingConfig,
TransformerContextParallelConfig,
TransformerDataParallelConfig,
TransformerDataParallelWrappingStrategy,
Expand All @@ -24,6 +23,7 @@


CONTEXT_LENGTH = 4 * 16_384
# 64K length, 32 GPUs -> 2,750 TPS


def build_model_config(common: CommonComponents) -> TransformerConfig:
Expand Down Expand Up @@ -55,7 +55,7 @@ def build_train_module_config(common: CommonComponents) -> TransformerTrainModul
wrapping_strategy=TransformerDataParallelWrappingStrategy.fine_grained,
),
cp_config=TransformerContextParallelConfig(degree=8),
ac_config=TransformerActivationCheckpointingConfig(),
# ac_config=TransformerActivationCheckpointingConfig(),
float8_config=Float8Config(enabled=True),
max_grad_norm=1.0,
scheduler=CosWithWarmup(warmup_steps=2000),
Expand Down

0 comments on commit c858268

Please sign in to comment.