Skip to content

Commit

Permalink
remove assert
Browse files Browse the repository at this point in the history
  • Loading branch information
Your Name committed May 24, 2024
1 parent a85b7b6 commit 33a2e71
Showing 1 changed file with 0 additions and 1 deletion.
1 change: 0 additions & 1 deletion composer/trainer/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1222,7 +1222,6 @@ def __init__(
raise ValueError(
'Both deepspeed_config and parallelism_config are specified but incompatible. Please specify only one.',
)
assert parallelism_config is None, parallelism_config
if deepspeed_config is not None or parallelism_config is not None or dist.get_world_size() > 1:
# Deepspeed and FSDP both require torch.distributed to be initialized, even if the world size is 1
# And torch.distributed is always required for multi-rank training
Expand Down

0 comments on commit 33a2e71

Please sign in to comment.