adamw-zero.yaml 185 B

1234567
  1. # @package train.optimizer
  2. _target_: torch.distributed.optim.ZeroRedundancyOptimizer
  3. _recursive_: True
  4. optimizer_class:
  5. _target_: torch.optim.__getattribute__
  6. _args_:
  7. - "AdamW"