Skip to content

Commit

Permalink
Merge pull request #726 from mlcommons/num_workers_fix
Browse files Browse the repository at this point in the history
Add warning for PyTorch data loader num_workers flag.
priyakasimbeg authored Mar 29, 2024
2 parents 9365996 + d492d69 commit 784b915
Showing 1 changed file with 12 additions and 3 deletions.
15 changes: 12 additions & 3 deletions submission_runner.py
Original file line number Diff line number Diff line change
@@ -154,9 +154,12 @@
flags.DEFINE_boolean('set_pytorch_max_split_size',
False,
'If true, set pytorch max_split_size_mb to 256')
flags.DEFINE_integer('pytorch_eval_num_workers',
0,
'Number of workers for PyTorch evaluation data loaders.')
flags.DEFINE_integer(
'pytorch_eval_num_workers',
0,
'Number of workers for ImageNet PyTorch evaluation data loaders.'
'WARNING: Setting pytorch_eval_num_workers != 0, will result '
'in incorrect evals currently, see issues/732.')
FLAGS = flags.FLAGS
USE_PYTORCH_DDP, RANK, DEVICE, N_GPUS = pytorch_setup()

@@ -634,6 +637,12 @@ def main(_):
if FLAGS.framework == 'pytorch':
pytorch_init(USE_PYTORCH_DDP, RANK, profiler)

# TODO: remove once issue resolved.
if FLAGS.pytorch_eval_num_workers != 0:
logging.warning(
'WARNING: Setting pytorch_eval_num_workers != 0, will result '
'in incorrect evals currently, see issues/732.')

workload_metadata = WORKLOADS[FLAGS.workload]

# Prevent OOM on librispeech conformer.

0 comments on commit 784b915

Please sign in to comment.