Skip to content

Commit

Permalink
Extend test timeout (#727)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: #727

Some health tests are still failing due to timeout. Extended timeout to 60,000 ms for all tests with a deadline, which is the maximum allowed run-time.

Reviewed By: HuanyuZhang

Differential Revision: D68980483

fbshipit-source-id: 8743e5630e22bb966885f7acacc9660982cc85c7
  • Loading branch information
iden-kalemaj authored and facebook-github-bot committed Feb 3, 2025
1 parent c7225a2 commit e4eb3fb
Show file tree
Hide file tree
Showing 19 changed files with 24 additions and 24 deletions.
2 changes: 1 addition & 1 deletion opacus/tests/accountants_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def test_get_noise_multiplier_prv_steps(self) -> None:
),
delta=st.sampled_from([1e-4, 1e-5, 1e-6]),
)
@settings(deadline=40000)
@settings(deadline=60000)
def test_get_noise_multiplier_overshoot(self, epsilon, epochs, sample_rate, delta):
noise_multiplier = get_noise_multiplier(
target_epsilon=epsilon,
Expand Down
4 changes: 2 additions & 2 deletions opacus/tests/batch_memory_manager_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def _init_training(self, batch_size=10, **data_loader_kwargs):
batch_size=st.sampled_from([8, 16, 64]),
max_physical_batch_size=st.sampled_from([4, 8]),
)
@settings(suppress_health_check=list(HealthCheck), deadline=40000)
@settings(suppress_health_check=list(HealthCheck), deadline=60000)
def test_basic(
self,
num_workers: int,
Expand Down Expand Up @@ -119,7 +119,7 @@ def test_basic(
num_workers=st.integers(0, 4),
pin_memory=st.booleans(),
)
@settings(suppress_health_check=list(HealthCheck), deadline=40000)
@settings(suppress_health_check=list(HealthCheck), deadline=60000)
def test_empty_batch(
self,
num_workers: int,
Expand Down
6 changes: 3 additions & 3 deletions opacus/tests/dp_layers/dp_multihead_attention_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ class DPMultiheadAttention_test(DPModules_test):
kdim=st.integers(2, 8) | st.none(),
vdim=st.integers(2, 8) | st.none(),
)
@settings(deadline=10000)
@settings(deadline=60000)
def test_attn(
self,
batch_size: int,
Expand Down Expand Up @@ -142,7 +142,7 @@ def test_attn(
kdim=st.integers(2, 8) | st.none(),
vdim=st.integers(2, 8) | st.none(),
)
@settings(deadline=10000)
@settings(deadline=60000)
def test_dp_attn(
self,
batch_size: int,
Expand Down Expand Up @@ -227,7 +227,7 @@ def test_dp_attn(
kdim=st.integers(2, 8) | st.none(),
vdim=st.integers(2, 8) | st.none(),
)
@settings(deadline=10000)
@settings(deadline=60000)
def test_dp_attn_hook(
self,
batch_size: int,
Expand Down
2 changes: 1 addition & 1 deletion opacus/tests/dp_layers/dp_rnn_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ class DPLSTM_test(DPModules_test):
zero_init=st.booleans(),
packed_input_flag=st.integers(0, 2),
)
@settings(deadline=20000)
@settings(deadline=60000)
def test_rnn(
self,
mode: str,
Expand Down
2 changes: 1 addition & 1 deletion opacus/tests/grad_samples/conv1d_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ class Conv1d_test(GradSampleHooks_test):
dilation=st.integers(1, 2),
groups=st.integers(1, 12),
)
@settings(deadline=10000)
@settings(deadline=60000)
def test_conv1d(
self,
N: int,
Expand Down
4 changes: 2 additions & 2 deletions opacus/tests/grad_samples/conv2d_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ class Conv2d_test(GradSampleHooks_test):
dilation=st.integers(1, 3),
groups=st.integers(1, 16),
)
@settings(deadline=30000)
@settings(deadline=60000)
def test_conv2d(
self,
N: int,
Expand Down Expand Up @@ -117,7 +117,7 @@ def test_conv2d(
dilation_h=st.integers(1, 3),
dilation_w=st.integers(1, 3),
)
@settings(deadline=30000)
@settings(deadline=60000)
def test_unfold2d(
self,
B: int,
Expand Down
2 changes: 1 addition & 1 deletion opacus/tests/grad_samples/conv3d_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class Conv3d_test(GradSampleHooks_test):
dilation=st.sampled_from([1, (1, 2, 2)]),
groups=st.integers(1, 16),
)
@settings(deadline=30000)
@settings(deadline=60000)
def test_conv3d(
self,
N: int,
Expand Down
2 changes: 1 addition & 1 deletion opacus/tests/grad_samples/dp_multihead_attention_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ class MultiHeadAttention_test(GradSampleHooks_test):
kv_dim=st.booleans(),
test_or_check=st.integers(1, 2),
)
@settings(deadline=10000)
@settings(deadline=60000)
def test_multihead_attention(
self,
N: int,
Expand Down
2 changes: 1 addition & 1 deletion opacus/tests/grad_samples/dp_rnn_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ class RNN_test(GradSampleHooks_test):
using_packed_sequences=st.booleans(),
packed_sequences_sorted=st.booleans(),
)
@settings(deadline=30000)
@settings(deadline=60000)
def test_rnn(
self,
model,
Expand Down
2 changes: 1 addition & 1 deletion opacus/tests/grad_samples/embedding_bag_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class Embedding_bag_test(GradSampleHooks_test):
D=st.integers(10, 17),
mode=st.sampled_from(["sum", "mean"]),
)
@settings(deadline=10000)
@settings(deadline=60000)
def test_input_across_dims(
self,
N: int,
Expand Down
2 changes: 1 addition & 1 deletion opacus/tests/grad_samples/embedding_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ class Embedding_test(GradSampleHooks_test):
dim=st.integers(2, 4),
batch_first=st.booleans(),
)
@settings(deadline=10000)
@settings(deadline=60000)
def test_input_across_dims(
self,
N: int,
Expand Down
2 changes: 1 addition & 1 deletion opacus/tests/grad_samples/group_norm_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ class GroupNorm_test(GradSampleHooks_test):
W=st.integers(4, 8),
num_groups=st.sampled_from([1, 4, "C"]),
)
@settings(deadline=10000)
@settings(deadline=60000)
def test_3d_input_groups(
self,
N: int,
Expand Down
2 changes: 1 addition & 1 deletion opacus/tests/grad_samples/instance_norm1d_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

class InstanceNorm1d_test(GradSampleHooks_test):
@given(N=st.integers(1, 4), C=st.integers(1, 3), W=st.integers(5, 10))
@settings(deadline=10000)
@settings(deadline=60000)
def test_3d_input(self, N: int, C: int, W: int):
x = torch.randn([N, C, W])
norm = nn.InstanceNorm1d(num_features=C, affine=True, track_running_stats=False)
Expand Down
2 changes: 1 addition & 1 deletion opacus/tests/grad_samples/instance_norm2d_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ class InstanceNorm2d_test(GradSampleHooks_test):
W=st.integers(5, 10),
H=st.integers(4, 8),
)
@settings(deadline=10000)
@settings(deadline=60000)
def test_4d_input(self, N: int, C: int, W: int, H: int):
x = torch.randn([N, C, H, W])
norm = nn.InstanceNorm2d(num_features=C, affine=True, track_running_stats=False)
Expand Down
2 changes: 1 addition & 1 deletion opacus/tests/grad_samples/instance_norm3d_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class InstanceNorm3d_test(GradSampleHooks_test):
H=st.integers(4, 8),
Z=st.integers(1, 4),
)
@settings(deadline=10000)
@settings(deadline=60000)
def test_5d_input(self, N: int, C: int, W: int, H: int, Z: int):
x = torch.randn([N, C, Z, H, W])
norm = nn.InstanceNorm3d(num_features=C, affine=True, track_running_stats=False)
Expand Down
2 changes: 1 addition & 1 deletion opacus/tests/grad_samples/layer_norm_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class LayerNorm_test(GradSampleHooks_test):
input_dim=st.integers(2, 4),
norm_dim=st.integers(1, 3),
)
@settings(deadline=10000)
@settings(deadline=60000)
def test_input_norm(
self, N: int, Z: int, W: int, H: int, input_dim: int, norm_dim: int
):
Expand Down
2 changes: 1 addition & 1 deletion opacus/tests/grad_samples/linear_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ class Linear_test(GradSampleHooks_test):
bias=st.booleans(),
batch_first=st.booleans(),
)
@settings(deadline=10000)
@settings(deadline=60000)
def test_input_bias(
self,
N: int,
Expand Down
2 changes: 1 addition & 1 deletion opacus/tests/grad_samples/sequence_bias_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ class SequenceBias_test(GradSampleHooks_test):
D=st.integers(4, 8),
batch_first=st.booleans(),
)
@settings(deadline=10000)
@settings(deadline=60000)
def test_batch_second(self, N: int, T: int, D: int, batch_first: bool):
seqbias = SequenceBias(D, batch_first)
if batch_first:
Expand Down
4 changes: 2 additions & 2 deletions opacus/tests/per_sample_gradients_utils_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def per_sample_grads_utils_test(
groups=st.integers(1, 12),
grad_sample_mode=st.sampled_from(get_grad_sample_modes(use_ew=True)),
)
@settings(deadline=40000)
@settings(deadline=60000)
def test_conv1d(
self,
N: int,
Expand Down Expand Up @@ -120,7 +120,7 @@ def test_conv1d(
batch_first=st.booleans(),
grad_sample_mode=st.sampled_from(get_grad_sample_modes(use_ew=True)),
)
@settings(deadline=40000)
@settings(deadline=60000)
def test_linear(
self,
N: int,
Expand Down

0 comments on commit e4eb3fb

Please sign in to comment.