diff --git a/tests/fast_pauli/test_pauli.py b/tests/fast_pauli/test_pauli.py index 527729d..b5ad601 100644 --- a/tests/fast_pauli/test_pauli.py +++ b/tests/fast_pauli/test_pauli.py @@ -74,5 +74,19 @@ def test_exceptions(pauli: type[fp.Pauli]) -> None: pauli(5) +@pytest.mark.parametrize("pauli,", [(fp.Pauli)], ids=resolve_parameter_repr) +def test_clone(paulis: dict, pauli: type[fp.Pauli]) -> None: + """Test clone method.""" + for i in paulis: + p1 = pauli(i) + p2 = p1.clone() + + np.testing.assert_array_equal( + p1.to_tensor(), + p2.to_tensor(), + ) + assert id(p1) != id(p2) + + if __name__ == "__main__": pytest.main() diff --git a/tests/fast_pauli/test_pauli_op.py b/tests/fast_pauli/test_pauli_op.py index b7d2dc2..1c7f7f6 100644 --- a/tests/fast_pauli/test_pauli_op.py +++ b/tests/fast_pauli/test_pauli_op.py @@ -945,5 +945,37 @@ def test_exceptions( pauli_op([1, 1], ["XYZ", "ZYX"]).expectation_value(np.eye(16)) +@pytest.mark.parametrize( + "pauli_op,", + [ + fp.PauliOp, + ], + ids=resolve_parameter_repr, +) +def test_clone( + pauli_strings_with_size: Callable, + generate_random_complex: Callable, + pauli_op: type[fp.PauliOp], +) -> None: + """Test clone method.""" + string_sets = [ + ["I", "X", "Y", "Z"], + pauli_strings_with_size(2), + pauli_strings_with_size(3), + ["XYZXYZXYZ", "ZZZIIIXXX"], + ] + + for strings in string_sets: + coeffs = generate_random_complex(len(strings)) + op1 = pauli_op(coeffs, strings) + op2 = op1.clone() + + np.testing.assert_array_equal( + op1.to_tensor(), + op2.to_tensor(), + ) + assert id(op1) != id(op2) + + if __name__ == "__main__": pytest.main() diff --git a/tests/fast_pauli/test_pauli_string.py b/tests/fast_pauli/test_pauli_string.py index be210dc..15dedea 100644 --- a/tests/fast_pauli/test_pauli_string.py +++ b/tests/fast_pauli/test_pauli_string.py @@ -405,6 +405,26 @@ def test_exceptions(pauli_string: type[fp.PauliString] | type[pp.PauliString]) - pauli_string("XYZ").apply(np.eye(4)) +@pytest.mark.parametrize( + "pauli_string,", + [ + (fp.PauliString), + ], + ids=resolve_parameter_repr, +) +def test_clone(sample_pauli_strings: list, pauli_string: type[fp.PauliString]) -> None: + """Test clone method.""" + for ps in sample_pauli_strings: + pstr1 = pauli_string(ps) + pstr2 = pstr1.clone() + + np.testing.assert_array_equal( + pstr1.to_tensor(), + pstr2.to_tensor(), + ) + assert id(pstr1) != id(pstr2) + + @pytest.mark.consistency def test_sparse_composers(paulis: dict, pauli_strings_with_size: Callable) -> None: """Test consistency for sparse pauli composers.""" diff --git a/tests/fast_pauli/test_summed_pauli_op.py b/tests/fast_pauli/test_summed_pauli_op.py index be0d631..27f6e16 100644 --- a/tests/fast_pauli/test_summed_pauli_op.py +++ b/tests/fast_pauli/test_summed_pauli_op.py @@ -237,3 +237,28 @@ def test_square( A_k2 = op2.to_tensor() np.testing.assert_allclose(A_k2, np.einsum("kab,kbc->kac", A_k, A_k)) + + +@pytest.mark.parametrize( + "summed_pauli_op", [fp.SummedPauliOp], ids=resolve_parameter_repr +) +@pytest.mark.parametrize( + "n_operators,n_qubits", + [(o, q) for o in [1, 10] for q in [1, 2, 4, 6]], +) +def test_clone( + summed_pauli_op: type[fp.SummedPauliOp], + n_operators: int, + n_qubits: int, +) -> None: + """Test clone method.""" + pauli_strings = fp.helpers.calculate_pauli_strings_max_weight(n_qubits, 2) + coeffs_2d = np.random.rand(len(pauli_strings), n_operators).astype(np.complex128) + op1 = summed_pauli_op(pauli_strings, coeffs_2d) + op2 = op1.clone() + + np.testing.assert_array_equal( + op1.to_tensor(), + op2.to_tensor(), + ) + assert id(op1) != id(op2)