Skip to content

Commit

Permalink
Merge branch 'dev-tests' into dev-tests-plat-143-modernpy
Browse files Browse the repository at this point in the history
  • Loading branch information
ethho committed Jan 2, 2024
2 parents bfe3608 + 90d1b5d commit 23a7588
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 16 deletions.
2 changes: 1 addition & 1 deletion LNX-docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ services:
interval: 15s
fakeservices.datajoint.io:
<<: *net
image: datajoint/nginx:v0.2.7
image: datajoint/nginx:v0.2.8
environment:
- ADD_db_TYPE=DATABASE
- ADD_db_ENDPOINT=db:3306
Expand Down
14 changes: 3 additions & 11 deletions docs/src/develop.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,24 +39,16 @@ The following will verify there are no regression errors by running our test sui

- Entire test suite:
```
nosetests -vw tests_old
pytest -sv --cov-report term-missing --cov=datajoint tests
```
> Note: We are in the process of upgrading to `pytest` tests. To run those, use:
> ```
> pytest -sv --cov-report term-missing --cov=datajoint tests
> ```

- A single functional test:
```
nosetests -vs --tests=tests_old.test_external_class:test_insert_and_fetch
pytest -sv tests/test_connection.py::test_dj_conn
```
> Note: We are in the process of upgrading to `pytest` tests. To run those, use:
> ```
> pytest -sv tests/test_connection.py::test_dj_conn
> ```
- A single class test:
```
nosetests -vs --tests=tests_old.test_fetch:TestFetch.test_getattribute_for_fetch1
pytest -sv tests/test_aggr_regressions.py::TestIssue558
```

### Style Tests
Expand Down
23 changes: 19 additions & 4 deletions tests/test_blob.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import pytest
import datajoint as dj
import timeit
import numpy as np
Expand All @@ -10,6 +11,13 @@
from .schema import Longblob


@pytest.fixture
def enable_feature_32bit_dims():
dj.blob.use_32bit_dims = True
yield
dj.blob.use_32bit_dims = False


def test_pack():
for x in (
32,
Expand Down Expand Up @@ -180,6 +188,8 @@ def test_insert_longblob(schema_any):
assert (Longblob & "id=1").fetch1()["data"].all() == query_mym_blob["data"].all()
(Longblob & "id=1").delete()


def test_insert_longblob_32bit(schema_any, enable_feature_32bit_dims):
query_32_blob = (
"INSERT INTO djtest_test1.longblob (id, data) VALUES (1, "
"X'6D596D00530200000001000000010000000400000068697473007369646573007461736B73007374"
Expand All @@ -190,7 +200,6 @@ def test_insert_longblob(schema_any):
"00000041020000000100000008000000040000000000000053007400610067006500200031003000')"
)
dj.conn().query(query_32_blob).fetchall()
dj.blob.use_32bit_dims = True
fetched = (Longblob & "id=1").fetch1()
expected = {
"id": 1,
Expand All @@ -211,25 +220,31 @@ def test_insert_longblob(schema_any):
assert fetched["id"] == expected["id"]
assert np.array_equal(fetched["data"], expected["data"])
(Longblob & "id=1").delete()
dj.blob.use_32bit_dims = False


def test_datetime_serialization_speed():
# If this fails that means for some reason deserializing/serializing
# np arrays of np.datetime64 types is now slower than regular arrays of datetime
assert not dj.blob.use_32bit_dims, "32 bit dims should be off for this test"
context = dict(
np=np,
datetime=datetime,
pack=pack,
unpack=unpack,
)

optimized_exe_time = timeit.timeit(
setup="myarr=pack(np.array([np.datetime64('2022-10-13 03:03:13') for _ in range(0, 10000)]))",
stmt="unpack(myarr)",
number=10,
globals=globals(),
globals=context,
)
print(f"np time {optimized_exe_time}")
baseline_exe_time = timeit.timeit(
setup="myarr2=pack(np.array([datetime(2022,10,13,3,3,13) for _ in range (0, 10000)]))",
stmt="unpack(myarr2)",
number=10,
globals=globals(),
globals=context,
)
print(f"python time {baseline_exe_time}")

Expand Down

0 comments on commit 23a7588

Please sign in to comment.