From 9f2297db5f214c8c5c0c5e5091fc5c719eac5928 Mon Sep 17 00:00:00 2001 From: A-Baji Date: Wed, 29 Nov 2023 14:52:34 -0600 Subject: [PATCH 001/212] test: :white_check_mark: convert simpler tests to pytest syntax --- tests/__init__.py | 7 + tests/schema.py | 489 +++++++++++++++++++ tests/schema_advanced.py | 147 ++++++ tests/schema_simple.py | 279 +++++++++++ {tests_old => tests}/test_blob.py | 100 ++-- {tests_old => tests}/test_blob_matlab.py | 58 ++- {tests_old => tests}/test_dependencies.py | 75 ++- tests/test_erd.py | 76 +++ {tests_old => tests}/test_foreign_keys.py | 23 +- {tests_old => tests}/test_groupby.py | 0 tests/test_hash.py | 6 + {tests_old => tests}/test_json.py | 6 +- {tests_old => tests}/test_log.py | 3 +- {tests_old => tests}/test_nan.py | 14 +- {tests_old => tests}/test_plugin.py | 0 {tests_old => tests}/test_relation_u.py | 59 ++- {tests_old => tests}/test_schema_keywords.py | 12 +- {tests_old => tests}/test_settings.py | 30 +- tests/test_utils.py | 33 ++ tests/test_virtual_module.py | 10 + tests_old/test_erd.py | 87 ---- tests_old/test_hash.py | 7 - tests_old/test_utils.py | 33 -- tests_old/test_virtual_module.py | 12 - 24 files changed, 1219 insertions(+), 347 deletions(-) create mode 100644 tests/schema.py create mode 100644 tests/schema_advanced.py create mode 100644 tests/schema_simple.py rename {tests_old => tests}/test_blob.py (73%) rename {tests_old => tests}/test_blob_matlab.py (83%) rename {tests_old => tests}/test_dependencies.py (64%) create mode 100644 tests/test_erd.py rename {tests_old => tests}/test_foreign_keys.py (72%) rename {tests_old => tests}/test_groupby.py (100%) create mode 100644 tests/test_hash.py rename {tests_old => tests}/test_json.py (98%) rename {tests_old => tests}/test_log.py (69%) rename {tests_old => tests}/test_nan.py (73%) rename {tests_old => tests}/test_plugin.py (100%) rename {tests_old => tests}/test_relation_u.py (52%) rename {tests_old => tests}/test_schema_keywords.py (67%) rename {tests_old => tests}/test_settings.py (69%) create mode 100644 tests/test_utils.py create mode 100644 tests/test_virtual_module.py delete mode 100644 tests_old/test_erd.py delete mode 100644 tests_old/test_hash.py delete mode 100644 tests_old/test_utils.py delete mode 100644 tests_old/test_virtual_module.py diff --git a/tests/__init__.py b/tests/__init__.py index 8b825a042..0fd907166 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -5,6 +5,13 @@ PREFIX = "djtest" +# Connection for testing +CONN_INFO = dict( + host=os.getenv("DJ_HOST"), + user=os.getenv("DJ_USER"), + password=os.getenv("DJ_PASS"), +) + CONN_INFO_ROOT = dict( host=os.getenv("DJ_HOST"), user=os.getenv("DJ_USER"), diff --git a/tests/schema.py b/tests/schema.py new file mode 100644 index 000000000..dafd481da --- /dev/null +++ b/tests/schema.py @@ -0,0 +1,489 @@ +""" +Sample schema with realistic tables for testing +""" + +import random +import numpy as np +import datajoint as dj +import inspect +from . import PREFIX, CONN_INFO + +schema = dj.Schema(PREFIX + "_test1", connection=dj.conn(**CONN_INFO)) + + +@schema +class TTest(dj.Lookup): + """ + doc string + """ + + definition = """ + key : int # key + --- + value : int # value + """ + contents = [(k, 2 * k) for k in range(10)] + + +@schema +class TTest2(dj.Manual): + definition = """ + key : int # key + --- + value : int # value + """ + + +@schema +class TTest3(dj.Manual): + definition = """ + key : int + --- + value : varchar(300) + """ + + +@schema +class NullableNumbers(dj.Manual): + definition = """ + key : int + --- + fvalue = null : float + dvalue = null : double + ivalue = null : int + """ + + +@schema +class TTestExtra(dj.Manual): + """ + clone of Test but with an extra field + """ + + definition = TTest.definition + "\nextra : int # extra int\n" + + +@schema +class TTestNoExtra(dj.Manual): + """ + clone of Test but with no extra fields + """ + + definition = TTest.definition + + +@schema +class Auto(dj.Lookup): + definition = """ + id :int auto_increment + --- + name :varchar(12) + """ + + def fill(self): + if not self: + self.insert([dict(name="Godel"), dict(name="Escher"), dict(name="Bach")]) + + +@schema +class User(dj.Lookup): + definition = """ # lab members + username: varchar(12) + """ + contents = [ + ["Jake"], + ["Cathryn"], + ["Shan"], + ["Fabian"], + ["Edgar"], + ["George"], + ["Dimitri"], + ] + + +@schema +class Subject(dj.Lookup): + definition = """ # Basic information about animal subjects used in experiments + subject_id :int # unique subject id + --- + real_id :varchar(40) # real-world name. Omit if the same as subject_id + species = "mouse" :enum('mouse', 'monkey', 'human') + date_of_birth :date + subject_notes :varchar(4000) + unique index (real_id, species) + """ + + contents = [ + [1551, "1551", "mouse", "2015-04-01", "genetically engineered super mouse"], + [10, "Curious George", "monkey", "2008-06-30", ""], + [1552, "1552", "mouse", "2015-06-15", ""], + [1553, "1553", "mouse", "2016-07-01", ""], + ] + + +@schema +class Language(dj.Lookup): + definition = """ + # languages spoken by some of the developers + # additional comments are ignored + name : varchar(40) # name of the developer + language : varchar(40) # language + """ + contents = [ + ("Fabian", "English"), + ("Edgar", "English"), + ("Dimitri", "English"), + ("Dimitri", "Ukrainian"), + ("Fabian", "German"), + ("Edgar", "Japanese"), + ] + + +@schema +class Experiment(dj.Imported): + definition = """ # information about experiments + -> Subject + experiment_id :smallint # experiment number for this subject + --- + experiment_date :date # date when experiment was started + -> [nullable] User + data_path="" :varchar(255) # file path to recorded data + notes="" :varchar(2048) # e.g. purpose of experiment + entry_time=CURRENT_TIMESTAMP :timestamp # automatic timestamp + """ + + fake_experiments_per_subject = 5 + + def make(self, key): + """ + populate with random data + """ + from datetime import date, timedelta + + users = [None, None] + list(User().fetch()["username"]) + random.seed("Amazing Seed") + self.insert( + dict( + key, + experiment_id=experiment_id, + experiment_date=( + date.today() - timedelta(random.expovariate(1 / 30)) + ).isoformat(), + username=random.choice(users), + ) + for experiment_id in range(self.fake_experiments_per_subject) + ) + + +@schema +class Trial(dj.Imported): + definition = """ # a trial within an experiment + -> Experiment.proj(animal='subject_id') + trial_id :smallint # trial number + --- + start_time :double # (s) + """ + + class Condition(dj.Part): + definition = """ # trial conditions + -> Trial + cond_idx : smallint # condition number + ---- + orientation : float # degrees + """ + + def make(self, key): + """populate with random data (pretend reading from raw files)""" + random.seed("Amazing Seed") + trial = self.Condition() + for trial_id in range(10): + key["trial_id"] = trial_id + self.insert1(dict(key, start_time=random.random() * 1e9)) + trial.insert( + dict(key, cond_idx=cond_idx, orientation=random.random() * 360) + for cond_idx in range(30) + ) + + +@schema +class Ephys(dj.Imported): + definition = """ # some kind of electrophysiological recording + -> Trial + ---- + sampling_frequency :double # (Hz) + duration :decimal(7,3) # (s) + """ + + class Channel(dj.Part): + definition = """ # subtable containing individual channels + -> master + channel :tinyint unsigned # channel number within Ephys + ---- + voltage : longblob + current = null : longblob # optional current to test null handling + """ + + def _make_tuples(self, key): + """ + populate with random data + """ + random.seed(str(key)) + row = dict( + key, sampling_frequency=6000, duration=np.minimum(2, random.expovariate(1)) + ) + self.insert1(row) + number_samples = int(row["duration"] * row["sampling_frequency"] + 0.5) + sub = self.Channel() + sub.insert( + dict( + key, + channel=channel, + voltage=np.float32(np.random.randn(number_samples)), + ) + for channel in range(2) + ) + + +@schema +class Image(dj.Manual): + definition = """ + # table for testing blob inserts + id : int # image identifier + --- + img : longblob # image + """ + + +@schema +class UberTrash(dj.Lookup): + definition = """ + id : int + --- + """ + contents = [(1,)] + + +@schema +class UnterTrash(dj.Lookup): + definition = """ + -> UberTrash + my_id : int + --- + """ + contents = [(1, 1), (1, 2)] + + +@schema +class SimpleSource(dj.Lookup): + definition = """ + id : int # id + """ + contents = ((x,) for x in range(10)) + + +@schema +class SigIntTable(dj.Computed): + definition = """ + -> SimpleSource + """ + + def _make_tuples(self, key): + raise KeyboardInterrupt + + +@schema +class SigTermTable(dj.Computed): + definition = """ + -> SimpleSource + """ + + def make(self, key): + raise SystemExit("SIGTERM received") + + +@schema +class DjExceptionName(dj.Lookup): + definition = """ + dj_exception_name: char(64) + """ + + @property + def contents(self): + return [ + [member_name] + for member_name, member_type in inspect.getmembers(dj.errors) + if inspect.isclass(member_type) and issubclass(member_type, Exception) + ] + + +@schema +class ErrorClass(dj.Computed): + definition = """ + -> DjExceptionName + """ + + def make(self, key): + exception_name = key["dj_exception_name"] + raise getattr(dj.errors, exception_name) + + +@schema +class DecimalPrimaryKey(dj.Lookup): + definition = """ + id : decimal(4,3) + """ + contents = zip((0.1, 0.25, 3.99)) + + +@schema +class IndexRich(dj.Manual): + definition = """ + -> Subject + --- + -> [unique, nullable] User.proj(first="username") + first_date : date + value : int + index (first_date, value) + """ + + +# Schema for issue 656 +@schema +class ThingA(dj.Manual): + definition = """ + a: int + """ + + +@schema +class ThingB(dj.Manual): + definition = """ + b1: int + b2: int + --- + b3: int + """ + + +@schema +class ThingC(dj.Manual): + definition = """ + -> ThingA + --- + -> [unique, nullable] ThingB + """ + + +@schema +class Parent(dj.Lookup): + definition = """ + parent_id: int + --- + name: varchar(30) + """ + contents = [(1, "Joe")] + + +@schema +class Child(dj.Lookup): + definition = """ + -> Parent + child_id: int + --- + name: varchar(30) + """ + contents = [(1, 12, "Dan")] + + +# Related to issue #886 (8), #883 (5) +@schema +class ComplexParent(dj.Lookup): + definition = "\n".join(["parent_id_{}: int".format(i + 1) for i in range(8)]) + contents = [tuple(i for i in range(8))] + + +@schema +class ComplexChild(dj.Lookup): + definition = "\n".join( + ["-> ComplexParent"] + ["child_id_{}: int".format(i + 1) for i in range(1)] + ) + contents = [tuple(i for i in range(9))] + + +@schema +class SubjectA(dj.Lookup): + definition = """ + subject_id: varchar(32) + --- + dob : date + sex : enum('M', 'F', 'U') + """ + contents = [ + ("mouse1", "2020-09-01", "M"), + ("mouse2", "2020-03-19", "F"), + ("mouse3", "2020-08-23", "F"), + ] + + +@schema +class SessionA(dj.Lookup): + definition = """ + -> SubjectA + session_start_time: datetime + --- + session_dir='' : varchar(32) + """ + contents = [ + ("mouse1", "2020-12-01 12:32:34", ""), + ("mouse1", "2020-12-02 12:32:34", ""), + ("mouse1", "2020-12-03 12:32:34", ""), + ("mouse1", "2020-12-04 12:32:34", ""), + ] + + +@schema +class SessionStatusA(dj.Lookup): + definition = """ + -> SessionA + --- + status: enum('in_training', 'trained_1a', 'trained_1b', 'ready4ephys') + """ + contents = [ + ("mouse1", "2020-12-01 12:32:34", "in_training"), + ("mouse1", "2020-12-02 12:32:34", "trained_1a"), + ("mouse1", "2020-12-03 12:32:34", "trained_1b"), + ("mouse1", "2020-12-04 12:32:34", "ready4ephys"), + ] + + +@schema +class SessionDateA(dj.Lookup): + definition = """ + -> SubjectA + session_date: date + """ + contents = [ + ("mouse1", "2020-12-01"), + ("mouse1", "2020-12-02"), + ("mouse1", "2020-12-03"), + ("mouse1", "2020-12-04"), + ] + + +@schema +class Stimulus(dj.Lookup): + definition = """ + id: int + --- + contrast: int + brightness: int + """ + + +@schema +class Longblob(dj.Manual): + definition = """ + id: int + --- + data: longblob + """ diff --git a/tests/schema_advanced.py b/tests/schema_advanced.py new file mode 100644 index 000000000..7580611e2 --- /dev/null +++ b/tests/schema_advanced.py @@ -0,0 +1,147 @@ +import datajoint as dj +from . import PREFIX, CONN_INFO + +schema = dj.Schema(PREFIX + "_advanced", locals(), connection=dj.conn(**CONN_INFO)) + + +@schema +class Person(dj.Manual): + definition = """ + person_id : int + ---- + full_name : varchar(60) + sex : enum('M','F') + """ + + def fill(self): + """ + fill fake names from www.fakenamegenerator.com + """ + self.insert( + ( + (0, "May K. Hall", "F"), + (1, "Jeffrey E. Gillen", "M"), + (2, "Hanna R. Walters", "F"), + (3, "Russel S. James", "M"), + (4, "Robbin J. Fletcher", "F"), + (5, "Wade J. Sullivan", "M"), + (6, "Dorothy J. Chen", "F"), + (7, "Michael L. Kowalewski", "M"), + (8, "Kimberly J. Stringer", "F"), + (9, "Mark G. Hair", "M"), + (10, "Mary R. Thompson", "F"), + (11, "Graham C. Gilpin", "M"), + (12, "Nelda T. Ruggeri", "F"), + (13, "Bryan M. Cummings", "M"), + (14, "Sara C. Le", "F"), + (15, "Myron S. Jaramillo", "M"), + ) + ) + + +@schema +class Parent(dj.Manual): + definition = """ + -> Person + parent_sex : enum('M','F') + --- + -> Person.proj(parent='person_id') + """ + + def fill(self): + def make_parent(pid, parent): + return dict( + person_id=pid, + parent=parent, + parent_sex=(Person & {"person_id": parent}).fetch1("sex"), + ) + + self.insert( + make_parent(*r) + for r in ( + (0, 2), + (0, 3), + (1, 4), + (1, 5), + (2, 4), + (2, 5), + (3, 4), + (3, 7), + (4, 7), + (4, 8), + (5, 9), + (5, 10), + (6, 9), + (6, 10), + (7, 11), + (7, 12), + (8, 11), + (8, 14), + (9, 11), + (9, 12), + (10, 13), + (10, 14), + (11, 14), + (11, 15), + (12, 14), + (12, 15), + ) + ) + + +@schema +class Subject(dj.Manual): + definition = """ + subject : int + --- + -> [unique, nullable] Person + """ + + +@schema +class Prep(dj.Manual): + definition = """ + prep : int + """ + + +@schema +class Slice(dj.Manual): + definition = """ + -> Prep + slice : int + """ + + +@schema +class Cell(dj.Manual): + definition = """ + -> Slice + cell : int + """ + + +@schema +class InputCell(dj.Manual): + definition = """ # a synapse within the slice + -> Cell + -> Cell.proj(input="cell") + """ + + +@schema +class LocalSynapse(dj.Manual): + definition = """ # a synapse within the slice + -> Cell.proj(presynaptic='cell') + -> Cell.proj(postsynaptic='cell') + """ + + +@schema +class GlobalSynapse(dj.Manual): + # Mix old-style and new-style projected foreign keys + definition = """ + # a synapse within the slice + -> Cell.proj(pre_slice="slice", pre_cell="cell") + -> Cell.proj(post_slice="slice", post_cell="cell") + """ diff --git a/tests/schema_simple.py b/tests/schema_simple.py new file mode 100644 index 000000000..78f64d036 --- /dev/null +++ b/tests/schema_simple.py @@ -0,0 +1,279 @@ +""" +A simple, abstract schema to test relational algebra +""" +import random +import datajoint as dj +import itertools +import hashlib +import uuid +import faker +from . import PREFIX, CONN_INFO +import numpy as np +from datetime import date, timedelta + +schema = dj.Schema(PREFIX + "_relational", locals(), connection=dj.conn(**CONN_INFO)) + + +@schema +class IJ(dj.Lookup): + definition = """ # tests restrictions + i : int + j : int + """ + contents = list(dict(i=i, j=j + 2) for i in range(3) for j in range(3)) + + +@schema +class JI(dj.Lookup): + definition = """ # tests restrictions by relations when attributes are reordered + j : int + i : int + """ + contents = list(dict(i=i + 1, j=j) for i in range(3) for j in range(3)) + + +@schema +class A(dj.Lookup): + definition = """ + id_a :int + --- + cond_in_a :tinyint + """ + contents = [(i, i % 4 > i % 3) for i in range(10)] + + +@schema +class B(dj.Computed): + definition = """ + -> A + id_b :int + --- + mu :float # mean value + sigma :float # standard deviation + n :smallint # number samples + """ + + class C(dj.Part): + definition = """ + -> B + id_c :int + --- + value :float # normally distributed variables according to parameters in B + """ + + def make(self, key): + random.seed(str(key)) + sub = B.C() + for i in range(4): + key["id_b"] = i + mu = random.normalvariate(0, 10) + sigma = random.lognormvariate(0, 4) + n = random.randint(0, 10) + self.insert1(dict(key, mu=mu, sigma=sigma, n=n)) + sub.insert( + dict(key, id_c=j, value=random.normalvariate(mu, sigma)) + for j in range(n) + ) + + +@schema +class L(dj.Lookup): + definition = """ + id_l: int + --- + cond_in_l :tinyint + """ + contents = [(i, i % 3 >= i % 5) for i in range(30)] + + +@schema +class D(dj.Computed): + definition = """ + -> A + id_d :int + --- + -> L + """ + + def _make_tuples(self, key): + # make reference to a random tuple from L + random.seed(str(key)) + lookup = list(L().fetch("KEY")) + self.insert(dict(key, id_d=i, **random.choice(lookup)) for i in range(4)) + + +@schema +class E(dj.Computed): + definition = """ + -> B + -> D + --- + -> L + """ + + class F(dj.Part): + definition = """ + -> E + id_f :int + --- + -> B.C + """ + + def make(self, key): + random.seed(str(key)) + self.insert1(dict(key, **random.choice(list(L().fetch("KEY"))))) + sub = E.F() + references = list((B.C() & key).fetch("KEY")) + random.shuffle(references) + sub.insert( + dict(key, id_f=i, **ref) + for i, ref in enumerate(references) + if random.getrandbits(1) + ) + + +@schema +class F(dj.Manual): + definition = """ + id: int + ---- + date=null: date + """ + + +@schema +class DataA(dj.Lookup): + definition = """ + idx : int + --- + a : int + """ + contents = list(zip(range(5), range(5))) + + +@schema +class DataB(dj.Lookup): + definition = """ + idx : int + --- + a : int + """ + contents = list(zip(range(5), range(5, 10))) + + +@schema +class Website(dj.Lookup): + definition = """ + url_hash : uuid + --- + url : varchar(1000) + """ + + def insert1_url(self, url): + hashed = hashlib.sha1() + hashed.update(url.encode()) + url_hash = uuid.UUID(bytes=hashed.digest()[:16]) + self.insert1(dict(url=url, url_hash=url_hash), skip_duplicates=True) + return url_hash + + +@schema +class Profile(dj.Manual): + definition = """ + ssn : char(11) + --- + name : varchar(70) + residence : varchar(255) + blood_group : enum('A+', 'A-', 'AB+', 'AB-', 'B+', 'B-', 'O+', 'O-') + username : varchar(120) + birthdate : date + job : varchar(120) + sex : enum('M', 'F') + """ + + class Website(dj.Part): + definition = """ + -> master + -> Website + """ + + def populate_random(self, n=10): + fake = faker.Faker() + faker.Faker.seed(0) # make test deterministic + for _ in range(n): + profile = fake.profile() + with self.connection.transaction: + self.insert1(profile, ignore_extra_fields=True) + for url in profile["website"]: + self.Website().insert1( + dict(ssn=profile["ssn"], url_hash=Website().insert1_url(url)) + ) + + +@schema +class TTestUpdate(dj.Lookup): + definition = """ + primary_key : int + --- + string_attr : varchar(255) + num_attr=null : float + blob_attr : longblob + """ + + contents = [ + (0, "my_string", 0.0, np.random.randn(10, 2)), + (1, "my_other_string", 1.0, np.random.randn(20, 1)), + ] + + +@schema +class ArgmaxTest(dj.Lookup): + definition = """ + primary_key : int + --- + secondary_key : char(2) + val : float + """ + + n = 10 + + @property + def contents(self): + n = self.n + yield from zip( + range(n**2), + itertools.chain(*itertools.repeat(tuple(map(chr, range(100, 100 + n))), n)), + np.random.rand(n**2), + ) + + +@schema +class ReservedWord(dj.Manual): + definition = """ + # Test of SQL reserved words + key : int + --- + in : varchar(25) + from : varchar(25) + int : int + select : varchar(25) + """ + + +@schema +class OutfitLaunch(dj.Lookup): + definition = """ + # Monthly released designer outfits + release_id: int + --- + day: date + """ + contents = [(0, date.today() - timedelta(days=15))] + + class OutfitPiece(dj.Part, dj.Lookup): + definition = """ + # Outfit piece associated with outfit + -> OutfitLaunch + piece: varchar(20) + """ + contents = [(0, "jeans"), (0, "sneakers"), (0, "polo")] diff --git a/tests_old/test_blob.py b/tests/test_blob.py similarity index 73% rename from tests_old/test_blob.py rename to tests/test_blob.py index 3765edc57..562d78f2b 100644 --- a/tests_old/test_blob.py +++ b/tests/test_blob.py @@ -7,15 +7,7 @@ from datetime import datetime from datajoint.blob import pack, unpack from numpy.testing import assert_array_equal -from nose.tools import ( - assert_equal, - assert_true, - assert_false, - assert_list_equal, - assert_set_equal, - assert_tuple_equal, - assert_dict_equal, -) +from pytest import approx def test_pack(): @@ -24,19 +16,19 @@ def test_pack(): -3.7e-2, np.float64(3e31), -np.inf, - np.int8(-3), - np.uint8(-1), + np.array(-3).astype(np.uint8), + np.array(-1).astype(np.uint8), np.int16(-33), - np.uint16(-33), + np.array(-33).astype(np.uint16), np.int32(-3), - np.uint32(-1), + np.array(-1).astype(np.uint32), np.int64(373), - np.uint64(-3), + np.array(-3).astype(np.uint64), ): - assert_equal(x, unpack(pack(x)), "Scalars don't match!") + assert x == approx(unpack(pack(x)), rel=1e-6), "Scalars don't match!" x = np.nan - assert_true(np.isnan(unpack(pack(x))), "nan scalar did not match!") + assert np.isnan(unpack(pack(x))), "nan scalar did not match!" x = np.random.randn(8, 10) assert_array_equal(x, unpack(pack(x)), "Arrays do not match!") @@ -45,7 +37,7 @@ def test_pack(): assert_array_equal(x, unpack(pack(x)), "Arrays do not match!") x = 7j - assert_equal(x, unpack(pack(x)), "Complex scalar does not match") + assert x == unpack(pack(x)), "Complex scalar does not match" x = np.float32(np.random.randn(3, 4, 5)) assert_array_equal(x, unpack(pack(x)), "Arrays do not match!") @@ -54,41 +46,37 @@ def test_pack(): assert_array_equal(x, unpack(pack(x)), "Arrays do not match!") x = None - assert_true(unpack(pack(x)) is None, "None did not match") + assert unpack(pack(x)) is None, "None did not match" x = -255 y = unpack(pack(x)) - assert_true( - x == y and isinstance(y, int) and not isinstance(y, np.ndarray), - "Scalar int did not match", - ) + assert ( + x == y and isinstance(y, int) and not isinstance(y, np.ndarray) + ), "Scalar int did not match" x = -25523987234234287910987234987098245697129798713407812347 y = unpack(pack(x)) - assert_true( - x == y and isinstance(y, int) and not isinstance(y, np.ndarray), - "Unbounded int did not match", - ) + assert ( + x == y and isinstance(y, int) and not isinstance(y, np.ndarray) + ), "Unbounded int did not match" x = 7.0 y = unpack(pack(x)) - assert_true( - x == y and isinstance(y, float) and not isinstance(y, np.ndarray), - "Scalar float did not match", - ) + assert ( + x == y and isinstance(y, float) and not isinstance(y, np.ndarray) + ), "Scalar float did not match" x = 7j y = unpack(pack(x)) - assert_true( - x == y and isinstance(y, complex) and not isinstance(y, np.ndarray), - "Complex scalar did not match", - ) + assert ( + x == y and isinstance(y, complex) and not isinstance(y, np.ndarray) + ), "Complex scalar did not match" x = True - assert_true(unpack(pack(x)) is True, "Scalar bool did not match") + assert unpack(pack(x)) is True, "Scalar bool did not match" x = [None] - assert_list_equal(x, unpack(pack(x))) + assert [None] == unpack(pack(x)) x = { "name": "Anonymous", @@ -98,22 +86,22 @@ def test_pack(): (11, 12): None, } y = unpack(pack(x)) - assert_dict_equal(x, y, "Dict do not match!") - assert_false( - isinstance(["range"][0], np.ndarray), "Scalar int was coerced into array." - ) + assert x == y, "Dict do not match!" + assert not isinstance( + ["range"][0], np.ndarray + ), "Scalar int was coerced into array." x = uuid.uuid4() - assert_equal(x, unpack(pack(x)), "UUID did not match") + assert x == unpack(pack(x)), "UUID did not match" x = Decimal("-112122121.000003000") - assert_equal(x, unpack(pack(x)), "Decimal did not pack/unpack correctly") + assert x == unpack(pack(x)), "Decimal did not pack/unpack correctly" x = [1, datetime.now(), {1: "one", "two": 2}, (1, 2)] - assert_list_equal(x, unpack(pack(x)), "List did not pack/unpack correctly") + assert x == unpack(pack(x)), "List did not pack/unpack correctly" x = (1, datetime.now(), {1: "one", "two": 2}, (uuid.uuid4(), 2)) - assert_tuple_equal(x, unpack(pack(x)), "Tuple did not pack/unpack correctly") + assert x == unpack(pack(x)), "Tuple did not pack/unpack correctly" x = ( 1, @@ -121,36 +109,34 @@ def test_pack(): {"yes!": [1, 2, np.array((3, 4))]}, ) y = unpack(pack(x)) - assert_dict_equal(x[1], y[1]) + assert x[1] == y[1] assert_array_equal(x[2]["yes!"][2], y[2]["yes!"][2]) x = {"elephant"} - assert_set_equal(x, unpack(pack(x)), "Set did not pack/unpack correctly") + assert x == unpack(pack(x)), "Set did not pack/unpack correctly" x = tuple(range(10)) - assert_tuple_equal( - x, unpack(pack(range(10))), "Iterator did not pack/unpack correctly" - ) + assert x == unpack(pack(range(10))), "Iterator did not pack/unpack correctly" x = Decimal("1.24") - assert_true(x == unpack(pack(x)), "Decimal object did not pack/unpack correctly") + assert x == approx(unpack(pack(x))), "Decimal object did not pack/unpack correctly" x = datetime.now() - assert_true(x == unpack(pack(x)), "Datetime object did not pack/unpack correctly") + assert x == unpack(pack(x)), "Datetime object did not pack/unpack correctly" x = np.bool_(True) - assert_true(x == unpack(pack(x)), "Numpy bool object did not pack/unpack correctly") + assert x == unpack(pack(x)), "Numpy bool object did not pack/unpack correctly" x = "test" - assert_true(x == unpack(pack(x)), "String object did not pack/unpack correctly") + assert x == unpack(pack(x)), "String object did not pack/unpack correctly" x = np.array(["yes"]) - assert_true( - x == unpack(pack(x)), "Numpy string array object did not pack/unpack correctly" - ) + assert x == unpack( + pack(x) + ), "Numpy string array object did not pack/unpack correctly" x = np.datetime64("1998").astype("datetime64[us]") - assert_true(x == unpack(pack(x))) + assert x == unpack(pack(x)) def test_recarrays(): diff --git a/tests_old/test_blob_matlab.py b/tests/test_blob_matlab.py similarity index 83% rename from tests_old/test_blob_matlab.py rename to tests/test_blob_matlab.py index 6104c9291..ecb698fec 100644 --- a/tests_old/test_blob_matlab.py +++ b/tests/test_blob_matlab.py @@ -1,8 +1,6 @@ import numpy as np import datajoint as dj from datajoint.blob import pack, unpack - -from nose.tools import assert_equal, assert_true, assert_tuple_equal, assert_false from numpy.testing import assert_array_equal from . import PREFIX, CONN_INFO @@ -58,7 +56,8 @@ def insert_blobs(): class TestFetch: @classmethod def setup_class(cls): - assert_false(dj.config["safemode"], "safemode must be disabled") + dj.config["safemode"] = False # temp + assert not dj.config["safemode"], "safemode must be disabled" Blob().delete() insert_blobs() @@ -70,43 +69,43 @@ def test_complex_matlab_blobs(): blobs = Blob().fetch("blob", order_by="KEY") blob = blobs[0] # 'simple string' 'character string' - assert_equal(blob[0], "character string") + assert blob[0] == "character string" blob = blobs[1] # '1D vector' 1:15:180 assert_array_equal(blob, np.r_[1:180:15][None, :]) assert_array_equal(blob, unpack(pack(blob))) blob = blobs[2] # 'string array' {'string1' 'string2'} - assert_true(isinstance(blob, dj.MatCell)) + assert isinstance(blob, dj.MatCell) assert_array_equal(blob, np.array([["string1", "string2"]])) assert_array_equal(blob, unpack(pack(blob))) blob = blobs[ 3 ] # 'struct array' struct('a', {1,2}, 'b', {struct('c', magic(3)), struct('C', magic(5))}) - assert_true(isinstance(blob, dj.MatStruct)) - assert_tuple_equal(blob.dtype.names, ("a", "b")) + assert isinstance(blob, dj.MatStruct) + assert tuple(blob.dtype.names) == ("a", "b") assert_array_equal(blob.a[0, 0], np.array([[1.0]])) assert_array_equal(blob.a[0, 1], np.array([[2.0]])) - assert_true(isinstance(blob.b[0, 1], dj.MatStruct)) - assert_tuple_equal(blob.b[0, 1].C[0, 0].shape, (5, 5)) + assert isinstance(blob.b[0, 1], dj.MatStruct) + assert tuple(blob.b[0, 1].C[0, 0].shape) == (5, 5) b = unpack(pack(blob)) assert_array_equal(b[0, 0].b[0, 0].c, blob[0, 0].b[0, 0].c) assert_array_equal(b[0, 1].b[0, 0].C, blob[0, 1].b[0, 0].C) blob = blobs[4] # '3D double array' reshape(1:24, [2,3,4]) assert_array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F")) - assert_true(blob.dtype == "float64") + assert blob.dtype == "float64" assert_array_equal(blob, unpack(pack(blob))) blob = blobs[5] # reshape(uint8(1:24), [2,3,4]) - assert_true(np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F"))) - assert_true(blob.dtype == "uint8") + assert np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F")) + assert blob.dtype == "uint8" assert_array_equal(blob, unpack(pack(blob))) blob = blobs[6] # fftn(reshape(1:24, [2,3,4])) - assert_tuple_equal(blob.shape, (2, 3, 4)) - assert_true(blob.dtype == "complex128") + assert tuple(blob.shape) == (2, 3, 4) + assert blob.dtype == "complex128" assert_array_equal(blob, unpack(pack(blob))) @staticmethod @@ -117,7 +116,7 @@ def test_complex_matlab_squeeze(): blob = (Blob & "id=1").fetch1( "blob", squeeze=True ) # 'simple string' 'character string' - assert_equal(blob, "character string") + assert blob == "character string" blob = (Blob & "id=2").fetch1( "blob", squeeze=True @@ -127,14 +126,14 @@ def test_complex_matlab_squeeze(): blob = (Blob & "id=3").fetch1( "blob", squeeze=True ) # 'string array' {'string1' 'string2'} - assert_true(isinstance(blob, dj.MatCell)) + assert isinstance(blob, dj.MatCell) assert_array_equal(blob, np.array(["string1", "string2"])) blob = (Blob & "id=4").fetch1( "blob", squeeze=True ) # 'struct array' struct('a', {1,2}, 'b', {struct('c', magic(3)), struct('C', magic(5))}) - assert_true(isinstance(blob, dj.MatStruct)) - assert_tuple_equal(blob.dtype.names, ("a", "b")) + assert isinstance(blob, dj.MatStruct) + assert tuple(blob.dtype.names) == ("a", "b") assert_array_equal( blob.a, np.array( @@ -144,32 +143,31 @@ def test_complex_matlab_squeeze(): ] ), ) - assert_true(isinstance(blob[1].b, dj.MatStruct)) - assert_tuple_equal(blob[1].b.C.item().shape, (5, 5)) + assert isinstance(blob[1].b, dj.MatStruct) + assert tuple(blob[1].b.C.item().shape) == (5, 5) blob = (Blob & "id=5").fetch1( "blob", squeeze=True ) # '3D double array' reshape(1:24, [2,3,4]) - assert_true(np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F"))) - assert_true(blob.dtype == "float64") + assert np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F")) + assert blob.dtype == "float64" blob = (Blob & "id=6").fetch1( "blob", squeeze=True ) # reshape(uint8(1:24), [2,3,4]) - assert_true(np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F"))) - assert_true(blob.dtype == "uint8") + assert np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F")) + assert blob.dtype == "uint8" blob = (Blob & "id=7").fetch1( "blob", squeeze=True ) # fftn(reshape(1:24, [2,3,4])) - assert_tuple_equal(blob.shape, (2, 3, 4)) - assert_true(blob.dtype == "complex128") + assert tuple(blob.shape) == (2, 3, 4) + assert blob.dtype == "complex128" - @staticmethod - def test_iter(): + def test_iter(self): """ test iterator over the entity set """ from_iter = {d["id"]: d for d in Blob()} - assert_equal(len(from_iter), len(Blob())) - assert_equal(from_iter[1]["blob"], "character string") + assert len(from_iter) == len(Blob()) + assert from_iter[1]["blob"] == "character string" diff --git a/tests_old/test_dependencies.py b/tests/test_dependencies.py similarity index 64% rename from tests_old/test_dependencies.py rename to tests/test_dependencies.py index c359b602a..1e8b1da41 100644 --- a/tests_old/test_dependencies.py +++ b/tests/test_dependencies.py @@ -1,57 +1,54 @@ -from nose.tools import assert_true, raises, assert_list_equal +import datajoint as dj +from datajoint import errors +from pytest import raises + from .schema import * from datajoint.dependencies import unite_master_parts def test_unite_master_parts(): - assert_list_equal( - unite_master_parts( - [ - "`s`.`a`", - "`s`.`a__q`", - "`s`.`b`", - "`s`.`c`", - "`s`.`c__q`", - "`s`.`b__q`", - "`s`.`d`", - "`s`.`a__r`", - ] - ), + assert unite_master_parts( [ "`s`.`a`", "`s`.`a__q`", - "`s`.`a__r`", "`s`.`b`", - "`s`.`b__q`", "`s`.`c`", "`s`.`c__q`", + "`s`.`b__q`", "`s`.`d`", - ], - ) - assert_list_equal( - unite_master_parts( - [ - "`lab`.`#equipment`", - "`cells`.`cell_analysis_method`", - "`cells`.`cell_analysis_method_task_type`", - "`cells`.`cell_analysis_method_users`", - "`cells`.`favorite_selection`", - "`cells`.`cell_analysis_method__cell_selection_params`", - "`lab`.`#equipment__config`", - "`cells`.`cell_analysis_method__field_detect_params`", - ] - ), + "`s`.`a__r`", + ] + ) == [ + "`s`.`a`", + "`s`.`a__q`", + "`s`.`a__r`", + "`s`.`b`", + "`s`.`b__q`", + "`s`.`c`", + "`s`.`c__q`", + "`s`.`d`", + ] + assert unite_master_parts( [ "`lab`.`#equipment`", - "`lab`.`#equipment__config`", "`cells`.`cell_analysis_method`", - "`cells`.`cell_analysis_method__cell_selection_params`", - "`cells`.`cell_analysis_method__field_detect_params`", "`cells`.`cell_analysis_method_task_type`", "`cells`.`cell_analysis_method_users`", "`cells`.`favorite_selection`", - ], - ) + "`cells`.`cell_analysis_method__cell_selection_params`", + "`lab`.`#equipment__config`", + "`cells`.`cell_analysis_method__field_detect_params`", + ] + ) == [ + "`lab`.`#equipment`", + "`lab`.`#equipment__config`", + "`cells`.`cell_analysis_method`", + "`cells`.`cell_analysis_method__cell_selection_params`", + "`cells`.`cell_analysis_method__field_detect_params`", + "`cells`.`cell_analysis_method_task_type`", + "`cells`.`cell_analysis_method_users`", + "`cells`.`favorite_selection`", + ] def test_nullable_dependency(): @@ -80,10 +77,9 @@ def test_nullable_dependency(): c.insert1(dict(a=3, b1=1, b2=1)) c.insert1(dict(a=4, b1=1, b2=2)) - assert_true(len(c) == len(c.fetch()) == 5) + assert len(c) == len(c.fetch()) == 5 -@raises(dj.errors.DuplicateError) def test_unique_dependency(): """test nullable unique foreign key""" @@ -104,4 +100,5 @@ def test_unique_dependency(): c.insert1(dict(a=0, b1=1, b2=1)) # duplicate foreign key attributes = not ok - c.insert1(dict(a=1, b1=1, b2=1)) + with raises(errors.DuplicateError): + c.insert1(dict(a=1, b1=1, b2=1)) diff --git a/tests/test_erd.py b/tests/test_erd.py new file mode 100644 index 000000000..991410995 --- /dev/null +++ b/tests/test_erd.py @@ -0,0 +1,76 @@ +import datajoint as dj +from .schema_simple import A, B, D, E, L, schema, OutfitLaunch +from . import schema_advanced + +namespace = locals() + + +class TestERD: + @staticmethod + def setup_method(): + """ + class-level test setup. Executes before each test method. + """ + + @staticmethod + def test_decorator(): + assert issubclass(A, dj.Lookup) + assert not issubclass(A, dj.Part) + assert B.database == schema.database + assert issubclass(B.C, dj.Part) + assert B.C.database == schema.database + assert B.C.master is B and E.F.master is E + + @staticmethod + def test_dependencies(): + deps = schema.connection.dependencies + deps.load() + assert all(cls.full_table_name in deps for cls in (A, B, B.C, D, E, E.F, L)) + assert set(A().children()) == set([B.full_table_name, D.full_table_name]) + assert set(D().parents(primary=True)) == set([A.full_table_name]) + assert set(D().parents(primary=False)) == set([L.full_table_name]) + assert set(deps.descendants(L.full_table_name)).issubset( + cls.full_table_name for cls in (L, D, E, E.F) + ) + + @staticmethod + def test_erd(): + assert dj.diagram.diagram_active, "Failed to import networkx and pydot" + erd = dj.ERD(schema, context=namespace) + graph = erd._make_graph() + assert set(cls.__name__ for cls in (A, B, D, E, L)).issubset(graph.nodes()) + + @staticmethod + def test_erd_algebra(): + erd0 = dj.ERD(B) + erd1 = erd0 + 3 + erd2 = dj.Di(E) - 3 + erd3 = erd1 * erd2 + erd4 = (erd0 + E).add_parts() - B - E + assert erd0.nodes_to_show == set(cls.full_table_name for cls in [B]) + assert erd1.nodes_to_show == set( + cls.full_table_name for cls in (B, B.C, E, E.F) + ) + assert erd2.nodes_to_show == set(cls.full_table_name for cls in (A, B, D, E, L)) + assert erd3.nodes_to_show == set(cls.full_table_name for cls in (B, E)) + assert erd4.nodes_to_show == set(cls.full_table_name for cls in (B.C, E.F)) + + @staticmethod + def test_repr_svg(): + erd = dj.ERD(schema_advanced, context=namespace) + svg = erd._repr_svg_() + assert svg.startswith("") + + @staticmethod + def test_make_image(): + erd = dj.ERD(schema, context=namespace) + img = erd.make_image() + assert img.ndim == 3 and img.shape[2] in (3, 4) + + @staticmethod + def test_part_table_parsing(): + # https://github.com/datajoint/datajoint-python/issues/882 + erd = dj.Di(schema) + graph = erd._make_graph() + assert "OutfitLaunch" in graph.nodes() + assert "OutfitLaunch.OutfitPiece" in graph.nodes() diff --git a/tests_old/test_foreign_keys.py b/tests/test_foreign_keys.py similarity index 72% rename from tests_old/test_foreign_keys.py rename to tests/test_foreign_keys.py index d082960e4..05d87c041 100644 --- a/tests_old/test_foreign_keys.py +++ b/tests/test_foreign_keys.py @@ -1,4 +1,3 @@ -from nose.tools import assert_equal, assert_false, assert_true from datajoint.declare import declare from . import schema_advanced @@ -8,18 +7,16 @@ def test_aliased_fk(): person = schema_advanced.Person() parent = schema_advanced.Parent() person.delete() - assert_false(person) - assert_false(parent) + assert not person + assert not parent person.fill() parent.fill() - assert_true(person) - assert_true(parent) + assert person + assert parent link = person.proj(parent_name="full_name", parent="person_id") parents = person * parent * link parents &= dict(full_name="May K. Hall") - assert_equal( - set(parents.fetch("parent_name")), {"Hanna R. Walters", "Russel S. James"} - ) + assert set(parents.fetch("parent_name")) == {"Hanna R. Walters", "Russel S. James"} delete_count = person.delete() assert delete_count == 16 @@ -33,19 +30,19 @@ def test_describe(): )[0].split("\n") s2 = declare(rel.full_table_name, describe, globals())[0].split("\n") for c1, c2 in zip(s1, s2): - assert_equal(c1, c2) + assert c1 == c2 def test_delete(): person = schema_advanced.Person() parent = schema_advanced.Parent() person.delete() - assert_false(person) - assert_false(parent) + assert not person + assert not parent person.fill() parent.fill() - assert_true(parent) + assert parent original_len = len(parent) to_delete = len(parent & "11 in (person_id, parent)") (person & "person_id=11").delete() - assert_true(to_delete and len(parent) == original_len - to_delete) + assert to_delete and len(parent) == original_len - to_delete diff --git a/tests_old/test_groupby.py b/tests/test_groupby.py similarity index 100% rename from tests_old/test_groupby.py rename to tests/test_groupby.py diff --git a/tests/test_hash.py b/tests/test_hash.py new file mode 100644 index 000000000..a88c45316 --- /dev/null +++ b/tests/test_hash.py @@ -0,0 +1,6 @@ +from datajoint import hash + + +def test_hash(): + assert hash.uuid_from_buffer(b"abc").hex == "900150983cd24fb0d6963f7d28e17f72" + assert hash.uuid_from_buffer(b"").hex == "d41d8cd98f00b204e9800998ecf8427e" diff --git a/tests_old/test_json.py b/tests/test_json.py similarity index 98% rename from tests_old/test_json.py rename to tests/test_json.py index b9b13e4ee..760475a1a 100644 --- a/tests_old/test_json.py +++ b/tests/test_json.py @@ -2,12 +2,10 @@ from datajoint.declare import declare import datajoint as dj import numpy as np -from distutils.version import LooseVersion +from packaging.version import Version from . import PREFIX -if LooseVersion(dj.conn().query("select @@version;").fetchone()[0]) >= LooseVersion( - "8.0.0" -): +if Version(dj.conn().query("select @@version;").fetchone()[0]) >= Version("8.0.0"): schema = dj.Schema(PREFIX + "_json") Team = None diff --git a/tests_old/test_log.py b/tests/test_log.py similarity index 69% rename from tests_old/test_log.py rename to tests/test_log.py index 86a48bc37..a3aafa992 100644 --- a/tests_old/test_log.py +++ b/tests/test_log.py @@ -1,4 +1,3 @@ -from nose.tools import assert_true from . import schema @@ -6,4 +5,4 @@ def test_log(): ts, events = (schema.schema.log & 'event like "Declared%%"').fetch( "timestamp", "event" ) - assert_true(len(ts) >= 2) + assert len(ts) >= 2 diff --git a/tests_old/test_nan.py b/tests/test_nan.py similarity index 73% rename from tests_old/test_nan.py rename to tests/test_nan.py index b06848fdf..ad4e6239e 100644 --- a/tests_old/test_nan.py +++ b/tests/test_nan.py @@ -1,5 +1,4 @@ import numpy as np -from nose.tools import assert_true import datajoint as dj from . import PREFIX, CONN_INFO @@ -28,15 +27,10 @@ def setup_class(cls): def test_insert_nan(self): """Test fetching of null values""" b = self.rel.fetch("value", order_by="id") - assert_true( - (np.isnan(self.a) == np.isnan(b)).all(), "incorrect handling of Nans" - ) - assert_true( - np.allclose( - self.a[np.logical_not(np.isnan(self.a))], b[np.logical_not(np.isnan(b))] - ), - "incorrect storage of floats", - ) + (np.isnan(self.a) == np.isnan(b)).all(), "incorrect handling of Nans" + np.allclose( + self.a[np.logical_not(np.isnan(self.a))], b[np.logical_not(np.isnan(b))] + ), "incorrect storage of floats" def test_nulls_do_not_affect_primary_keys(self): """Test against a case that previously caused a bug when skipping existing entries.""" diff --git a/tests_old/test_plugin.py b/tests/test_plugin.py similarity index 100% rename from tests_old/test_plugin.py rename to tests/test_plugin.py diff --git a/tests_old/test_relation_u.py b/tests/test_relation_u.py similarity index 52% rename from tests_old/test_relation_u.py rename to tests/test_relation_u.py index ff30711b3..44033708d 100644 --- a/tests_old/test_relation_u.py +++ b/tests/test_relation_u.py @@ -1,6 +1,6 @@ -from nose.tools import assert_equal, assert_true, raises, assert_list_equal -from . import schema, schema_simple import datajoint as dj +from pytest import raises +from . import schema, schema_simple class TestU: @@ -23,37 +23,35 @@ def setup_class(cls): def test_restriction(self): language_set = {s[1] for s in self.language.contents} rel = dj.U("language") & self.language - assert_list_equal(rel.heading.names, ["language"]) - assert_true(len(rel) == len(language_set)) - assert_true(set(rel.fetch("language")) == language_set) + assert list(rel.heading.names) == ["language"] + assert len(rel) == len(language_set) + assert set(rel.fetch("language")) == language_set # Test for issue #342 rel = self.trial * dj.U("start_time") - assert_list_equal(rel.primary_key, self.trial.primary_key + ["start_time"]) - assert_list_equal(rel.primary_key, (rel & "trial_id>3").primary_key) - assert_list_equal((dj.U("start_time") & self.trial).primary_key, ["start_time"]) + assert list(rel.primary_key) == self.trial.primary_key + ["start_time"] + assert list(rel.primary_key) == list((rel & "trial_id>3").primary_key) + assert list((dj.U("start_time") & self.trial).primary_key) == ["start_time"] - @staticmethod - @raises(dj.DataJointError) - def test_invalid_restriction(): - result = dj.U("color") & dict(color="red") + def test_invalid_restriction(self): + with raises(dj.DataJointError): + result = dj.U("color") & dict(color="red") def test_ineffective_restriction(self): rel = self.language & dj.U("language") - assert_true(rel.make_sql() == self.language.make_sql()) + assert rel.make_sql() == self.language.make_sql() def test_join(self): rel = self.experiment * dj.U("experiment_date") - assert_equal(self.experiment.primary_key, ["subject_id", "experiment_id"]) - assert_equal(rel.primary_key, self.experiment.primary_key + ["experiment_date"]) + assert self.experiment.primary_key == ["subject_id", "experiment_id"] + assert rel.primary_key == self.experiment.primary_key + ["experiment_date"] rel = dj.U("experiment_date") * self.experiment - assert_equal(self.experiment.primary_key, ["subject_id", "experiment_id"]) - assert_equal(rel.primary_key, self.experiment.primary_key + ["experiment_date"]) + assert self.experiment.primary_key == ["subject_id", "experiment_id"] + assert rel.primary_key == self.experiment.primary_key + ["experiment_date"] - @staticmethod - @raises(dj.DataJointError) - def test_invalid_join(): - rel = dj.U("language") * dict(language="English") + def test_invalid_join(self): + with raises(dj.DataJointError): + rel = dj.U("language") * dict(language="English") def test_repr_without_attrs(self): """test dj.U() display""" @@ -64,25 +62,24 @@ def test_aggregations(self): lang = schema.Language() # test total aggregation on expression object n1 = dj.U().aggr(lang, n="count(*)").fetch1("n") - assert_equal(n1, len(lang.fetch())) + assert n1 == len(lang.fetch()) # test total aggregation on expression class n2 = dj.U().aggr(schema.Language, n="count(*)").fetch1("n") - assert_equal(n1, n2) + assert n1 == n2 rel = dj.U("language").aggr(schema.Language, number_of_speakers="count(*)") - assert_equal(len(rel), len(set(l[1] for l in schema.Language.contents))) - assert_equal((rel & 'language="English"').fetch1("number_of_speakers"), 3) + assert len(rel) == len(set(l[1] for l in schema.Language.contents)) + assert (rel & 'language="English"').fetch1("number_of_speakers") == 3 def test_argmax(self): rel = schema.TTest() - # get the tuples corresponding to maximum value + # get the tuples corresponding to the maximum value mx = (rel * dj.U().aggr(rel, mx="max(value)")) & "mx=value" - assert_equal(mx.fetch("value")[0], max(rel.fetch("value"))) + assert mx.fetch("value")[0] == max(rel.fetch("value")) def test_aggr(self): rel = schema_simple.ArgmaxTest() amax1 = (dj.U("val") * rel) & dj.U("secondary_key").aggr(rel, val="min(val)") amax2 = (dj.U("val") * rel) * dj.U("secondary_key").aggr(rel, val="min(val)") - assert_true( - len(amax1) == len(amax2) == rel.n, - "Aggregated argmax with join and restriction does not yield same length.", - ) + assert ( + len(amax1) == len(amax2) == rel.n + ), "Aggregated argmax with join and restriction does not yield the same length." diff --git a/tests_old/test_schema_keywords.py b/tests/test_schema_keywords.py similarity index 67% rename from tests_old/test_schema_keywords.py rename to tests/test_schema_keywords.py index 49f380f57..1853852ed 100644 --- a/tests_old/test_schema_keywords.py +++ b/tests/test_schema_keywords.py @@ -1,7 +1,5 @@ from . import PREFIX, CONN_INFO import datajoint as dj -from nose.tools import assert_true - schema = dj.Schema(PREFIX + "_keywords", connection=dj.conn(**CONN_INFO)) @@ -39,8 +37,8 @@ class D(B): def test_inherited_part_table(): - assert_true("a_id" in D().heading.attributes) - assert_true("b_id" in D().heading.attributes) - assert_true("a_id" in D.C().heading.attributes) - assert_true("b_id" in D.C().heading.attributes) - assert_true("name" in D.C().heading.attributes) + assert "a_id" in D().heading.attributes + assert "b_id" in D().heading.attributes + assert "a_id" in D.C().heading.attributes + assert "b_id" in D.C().heading.attributes + assert "name" in D.C().heading.attributes diff --git a/tests_old/test_settings.py b/tests/test_settings.py similarity index 69% rename from tests_old/test_settings.py rename to tests/test_settings.py index 63c3dad36..b937d5ad3 100644 --- a/tests_old/test_settings.py +++ b/tests/test_settings.py @@ -1,8 +1,8 @@ import pprint import random import string -from datajoint import settings -from nose.tools import assert_true, assert_equal, raises +import pytest +from datajoint import DataJointError, settings import datajoint as dj import os @@ -14,7 +14,7 @@ def test_load_save(): dj.config.save("tmp.json") conf = settings.Config() conf.load("tmp.json") - assert_true(conf == dj.config, "Two config files do not match.") + assert conf == dj.config os.remove("tmp.json") @@ -25,7 +25,7 @@ def test_singleton(): conf.load("tmp.json") conf["dummy.val"] = 2 - assert_true(conf == dj.config, "Config does not behave like a singleton.") + assert conf == dj.config os.remove("tmp.json") @@ -34,36 +34,36 @@ def test_singleton2(): conf = settings.Config() conf["dummy.val"] = 2 _ = settings.Config() # a new instance should not delete dummy.val - assert_true(conf["dummy.val"] == 2, "Config does not behave like a singleton.") + assert conf["dummy.val"] == 2 -@raises(dj.DataJointError) def test_validator(): """Testing validator""" - dj.config["database.port"] = "harbor" + with pytest.raises(DataJointError): + dj.config["database.port"] = "harbor" def test_del(): """Testing del""" dj.config["peter"] = 2 - assert_true("peter" in dj.config) + assert "peter" in dj.config del dj.config["peter"] - assert_true("peter" not in dj.config) + assert "peter" not in dj.config def test_len(): """Testing len""" - assert_equal(len(dj.config), len(dj.config._conf)) + len(dj.config) == len(dj.config._conf) def test_str(): """Testing str""" - assert_equal(str(dj.config), pprint.pformat(dj.config._conf, indent=4)) + str(dj.config) == pprint.pformat(dj.config._conf, indent=4) def test_repr(): """Testing repr""" - assert_equal(repr(dj.config), pprint.pformat(dj.config._conf, indent=4)) + repr(dj.config) == pprint.pformat(dj.config._conf, indent=4) def test_save(): @@ -76,7 +76,7 @@ def test_save(): os.rename(settings.LOCALCONFIG, tmpfile) moved = True dj.config.save_local() - assert_true(os.path.isfile(settings.LOCALCONFIG)) + assert os.path.isfile(settings.LOCALCONFIG) if moved: os.rename(tmpfile, settings.LOCALCONFIG) @@ -101,5 +101,5 @@ def test_contextmanager(): """Testing context manager""" dj.config["arbitrary.stuff"] = 7 with dj.config(arbitrary__stuff=10): - assert_true(dj.config["arbitrary.stuff"] == 10) - assert_true(dj.config["arbitrary.stuff"] == 7) + assert dj.config["arbitrary.stuff"] == 10 + assert dj.config["arbitrary.stuff"] == 7 diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 000000000..936badb1c --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,33 @@ +""" +Collection of test cases to test core module. +""" +from datajoint import DataJointError +from datajoint.utils import from_camel_case, to_camel_case +import pytest + + +def setup(): + pass + + +def teardown(): + pass + + +def test_from_camel_case(): + assert from_camel_case("AllGroups") == "all_groups" + with pytest.raises(DataJointError): + from_camel_case("repNames") + with pytest.raises(DataJointError): + from_camel_case("10_all") + with pytest.raises(DataJointError): + from_camel_case("hello world") + with pytest.raises(DataJointError): + from_camel_case("#baisc_names") + + +def test_to_camel_case(): + assert to_camel_case("all_groups") == "AllGroups" + assert to_camel_case("hello") == "Hello" + assert to_camel_case("this_is_a_sample_case") == "ThisIsASampleCase" + assert to_camel_case("This_is_Mixed") == "ThisIsMixed" diff --git a/tests/test_virtual_module.py b/tests/test_virtual_module.py new file mode 100644 index 000000000..d3546c488 --- /dev/null +++ b/tests/test_virtual_module.py @@ -0,0 +1,10 @@ +import datajoint as dj +from datajoint.user_tables import UserTable +from . import CONN_INFO + + +def test_virtual_module(schema_obj): + module = dj.VirtualModule( + "module", schema_obj.schema.database, connection=dj.conn(**CONN_INFO) + ) + assert issubclass(module.Experiment, UserTable) diff --git a/tests_old/test_erd.py b/tests_old/test_erd.py deleted file mode 100644 index 1a6293431..000000000 --- a/tests_old/test_erd.py +++ /dev/null @@ -1,87 +0,0 @@ -from nose.tools import assert_false, assert_true -import datajoint as dj -from .schema_simple import A, B, D, E, L, schema, OutfitLaunch -from . import schema_advanced - -namespace = locals() - - -class TestERD: - @staticmethod - def setup(): - """ - class-level test setup. Executes before each test method. - """ - - @staticmethod - def test_decorator(): - assert_true(issubclass(A, dj.Lookup)) - assert_false(issubclass(A, dj.Part)) - assert_true(B.database == schema.database) - assert_true(issubclass(B.C, dj.Part)) - assert_true(B.C.database == schema.database) - assert_true(B.C.master is B and E.F.master is E) - - @staticmethod - def test_dependencies(): - deps = schema.connection.dependencies - deps.load() - assert_true( - all(cls.full_table_name in deps for cls in (A, B, B.C, D, E, E.F, L)) - ) - assert_true(set(A().children()) == set([B.full_table_name, D.full_table_name])) - assert_true(set(D().parents(primary=True)) == set([A.full_table_name])) - assert_true(set(D().parents(primary=False)) == set([L.full_table_name])) - assert_true( - set(deps.descendants(L.full_table_name)).issubset( - cls.full_table_name for cls in (L, D, E, E.F) - ) - ) - - @staticmethod - def test_erd(): - assert_true(dj.diagram.diagram_active, "Failed to import networkx and pydot") - erd = dj.ERD(schema, context=namespace) - graph = erd._make_graph() - assert_true( - set(cls.__name__ for cls in (A, B, D, E, L)).issubset(graph.nodes()) - ) - - @staticmethod - def test_erd_algebra(): - erd0 = dj.ERD(B) - erd1 = erd0 + 3 - erd2 = dj.Di(E) - 3 - erd3 = erd1 * erd2 - erd4 = (erd0 + E).add_parts() - B - E - assert_true(erd0.nodes_to_show == set(cls.full_table_name for cls in [B])) - assert_true( - erd1.nodes_to_show == set(cls.full_table_name for cls in (B, B.C, E, E.F)) - ) - assert_true( - erd2.nodes_to_show == set(cls.full_table_name for cls in (A, B, D, E, L)) - ) - assert_true(erd3.nodes_to_show == set(cls.full_table_name for cls in (B, E))) - assert_true( - erd4.nodes_to_show == set(cls.full_table_name for cls in (B.C, E.F)) - ) - - @staticmethod - def test_repr_svg(): - erd = dj.ERD(schema_advanced, context=namespace) - svg = erd._repr_svg_() - assert_true(svg.startswith("")) - - @staticmethod - def test_make_image(): - erd = dj.ERD(schema, context=namespace) - img = erd.make_image() - assert_true(img.ndim == 3 and img.shape[2] in (3, 4)) - - @staticmethod - def test_part_table_parsing(): - # https://github.com/datajoint/datajoint-python/issues/882 - erd = dj.Di(schema) - graph = erd._make_graph() - assert "OutfitLaunch" in graph.nodes() - assert "OutfitLaunch.OutfitPiece" in graph.nodes() diff --git a/tests_old/test_hash.py b/tests_old/test_hash.py deleted file mode 100644 index dc88290eb..000000000 --- a/tests_old/test_hash.py +++ /dev/null @@ -1,7 +0,0 @@ -from nose.tools import assert_equal -from datajoint import hash - - -def test_hash(): - assert_equal(hash.uuid_from_buffer(b"abc").hex, "900150983cd24fb0d6963f7d28e17f72") - assert_equal(hash.uuid_from_buffer(b"").hex, "d41d8cd98f00b204e9800998ecf8427e") diff --git a/tests_old/test_utils.py b/tests_old/test_utils.py deleted file mode 100644 index b5ed96af3..000000000 --- a/tests_old/test_utils.py +++ /dev/null @@ -1,33 +0,0 @@ -""" -Collection of test cases to test core module. -""" -from nose.tools import assert_true, assert_raises, assert_equal -from datajoint import DataJointError -from datajoint.utils import from_camel_case, to_camel_case - - -def setup(): - pass - - -def teardown(): - pass - - -def test_from_camel_case(): - assert_equal(from_camel_case("AllGroups"), "all_groups") - with assert_raises(DataJointError): - from_camel_case("repNames") - with assert_raises(DataJointError): - from_camel_case("10_all") - with assert_raises(DataJointError): - from_camel_case("hello world") - with assert_raises(DataJointError): - from_camel_case("#baisc_names") - - -def test_to_camel_case(): - assert_equal(to_camel_case("all_groups"), "AllGroups") - assert_equal(to_camel_case("hello"), "Hello") - assert_equal(to_camel_case("this_is_a_sample_case"), "ThisIsASampleCase") - assert_equal(to_camel_case("This_is_Mixed"), "ThisIsMixed") diff --git a/tests_old/test_virtual_module.py b/tests_old/test_virtual_module.py deleted file mode 100644 index 58180916f..000000000 --- a/tests_old/test_virtual_module.py +++ /dev/null @@ -1,12 +0,0 @@ -from nose.tools import assert_true -import datajoint as dj -from datajoint.user_tables import UserTable -from . import schema -from . import CONN_INFO - - -def test_virtual_module(): - module = dj.VirtualModule( - "module", schema.schema.database, connection=dj.conn(**CONN_INFO) - ) - assert_true(issubclass(module.Experiment, UserTable)) From da85e97f13659575347523c0819f5275a22f232a Mon Sep 17 00:00:00 2001 From: A-Baji Date: Thu, 30 Nov 2023 13:14:28 -0600 Subject: [PATCH 002/212] feat: :sparkles: implement schema fixtures --- tests/__init__.py | 57 -------------------- tests/conftest.py | 61 ++++++++++++++++++++++ tests/test_blob_matlab.py | 98 +++++++++++++++++++---------------- tests/test_connection.py | 2 +- tests/test_nan.py | 40 +++++++------- tests/test_schema_keywords.py | 18 ++++--- tests/test_virtual_module.py | 5 +- 7 files changed, 152 insertions(+), 129 deletions(-) create mode 100644 tests/conftest.py diff --git a/tests/__init__.py b/tests/__init__.py index 0fd907166..70381c090 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -17,60 +17,3 @@ user=os.getenv("DJ_USER"), password=os.getenv("DJ_PASS"), ) - - -@pytest.fixture -def connection_root(): - """Root user database connection.""" - dj.config["safemode"] = False - connection = dj.Connection( - host=os.getenv("DJ_HOST"), - user=os.getenv("DJ_USER"), - password=os.getenv("DJ_PASS"), - ) - yield connection - dj.config["safemode"] = True - connection.close() - - -@pytest.fixture -def connection_test(connection_root): - """Test user database connection.""" - database = f"{PREFIX}%%" - credentials = dict( - host=os.getenv("DJ_HOST"), user="datajoint", password="datajoint" - ) - permission = "ALL PRIVILEGES" - - # Create MySQL users - if version.parse( - connection_root.query("select @@version;").fetchone()[0] - ) >= version.parse("8.0.0"): - # create user if necessary on mysql8 - connection_root.query( - f""" - CREATE USER IF NOT EXISTS '{credentials["user"]}'@'%%' - IDENTIFIED BY '{credentials["password"]}'; - """ - ) - connection_root.query( - f""" - GRANT {permission} ON `{database}`.* - TO '{credentials["user"]}'@'%%'; - """ - ) - else: - # grant permissions. For MySQL 5.7 this also automatically creates user - # if not exists - connection_root.query( - f""" - GRANT {permission} ON `{database}`.* - TO '{credentials["user"]}'@'%%' - IDENTIFIED BY '{credentials["password"]}'; - """ - ) - - connection = dj.Connection(**credentials) - yield connection - connection_root.query(f"""DROP USER `{credentials["user"]}`""") - connection.close() diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 000000000..49c1bb5b4 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,61 @@ +import datajoint as dj +from packaging import version +import os +import pytest +from . import schema, PREFIX + +@pytest.fixture(scope="session") +def connection_root(): + """Root user database connection.""" + dj.config["safemode"] = False + connection = dj.Connection( + host=os.getenv("DJ_HOST"), + user=os.getenv("DJ_USER"), + password=os.getenv("DJ_PASS"), + ) + yield connection + dj.config["safemode"] = True + connection.close() + + +@pytest.fixture(scope="session") +def connection_test(connection_root): + """Test user database connection.""" + database = f"{PREFIX}%%" + credentials = dict( + host=os.getenv("DJ_HOST"), user="datajoint", password="datajoint" + ) + permission = "ALL PRIVILEGES" + + # Create MySQL users + if version.parse( + connection_root.query("select @@version;").fetchone()[0] + ) >= version.parse("8.0.0"): + # create user if necessary on mysql8 + connection_root.query( + f""" + CREATE USER IF NOT EXISTS '{credentials["user"]}'@'%%' + IDENTIFIED BY '{credentials["password"]}'; + """ + ) + connection_root.query( + f""" + GRANT {permission} ON `{database}`.* + TO '{credentials["user"]}'@'%%'; + """ + ) + else: + # grant permissions. For MySQL 5.7 this also automatically creates user + # if not exists + connection_root.query( + f""" + GRANT {permission} ON `{database}`.* + TO '{credentials["user"]}'@'%%' + IDENTIFIED BY '{credentials["password"]}'; + """ + ) + + connection = dj.Connection(**credentials) + yield connection + connection_root.query(f"""DROP USER `{credentials["user"]}`""") + connection.close() diff --git a/tests/test_blob_matlab.py b/tests/test_blob_matlab.py index ecb698fec..504a4c52e 100644 --- a/tests/test_blob_matlab.py +++ b/tests/test_blob_matlab.py @@ -1,14 +1,12 @@ import numpy as np +import pytest import datajoint as dj from datajoint.blob import pack, unpack from numpy.testing import assert_array_equal -from . import PREFIX, CONN_INFO +from . import PREFIX -schema = dj.Schema(PREFIX + "_test1", locals(), connection=dj.conn(**CONN_INFO)) - -@schema class Blob(dj.Manual): definition = """ # diverse types of blobs id : int @@ -18,51 +16,63 @@ class Blob(dj.Manual): """ -def insert_blobs(): - """ - This function inserts blobs resulting from the following datajoint-matlab code: - - self.insert({ - 1 'simple string' 'character string' - 2 '1D vector' 1:15:180 - 3 'string array' {'string1' 'string2'} - 4 'struct array' struct('a', {1,2}, 'b', {struct('c', magic(3)), struct('C', magic(5))}) - 5 '3D double array' reshape(1:24, [2,3,4]) - 6 '3D uint8 array' reshape(uint8(1:24), [2,3,4]) - 7 '3D complex array' fftn(reshape(1:24, [2,3,4])) - }) - - and then dumped using the command - mysqldump -u username -p --hex-blob test_schema blob_table > blob.sql - """ +@pytest.fixture(scope="module") +def schema(connection_test): + schema = dj.Schema(PREFIX + "_test1", locals(), connection=dj.conn(connection_test)) + schema(Blob) + yield schema + schema.drop() - schema.connection.query( + +@pytest.fixture(scope="module") +def insert_blobs_func(schema): + def insert_blobs(): + """ + This function inserts blobs resulting from the following datajoint-matlab code: + + self.insert({ + 1 'simple string' 'character string' + 2 '1D vector' 1:15:180 + 3 'string array' {'string1' 'string2'} + 4 'struct array' struct('a', {1,2}, 'b', {struct('c', magic(3)), struct('C', magic(5))}) + 5 '3D double array' reshape(1:24, [2,3,4]) + 6 '3D uint8 array' reshape(uint8(1:24), [2,3,4]) + 7 '3D complex array' fftn(reshape(1:24, [2,3,4])) + }) + + and then dumped using the command + mysqldump -u username -p --hex-blob test_schema blob_table > blob.sql """ - INSERT INTO {table_name} VALUES - (1,'simple string',0x6D596D00410200000000000000010000000000000010000000000000000400000000000000630068006100720061006300740065007200200073007400720069006E006700), - (2,'1D vector',0x6D596D0041020000000000000001000000000000000C000000000000000600000000000000000000000000F03F00000000000030400000000000003F4000000000000047400000000000804E4000000000000053400000000000C056400000000000805A400000000000405E4000000000000061400000000000E062400000000000C06440), - (3,'string array',0x6D596D00430200000000000000010000000000000002000000000000002F0000000000000041020000000000000001000000000000000700000000000000040000000000000073007400720069006E00670031002F0000000000000041020000000000000001000000000000000700000000000000040000000000000073007400720069006E0067003200), - (4,'struct array',0x6D596D005302000000000000000100000000000000020000000000000002000000610062002900000000000000410200000000000000010000000000000001000000000000000600000000000000000000000000F03F9000000000000000530200000000000000010000000000000001000000000000000100000063006900000000000000410200000000000000030000000000000003000000000000000600000000000000000000000000204000000000000008400000000000001040000000000000F03F0000000000001440000000000000224000000000000018400000000000001C40000000000000004029000000000000004102000000000000000100000000000000010000000000000006000000000000000000000000000040100100000000000053020000000000000001000000000000000100000000000000010000004300E9000000000000004102000000000000000500000000000000050000000000000006000000000000000000000000003140000000000000374000000000000010400000000000002440000000000000264000000000000038400000000000001440000000000000184000000000000028400000000000003240000000000000F03F0000000000001C400000000000002A400000000000003340000000000000394000000000000020400000000000002C400000000000003440000000000000354000000000000000400000000000002E400000000000003040000000000000364000000000000008400000000000002240), - (5,'3D double array',0x6D596D004103000000000000000200000000000000030000000000000004000000000000000600000000000000000000000000F03F000000000000004000000000000008400000000000001040000000000000144000000000000018400000000000001C40000000000000204000000000000022400000000000002440000000000000264000000000000028400000000000002A400000000000002C400000000000002E40000000000000304000000000000031400000000000003240000000000000334000000000000034400000000000003540000000000000364000000000000037400000000000003840), - (6,'3D uint8 array',0x6D596D0041030000000000000002000000000000000300000000000000040000000000000009000000000000000102030405060708090A0B0C0D0E0F101112131415161718), - (7,'3D complex array',0x6D596D0041030000000000000002000000000000000300000000000000040000000000000006000000010000000000000000C0724000000000000028C000000000000038C0000000000000000000000000000038C0000000000000000000000000000052C00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000052C00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000052C00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000AA4C58E87AB62B400000000000000000AA4C58E87AB62BC0000000000000008000000000000052400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000008000000000000052C000000000000000800000000000000080000000000000008000000000000000800000000000000080 - ); - """.format( - table_name=Blob.full_table_name + + schema.connection.query( + """ + INSERT INTO {table_name} VALUES + (1,'simple string',0x6D596D00410200000000000000010000000000000010000000000000000400000000000000630068006100720061006300740065007200200073007400720069006E006700), + (2,'1D vector',0x6D596D0041020000000000000001000000000000000C000000000000000600000000000000000000000000F03F00000000000030400000000000003F4000000000000047400000000000804E4000000000000053400000000000C056400000000000805A400000000000405E4000000000000061400000000000E062400000000000C06440), + (3,'string array',0x6D596D00430200000000000000010000000000000002000000000000002F0000000000000041020000000000000001000000000000000700000000000000040000000000000073007400720069006E00670031002F0000000000000041020000000000000001000000000000000700000000000000040000000000000073007400720069006E0067003200), + (4,'struct array',0x6D596D005302000000000000000100000000000000020000000000000002000000610062002900000000000000410200000000000000010000000000000001000000000000000600000000000000000000000000F03F9000000000000000530200000000000000010000000000000001000000000000000100000063006900000000000000410200000000000000030000000000000003000000000000000600000000000000000000000000204000000000000008400000000000001040000000000000F03F0000000000001440000000000000224000000000000018400000000000001C40000000000000004029000000000000004102000000000000000100000000000000010000000000000006000000000000000000000000000040100100000000000053020000000000000001000000000000000100000000000000010000004300E9000000000000004102000000000000000500000000000000050000000000000006000000000000000000000000003140000000000000374000000000000010400000000000002440000000000000264000000000000038400000000000001440000000000000184000000000000028400000000000003240000000000000F03F0000000000001C400000000000002A400000000000003340000000000000394000000000000020400000000000002C400000000000003440000000000000354000000000000000400000000000002E400000000000003040000000000000364000000000000008400000000000002240), + (5,'3D double array',0x6D596D004103000000000000000200000000000000030000000000000004000000000000000600000000000000000000000000F03F000000000000004000000000000008400000000000001040000000000000144000000000000018400000000000001C40000000000000204000000000000022400000000000002440000000000000264000000000000028400000000000002A400000000000002C400000000000002E40000000000000304000000000000031400000000000003240000000000000334000000000000034400000000000003540000000000000364000000000000037400000000000003840), + (6,'3D uint8 array',0x6D596D0041030000000000000002000000000000000300000000000000040000000000000009000000000000000102030405060708090A0B0C0D0E0F101112131415161718), + (7,'3D complex array',0x6D596D0041030000000000000002000000000000000300000000000000040000000000000006000000010000000000000000C0724000000000000028C000000000000038C0000000000000000000000000000038C0000000000000000000000000000052C00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000052C00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000052C00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000AA4C58E87AB62B400000000000000000AA4C58E87AB62BC0000000000000008000000000000052400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000008000000000000052C000000000000000800000000000000080000000000000008000000000000000800000000000000080 + ); + """.format( + table_name=Blob.full_table_name + ) ) - ) + yield insert_blobs -class TestFetch: - @classmethod - def setup_class(cls): - dj.config["safemode"] = False # temp - assert not dj.config["safemode"], "safemode must be disabled" - Blob().delete() - insert_blobs() +@pytest.fixture(scope="class") +def setup_class(schema, insert_blobs_func): + assert not dj.config["safemode"], "safemode must be disabled" + Blob().delete() + insert_blobs_func() + + +class TestFetch: @staticmethod - def test_complex_matlab_blobs(): + def test_complex_matlab_blobs(setup_class): """ test correct de-serialization of various blob types """ @@ -109,7 +119,7 @@ def test_complex_matlab_blobs(): assert_array_equal(blob, unpack(pack(blob))) @staticmethod - def test_complex_matlab_squeeze(): + def test_complex_matlab_squeeze(setup_class): """ test correct de-serialization of various blob types """ @@ -164,7 +174,7 @@ def test_complex_matlab_squeeze(): assert tuple(blob.shape) == (2, 3, 4) assert blob.dtype == "complex128" - def test_iter(self): + def test_iter(self, setup_class): """ test iterator over the entity set """ diff --git a/tests/test_connection.py b/tests/test_connection.py index 1916da951..76b6d2389 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -5,7 +5,7 @@ import datajoint as dj from datajoint import DataJointError import numpy as np -from . import CONN_INFO_ROOT, connection_root, connection_test +from . import CONN_INFO_ROOT from . import PREFIX import pytest diff --git a/tests/test_nan.py b/tests/test_nan.py index ad4e6239e..1b3fb9f00 100644 --- a/tests/test_nan.py +++ b/tests/test_nan.py @@ -1,11 +1,8 @@ import numpy as np import datajoint as dj -from . import PREFIX, CONN_INFO +from . import PREFIX +import pytest -schema = dj.Schema(PREFIX + "_nantest", locals(), connection=dj.conn(**CONN_INFO)) - - -@schema class NanTest(dj.Manual): definition = """ id :int @@ -13,26 +10,33 @@ class NanTest(dj.Manual): value=null :double """ +@pytest.fixture(scope="module") +def schema(connection_test): + schema = dj.Schema(PREFIX + "_nantest", locals(), connection=dj.conn(connection_test)) + schema(NanTest) + yield schema + schema.drop() -class TestNaNInsert: - @classmethod - def setup_class(cls): - cls.rel = NanTest() - with dj.config(safemode=False): - cls.rel.delete() - a = np.array([0, 1 / 3, np.nan, np.pi, np.nan]) - cls.rel.insert(((i, value) for i, value in enumerate(a))) - cls.a = a +@pytest.fixture(scope="class") +def setup_class(request, schema): + rel = NanTest() + with dj.config(safemode=False): + rel.delete() + a = np.array([0, 1 / 3, np.nan, np.pi, np.nan]) + rel.insert(((i, value) for i, value in enumerate(a))) + request.cls.rel = rel + request.cls.a = a - def test_insert_nan(self): +class TestNaNInsert: + def test_insert_nan(self, setup_class): """Test fetching of null values""" b = self.rel.fetch("value", order_by="id") - (np.isnan(self.a) == np.isnan(b)).all(), "incorrect handling of Nans" - np.allclose( + assert (np.isnan(self.a) == np.isnan(b)).all(), "incorrect handling of Nans" + assert np.allclose( self.a[np.logical_not(np.isnan(self.a))], b[np.logical_not(np.isnan(b))] ), "incorrect storage of floats" - def test_nulls_do_not_affect_primary_keys(self): + def test_nulls_do_not_affect_primary_keys(self, setup_class): """Test against a case that previously caused a bug when skipping existing entries.""" self.rel.insert( ((i, value) for i, value in enumerate(self.a)), skip_duplicates=True diff --git a/tests/test_schema_keywords.py b/tests/test_schema_keywords.py index 1853852ed..e8354ec26 100644 --- a/tests/test_schema_keywords.py +++ b/tests/test_schema_keywords.py @@ -1,10 +1,8 @@ -from . import PREFIX, CONN_INFO +from . import PREFIX import datajoint as dj +import pytest -schema = dj.Schema(PREFIX + "_keywords", connection=dj.conn(**CONN_INFO)) - -@schema class A(dj.Manual): definition = """ a_id: int # a id @@ -31,12 +29,20 @@ class C(dj.Part): """ -@schema class D(B): source = A -def test_inherited_part_table(): +@pytest.fixture(scope="module") +def schema(connection_test): + schema = dj.Schema(PREFIX + "_keywords", connection=dj.conn(connection_test)) + schema(A) + schema(D) + yield schema + schema.drop() + + +def test_inherited_part_table(schema): assert "a_id" in D().heading.attributes assert "b_id" in D().heading.attributes assert "a_id" in D.C().heading.attributes diff --git a/tests/test_virtual_module.py b/tests/test_virtual_module.py index d3546c488..fbb05002c 100644 --- a/tests/test_virtual_module.py +++ b/tests/test_virtual_module.py @@ -1,10 +1,9 @@ import datajoint as dj from datajoint.user_tables import UserTable -from . import CONN_INFO -def test_virtual_module(schema_obj): +def test_virtual_module(schema_obj, connection_test): module = dj.VirtualModule( - "module", schema_obj.schema.database, connection=dj.conn(**CONN_INFO) + "module", schema_obj.schema.database, connection=dj.conn(connection_test) ) assert issubclass(module.Experiment, UserTable) From 5b53e156d25741aa78498a18eac7ceb7f2d28cd3 Mon Sep 17 00:00:00 2001 From: A-Baji Date: Thu, 30 Nov 2023 14:13:21 -0600 Subject: [PATCH 003/212] convert schema.py to fixture [WIP] --- tests/conftest.py | 52 ++++++++++++++++++++++++++++++++++- tests/schema.py | 39 -------------------------- tests/test_blob.py | 2 +- tests/test_blob_matlab.py | 2 +- tests/test_nan.py | 2 +- tests/test_schema_keywords.py | 2 +- tests/test_virtual_module.py | 2 +- 7 files changed, 56 insertions(+), 45 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 49c1bb5b4..bea480b85 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,8 +1,15 @@ +import sys import datajoint as dj from packaging import version import os import pytest -from . import schema, PREFIX +import inspect +from . import PREFIX +from .schema import * + +# all_classes = [] +# for _, obj in inspect.getmembers(sys.modules[__name__], inspect.isclass): +# all_classes.append(obj) @pytest.fixture(scope="session") def connection_root(): @@ -59,3 +66,46 @@ def connection_test(connection_root): yield connection connection_root.query(f"""DROP USER `{credentials["user"]}`""") connection.close() + +@pytest.fixture +def schema_fixture(connection_test): + schema = dj.Schema(PREFIX + "_test1", connection=connection_test) + schema(TTest) + schema(TTest) + schema(TTest2) + schema(TTest3) + schema(NullableNumbers) + schema(TTestExtra) + schema(TTestNoExtra) + schema(Auto) + schema(User) + schema(Subject) + schema(Language) + schema(Experiment) + schema(Trial) + schema(Ephys) + schema(Image) + schema(UberTrash) + schema(UnterTrash) + schema(SimpleSource) + schema(SigIntTable) + schema(SigTermTable) + schema(DjExceptionName) + schema(ErrorClass) + schema(DecimalPrimaryKey) + schema(IndexRich) + schema(ThingA) + schema(ThingB) + schema(ThingC) + schema(Parent) + schema(Child) + schema(ComplexParent) + schema(ComplexChild) + schema(SubjectA) + schema(SessionA) + schema(SessionStatusA) + schema(SessionDateA) + schema(Stimulus) + schema(Longblob) + yield schema + schema.drop() \ No newline at end of file diff --git a/tests/schema.py b/tests/schema.py index dafd481da..4128ddd30 100644 --- a/tests/schema.py +++ b/tests/schema.py @@ -6,12 +6,8 @@ import numpy as np import datajoint as dj import inspect -from . import PREFIX, CONN_INFO -schema = dj.Schema(PREFIX + "_test1", connection=dj.conn(**CONN_INFO)) - -@schema class TTest(dj.Lookup): """ doc string @@ -25,7 +21,6 @@ class TTest(dj.Lookup): contents = [(k, 2 * k) for k in range(10)] -@schema class TTest2(dj.Manual): definition = """ key : int # key @@ -34,7 +29,6 @@ class TTest2(dj.Manual): """ -@schema class TTest3(dj.Manual): definition = """ key : int @@ -43,7 +37,6 @@ class TTest3(dj.Manual): """ -@schema class NullableNumbers(dj.Manual): definition = """ key : int @@ -54,7 +47,6 @@ class NullableNumbers(dj.Manual): """ -@schema class TTestExtra(dj.Manual): """ clone of Test but with an extra field @@ -63,7 +55,6 @@ class TTestExtra(dj.Manual): definition = TTest.definition + "\nextra : int # extra int\n" -@schema class TTestNoExtra(dj.Manual): """ clone of Test but with no extra fields @@ -72,7 +63,6 @@ class TTestNoExtra(dj.Manual): definition = TTest.definition -@schema class Auto(dj.Lookup): definition = """ id :int auto_increment @@ -85,7 +75,6 @@ def fill(self): self.insert([dict(name="Godel"), dict(name="Escher"), dict(name="Bach")]) -@schema class User(dj.Lookup): definition = """ # lab members username: varchar(12) @@ -101,7 +90,6 @@ class User(dj.Lookup): ] -@schema class Subject(dj.Lookup): definition = """ # Basic information about animal subjects used in experiments subject_id :int # unique subject id @@ -121,7 +109,6 @@ class Subject(dj.Lookup): ] -@schema class Language(dj.Lookup): definition = """ # languages spoken by some of the developers @@ -139,7 +126,6 @@ class Language(dj.Lookup): ] -@schema class Experiment(dj.Imported): definition = """ # information about experiments -> Subject @@ -175,7 +161,6 @@ def make(self, key): ) -@schema class Trial(dj.Imported): definition = """ # a trial within an experiment -> Experiment.proj(animal='subject_id') @@ -205,7 +190,6 @@ def make(self, key): ) -@schema class Ephys(dj.Imported): definition = """ # some kind of electrophysiological recording -> Trial @@ -244,7 +228,6 @@ def _make_tuples(self, key): ) -@schema class Image(dj.Manual): definition = """ # table for testing blob inserts @@ -254,7 +237,6 @@ class Image(dj.Manual): """ -@schema class UberTrash(dj.Lookup): definition = """ id : int @@ -263,7 +245,6 @@ class UberTrash(dj.Lookup): contents = [(1,)] -@schema class UnterTrash(dj.Lookup): definition = """ -> UberTrash @@ -273,7 +254,6 @@ class UnterTrash(dj.Lookup): contents = [(1, 1), (1, 2)] -@schema class SimpleSource(dj.Lookup): definition = """ id : int # id @@ -281,7 +261,6 @@ class SimpleSource(dj.Lookup): contents = ((x,) for x in range(10)) -@schema class SigIntTable(dj.Computed): definition = """ -> SimpleSource @@ -291,7 +270,6 @@ def _make_tuples(self, key): raise KeyboardInterrupt -@schema class SigTermTable(dj.Computed): definition = """ -> SimpleSource @@ -301,7 +279,6 @@ def make(self, key): raise SystemExit("SIGTERM received") -@schema class DjExceptionName(dj.Lookup): definition = """ dj_exception_name: char(64) @@ -316,7 +293,6 @@ def contents(self): ] -@schema class ErrorClass(dj.Computed): definition = """ -> DjExceptionName @@ -327,7 +303,6 @@ def make(self, key): raise getattr(dj.errors, exception_name) -@schema class DecimalPrimaryKey(dj.Lookup): definition = """ id : decimal(4,3) @@ -335,7 +310,6 @@ class DecimalPrimaryKey(dj.Lookup): contents = zip((0.1, 0.25, 3.99)) -@schema class IndexRich(dj.Manual): definition = """ -> Subject @@ -348,14 +322,12 @@ class IndexRich(dj.Manual): # Schema for issue 656 -@schema class ThingA(dj.Manual): definition = """ a: int """ -@schema class ThingB(dj.Manual): definition = """ b1: int @@ -365,7 +337,6 @@ class ThingB(dj.Manual): """ -@schema class ThingC(dj.Manual): definition = """ -> ThingA @@ -374,7 +345,6 @@ class ThingC(dj.Manual): """ -@schema class Parent(dj.Lookup): definition = """ parent_id: int @@ -384,7 +354,6 @@ class Parent(dj.Lookup): contents = [(1, "Joe")] -@schema class Child(dj.Lookup): definition = """ -> Parent @@ -396,13 +365,11 @@ class Child(dj.Lookup): # Related to issue #886 (8), #883 (5) -@schema class ComplexParent(dj.Lookup): definition = "\n".join(["parent_id_{}: int".format(i + 1) for i in range(8)]) contents = [tuple(i for i in range(8))] -@schema class ComplexChild(dj.Lookup): definition = "\n".join( ["-> ComplexParent"] + ["child_id_{}: int".format(i + 1) for i in range(1)] @@ -410,7 +377,6 @@ class ComplexChild(dj.Lookup): contents = [tuple(i for i in range(9))] -@schema class SubjectA(dj.Lookup): definition = """ subject_id: varchar(32) @@ -425,7 +391,6 @@ class SubjectA(dj.Lookup): ] -@schema class SessionA(dj.Lookup): definition = """ -> SubjectA @@ -441,7 +406,6 @@ class SessionA(dj.Lookup): ] -@schema class SessionStatusA(dj.Lookup): definition = """ -> SessionA @@ -456,7 +420,6 @@ class SessionStatusA(dj.Lookup): ] -@schema class SessionDateA(dj.Lookup): definition = """ -> SubjectA @@ -470,7 +433,6 @@ class SessionDateA(dj.Lookup): ] -@schema class Stimulus(dj.Lookup): definition = """ id: int @@ -480,7 +442,6 @@ class Stimulus(dj.Lookup): """ -@schema class Longblob(dj.Manual): definition = """ id: int diff --git a/tests/test_blob.py b/tests/test_blob.py index 562d78f2b..761b02cf5 100644 --- a/tests/test_blob.py +++ b/tests/test_blob.py @@ -169,7 +169,7 @@ def test_complex(): assert_array_equal(x, unpack(pack(x)), "Arrays do not match!") -def test_insert_longblob(): +def test_insert_longblob(schema_fixture): insert_dj_blob = {"id": 1, "data": [1, 2, 3]} schema.Longblob.insert1(insert_dj_blob) assert (schema.Longblob & "id=1").fetch1() == insert_dj_blob diff --git a/tests/test_blob_matlab.py b/tests/test_blob_matlab.py index 504a4c52e..06154b1fc 100644 --- a/tests/test_blob_matlab.py +++ b/tests/test_blob_matlab.py @@ -18,7 +18,7 @@ class Blob(dj.Manual): @pytest.fixture(scope="module") def schema(connection_test): - schema = dj.Schema(PREFIX + "_test1", locals(), connection=dj.conn(connection_test)) + schema = dj.Schema(PREFIX + "_test1", locals(), connection=connection_test) schema(Blob) yield schema schema.drop() diff --git a/tests/test_nan.py b/tests/test_nan.py index 1b3fb9f00..38dd5036f 100644 --- a/tests/test_nan.py +++ b/tests/test_nan.py @@ -12,7 +12,7 @@ class NanTest(dj.Manual): @pytest.fixture(scope="module") def schema(connection_test): - schema = dj.Schema(PREFIX + "_nantest", locals(), connection=dj.conn(connection_test)) + schema = dj.Schema(PREFIX + "_nantest", locals(), connection=connection_test) schema(NanTest) yield schema schema.drop() diff --git a/tests/test_schema_keywords.py b/tests/test_schema_keywords.py index e8354ec26..c8b7d5a24 100644 --- a/tests/test_schema_keywords.py +++ b/tests/test_schema_keywords.py @@ -35,7 +35,7 @@ class D(B): @pytest.fixture(scope="module") def schema(connection_test): - schema = dj.Schema(PREFIX + "_keywords", connection=dj.conn(connection_test)) + schema = dj.Schema(PREFIX + "_keywords", connection=connection_test) schema(A) schema(D) yield schema diff --git a/tests/test_virtual_module.py b/tests/test_virtual_module.py index fbb05002c..b7c3f23bb 100644 --- a/tests/test_virtual_module.py +++ b/tests/test_virtual_module.py @@ -4,6 +4,6 @@ def test_virtual_module(schema_obj, connection_test): module = dj.VirtualModule( - "module", schema_obj.schema.database, connection=dj.conn(connection_test) + "module", schema_obj.schema.database, connection=connection_test ) assert issubclass(module.Experiment, UserTable) From aaee0a1af8761d01ddb7332d76482971411fc4c3 Mon Sep 17 00:00:00 2001 From: A-Baji Date: Fri, 1 Dec 2023 13:59:38 -0600 Subject: [PATCH 004/212] convert schema files to fixtures --- tests/conftest.py | 137 +++++++++++++++++++++++------------ tests/schema.py | 2 + tests/schema_advanced.py | 12 +-- tests/schema_simple.py | 19 +---- tests/test_blob.py | 23 +++--- tests/test_dependencies.py | 7 +- tests/test_erd.py | 118 ++++++++++++++---------------- tests/test_foreign_keys.py | 25 +++---- tests/test_groupby.py | 2 +- tests/test_log.py | 7 +- tests/test_nan.py | 6 +- tests/test_relation_u.py | 61 ++++++++-------- tests/test_virtual_module.py | 6 +- 13 files changed, 216 insertions(+), 209 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index bea480b85..8335b1c11 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,12 +4,10 @@ import os import pytest import inspect -from . import PREFIX -from .schema import * +from . import PREFIX, schema, schema_simple, schema_advanced + +namespace = locals() -# all_classes = [] -# for _, obj in inspect.getmembers(sys.modules[__name__], inspect.isclass): -# all_classes.append(obj) @pytest.fixture(scope="session") def connection_root(): @@ -67,45 +65,92 @@ def connection_test(connection_root): connection_root.query(f"""DROP USER `{credentials["user"]}`""") connection.close() -@pytest.fixture -def schema_fixture(connection_test): - schema = dj.Schema(PREFIX + "_test1", connection=connection_test) - schema(TTest) - schema(TTest) - schema(TTest2) - schema(TTest3) - schema(NullableNumbers) - schema(TTestExtra) - schema(TTestNoExtra) - schema(Auto) - schema(User) - schema(Subject) - schema(Language) - schema(Experiment) - schema(Trial) - schema(Ephys) - schema(Image) - schema(UberTrash) - schema(UnterTrash) - schema(SimpleSource) - schema(SigIntTable) - schema(SigTermTable) - schema(DjExceptionName) - schema(ErrorClass) - schema(DecimalPrimaryKey) - schema(IndexRich) - schema(ThingA) - schema(ThingB) - schema(ThingC) - schema(Parent) - schema(Child) - schema(ComplexParent) - schema(ComplexChild) - schema(SubjectA) - schema(SessionA) - schema(SessionStatusA) - schema(SessionDateA) - schema(Stimulus) - schema(Longblob) + +@pytest.fixture(scope="module") +def schema_any(connection_test): + schema_any = dj.Schema( + PREFIX + "_test1", schema.__dict__, connection=connection_test + ) + schema_any(schema.TTest) + schema_any(schema.TTest2) + schema_any(schema.TTest3) + schema_any(schema.NullableNumbers) + schema_any(schema.TTestExtra) + schema_any(schema.TTestNoExtra) + schema_any(schema.Auto) + schema_any(schema.User) + schema_any(schema.Subject) + schema_any(schema.Language) + schema_any(schema.Experiment) + schema_any(schema.Trial) + schema_any(schema.Ephys) + schema_any(schema.Image) + schema_any(schema.UberTrash) + schema_any(schema.UnterTrash) + schema_any(schema.SimpleSource) + schema_any(schema.SigIntTable) + schema_any(schema.SigTermTable) + schema_any(schema.DjExceptionName) + schema_any(schema.ErrorClass) + schema_any(schema.DecimalPrimaryKey) + schema_any(schema.IndexRich) + schema_any(schema.ThingA) + schema_any(schema.ThingB) + schema_any(schema.ThingC) + schema_any(schema.Parent) + schema_any(schema.Child) + schema_any(schema.ComplexParent) + schema_any(schema.ComplexChild) + schema_any(schema.SubjectA) + schema_any(schema.SessionA) + schema_any(schema.SessionStatusA) + schema_any(schema.SessionDateA) + schema_any(schema.Stimulus) + schema_any(schema.Longblob) + yield schema_any + schema_any.drop() + + +@pytest.fixture(scope="module") +def schema_simp(connection_test): + schema = dj.Schema( + PREFIX + "_relational", schema_simple.__dict__, connection=connection_test + ) + schema(schema_simple.IJ) + schema(schema_simple.JI) + schema(schema_simple.A) + schema(schema_simple.B) + schema(schema_simple.L) + schema(schema_simple.D) + schema(schema_simple.E) + schema(schema_simple.F) + schema(schema_simple.F) + schema(schema_simple.DataA) + schema(schema_simple.DataB) + schema(schema_simple.Website) + schema(schema_simple.Profile) + schema(schema_simple.Website) + schema(schema_simple.TTestUpdate) + schema(schema_simple.ArgmaxTest) + schema(schema_simple.ReservedWord) + schema(schema_simple.OutfitLaunch) + yield schema + schema.drop() + + +@pytest.fixture(scope="module") +def schema_adv(connection_test): + schema = dj.Schema( + PREFIX + "_advanced", schema_advanced.__dict__, connection=connection_test + ) + schema(schema_advanced.Person) + schema(schema_advanced.Parent) + schema(schema_advanced.Subject) + schema(schema_advanced.Prep) + schema(schema_advanced.Slice) + schema(schema_advanced.Cell) + schema(schema_advanced.InputCell) + schema(schema_advanced.LocalSynapse) + schema(schema_advanced.GlobalSynapse) yield schema - schema.drop() \ No newline at end of file + schema.drop() diff --git a/tests/schema.py b/tests/schema.py index 4128ddd30..864c5efe4 100644 --- a/tests/schema.py +++ b/tests/schema.py @@ -7,6 +7,8 @@ import datajoint as dj import inspect +LOCALS_ANY = locals() + class TTest(dj.Lookup): """ diff --git a/tests/schema_advanced.py b/tests/schema_advanced.py index 7580611e2..104e4d1e4 100644 --- a/tests/schema_advanced.py +++ b/tests/schema_advanced.py @@ -1,10 +1,8 @@ import datajoint as dj -from . import PREFIX, CONN_INFO -schema = dj.Schema(PREFIX + "_advanced", locals(), connection=dj.conn(**CONN_INFO)) +LOCALS_ADVANCED = locals() -@schema class Person(dj.Manual): definition = """ person_id : int @@ -39,7 +37,6 @@ def fill(self): ) -@schema class Parent(dj.Manual): definition = """ -> Person @@ -89,7 +86,6 @@ def make_parent(pid, parent): ) -@schema class Subject(dj.Manual): definition = """ subject : int @@ -98,14 +94,12 @@ class Subject(dj.Manual): """ -@schema class Prep(dj.Manual): definition = """ prep : int """ -@schema class Slice(dj.Manual): definition = """ -> Prep @@ -113,7 +107,6 @@ class Slice(dj.Manual): """ -@schema class Cell(dj.Manual): definition = """ -> Slice @@ -121,7 +114,6 @@ class Cell(dj.Manual): """ -@schema class InputCell(dj.Manual): definition = """ # a synapse within the slice -> Cell @@ -129,7 +121,6 @@ class InputCell(dj.Manual): """ -@schema class LocalSynapse(dj.Manual): definition = """ # a synapse within the slice -> Cell.proj(presynaptic='cell') @@ -137,7 +128,6 @@ class LocalSynapse(dj.Manual): """ -@schema class GlobalSynapse(dj.Manual): # Mix old-style and new-style projected foreign keys definition = """ diff --git a/tests/schema_simple.py b/tests/schema_simple.py index 78f64d036..bb5c21ff5 100644 --- a/tests/schema_simple.py +++ b/tests/schema_simple.py @@ -7,14 +7,12 @@ import hashlib import uuid import faker -from . import PREFIX, CONN_INFO import numpy as np from datetime import date, timedelta -schema = dj.Schema(PREFIX + "_relational", locals(), connection=dj.conn(**CONN_INFO)) +LOCALS_SIMPLE = locals() -@schema class IJ(dj.Lookup): definition = """ # tests restrictions i : int @@ -23,7 +21,6 @@ class IJ(dj.Lookup): contents = list(dict(i=i, j=j + 2) for i in range(3) for j in range(3)) -@schema class JI(dj.Lookup): definition = """ # tests restrictions by relations when attributes are reordered j : int @@ -32,7 +29,6 @@ class JI(dj.Lookup): contents = list(dict(i=i + 1, j=j) for i in range(3) for j in range(3)) -@schema class A(dj.Lookup): definition = """ id_a :int @@ -42,7 +38,6 @@ class A(dj.Lookup): contents = [(i, i % 4 > i % 3) for i in range(10)] -@schema class B(dj.Computed): definition = """ -> A @@ -76,7 +71,6 @@ def make(self, key): ) -@schema class L(dj.Lookup): definition = """ id_l: int @@ -86,7 +80,6 @@ class L(dj.Lookup): contents = [(i, i % 3 >= i % 5) for i in range(30)] -@schema class D(dj.Computed): definition = """ -> A @@ -102,7 +95,6 @@ def _make_tuples(self, key): self.insert(dict(key, id_d=i, **random.choice(lookup)) for i in range(4)) -@schema class E(dj.Computed): definition = """ -> B @@ -132,7 +124,6 @@ def make(self, key): ) -@schema class F(dj.Manual): definition = """ id: int @@ -141,7 +132,6 @@ class F(dj.Manual): """ -@schema class DataA(dj.Lookup): definition = """ idx : int @@ -151,7 +141,6 @@ class DataA(dj.Lookup): contents = list(zip(range(5), range(5))) -@schema class DataB(dj.Lookup): definition = """ idx : int @@ -161,7 +150,6 @@ class DataB(dj.Lookup): contents = list(zip(range(5), range(5, 10))) -@schema class Website(dj.Lookup): definition = """ url_hash : uuid @@ -177,7 +165,6 @@ def insert1_url(self, url): return url_hash -@schema class Profile(dj.Manual): definition = """ ssn : char(11) @@ -210,7 +197,6 @@ def populate_random(self, n=10): ) -@schema class TTestUpdate(dj.Lookup): definition = """ primary_key : int @@ -226,7 +212,6 @@ class TTestUpdate(dj.Lookup): ] -@schema class ArgmaxTest(dj.Lookup): definition = """ primary_key : int @@ -247,7 +232,6 @@ def contents(self): ) -@schema class ReservedWord(dj.Manual): definition = """ # Test of SQL reserved words @@ -260,7 +244,6 @@ class ReservedWord(dj.Manual): """ -@schema class OutfitLaunch(dj.Lookup): definition = """ # Monthly released designer outfits diff --git a/tests/test_blob.py b/tests/test_blob.py index 761b02cf5..a3de2e9a9 100644 --- a/tests/test_blob.py +++ b/tests/test_blob.py @@ -1,13 +1,14 @@ +import pytest import datajoint as dj import timeit import numpy as np import uuid -from . import schema from decimal import Decimal from datetime import datetime from datajoint.blob import pack, unpack from numpy.testing import assert_array_equal from pytest import approx +from .schema import * def test_pack(): @@ -169,18 +170,16 @@ def test_complex(): assert_array_equal(x, unpack(pack(x)), "Arrays do not match!") -def test_insert_longblob(schema_fixture): +def test_insert_longblob(schema_any): insert_dj_blob = {"id": 1, "data": [1, 2, 3]} - schema.Longblob.insert1(insert_dj_blob) - assert (schema.Longblob & "id=1").fetch1() == insert_dj_blob - (schema.Longblob & "id=1").delete() + Longblob.insert1(insert_dj_blob) + assert (Longblob & "id=1").fetch1() == insert_dj_blob + (Longblob & "id=1").delete() query_mym_blob = {"id": 1, "data": np.array([1, 2, 3])} - schema.Longblob.insert1(query_mym_blob) - assert (schema.Longblob & "id=1").fetch1()["data"].all() == query_mym_blob[ - "data" - ].all() - (schema.Longblob & "id=1").delete() + Longblob.insert1(query_mym_blob) + assert (Longblob & "id=1").fetch1()["data"].all() == query_mym_blob["data"].all() + (Longblob & "id=1").delete() query_32_blob = ( "INSERT INTO djtest_test1.longblob (id, data) VALUES (1, " @@ -193,7 +192,7 @@ def test_insert_longblob(schema_fixture): ) dj.conn().query(query_32_blob).fetchall() dj.blob.use_32bit_dims = True - assert (schema.Longblob & "id=1").fetch1() == { + assert (Longblob & "id=1").fetch1() == { "id": 1, "data": np.rec.array( [ @@ -209,7 +208,7 @@ def test_insert_longblob(schema_fixture): dtype=[("hits", "O"), ("sides", "O"), ("tasks", "O"), ("stage", "O")], ), } - (schema.Longblob & "id=1").delete() + (Longblob & "id=1").delete() dj.blob.use_32bit_dims = False diff --git a/tests/test_dependencies.py b/tests/test_dependencies.py index 1e8b1da41..312e5f8ad 100644 --- a/tests/test_dependencies.py +++ b/tests/test_dependencies.py @@ -1,9 +1,8 @@ import datajoint as dj from datajoint import errors from pytest import raises - -from .schema import * from datajoint.dependencies import unite_master_parts +from .schema import * def test_unite_master_parts(): @@ -51,7 +50,7 @@ def test_unite_master_parts(): ] -def test_nullable_dependency(): +def test_nullable_dependency(schema_any): """test nullable unique foreign key""" # Thing C has a nullable dependency on B whose primary key is composite a = ThingA() @@ -80,7 +79,7 @@ def test_nullable_dependency(): assert len(c) == len(c.fetch()) == 5 -def test_unique_dependency(): +def test_unique_dependency(schema_any): """test nullable unique foreign key""" # Thing C has a nullable dependency on B whose primary key is composite diff --git a/tests/test_erd.py b/tests/test_erd.py index 991410995..f1274ec1b 100644 --- a/tests/test_erd.py +++ b/tests/test_erd.py @@ -1,76 +1,64 @@ import datajoint as dj -from .schema_simple import A, B, D, E, L, schema, OutfitLaunch -from . import schema_advanced +from .schema_simple import LOCALS_SIMPLE, A, B, D, E, L, OutfitLaunch +from .schema_advanced import * -namespace = locals() +def test_decorator(schema_simp): + assert issubclass(A, dj.Lookup) + assert not issubclass(A, dj.Part) + assert B.database == schema_simp.database + assert issubclass(B.C, dj.Part) + assert B.C.database == schema_simp.database + assert B.C.master is B and E.F.master is E -class TestERD: - @staticmethod - def setup_method(): - """ - class-level test setup. Executes before each test method. - """ - @staticmethod - def test_decorator(): - assert issubclass(A, dj.Lookup) - assert not issubclass(A, dj.Part) - assert B.database == schema.database - assert issubclass(B.C, dj.Part) - assert B.C.database == schema.database - assert B.C.master is B and E.F.master is E +def test_dependencies(schema_simp): + deps = schema_simp.connection.dependencies + deps.load() + assert all(cls.full_table_name in deps for cls in (A, B, B.C, D, E, E.F, L)) + assert set(A().children()) == set([B.full_table_name, D.full_table_name]) + assert set(D().parents(primary=True)) == set([A.full_table_name]) + assert set(D().parents(primary=False)) == set([L.full_table_name]) + assert set(deps.descendants(L.full_table_name)).issubset( + cls.full_table_name for cls in (L, D, E, E.F) + ) - @staticmethod - def test_dependencies(): - deps = schema.connection.dependencies - deps.load() - assert all(cls.full_table_name in deps for cls in (A, B, B.C, D, E, E.F, L)) - assert set(A().children()) == set([B.full_table_name, D.full_table_name]) - assert set(D().parents(primary=True)) == set([A.full_table_name]) - assert set(D().parents(primary=False)) == set([L.full_table_name]) - assert set(deps.descendants(L.full_table_name)).issubset( - cls.full_table_name for cls in (L, D, E, E.F) - ) - @staticmethod - def test_erd(): - assert dj.diagram.diagram_active, "Failed to import networkx and pydot" - erd = dj.ERD(schema, context=namespace) - graph = erd._make_graph() - assert set(cls.__name__ for cls in (A, B, D, E, L)).issubset(graph.nodes()) +def test_erd(schema_simp): + assert dj.diagram.diagram_active, "Failed to import networkx and pydot" + erd = dj.ERD(schema_simp, context=LOCALS_SIMPLE) + graph = erd._make_graph() + assert set(cls.__name__ for cls in (A, B, D, E, L)).issubset(graph.nodes()) - @staticmethod - def test_erd_algebra(): - erd0 = dj.ERD(B) - erd1 = erd0 + 3 - erd2 = dj.Di(E) - 3 - erd3 = erd1 * erd2 - erd4 = (erd0 + E).add_parts() - B - E - assert erd0.nodes_to_show == set(cls.full_table_name for cls in [B]) - assert erd1.nodes_to_show == set( - cls.full_table_name for cls in (B, B.C, E, E.F) - ) - assert erd2.nodes_to_show == set(cls.full_table_name for cls in (A, B, D, E, L)) - assert erd3.nodes_to_show == set(cls.full_table_name for cls in (B, E)) - assert erd4.nodes_to_show == set(cls.full_table_name for cls in (B.C, E.F)) - @staticmethod - def test_repr_svg(): - erd = dj.ERD(schema_advanced, context=namespace) - svg = erd._repr_svg_() - assert svg.startswith("") +def test_erd_algebra(schema_simp): + erd0 = dj.ERD(B) + erd1 = erd0 + 3 + erd2 = dj.Di(E) - 3 + erd3 = erd1 * erd2 + erd4 = (erd0 + E).add_parts() - B - E + assert erd0.nodes_to_show == set(cls.full_table_name for cls in [B]) + assert erd1.nodes_to_show == set(cls.full_table_name for cls in (B, B.C, E, E.F)) + assert erd2.nodes_to_show == set(cls.full_table_name for cls in (A, B, D, E, L)) + assert erd3.nodes_to_show == set(cls.full_table_name for cls in (B, E)) + assert erd4.nodes_to_show == set(cls.full_table_name for cls in (B.C, E.F)) - @staticmethod - def test_make_image(): - erd = dj.ERD(schema, context=namespace) - img = erd.make_image() - assert img.ndim == 3 and img.shape[2] in (3, 4) - @staticmethod - def test_part_table_parsing(): - # https://github.com/datajoint/datajoint-python/issues/882 - erd = dj.Di(schema) - graph = erd._make_graph() - assert "OutfitLaunch" in graph.nodes() - assert "OutfitLaunch.OutfitPiece" in graph.nodes() +def test_repr_svg(schema_adv): + erd = dj.ERD(schema_adv, context=locals()) + svg = erd._repr_svg_() + assert svg.startswith("") + + +def test_make_image(schema_simp): + erd = dj.ERD(schema_simp, context=locals()) + img = erd.make_image() + assert img.ndim == 3 and img.shape[2] in (3, 4) + + +def test_part_table_parsing(schema_simp): + # https://github.com/datajoint/datajoint-python/issues/882 + erd = dj.Di(schema_simp) + graph = erd._make_graph() + assert "OutfitLaunch" in graph.nodes() + assert "OutfitLaunch.OutfitPiece" in graph.nodes() diff --git a/tests/test_foreign_keys.py b/tests/test_foreign_keys.py index 05d87c041..18daa952a 100644 --- a/tests/test_foreign_keys.py +++ b/tests/test_foreign_keys.py @@ -1,11 +1,10 @@ from datajoint.declare import declare +from .schema_advanced import * -from . import schema_advanced - -def test_aliased_fk(): - person = schema_advanced.Person() - parent = schema_advanced.Parent() +def test_aliased_fk(schema_adv): + person = Person() + parent = Parent() person.delete() assert not person assert not parent @@ -21,21 +20,21 @@ def test_aliased_fk(): assert delete_count == 16 -def test_describe(): +def test_describe(schema_adv): """real_definition should match original definition""" - for rel in (schema_advanced.LocalSynapse, schema_advanced.GlobalSynapse): + for rel in (LocalSynapse, GlobalSynapse): describe = rel.describe() - s1 = declare( - rel.full_table_name, rel.definition, schema_advanced.schema.context - )[0].split("\n") + s1 = declare(rel.full_table_name, rel.definition, schema_adv.context)[0].split( + "\n" + ) s2 = declare(rel.full_table_name, describe, globals())[0].split("\n") for c1, c2 in zip(s1, s2): assert c1 == c2 -def test_delete(): - person = schema_advanced.Person() - parent = schema_advanced.Parent() +def test_delete(schema_adv): + person = Person() + parent = Parent() person.delete() assert not person assert not parent diff --git a/tests/test_groupby.py b/tests/test_groupby.py index 3d3be530e..109972760 100644 --- a/tests/test_groupby.py +++ b/tests/test_groupby.py @@ -1,7 +1,7 @@ from .schema_simple import A, D -def test_aggr_with_proj(): +def test_aggr_with_proj(schema_simp): # issue #944 - only breaks with MariaDB # MariaDB implements the SQL:1992 standard that prohibits fields in the select statement that are # not also in the GROUP BY statement. diff --git a/tests/test_log.py b/tests/test_log.py index a3aafa992..4b6e64613 100644 --- a/tests/test_log.py +++ b/tests/test_log.py @@ -1,8 +1,5 @@ -from . import schema - - -def test_log(): - ts, events = (schema.schema.log & 'event like "Declared%%"').fetch( +def test_log(schema_any): + ts, events = (schema_any.log & 'event like "Declared%%"').fetch( "timestamp", "event" ) assert len(ts) >= 2 diff --git a/tests/test_nan.py b/tests/test_nan.py index 38dd5036f..299c0d9f8 100644 --- a/tests/test_nan.py +++ b/tests/test_nan.py @@ -3,6 +3,7 @@ from . import PREFIX import pytest + class NanTest(dj.Manual): definition = """ id :int @@ -10,13 +11,15 @@ class NanTest(dj.Manual): value=null :double """ + @pytest.fixture(scope="module") def schema(connection_test): - schema = dj.Schema(PREFIX + "_nantest", locals(), connection=connection_test) + schema = dj.Schema(PREFIX + "_nantest", connection=connection_test) schema(NanTest) yield schema schema.drop() + @pytest.fixture(scope="class") def setup_class(request, schema): rel = NanTest() @@ -27,6 +30,7 @@ def setup_class(request, schema): request.cls.rel = rel request.cls.a = a + class TestNaNInsert: def test_insert_nan(self, setup_class): """Test fetching of null values""" diff --git a/tests/test_relation_u.py b/tests/test_relation_u.py index 44033708d..d225bccbb 100644 --- a/tests/test_relation_u.py +++ b/tests/test_relation_u.py @@ -1,6 +1,21 @@ +import pytest import datajoint as dj from pytest import raises -from . import schema, schema_simple +from .schema import * +from .schema_simple import * + + +@pytest.fixture(scope="class") +def setup_class(request, schema_any): + request.cls.user = User() + request.cls.language = Language() + request.cls.subject = Subject() + request.cls.experiment = Experiment() + request.cls.trial = Trial() + request.cls.ephys = Ephys() + request.cls.channel = Ephys.Channel() + request.cls.img = Image() + request.cls.trash = UberTrash() class TestU: @@ -8,19 +23,7 @@ class TestU: Test tables: insert, delete """ - @classmethod - def setup_class(cls): - cls.user = schema.User() - cls.language = schema.Language() - cls.subject = schema.Subject() - cls.experiment = schema.Experiment() - cls.trial = schema.Trial() - cls.ephys = schema.Ephys() - cls.channel = schema.Ephys.Channel() - cls.img = schema.Image() - cls.trash = schema.UberTrash() - - def test_restriction(self): + def test_restriction(self, setup_class): language_set = {s[1] for s in self.language.contents} rel = dj.U("language") & self.language assert list(rel.heading.names) == ["language"] @@ -32,15 +35,15 @@ def test_restriction(self): assert list(rel.primary_key) == list((rel & "trial_id>3").primary_key) assert list((dj.U("start_time") & self.trial).primary_key) == ["start_time"] - def test_invalid_restriction(self): + def test_invalid_restriction(self, setup_class): with raises(dj.DataJointError): result = dj.U("color") & dict(color="red") - def test_ineffective_restriction(self): + def test_ineffective_restriction(self, setup_class): rel = self.language & dj.U("language") assert rel.make_sql() == self.language.make_sql() - def test_join(self): + def test_join(self, setup_class): rel = self.experiment * dj.U("experiment_date") assert self.experiment.primary_key == ["subject_id", "experiment_id"] assert rel.primary_key == self.experiment.primary_key + ["experiment_date"] @@ -49,35 +52,35 @@ def test_join(self): assert self.experiment.primary_key == ["subject_id", "experiment_id"] assert rel.primary_key == self.experiment.primary_key + ["experiment_date"] - def test_invalid_join(self): + def test_invalid_join(self, setup_class): with raises(dj.DataJointError): rel = dj.U("language") * dict(language="English") - def test_repr_without_attrs(self): + def test_repr_without_attrs(self, setup_class): """test dj.U() display""" - query = dj.U().aggr(schema.Language, n="count(*)") + query = dj.U().aggr(Language, n="count(*)") repr(query) - def test_aggregations(self): - lang = schema.Language() + def test_aggregations(self, setup_class): + lang = Language() # test total aggregation on expression object n1 = dj.U().aggr(lang, n="count(*)").fetch1("n") assert n1 == len(lang.fetch()) # test total aggregation on expression class - n2 = dj.U().aggr(schema.Language, n="count(*)").fetch1("n") + n2 = dj.U().aggr(Language, n="count(*)").fetch1("n") assert n1 == n2 - rel = dj.U("language").aggr(schema.Language, number_of_speakers="count(*)") - assert len(rel) == len(set(l[1] for l in schema.Language.contents)) + rel = dj.U("language").aggr(Language, number_of_speakers="count(*)") + assert len(rel) == len(set(l[1] for l in Language.contents)) assert (rel & 'language="English"').fetch1("number_of_speakers") == 3 - def test_argmax(self): - rel = schema.TTest() + def test_argmax(self, setup_class): + rel = TTest() # get the tuples corresponding to the maximum value mx = (rel * dj.U().aggr(rel, mx="max(value)")) & "mx=value" assert mx.fetch("value")[0] == max(rel.fetch("value")) - def test_aggr(self): - rel = schema_simple.ArgmaxTest() + def test_aggr(self, setup_class, schema_simp): + rel = ArgmaxTest() amax1 = (dj.U("val") * rel) & dj.U("secondary_key").aggr(rel, val="min(val)") amax2 = (dj.U("val") * rel) * dj.U("secondary_key").aggr(rel, val="min(val)") assert ( diff --git a/tests/test_virtual_module.py b/tests/test_virtual_module.py index b7c3f23bb..bd8a0c754 100644 --- a/tests/test_virtual_module.py +++ b/tests/test_virtual_module.py @@ -2,8 +2,6 @@ from datajoint.user_tables import UserTable -def test_virtual_module(schema_obj, connection_test): - module = dj.VirtualModule( - "module", schema_obj.schema.database, connection=connection_test - ) +def test_virtual_module(schema_any, connection_test): + module = dj.VirtualModule("module", schema_any.database, connection=connection_test) assert issubclass(module.Experiment, UserTable) From 8f09fe9c3cf0b018bb0959266550982f89fd61b6 Mon Sep 17 00:00:00 2001 From: A-Baji Date: Fri, 1 Dec 2023 14:05:56 -0600 Subject: [PATCH 005/212] remove temp conn info --- tests/__init__.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/tests/__init__.py b/tests/__init__.py index 70381c090..de57f6eab 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -5,13 +5,6 @@ PREFIX = "djtest" -# Connection for testing -CONN_INFO = dict( - host=os.getenv("DJ_HOST"), - user=os.getenv("DJ_USER"), - password=os.getenv("DJ_PASS"), -) - CONN_INFO_ROOT = dict( host=os.getenv("DJ_HOST"), user=os.getenv("DJ_USER"), From e27147f69f52184b90dd815e9fa3f9b0da938346 Mon Sep 17 00:00:00 2001 From: A-Baji Date: Fri, 1 Dec 2023 14:09:43 -0600 Subject: [PATCH 006/212] import cleanup --- tests/conftest.py | 2 -- tests/test_blob.py | 1 - tests/test_connection.py | 1 - 3 files changed, 4 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 8335b1c11..e13a13632 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,9 +1,7 @@ -import sys import datajoint as dj from packaging import version import os import pytest -import inspect from . import PREFIX, schema, schema_simple, schema_advanced namespace = locals() diff --git a/tests/test_blob.py b/tests/test_blob.py index a3de2e9a9..23de7be76 100644 --- a/tests/test_blob.py +++ b/tests/test_blob.py @@ -1,4 +1,3 @@ -import pytest import datajoint as dj import timeit import numpy as np diff --git a/tests/test_connection.py b/tests/test_connection.py index 76b6d2389..795d3761e 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -6,7 +6,6 @@ from datajoint import DataJointError import numpy as np from . import CONN_INFO_ROOT - from . import PREFIX import pytest From afc1c2b3e8369e53f1ed94a09cf3d082704704e5 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Fri, 1 Dec 2023 16:54:03 -0600 Subject: [PATCH 007/212] Use LOCALS dict for context --- tests/conftest.py | 15 +++++++-------- tests/schema.py | 10 ++++++---- tests/schema_advanced.py | 6 +++--- tests/schema_simple.py | 10 ++++++---- tests/test_relation_u.py | 2 +- 5 files changed, 23 insertions(+), 20 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index e13a13632..109bda6c0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,8 +4,6 @@ import pytest from . import PREFIX, schema, schema_simple, schema_advanced -namespace = locals() - @pytest.fixture(scope="session") def connection_root(): @@ -64,11 +62,12 @@ def connection_test(connection_root): connection.close() -@pytest.fixture(scope="module") +@pytest.fixture def schema_any(connection_test): schema_any = dj.Schema( - PREFIX + "_test1", schema.__dict__, connection=connection_test + PREFIX + "_test1", schema.LOCALS_ANY, connection=connection_test ) + assert schema.LOCALS_ANY, "LOCALS_ANY is empty" schema_any(schema.TTest) schema_any(schema.TTest2) schema_any(schema.TTest3) @@ -109,10 +108,10 @@ def schema_any(connection_test): schema_any.drop() -@pytest.fixture(scope="module") +@pytest.fixture def schema_simp(connection_test): schema = dj.Schema( - PREFIX + "_relational", schema_simple.__dict__, connection=connection_test + PREFIX + "_relational", schema_simple.LOCALS_SIMPLE, connection=connection_test ) schema(schema_simple.IJ) schema(schema_simple.JI) @@ -136,10 +135,10 @@ def schema_simp(connection_test): schema.drop() -@pytest.fixture(scope="module") +@pytest.fixture def schema_adv(connection_test): schema = dj.Schema( - PREFIX + "_advanced", schema_advanced.__dict__, connection=connection_test + PREFIX + "_advanced", schema_advanced.LOCALS_ADVANCED, connection=connection_test ) schema(schema_advanced.Person) schema(schema_advanced.Parent) diff --git a/tests/schema.py b/tests/schema.py index 864c5efe4..7bc4dccdb 100644 --- a/tests/schema.py +++ b/tests/schema.py @@ -7,8 +7,6 @@ import datajoint as dj import inspect -LOCALS_ANY = locals() - class TTest(dj.Lookup): """ @@ -33,7 +31,7 @@ class TTest2(dj.Manual): class TTest3(dj.Manual): definition = """ - key : int + key : int --- value : varchar(300) """ @@ -41,7 +39,7 @@ class TTest3(dj.Manual): class NullableNumbers(dj.Manual): definition = """ - key : int + key : int --- fvalue = null : float dvalue = null : double @@ -450,3 +448,7 @@ class Longblob(dj.Manual): --- data: longblob """ + + +LOCALS_ANY = {k: v for k, v in locals().items() if inspect.isclass(v)} + diff --git a/tests/schema_advanced.py b/tests/schema_advanced.py index 104e4d1e4..726fc819a 100644 --- a/tests/schema_advanced.py +++ b/tests/schema_advanced.py @@ -1,7 +1,5 @@ import datajoint as dj - -LOCALS_ADVANCED = locals() - +import inspect class Person(dj.Manual): definition = """ @@ -135,3 +133,5 @@ class GlobalSynapse(dj.Manual): -> Cell.proj(pre_slice="slice", pre_cell="cell") -> Cell.proj(post_slice="slice", post_cell="cell") """ + +LOCALS_ADVANCED = {k: v for k, v in locals().items() if inspect.isclass(v)} diff --git a/tests/schema_simple.py b/tests/schema_simple.py index bb5c21ff5..7742ba1c2 100644 --- a/tests/schema_simple.py +++ b/tests/schema_simple.py @@ -9,8 +9,7 @@ import faker import numpy as np from datetime import date, timedelta - -LOCALS_SIMPLE = locals() +import inspect class IJ(dj.Lookup): @@ -237,8 +236,8 @@ class ReservedWord(dj.Manual): # Test of SQL reserved words key : int --- - in : varchar(25) - from : varchar(25) + in : varchar(25) + from : varchar(25) int : int select : varchar(25) """ @@ -260,3 +259,6 @@ class OutfitPiece(dj.Part, dj.Lookup): piece: varchar(20) """ contents = [(0, "jeans"), (0, "sneakers"), (0, "polo")] + + +LOCALS_SIMPLE = {k: v for k, v in locals().items() if inspect.isclass(v)} diff --git a/tests/test_relation_u.py b/tests/test_relation_u.py index d225bccbb..3494f4bff 100644 --- a/tests/test_relation_u.py +++ b/tests/test_relation_u.py @@ -17,7 +17,7 @@ def setup_class(request, schema_any): request.cls.img = Image() request.cls.trash = UberTrash() - +@pytest.mark.skip(reason="temporary") class TestU: """ Test tables: insert, delete From a59466e23328d0906737d3cdb1830662a92aefd5 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Fri, 1 Dec 2023 16:59:03 -0600 Subject: [PATCH 008/212] Clean up imports for test_blob --- tests/schema.py | 2 +- tests/schema_advanced.py | 1 + tests/schema_simple.py | 1 + tests/test_blob.py | 2 +- 4 files changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/schema.py b/tests/schema.py index 7bc4dccdb..13ff945a3 100644 --- a/tests/schema.py +++ b/tests/schema.py @@ -451,4 +451,4 @@ class Longblob(dj.Manual): LOCALS_ANY = {k: v for k, v in locals().items() if inspect.isclass(v)} - +__all__ = list(LOCALS_ANY.keys()) diff --git a/tests/schema_advanced.py b/tests/schema_advanced.py index 726fc819a..f925e4971 100644 --- a/tests/schema_advanced.py +++ b/tests/schema_advanced.py @@ -135,3 +135,4 @@ class GlobalSynapse(dj.Manual): """ LOCALS_ADVANCED = {k: v for k, v in locals().items() if inspect.isclass(v)} +__all__ = list(LOCALS_ADVANCED.keys()) diff --git a/tests/schema_simple.py b/tests/schema_simple.py index 7742ba1c2..addd70c26 100644 --- a/tests/schema_simple.py +++ b/tests/schema_simple.py @@ -262,3 +262,4 @@ class OutfitPiece(dj.Part, dj.Lookup): LOCALS_SIMPLE = {k: v for k, v in locals().items() if inspect.isclass(v)} +__all__ = list(LOCALS_SIMPLE.keys()) diff --git a/tests/test_blob.py b/tests/test_blob.py index 23de7be76..e55488987 100644 --- a/tests/test_blob.py +++ b/tests/test_blob.py @@ -7,7 +7,7 @@ from datajoint.blob import pack, unpack from numpy.testing import assert_array_equal from pytest import approx -from .schema import * +from .schema import Longblob def test_pack(): From 3442047c915b3a50a4024d627787fd8d13cab9ac Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Fri, 1 Dec 2023 21:43:42 -0600 Subject: [PATCH 009/212] Fix dev container --- .devcontainer/Dockerfile | 12 ------- .devcontainer/devcontainer.json | 54 +++++++++++++++---------------- .devcontainer/docker-compose.yaml | 10 ------ LNX-docker-compose.yml | 8 ++--- 4 files changed, 31 insertions(+), 53 deletions(-) delete mode 100644 .devcontainer/Dockerfile delete mode 100644 .devcontainer/docker-compose.yaml diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile deleted file mode 100644 index e008c9287..000000000 --- a/.devcontainer/Dockerfile +++ /dev/null @@ -1,12 +0,0 @@ -# Note: You can use any Debian/Ubuntu based image you want. -FROM mcr.microsoft.com/devcontainers/python:3.7-bullseye - -RUN \ - apt update && \ - apt-get install bash-completion graphviz default-mysql-client -y && \ - pip install flake8 black faker ipykernel pytest pytest-cov nose nose-cov datajoint && \ - pip uninstall datajoint -y - -ENV DJ_HOST fakeservices.datajoint.io -ENV DJ_USER root -ENV DJ_PASS simple \ No newline at end of file diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index a5db4d4c5..08a4482cf 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,34 +1,34 @@ // For format details, see https://aka.ms/devcontainer.json. For config options, see the +// README at: https://github.com/devcontainers/templates/tree/main/src/docker-existing-docker-compose { - "name": "Development", - "dockerComposeFile": "docker-compose.yaml", + "name": "Existing Docker Compose (Extend)", + // Update the 'dockerComposeFile' list if you have more compose files or use different names. + // The .devcontainer/docker-compose.yml file contains any overrides you need/want to make. + "dockerComposeFile": [ + "../LNX-docker-compose.yml", + "docker-compose.yml" + ], + // The 'service' property is the name of the service for the container that VS Code should + // use. Update this value and .devcontainer/docker-compose.yml to the real service name. "service": "app", + // The optional 'workspaceFolder' property is the path VS Code should open by default when + // connected. This is typically a file mount in .devcontainer/docker-compose.yml "workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}", - // Use this environment variable if you need to bind mount your local source code into a new container. - "remoteEnv": { - "LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}" - }, - // https://containers.dev/features + // Features to add to the dev container. More info: https://containers.dev/features. + // "features": {}, + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [], + // Uncomment the next line if you want start specific services in your Docker Compose config. + // "runServices": [], + // Uncomment the next line if you want to keep your containers running after VS Code shuts down. + "shutdownAction": "stopCompose", + // Uncomment the next line to run commands after the container is created. + "postCreateCommand": "python3 -m pip install -e .", "features": { - "ghcr.io/devcontainers/features/docker-in-docker:2": {}, - "ghcr.io/devcontainers/features/git:1": {}, - "ghcr.io/eitsupi/devcontainer-features/jq-likes:1": {}, - "ghcr.io/guiyomh/features/vim:0": {} + "ghcr.io/cirolosapio/devcontainers-features/alpine-git:0": {}, }, - "onCreateCommand": "pip install -e .", - "postStartCommand": "MYSQL_VER=8.0 MINIO_VER=RELEASE.2022-08-11T04-37-28Z docker compose -f local-docker-compose.yml down && docker volume prune -f && MYSQL_VER=8.0 MINIO_VER=RELEASE.2022-08-11T04-37-28Z docker compose -f local-docker-compose.yml up --build --wait", - "forwardPorts": [ - 80, - 443, - 3306, - 8080, - 9000 - ], - "customizations": { - "vscode": { - "extensions": [ - "ms-python.python" - ] - } - } + // Configure tool-specific properties. + // "customizations": {}, + // Uncomment to connect as an existing user other than the container default. More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "devcontainer" } \ No newline at end of file diff --git a/.devcontainer/docker-compose.yaml b/.devcontainer/docker-compose.yaml deleted file mode 100644 index a456ed151..000000000 --- a/.devcontainer/docker-compose.yaml +++ /dev/null @@ -1,10 +0,0 @@ -version: "3" -services: - app: - build: . - extra_hosts: - - fakeservices.datajoint.io:127.0.0.1 - volumes: - - ../..:/workspaces:cached - entrypoint: /usr/local/share/docker-init.sh - command: tail -f /dev/null diff --git a/LNX-docker-compose.yml b/LNX-docker-compose.yml index 9c0a95b78..248b3611c 100644 --- a/LNX-docker-compose.yml +++ b/LNX-docker-compose.yml @@ -7,7 +7,7 @@ x-net: services: db: <<: *net - image: datajoint/mysql:${MYSQL_VER} + image: datajoint/mysql:${MYSQL_VER:-5.7} environment: - MYSQL_ROOT_PASSWORD=${DJ_PASS} # ports: @@ -21,7 +21,7 @@ services: interval: 15s minio: <<: *net - image: minio/minio:${MINIO_VER} + image: minio/minio:${MINIO_VER:-RELEASE.2022-08-11T04-37-28Z} environment: - MINIO_ACCESS_KEY=datajoint - MINIO_SECRET_KEY=datajoint @@ -58,7 +58,7 @@ services: # - "3306:3306" app: <<: *net - image: datajoint/djtest:py${PY_VER}-${DISTRO} + image: datajoint/djtest:py${PY_VER:-3.8}-${DISTRO:-alpine} depends_on: db: condition: service_healthy @@ -93,7 +93,7 @@ services: nosetests -vsw tests_old --with-coverage --cover-package=datajoint # ports: # - "8888:8888" - user: ${HOST_UID}:anaconda + user: ${HOST_UID:-1000}:anaconda volumes: - .:/src - /tmp/.X11-unix:/tmp/.X11-unix:rw From 980e818c7e373561467fe31f679e10d324536859 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Fri, 1 Dec 2023 21:57:13 -0600 Subject: [PATCH 010/212] Clean up imports for test_blob_matlab --- tests/test_blob_matlab.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_blob_matlab.py b/tests/test_blob_matlab.py index 06154b1fc..575e6b0b8 100644 --- a/tests/test_blob_matlab.py +++ b/tests/test_blob_matlab.py @@ -16,15 +16,15 @@ class Blob(dj.Manual): """ -@pytest.fixture(scope="module") +@pytest.fixture def schema(connection_test): - schema = dj.Schema(PREFIX + "_test1", locals(), connection=connection_test) + schema = dj.Schema(PREFIX + "_test1", dict(Blob=Blob), connection=connection_test) schema(Blob) yield schema schema.drop() -@pytest.fixture(scope="module") +@pytest.fixture def insert_blobs_func(schema): def insert_blobs(): """ @@ -63,7 +63,7 @@ def insert_blobs(): yield insert_blobs -@pytest.fixture(scope="class") +@pytest.fixture def setup_class(schema, insert_blobs_func): assert not dj.config["safemode"], "safemode must be disabled" Blob().delete() From 4ffbca2011749dc7007e05ae65977d1be1a62620 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Fri, 1 Dec 2023 22:56:06 -0600 Subject: [PATCH 011/212] Clean up recently migrated pytests --- tests/test_connection.py | 2 +- tests/test_erd.py | 4 +- tests/test_json.py | 415 +++++++++++++++++----------------- tests/test_plugin.py | 8 +- tests/test_relation_u.py | 43 ++-- tests/test_schema_keywords.py | 2 +- tests/test_utils.py | 8 - 7 files changed, 239 insertions(+), 243 deletions(-) diff --git a/tests/test_connection.py b/tests/test_connection.py index 795d3761e..a73677aec 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -12,7 +12,7 @@ @pytest.fixture def schema(connection_test): - schema = dj.Schema(PREFIX + "_transactions", locals(), connection=connection_test) + schema = dj.Schema(PREFIX + "_transactions", context=dict(), connection=connection_test) yield schema schema.drop() diff --git a/tests/test_erd.py b/tests/test_erd.py index f1274ec1b..aebf62eaf 100644 --- a/tests/test_erd.py +++ b/tests/test_erd.py @@ -45,13 +45,13 @@ def test_erd_algebra(schema_simp): def test_repr_svg(schema_adv): - erd = dj.ERD(schema_adv, context=locals()) + erd = dj.ERD(schema_adv, context=dict()) svg = erd._repr_svg_() assert svg.startswith("") def test_make_image(schema_simp): - erd = dj.ERD(schema_simp, context=locals()) + erd = dj.ERD(schema_simp, context=dict()) img = erd.make_image() assert img.ndim == 3 and img.shape[2] in (3, 4) diff --git a/tests/test_json.py b/tests/test_json.py index 760475a1a..37a33c825 100644 --- a/tests/test_json.py +++ b/tests/test_json.py @@ -1,3 +1,4 @@ +import pytest import inspect from datajoint.declare import declare import datajoint as dj @@ -5,213 +6,215 @@ from packaging.version import Version from . import PREFIX -if Version(dj.conn().query("select @@version;").fetchone()[0]) >= Version("8.0.0"): - schema = dj.Schema(PREFIX + "_json") - Team = None - - def setup(): - global Team - - @schema - class Team(dj.Lookup): - definition = """ - name: varchar(40) - --- - car=null: json - unique index(car.name:char(20)) - uniQue inDex ( name, car.name:char(20), (json_value(`car`, _utf8mb4'$.length' returning decimal(4, 1))) ) - """ - contents = [ - ( - "engineering", +if Version(dj.conn().query("select @@version;").fetchone()[0]) < Version("8.0.0"): + pytest.skip("skipping windows-only tests", allow_module_level=True) + + +class Team(dj.Lookup): + definition = """ + name: varchar(40) + --- + car=null: json + unique index(car.name:char(20)) + uniQue inDex ( name, car.name:char(20), (json_value(`car`, _utf8mb4'$.length' returning decimal(4, 1))) ) + """ + contents = [ + ( + "engineering", + { + "name": "Rever", + "length": 20.5, + "inspected": True, + "tire_pressure": [32, 31, 33, 34], + "headlights": [ { - "name": "Rever", - "length": 20.5, - "inspected": True, - "tire_pressure": [32, 31, 33, 34], - "headlights": [ - { - "side": "left", - "hyper_white": None, - }, - { - "side": "right", - "hyper_white": None, - }, - ], + "side": "left", + "hyper_white": None, }, - ), - ( - "business", { - "name": "Chaching", - "length": 100, - "safety_inspected": False, - "tire_pressure": [34, 30, 27, 32], - "headlights": [ - { - "side": "left", - "hyper_white": True, - }, - { - "side": "right", - "hyper_white": True, - }, - ], + "side": "right", + "hyper_white": None, }, - ), - ( - "marketing", - None, - ), - ] - - def teardown(): - schema.drop() - - def test_insert_update(): - car = { - "name": "Discovery", - "length": 22.9, - "inspected": None, - "tire_pressure": [35, 36, 34, 37], - "headlights": [ - { - "side": "left", - "hyper_white": True, - }, - { - "side": "right", - "hyper_white": True, - }, - ], - } - - Team.insert1({"name": "research", "car": car}) - q = Team & {"name": "research"} - assert q.fetch1("car") == car - - car.update({"length": 23}) - Team.update1({"name": "research", "car": car}) - assert q.fetch1("car") == car - - try: - Team.insert1({"name": "hr", "car": car}) - raise Exception("Inserted non-unique car name.") - except dj.DataJointError: - pass - - q.delete_quick() - assert not q - - def test_describe(): - rel = Team() - context = inspect.currentframe().f_globals - s1 = declare(rel.full_table_name, rel.definition, context) - s2 = declare(rel.full_table_name, rel.describe(), context) - assert s1 == s2 - - def test_restrict(): - # dict - assert (Team & {"car.name": "Chaching"}).fetch1("name") == "business" - - assert (Team & {"car.length": 20.5}).fetch1("name") == "engineering" - - assert (Team & {"car.inspected": "true"}).fetch1("name") == "engineering" - - assert (Team & {"car.inspected:unsigned": True}).fetch1("name") == "engineering" - - assert (Team & {"car.safety_inspected": "false"}).fetch1("name") == "business" - - assert (Team & {"car.safety_inspected:unsigned": False}).fetch1( - "name" - ) == "business" - - assert (Team & {"car.headlights[0].hyper_white": None}).fetch( - "name", order_by="name", as_dict=True - ) == [ - {"name": "engineering"}, - {"name": "marketing"}, - ] # if entire record missing, JSON key is missing, or value set to JSON null - - assert (Team & {"car": None}).fetch1("name") == "marketing" - - assert (Team & {"car.tire_pressure": [34, 30, 27, 32]}).fetch1( - "name" - ) == "business" - - assert ( - Team & {"car.headlights[1]": {"side": "right", "hyper_white": True}} - ).fetch1("name") == "business" - - # sql operators - assert (Team & "`car`->>'$.name' LIKE '%ching%'").fetch1( - "name" - ) == "business", "Missing substring" - - assert (Team & "`car`->>'$.length' > 30").fetch1("name") == "business", "<= 30" - - assert ( - Team & "JSON_VALUE(`car`, '$.safety_inspected' RETURNING UNSIGNED) = 0" - ).fetch1("name") == "business", "Has `safety_inspected` set to `true`" - - assert (Team & "`car`->>'$.headlights[0].hyper_white' = 'null'").fetch1( - "name" - ) == "engineering", "Has 1st `headlight` with `hyper_white` not set to `null`" - - assert (Team & "`car`->>'$.inspected' IS NOT NULL").fetch1( - "name" - ) == "engineering", "Missing `inspected` key" - - assert (Team & "`car`->>'$.tire_pressure' = '[34, 30, 27, 32]'").fetch1( - "name" - ) == "business", "`tire_pressure` array did not match" - - assert ( - Team - & """`car`->>'$.headlights[1]' = '{"side": "right", "hyper_white": true}'""" - ).fetch1("name") == "business", "2nd `headlight` object did not match" - - def test_proj(): - # proj necessary since we need to rename indexed value into a proper attribute name - assert Team.proj(car_length="car.length").fetch( - as_dict=True, order_by="car_length" - ) == [ - {"name": "marketing", "car_length": None}, - {"name": "business", "car_length": "100"}, - {"name": "engineering", "car_length": "20.5"}, - ] - - assert Team.proj(car_length="car.length:decimal(4, 1)").fetch( - as_dict=True, order_by="car_length" - ) == [ - {"name": "marketing", "car_length": None}, - {"name": "engineering", "car_length": 20.5}, - {"name": "business", "car_length": 100.0}, - ] - - assert Team.proj( - car_width="JSON_VALUE(`car`, '$.length' RETURNING float) - 15" - ).fetch(as_dict=True, order_by="car_width") == [ - {"name": "marketing", "car_width": None}, - {"name": "engineering", "car_width": 5.5}, - {"name": "business", "car_width": 85.0}, - ] - - assert ( - (Team & {"name": "engineering"}).proj(car_tire_pressure="car.tire_pressure") - ).fetch1("car_tire_pressure") == "[32, 31, 33, 34]" - - assert np.array_equal( - Team.proj(car_inspected="car.inspected").fetch( - "car_inspected", order_by="name" - ), - np.array([None, "true", None]), - ) - - assert np.array_equal( - Team.proj(car_inspected="car.inspected:unsigned").fetch( - "car_inspected", order_by="name" - ), - np.array([None, 1, None]), - ) + ], + }, + ), + ( + "business", + { + "name": "Chaching", + "length": 100, + "safety_inspected": False, + "tire_pressure": [34, 30, 27, 32], + "headlights": [ + { + "side": "left", + "hyper_white": True, + }, + { + "side": "right", + "hyper_white": True, + }, + ], + }, + ), + ( + "marketing", + None, + ), + ] + + +@pytest.fixture +def schema(connection_test): + schema = dj.Schema(PREFIX + "_json", context=dict(), connection=connection_test) + schema(Team) + yield schema + schema.drop() + + +def test_insert_update(schema): + car = { + "name": "Discovery", + "length": 22.9, + "inspected": None, + "tire_pressure": [35, 36, 34, 37], + "headlights": [ + { + "side": "left", + "hyper_white": True, + }, + { + "side": "right", + "hyper_white": True, + }, + ], + } + + Team.insert1({"name": "research", "car": car}) + q = Team & {"name": "research"} + assert q.fetch1("car") == car + + car.update({"length": 23}) + Team.update1({"name": "research", "car": car}) + assert q.fetch1("car") == car + + try: + Team.insert1({"name": "hr", "car": car}) + raise Exception("Inserted non-unique car name.") + except dj.DataJointError: + pass + + q.delete_quick() + assert not q + +def test_describe(schema): + rel = Team() + context = inspect.currentframe().f_globals + s1 = declare(rel.full_table_name, rel.definition, context) + s2 = declare(rel.full_table_name, rel.describe(), context) + assert s1 == s2 + +def test_restrict(schema): + # dict + assert (Team & {"car.name": "Chaching"}).fetch1("name") == "business" + + assert (Team & {"car.length": 20.5}).fetch1("name") == "engineering" + + assert (Team & {"car.inspected": "true"}).fetch1("name") == "engineering" + + assert (Team & {"car.inspected:unsigned": True}).fetch1("name") == "engineering" + + assert (Team & {"car.safety_inspected": "false"}).fetch1("name") == "business" + + assert (Team & {"car.safety_inspected:unsigned": False}).fetch1( + "name" + ) == "business" + + assert (Team & {"car.headlights[0].hyper_white": None}).fetch( + "name", order_by="name", as_dict=True + ) == [ + {"name": "engineering"}, + {"name": "marketing"}, + ] # if entire record missing, JSON key is missing, or value set to JSON null + + assert (Team & {"car": None}).fetch1("name") == "marketing" + + assert (Team & {"car.tire_pressure": [34, 30, 27, 32]}).fetch1( + "name" + ) == "business" + + assert ( + Team & {"car.headlights[1]": {"side": "right", "hyper_white": True}} + ).fetch1("name") == "business" + + # sql operators + assert (Team & "`car`->>'$.name' LIKE '%ching%'").fetch1( + "name" + ) == "business", "Missing substring" + + assert (Team & "`car`->>'$.length' > 30").fetch1("name") == "business", "<= 30" + + assert ( + Team & "JSON_VALUE(`car`, '$.safety_inspected' RETURNING UNSIGNED) = 0" + ).fetch1("name") == "business", "Has `safety_inspected` set to `true`" + + assert (Team & "`car`->>'$.headlights[0].hyper_white' = 'null'").fetch1( + "name" + ) == "engineering", "Has 1st `headlight` with `hyper_white` not set to `null`" + + assert (Team & "`car`->>'$.inspected' IS NOT NULL").fetch1( + "name" + ) == "engineering", "Missing `inspected` key" + + assert (Team & "`car`->>'$.tire_pressure' = '[34, 30, 27, 32]'").fetch1( + "name" + ) == "business", "`tire_pressure` array did not match" + + assert ( + Team + & """`car`->>'$.headlights[1]' = '{"side": "right", "hyper_white": true}'""" + ).fetch1("name") == "business", "2nd `headlight` object did not match" + +def test_proj(schema): + # proj necessary since we need to rename indexed value into a proper attribute name + assert Team.proj(car_length="car.length").fetch( + as_dict=True, order_by="car_length" + ) == [ + {"name": "marketing", "car_length": None}, + {"name": "business", "car_length": "100"}, + {"name": "engineering", "car_length": "20.5"}, + ] + + assert Team.proj(car_length="car.length:decimal(4, 1)").fetch( + as_dict=True, order_by="car_length" + ) == [ + {"name": "marketing", "car_length": None}, + {"name": "engineering", "car_length": 20.5}, + {"name": "business", "car_length": 100.0}, + ] + + assert Team.proj( + car_width="JSON_VALUE(`car`, '$.length' RETURNING float) - 15" + ).fetch(as_dict=True, order_by="car_width") == [ + {"name": "marketing", "car_width": None}, + {"name": "engineering", "car_width": 5.5}, + {"name": "business", "car_width": 85.0}, + ] + + assert ( + (Team & {"name": "engineering"}).proj(car_tire_pressure="car.tire_pressure") + ).fetch1("car_tire_pressure") == "[32, 31, 33, 34]" + + assert np.array_equal( + Team.proj(car_inspected="car.inspected").fetch( + "car_inspected", order_by="name" + ), + np.array([None, "true", None]), + ) + + assert np.array_equal( + Team.proj(car_inspected="car.inspected:unsigned").fetch( + "car_inspected", order_by="name" + ), + np.array([None, 1, None]), + ) diff --git a/tests/test_plugin.py b/tests/test_plugin.py index f70f4c2ef..e41224116 100644 --- a/tests/test_plugin.py +++ b/tests/test_plugin.py @@ -1,3 +1,4 @@ +import pytest import datajoint.errors as djerr import datajoint.plugin as p import pkg_resources @@ -22,7 +23,8 @@ def test_normal_djerror(): assert e.__cause__ is None -def test_verified_djerror(category="connection"): +@pytest.mark.parametrize('category', ('connection', )) +def test_verified_djerror(category): try: curr_plugins = getattr(p, "{}_plugins".format(category)) setattr( @@ -39,8 +41,8 @@ def test_verified_djerror(category="connection"): def test_verified_djerror_type(): test_verified_djerror(category="type") - -def test_unverified_djerror(category="connection"): +@pytest.mark.parametrize('category', ('connection', )) +def test_unverified_djerror(category): try: curr_plugins = getattr(p, "{}_plugins".format(category)) setattr( diff --git a/tests/test_relation_u.py b/tests/test_relation_u.py index 3494f4bff..50997662d 100644 --- a/tests/test_relation_u.py +++ b/tests/test_relation_u.py @@ -5,25 +5,24 @@ from .schema_simple import * -@pytest.fixture(scope="class") -def setup_class(request, schema_any): - request.cls.user = User() - request.cls.language = Language() - request.cls.subject = Subject() - request.cls.experiment = Experiment() - request.cls.trial = Trial() - request.cls.ephys = Ephys() - request.cls.channel = Ephys.Channel() - request.cls.img = Image() - request.cls.trash = UberTrash() - -@pytest.mark.skip(reason="temporary") class TestU: """ Test tables: insert, delete """ - def test_restriction(self, setup_class): + @classmethod + def setup_class(cls): + cls.user = User() + cls.language = Language() + cls.subject = Subject() + cls.experiment = Experiment() + cls.trial = Trial() + cls.ephys = Ephys() + cls.channel = Ephys.Channel() + cls.img = Image() + cls.trash = UberTrash() + + def test_restriction(self, schema_any): language_set = {s[1] for s in self.language.contents} rel = dj.U("language") & self.language assert list(rel.heading.names) == ["language"] @@ -35,15 +34,15 @@ def test_restriction(self, setup_class): assert list(rel.primary_key) == list((rel & "trial_id>3").primary_key) assert list((dj.U("start_time") & self.trial).primary_key) == ["start_time"] - def test_invalid_restriction(self, setup_class): + def test_invalid_restriction(self, schema_any): with raises(dj.DataJointError): result = dj.U("color") & dict(color="red") - def test_ineffective_restriction(self, setup_class): + def test_ineffective_restriction(self, schema_any): rel = self.language & dj.U("language") assert rel.make_sql() == self.language.make_sql() - def test_join(self, setup_class): + def test_join(self, schema_any): rel = self.experiment * dj.U("experiment_date") assert self.experiment.primary_key == ["subject_id", "experiment_id"] assert rel.primary_key == self.experiment.primary_key + ["experiment_date"] @@ -52,16 +51,16 @@ def test_join(self, setup_class): assert self.experiment.primary_key == ["subject_id", "experiment_id"] assert rel.primary_key == self.experiment.primary_key + ["experiment_date"] - def test_invalid_join(self, setup_class): + def test_invalid_join(self, schema_any): with raises(dj.DataJointError): rel = dj.U("language") * dict(language="English") - def test_repr_without_attrs(self, setup_class): + def test_repr_without_attrs(self, schema_any): """test dj.U() display""" query = dj.U().aggr(Language, n="count(*)") repr(query) - def test_aggregations(self, setup_class): + def test_aggregations(self, schema_any): lang = Language() # test total aggregation on expression object n1 = dj.U().aggr(lang, n="count(*)").fetch1("n") @@ -73,13 +72,13 @@ def test_aggregations(self, setup_class): assert len(rel) == len(set(l[1] for l in Language.contents)) assert (rel & 'language="English"').fetch1("number_of_speakers") == 3 - def test_argmax(self, setup_class): + def test_argmax(self, schema_any): rel = TTest() # get the tuples corresponding to the maximum value mx = (rel * dj.U().aggr(rel, mx="max(value)")) & "mx=value" assert mx.fetch("value")[0] == max(rel.fetch("value")) - def test_aggr(self, setup_class, schema_simp): + def test_aggr(self, schema_any, schema_simp): rel = ArgmaxTest() amax1 = (dj.U("val") * rel) & dj.U("secondary_key").aggr(rel, val="min(val)") amax2 = (dj.U("val") * rel) * dj.U("secondary_key").aggr(rel, val="min(val)") diff --git a/tests/test_schema_keywords.py b/tests/test_schema_keywords.py index c8b7d5a24..1cad98efd 100644 --- a/tests/test_schema_keywords.py +++ b/tests/test_schema_keywords.py @@ -33,7 +33,7 @@ class D(B): source = A -@pytest.fixture(scope="module") +@pytest.fixture def schema(connection_test): schema = dj.Schema(PREFIX + "_keywords", connection=connection_test) schema(A) diff --git a/tests/test_utils.py b/tests/test_utils.py index 936badb1c..04325db56 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -6,14 +6,6 @@ import pytest -def setup(): - pass - - -def teardown(): - pass - - def test_from_camel_case(): assert from_camel_case("AllGroups") == "all_groups" with pytest.raises(DataJointError): From 33e21cf0ade77a3ee912374fba1d1ea5217b9cba Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 14:21:39 +0000 Subject: [PATCH 012/212] WIP test_adapted_attributes migration --- tests/__init__.py | 22 +++++-- tests/conftest.py | 85 +++++++++++++++++++++++- tests/schema_adapted.py | 61 +++++++++++++++++ tests/test_adapted_attributes.py | 108 +++++++++++++++++++++++++++++++ 4 files changed, 271 insertions(+), 5 deletions(-) create mode 100644 tests/schema_adapted.py create mode 100644 tests/test_adapted_attributes.py diff --git a/tests/__init__.py b/tests/__init__.py index de57f6eab..219f7f5c0 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -3,10 +3,24 @@ import pytest import os -PREFIX = "djtest" +PREFIX = os.environ.get("DJ_TEST_DB_PREFIX", "djtest") + +# Connection for testing +CONN_INFO = dict( + host=os.environ.get("DJ_TEST_HOST", "fakeservices.datajoint.io"), + user=os.environ.get("DJ_TEST_USER", "datajoint"), + password=os.environ.get("DJ_TEST_PASSWORD", "datajoint"), +) CONN_INFO_ROOT = dict( - host=os.getenv("DJ_HOST"), - user=os.getenv("DJ_USER"), - password=os.getenv("DJ_PASS"), + host=os.environ.get("DJ_HOST", "fakeservices.datajoint.io"), + user=os.environ.get("DJ_USER", "root"), + password=os.environ.get("DJ_PASS", "simple"), +) + +S3_CONN_INFO = dict( + endpoint=os.environ.get("S3_ENDPOINT", "fakeservices.datajoint.io"), + access_key=os.environ.get("S3_ACCESS_KEY", "datajoint"), + secret_key=os.environ.get("S3_SECRET_KEY", "datajoint"), + bucket=os.environ.get("S3_BUCKET", "datajoint.test"), ) diff --git a/tests/conftest.py b/tests/conftest.py index 109bda6c0..97c71c1e5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,8 +1,21 @@ import datajoint as dj from packaging import version import os +import minio +import urllib3 +import certifi +import shutil import pytest -from . import PREFIX, schema, schema_simple, schema_advanced +import networkx as nx +import json +from pathlib import Path +import tempfile +from datajoint import errors +from . import ( + PREFIX, CONN_INFO, S3_CONN_INFO, + schema, schema_simple, schema_advanced, schema_adapted +) + @pytest.fixture(scope="session") @@ -151,3 +164,73 @@ def schema_adv(connection_test): schema(schema_advanced.GlobalSynapse) yield schema schema.drop() + + +@pytest.fixture +def adapted_graph_instance(): + yield schema_adapted.GraphAdapter() + +@pytest.fixture +def enable_adapted_types(monkeypatch): + monkeypatch.setenv('ADAPTED_TYPE_SWITCH', 'TRUE') + yield + monkeypatch.delenv('ADAPTED_TYPE_SWITCH', raising=True) + +@pytest.fixture +def enable_filepath_feature(monkeypatch): + monkeypatch.setenv('FILEPATH_FEATURE_SWITCH', 'TRUE') + yield + monkeypatch.delenv('FILEPATH_FEATURE_SWITCH', raising=True) + +@pytest.fixture +def schema_ad(monkeypatch, connection_test, adapted_graph_instance, enable_adapted_types, enable_filepath_feature): + stores_config = { + "repo-s3": dict( + S3_CONN_INFO, protocol="s3", location="adapted/repo", stage=tempfile.mkdtemp() + ) + } + dj.config["stores"] = stores_config + schema_name = PREFIX + "_test_custom_datatype" + layout_to_filepath = schema_adapted.LayoutToFilepath() + context = { + **schema_adapted.LOCALS_ADAPTED, + 'graph': adapted_graph_instance, + 'layout_to_filepath': layout_to_filepath, + } + schema = dj.schema(schema_name, context=context, connection=connection_test) + + + # instantiate for use as a datajoint type + # TODO: remove? + graph = adapted_graph_instance + + schema(schema_adapted.Connectivity) + # errors._switch_filepath_types(True) + schema(schema_adapted.Layout) + yield schema + # errors._switch_filepath_types(False) + +@pytest.fixture +def httpClient(): + # Initialize httpClient with relevant timeout. + httpClient = urllib3.PoolManager( + timeout=30, + cert_reqs="CERT_REQUIRED", + ca_certs=certifi.where(), + retries=urllib3.Retry( + total=3, backoff_factor=0.2, status_forcelist=[500, 502, 503, 504] + ), + ) + yield httpClient + +@pytest.fixture +def minioClient(): + # Initialize minioClient with an endpoint and access/secret keys. + minioClient = minio.Minio( + S3_CONN_INFO["endpoint"], + access_key=S3_CONN_INFO["access_key"], + secret_key=S3_CONN_INFO["secret_key"], + secure=True, + http_client=httpClient, + ) + yield minioClient diff --git a/tests/schema_adapted.py b/tests/schema_adapted.py new file mode 100644 index 000000000..559c14234 --- /dev/null +++ b/tests/schema_adapted.py @@ -0,0 +1,61 @@ +import datajoint as dj +import inspect +import networkx as nx +import json +from pathlib import Path +import tempfile + + +class GraphAdapter(dj.AttributeAdapter): + attribute_type = "longblob" # this is how the attribute will be declared + + @staticmethod + def get(obj): + # convert edge list into a graph + return nx.Graph(obj) + + @staticmethod + def put(obj): + # convert graph object into an edge list + assert isinstance(obj, nx.Graph) + return list(obj.edges) + + +class LayoutToFilepath(dj.AttributeAdapter): + """ + An adapted data type that saves a graph layout into fixed filepath + """ + + attribute_type = "filepath@repo-s3" + + @staticmethod + def get(path): + with open(path, "r") as f: + return json.load(f) + + @staticmethod + def put(layout): + path = Path(dj.config["stores"]["repo-s3"]["stage"], "layout.json") + with open(str(path), "w") as f: + json.dump(layout, f) + return path + + +class Connectivity(dj.Manual): + definition = """ + connid : int + --- + conn_graph = null : + """ + +class Layout(dj.Manual): + definition = """ + # stores graph layout + -> Connectivity + --- + layout: + """ + + +LOCALS_ADAPTED = {k: v for k, v in locals().items() if inspect.isclass(v)} +__all__ = list(LOCALS_ADAPTED.keys()) diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py new file mode 100644 index 000000000..0c1d9ea01 --- /dev/null +++ b/tests/test_adapted_attributes.py @@ -0,0 +1,108 @@ +import os +import pytest +import datajoint as dj +import networkx as nx +from itertools import zip_longest +# from . import schema_adapted as adapted +from .schema_adapted import Connectivity, Layout + + +def test_adapted_type(schema_ad): + assert os.environ['ADAPTED_TYPE_SWITCH'] == 'TRUE' + c = Connectivity() + graphs = [ + nx.lollipop_graph(4, 2), + nx.star_graph(5), + nx.barbell_graph(3, 1), + nx.cycle_graph(5), + ] + c.insert((i, g) for i, g in enumerate(graphs)) + returned_graphs = c.fetch("conn_graph", order_by="connid") + for g1, g2 in zip(graphs, returned_graphs): + assert isinstance(g2, nx.Graph) + assert len(g1.edges) == len(g2.edges) + assert 0 == len(nx.symmetric_difference(g1, g2).edges) + c.delete() + + +# adapted_graph_instance? +def test_adapted_filepath_type(schema_ad): + # https://github.com/datajoint/datajoint-python/issues/684 + + # dj.errors._switch_adapted_types(True) + # dj.errors._switch_filepath_types(True) + + c = Connectivity() + c.delete() + c.insert1((0, nx.lollipop_graph(4, 2))) + + layout = nx.spring_layout(c.fetch1("conn_graph")) + # make json friendly + layout = {str(k): [round(r, ndigits=4) for r in v] for k, v in layout.items()} + t = Layout() + t.insert1((0, layout)) + result = t.fetch1("layout") + # TODO: may fail, used to be assert_dict_equal + assert result == layout + + t.delete() + c.delete() + + # dj.errors._switch_filepath_types(False) + # dj.errors._switch_adapted_types(False) + + +# test spawned classes +# TODO: separate fixture +# local_schema = dj.Schema(adapted.schema_name) +# local_schema.spawn_missing_classes() + +@pytest.mark.skip(reason='temp') +def test_adapted_spawned(): + dj.errors._switch_adapted_types(True) + c = Connectivity() # a spawned class + graphs = [ + nx.lollipop_graph(4, 2), + nx.star_graph(5), + nx.barbell_graph(3, 1), + nx.cycle_graph(5), + ] + c.insert((i, g) for i, g in enumerate(graphs)) + returned_graphs = c.fetch("conn_graph", order_by="connid") + for g1, g2 in zip(graphs, returned_graphs): + assert isinstance(g2, nx.Graph) + assert len(g1.edges) == len(g2.edges) + assert 0 == len(nx.symmetric_difference(g1, g2).edges) + c.delete() + dj.errors._switch_adapted_types(False) + + +# test with virtual module +# TODO: separate fixture +# virtual_module = dj.VirtualModule( +# "virtual_module", adapted.schema_name, add_objects={"graph": graph} +# ) + + +@pytest.mark.skip(reason='temp') +def test_adapted_virtual(): + dj.errors._switch_adapted_types(True) + c = virtual_module.Connectivity() + graphs = [ + nx.lollipop_graph(4, 2), + nx.star_graph(5), + nx.barbell_graph(3, 1), + nx.cycle_graph(5), + ] + c.insert((i, g) for i, g in enumerate(graphs)) + c.insert1({"connid": 100}) # test work with NULLs + returned_graphs = c.fetch("conn_graph", order_by="connid") + for g1, g2 in zip_longest(graphs, returned_graphs): + if g1 is None: + assert g2 is None + else: + assert isinstance(g2, nx.Graph) + assert len(g1.edges) == len(g2.edges) + assert 0 == len(nx.symmetric_difference(g1, g2).edges) + c.delete() + dj.errors._switch_adapted_types(False) From 3177773e848a5e95521c7969c9bd53f410f1744f Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 08:42:26 -0600 Subject: [PATCH 013/212] Use correct env var names for feature switches --- tests/conftest.py | 14 ++++++++++---- tests/test_adapted_attributes.py | 2 +- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 97c71c1e5..86f341144 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,6 +11,9 @@ from pathlib import Path import tempfile from datajoint import errors +from datajoint.errors import ( + ADAPTED_TYPE_SWITCH, FILEPATH_FEATURE_SWITCH +) from . import ( PREFIX, CONN_INFO, S3_CONN_INFO, schema, schema_simple, schema_advanced, schema_adapted @@ -18,6 +21,7 @@ + @pytest.fixture(scope="session") def connection_root(): """Root user database connection.""" @@ -172,18 +176,19 @@ def adapted_graph_instance(): @pytest.fixture def enable_adapted_types(monkeypatch): - monkeypatch.setenv('ADAPTED_TYPE_SWITCH', 'TRUE') + monkeypatch.setenv(ADAPTED_TYPE_SWITCH, 'TRUE') yield - monkeypatch.delenv('ADAPTED_TYPE_SWITCH', raising=True) + monkeypatch.delenv(ADAPTED_TYPE_SWITCH, raising=True) @pytest.fixture def enable_filepath_feature(monkeypatch): - monkeypatch.setenv('FILEPATH_FEATURE_SWITCH', 'TRUE') + monkeypatch.setenv(FILEPATH_FEATURE_SWITCH, 'TRUE') yield - monkeypatch.delenv('FILEPATH_FEATURE_SWITCH', raising=True) + monkeypatch.delenv(FILEPATH_FEATURE_SWITCH, raising=True) @pytest.fixture def schema_ad(monkeypatch, connection_test, adapted_graph_instance, enable_adapted_types, enable_filepath_feature): + assert os.environ.get(ADAPTED_TYPE_SWITCH) == 'TRUE', 'must have adapted types enabled in environment' stores_config = { "repo-s3": dict( S3_CONN_INFO, protocol="s3", location="adapted/repo", stage=tempfile.mkdtemp() @@ -209,6 +214,7 @@ def schema_ad(monkeypatch, connection_test, adapted_graph_instance, enable_adapt schema(schema_adapted.Layout) yield schema # errors._switch_filepath_types(False) + schema.drop() @pytest.fixture def httpClient(): diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index 0c1d9ea01..beb694142 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -8,7 +8,7 @@ def test_adapted_type(schema_ad): - assert os.environ['ADAPTED_TYPE_SWITCH'] == 'TRUE' + assert os.environ[dj.errors.ADAPTED_TYPE_SWITCH] == 'TRUE' c = Connectivity() graphs = [ nx.lollipop_graph(4, 2), From 1f1575a74329a111c66b602336760410f51c783a Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 08:56:38 -0600 Subject: [PATCH 014/212] WIP migrating test_adapted_attributes tests/test_adapted_attributes.py::test_adapted_filepath_type throws datajoint/s3.py:54: BucketInaccessible --- tests/conftest.py | 29 ------------------------ tests/test_adapted_attributes.py | 38 ++++++++++++++++++++++++++++++-- 2 files changed, 36 insertions(+), 31 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 86f341144..67b02fbf2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -186,35 +186,6 @@ def enable_filepath_feature(monkeypatch): yield monkeypatch.delenv(FILEPATH_FEATURE_SWITCH, raising=True) -@pytest.fixture -def schema_ad(monkeypatch, connection_test, adapted_graph_instance, enable_adapted_types, enable_filepath_feature): - assert os.environ.get(ADAPTED_TYPE_SWITCH) == 'TRUE', 'must have adapted types enabled in environment' - stores_config = { - "repo-s3": dict( - S3_CONN_INFO, protocol="s3", location="adapted/repo", stage=tempfile.mkdtemp() - ) - } - dj.config["stores"] = stores_config - schema_name = PREFIX + "_test_custom_datatype" - layout_to_filepath = schema_adapted.LayoutToFilepath() - context = { - **schema_adapted.LOCALS_ADAPTED, - 'graph': adapted_graph_instance, - 'layout_to_filepath': layout_to_filepath, - } - schema = dj.schema(schema_name, context=context, connection=connection_test) - - - # instantiate for use as a datajoint type - # TODO: remove? - graph = adapted_graph_instance - - schema(schema_adapted.Connectivity) - # errors._switch_filepath_types(True) - schema(schema_adapted.Layout) - yield schema - # errors._switch_filepath_types(False) - schema.drop() @pytest.fixture def httpClient(): diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index beb694142..8657efee3 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -1,11 +1,44 @@ import os import pytest +import tempfile import datajoint as dj +from datajoint.errors import ADAPTED_TYPE_SWITCH import networkx as nx from itertools import zip_longest -# from . import schema_adapted as adapted +from . import schema_adapted from .schema_adapted import Connectivity, Layout - +from . import PREFIX, S3_CONN_INFO + + +@pytest.fixture +def schema_ad(monkeypatch, connection_test, adapted_graph_instance, enable_adapted_types, enable_filepath_feature): + assert os.environ.get(ADAPTED_TYPE_SWITCH) == 'TRUE', 'must have adapted types enabled in environment' + stores_config = { + "repo-s3": dict( + S3_CONN_INFO, protocol="s3", location="adapted/repo", stage=tempfile.mkdtemp() + ) + } + dj.config["stores"] = stores_config + schema_name = PREFIX + "_test_custom_datatype" + layout_to_filepath = schema_adapted.LayoutToFilepath() + context = { + **schema_adapted.LOCALS_ADAPTED, + 'graph': adapted_graph_instance, + 'layout_to_filepath': layout_to_filepath, + } + schema = dj.schema(schema_name, context=context, connection=connection_test) + + + # instantiate for use as a datajoint type + # TODO: remove? + graph = adapted_graph_instance + + schema(schema_adapted.Connectivity) + # errors._switch_filepath_types(True) + schema(schema_adapted.Layout) + yield schema + # errors._switch_filepath_types(False) + schema.drop() def test_adapted_type(schema_ad): assert os.environ[dj.errors.ADAPTED_TYPE_SWITCH] == 'TRUE' @@ -26,6 +59,7 @@ def test_adapted_type(schema_ad): # adapted_graph_instance? +# @pytest.mark.skip(reason='misconfigured s3 fixtures') def test_adapted_filepath_type(schema_ad): # https://github.com/datajoint/datajoint-python/issues/684 From 21854dad18a3db00eb586bb3718d427a26f1d2df Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 09:09:57 -0600 Subject: [PATCH 015/212] Migrate test_adapted_attributes: module scoped fixtures for now --- tests/conftest.py | 25 ++++++++--------------- tests/test_adapted_attributes.py | 34 ++++++++++++++++++++++++++------ 2 files changed, 36 insertions(+), 23 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 67b02fbf2..aed3ca468 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -19,7 +19,15 @@ schema, schema_simple, schema_advanced, schema_adapted ) +@pytest.fixture(scope="session") +def monkeysession(): + with pytest.MonkeyPatch.context() as mp: + yield mp +@pytest.fixture(scope="module") +def monkeymodule(): + with pytest.MonkeyPatch.context() as mp: + yield mp @pytest.fixture(scope="session") @@ -170,23 +178,6 @@ def schema_adv(connection_test): schema.drop() -@pytest.fixture -def adapted_graph_instance(): - yield schema_adapted.GraphAdapter() - -@pytest.fixture -def enable_adapted_types(monkeypatch): - monkeypatch.setenv(ADAPTED_TYPE_SWITCH, 'TRUE') - yield - monkeypatch.delenv(ADAPTED_TYPE_SWITCH, raising=True) - -@pytest.fixture -def enable_filepath_feature(monkeypatch): - monkeypatch.setenv(FILEPATH_FEATURE_SWITCH, 'TRUE') - yield - monkeypatch.delenv(FILEPATH_FEATURE_SWITCH, raising=True) - - @pytest.fixture def httpClient(): # Initialize httpClient with relevant timeout. diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index 8657efee3..7e275c5aa 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -2,7 +2,7 @@ import pytest import tempfile import datajoint as dj -from datajoint.errors import ADAPTED_TYPE_SWITCH +from datajoint.errors import ADAPTED_TYPE_SWITCH, FILEPATH_FEATURE_SWITCH import networkx as nx from itertools import zip_longest from . import schema_adapted @@ -10,9 +10,28 @@ from . import PREFIX, S3_CONN_INFO -@pytest.fixture -def schema_ad(monkeypatch, connection_test, adapted_graph_instance, enable_adapted_types, enable_filepath_feature): - assert os.environ.get(ADAPTED_TYPE_SWITCH) == 'TRUE', 'must have adapted types enabled in environment' +@pytest.fixture(scope='module') +def adapted_graph_instance(): + yield schema_adapted.GraphAdapter() + + +@pytest.fixture(scope='module') +def enable_adapted_types(monkeymodule): + monkeymodule.setenv(ADAPTED_TYPE_SWITCH, 'TRUE') + yield + monkeymodule.delenv(ADAPTED_TYPE_SWITCH, raising=True) + + +@pytest.fixture(scope='module') +def enable_filepath_feature(monkeymodule): + monkeymodule.setenv(FILEPATH_FEATURE_SWITCH, 'TRUE') + yield + monkeymodule.delenv(FILEPATH_FEATURE_SWITCH, raising=True) + + + +@pytest.fixture(scope='module') +def schema_ad(connection_test, adapted_graph_instance, enable_adapted_types, enable_filepath_feature): stores_config = { "repo-s3": dict( S3_CONN_INFO, protocol="s3", location="adapted/repo", stage=tempfile.mkdtemp() @@ -40,9 +59,12 @@ def schema_ad(monkeypatch, connection_test, adapted_graph_instance, enable_adapt # errors._switch_filepath_types(False) schema.drop() -def test_adapted_type(schema_ad): +@pytest.fixture(scope='module') +def c(schema_ad): + yield Connectivity() + +def test_adapted_type(schema_ad, c): assert os.environ[dj.errors.ADAPTED_TYPE_SWITCH] == 'TRUE' - c = Connectivity() graphs = [ nx.lollipop_graph(4, 2), nx.star_graph(5), From cd584bce1f41c666f35eb38b16d4ee97b968e946 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 09:27:09 -0600 Subject: [PATCH 016/212] Add @dimitri-yatsenko suggested changes on #1116 --- tests/schema_adapted.py | 2 +- tests/schema_advanced.py | 2 +- tests/schema_simple.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/schema_adapted.py b/tests/schema_adapted.py index 559c14234..68a7e965a 100644 --- a/tests/schema_adapted.py +++ b/tests/schema_adapted.py @@ -58,4 +58,4 @@ class Layout(dj.Manual): LOCALS_ADAPTED = {k: v for k, v in locals().items() if inspect.isclass(v)} -__all__ = list(LOCALS_ADAPTED.keys()) +__all__ = list(LOCALS_ADAPTED) diff --git a/tests/schema_advanced.py b/tests/schema_advanced.py index f925e4971..649ff186a 100644 --- a/tests/schema_advanced.py +++ b/tests/schema_advanced.py @@ -135,4 +135,4 @@ class GlobalSynapse(dj.Manual): """ LOCALS_ADVANCED = {k: v for k, v in locals().items() if inspect.isclass(v)} -__all__ = list(LOCALS_ADVANCED.keys()) +__all__ = list(LOCALS_ADVANCED) diff --git a/tests/schema_simple.py b/tests/schema_simple.py index addd70c26..e751a9c6e 100644 --- a/tests/schema_simple.py +++ b/tests/schema_simple.py @@ -262,4 +262,4 @@ class OutfitPiece(dj.Part, dj.Lookup): LOCALS_SIMPLE = {k: v for k, v in locals().items() if inspect.isclass(v)} -__all__ = list(LOCALS_SIMPLE.keys()) +__all__ = list(LOCALS_SIMPLE) From 93fa858e567b68fab2446566b1817a3c4f6aa8fe Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 09:35:07 -0600 Subject: [PATCH 017/212] Migrate test_adapted_attributes::test_adapted_spawned --- tests/schema.py | 2 +- tests/test_adapted_attributes.py | 53 ++++++++++++++++++-------------- 2 files changed, 31 insertions(+), 24 deletions(-) diff --git a/tests/schema.py b/tests/schema.py index 13ff945a3..140a34bba 100644 --- a/tests/schema.py +++ b/tests/schema.py @@ -451,4 +451,4 @@ class Longblob(dj.Manual): LOCALS_ANY = {k: v for k, v in locals().items() if inspect.isclass(v)} -__all__ = list(LOCALS_ANY.keys()) +__all__ = list(LOCALS_ANY) diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index 7e275c5aa..e6ce56797 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -10,42 +10,47 @@ from . import PREFIX, S3_CONN_INFO -@pytest.fixture(scope='module') +@pytest.fixture def adapted_graph_instance(): yield schema_adapted.GraphAdapter() -@pytest.fixture(scope='module') -def enable_adapted_types(monkeymodule): - monkeymodule.setenv(ADAPTED_TYPE_SWITCH, 'TRUE') +@pytest.fixture +def enable_adapted_types(monkeypatch): + monkeypatch.setenv(ADAPTED_TYPE_SWITCH, 'TRUE') yield - monkeymodule.delenv(ADAPTED_TYPE_SWITCH, raising=True) + monkeypatch.delenv(ADAPTED_TYPE_SWITCH, raising=True) -@pytest.fixture(scope='module') -def enable_filepath_feature(monkeymodule): - monkeymodule.setenv(FILEPATH_FEATURE_SWITCH, 'TRUE') +@pytest.fixture +def enable_filepath_feature(monkeypatch): + monkeypatch.setenv(FILEPATH_FEATURE_SWITCH, 'TRUE') yield - monkeymodule.delenv(FILEPATH_FEATURE_SWITCH, raising=True) + monkeypatch.delenv(FILEPATH_FEATURE_SWITCH, raising=True) +@pytest.fixture +def schema_name_custom_datatype(): + schema_name = PREFIX + "_test_custom_datatype" + return schema_name -@pytest.fixture(scope='module') -def schema_ad(connection_test, adapted_graph_instance, enable_adapted_types, enable_filepath_feature): +@pytest.fixture +def schema_ad( + schema_name_custom_datatype, connection_test, adapted_graph_instance, enable_adapted_types, enable_filepath_feature +): stores_config = { "repo-s3": dict( S3_CONN_INFO, protocol="s3", location="adapted/repo", stage=tempfile.mkdtemp() ) } dj.config["stores"] = stores_config - schema_name = PREFIX + "_test_custom_datatype" layout_to_filepath = schema_adapted.LayoutToFilepath() context = { **schema_adapted.LOCALS_ADAPTED, 'graph': adapted_graph_instance, 'layout_to_filepath': layout_to_filepath, } - schema = dj.schema(schema_name, context=context, connection=connection_test) + schema = dj.schema(schema_name_custom_datatype, context=context, connection=connection_test) # instantiate for use as a datajoint type @@ -59,7 +64,7 @@ def schema_ad(connection_test, adapted_graph_instance, enable_adapted_types, ena # errors._switch_filepath_types(False) schema.drop() -@pytest.fixture(scope='module') +@pytest.fixture def c(schema_ad): yield Connectivity() @@ -81,7 +86,7 @@ def test_adapted_type(schema_ad, c): # adapted_graph_instance? -# @pytest.mark.skip(reason='misconfigured s3 fixtures') +@pytest.mark.skip(reason='misconfigured s3 fixtures') def test_adapted_filepath_type(schema_ad): # https://github.com/datajoint/datajoint-python/issues/684 @@ -108,14 +113,17 @@ def test_adapted_filepath_type(schema_ad): # dj.errors._switch_adapted_types(False) -# test spawned classes -# TODO: separate fixture -# local_schema = dj.Schema(adapted.schema_name) -# local_schema.spawn_missing_classes() +@pytest.fixture +def local_schema(schema_ad, schema_name_custom_datatype): + """Fixture for testing spawned classes""" + local_schema = dj.Schema(schema_name_custom_datatype) + local_schema.spawn_missing_classes() + yield local_schema + local_schema.drop() -@pytest.mark.skip(reason='temp') -def test_adapted_spawned(): - dj.errors._switch_adapted_types(True) + +# @pytest.mark.skip(reason='temp') +def test_adapted_spawned(local_schema, enable_adapted_types): c = Connectivity() # a spawned class graphs = [ nx.lollipop_graph(4, 2), @@ -130,7 +138,6 @@ def test_adapted_spawned(): assert len(g1.edges) == len(g2.edges) assert 0 == len(nx.symmetric_difference(g1, g2).edges) c.delete() - dj.errors._switch_adapted_types(False) # test with virtual module From 07631f9a0944c16c25d0aee1c29ac7788298db68 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 09:42:59 -0600 Subject: [PATCH 018/212] All passing in test_adapted_attributes::test_adapted_spawned except s3 --- tests/test_adapted_attributes.py | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index e6ce56797..626cb9694 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -122,9 +122,8 @@ def local_schema(schema_ad, schema_name_custom_datatype): local_schema.drop() -# @pytest.mark.skip(reason='temp') -def test_adapted_spawned(local_schema, enable_adapted_types): - c = Connectivity() # a spawned class +def test_adapted_spawned(local_schema, enable_adapted_types, c): + # c = Connectivity() # a spawned class graphs = [ nx.lollipop_graph(4, 2), nx.star_graph(5), @@ -140,17 +139,20 @@ def test_adapted_spawned(local_schema, enable_adapted_types): c.delete() -# test with virtual module -# TODO: separate fixture -# virtual_module = dj.VirtualModule( -# "virtual_module", adapted.schema_name, add_objects={"graph": graph} -# ) - - -@pytest.mark.skip(reason='temp') -def test_adapted_virtual(): - dj.errors._switch_adapted_types(True) - c = virtual_module.Connectivity() +@pytest.fixture +def schema_virtual_module(schema_ad, schema_name_custom_datatype, adapted_graph_instance): + """Fixture for testing virtual modules""" + # virtual_module = dj.VirtualModule( + # "virtual_module", adapted.schema_name, add_objects={"graph": graph} + # ) + schema_virtual_module = dj.VirtualModule( + "virtual_module", schema_name_custom_datatype, add_objects={"graph": adapted_graph_instance} + ) + return schema_virtual_module + + +def test_adapted_virtual(schema_virtual_module): + c = schema_virtual_module.Connectivity() graphs = [ nx.lollipop_graph(4, 2), nx.star_graph(5), @@ -168,4 +170,3 @@ def test_adapted_virtual(): assert len(g1.edges) == len(g2.edges) assert 0 == len(nx.symmetric_difference(g1, g2).edges) c.delete() - dj.errors._switch_adapted_types(False) From a6ca9339641d40732c06b346d7e4a8a18184f2b1 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 09:47:05 -0600 Subject: [PATCH 019/212] Clean up fixtures --- tests/test_adapted_attributes.py | 69 ++++++++++++-------------------- 1 file changed, 25 insertions(+), 44 deletions(-) diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index 626cb9694..2ec0c239f 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -34,9 +34,11 @@ def schema_name_custom_datatype(): schema_name = PREFIX + "_test_custom_datatype" return schema_name + @pytest.fixture def schema_ad( - schema_name_custom_datatype, connection_test, adapted_graph_instance, enable_adapted_types, enable_filepath_feature + schema_name_custom_datatype, connection_test, adapted_graph_instance, + enable_adapted_types, enable_filepath_feature ): stores_config = { "repo-s3": dict( @@ -51,25 +53,33 @@ def schema_ad( 'layout_to_filepath': layout_to_filepath, } schema = dj.schema(schema_name_custom_datatype, context=context, connection=connection_test) - - - # instantiate for use as a datajoint type - # TODO: remove? graph = adapted_graph_instance - schema(schema_adapted.Connectivity) - # errors._switch_filepath_types(True) schema(schema_adapted.Layout) yield schema - # errors._switch_filepath_types(False) schema.drop() + +@pytest.fixture +def local_schema(schema_ad, schema_name_custom_datatype): + """Fixture for testing spawned classes""" + local_schema = dj.Schema(schema_name_custom_datatype) + local_schema.spawn_missing_classes() + yield local_schema + local_schema.drop() + + @pytest.fixture -def c(schema_ad): - yield Connectivity() +def schema_virtual_module(schema_ad, schema_name_custom_datatype, adapted_graph_instance): + """Fixture for testing virtual modules""" + schema_virtual_module = dj.VirtualModule( + "virtual_module", schema_name_custom_datatype, add_objects={"graph": adapted_graph_instance} + ) + return schema_virtual_module -def test_adapted_type(schema_ad, c): - assert os.environ[dj.errors.ADAPTED_TYPE_SWITCH] == 'TRUE' + +def test_adapted_type(schema_ad): + c = Connectivity() graphs = [ nx.lollipop_graph(4, 2), nx.star_graph(5), @@ -85,14 +95,9 @@ def test_adapted_type(schema_ad, c): c.delete() -# adapted_graph_instance? @pytest.mark.skip(reason='misconfigured s3 fixtures') def test_adapted_filepath_type(schema_ad): - # https://github.com/datajoint/datajoint-python/issues/684 - - # dj.errors._switch_adapted_types(True) - # dj.errors._switch_filepath_types(True) - + """https://github.com/datajoint/datajoint-python/issues/684""" c = Connectivity() c.delete() c.insert1((0, nx.lollipop_graph(4, 2))) @@ -105,25 +110,12 @@ def test_adapted_filepath_type(schema_ad): result = t.fetch1("layout") # TODO: may fail, used to be assert_dict_equal assert result == layout - t.delete() c.delete() - # dj.errors._switch_filepath_types(False) - # dj.errors._switch_adapted_types(False) - -@pytest.fixture -def local_schema(schema_ad, schema_name_custom_datatype): - """Fixture for testing spawned classes""" - local_schema = dj.Schema(schema_name_custom_datatype) - local_schema.spawn_missing_classes() - yield local_schema - local_schema.drop() - - -def test_adapted_spawned(local_schema, enable_adapted_types, c): - # c = Connectivity() # a spawned class +def test_adapted_spawned(local_schema, enable_adapted_types): + c = Connectivity() # a spawned class graphs = [ nx.lollipop_graph(4, 2), nx.star_graph(5), @@ -139,17 +131,6 @@ def test_adapted_spawned(local_schema, enable_adapted_types, c): c.delete() -@pytest.fixture -def schema_virtual_module(schema_ad, schema_name_custom_datatype, adapted_graph_instance): - """Fixture for testing virtual modules""" - # virtual_module = dj.VirtualModule( - # "virtual_module", adapted.schema_name, add_objects={"graph": graph} - # ) - schema_virtual_module = dj.VirtualModule( - "virtual_module", schema_name_custom_datatype, add_objects={"graph": adapted_graph_instance} - ) - return schema_virtual_module - def test_adapted_virtual(schema_virtual_module): c = schema_virtual_module.Connectivity() From 5d65db32e92bcfd1d63d3e4f00032ad1c8896719 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 10:04:13 -0600 Subject: [PATCH 020/212] First pass at migrating test_s3 --- tests/schema_external.py | 89 ++++++++++++++++++++++++++++++ tests/test_s3.py | 116 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 205 insertions(+) create mode 100644 tests/schema_external.py create mode 100644 tests/test_s3.py diff --git a/tests/schema_external.py b/tests/schema_external.py new file mode 100644 index 000000000..7702772fa --- /dev/null +++ b/tests/schema_external.py @@ -0,0 +1,89 @@ +""" +A schema for testing external attributes +""" + +import tempfile +import inspect +import datajoint as dj +from . import PREFIX, CONN_INFO, S3_CONN_INFO +import numpy as np + + +class Simple(dj.Manual): + definition = """ + simple : int + --- + item : blob@local + """ + + +class SimpleRemote(dj.Manual): + definition = """ + simple : int + --- + item : blob@share + """ + + +class Seed(dj.Lookup): + definition = """ + seed : int + """ + contents = zip(range(4)) + + +class Dimension(dj.Lookup): + definition = """ + dim : int + --- + dimensions : blob + """ + contents = ([0, [100, 50]], [1, [3, 4, 8, 6]]) + + +class Image(dj.Computed): + definition = """ + # table for storing + -> Seed + -> Dimension + ---- + img : blob@share # objects are stored as specified by dj.config['stores']['share'] + neg : blob@local # objects are stored as specified by dj.config['stores']['local'] + """ + + def make(self, key): + np.random.seed(key["seed"]) + img = np.random.rand(*(Dimension() & key).fetch1("dimensions")) + self.insert1(dict(key, img=img, neg=-img.astype(np.float32))) + + +class Attach(dj.Manual): + definition = """ + # table for storing attachments + attach : int + ---- + img : attach@share # attachments are stored as specified by: dj.config['stores']['raw'] + txt : attach # attachments are stored directly in the database + """ + + +class Filepath(dj.Manual): + definition = """ + # table for file management + fnum : int # test comment containing : + --- + img : filepath@repo # managed files + """ + + +class FilepathS3(dj.Manual): + definition = """ + # table for file management + fnum : int + --- + img : filepath@repo-s3 # managed files + """ + + +LOCALS_EXTERNAL= {k: v for k, v in locals().items() if inspect.isclass(v)} +__all__ = list(LOCALS_EXTERNAL) diff --git a/tests/test_s3.py b/tests/test_s3.py new file mode 100644 index 000000000..7173f7650 --- /dev/null +++ b/tests/test_s3.py @@ -0,0 +1,116 @@ +import pytest +import urllib3 +import certifi +from nose.tools import assert_true, raises +from .schema_external import SimpleRemote +from datajoint.errors import DataJointError +from datajoint.hash import uuid_from_buffer +from datajoint.blob import pack +from . import S3_CONN_INFO +from minio import Minio + +@pytest.fixture(scope='module') +def http_client(): + http_client = urllib3.PoolManager( + timeout=30, + cert_reqs="CERT_REQUIRED", + ca_certs=certifi.where(), + retries=urllib3.Retry( + total=3, backoff_factor=0.2, status_forcelist=[500, 502, 503, 504] + ), + ) + return http_client + + +@pytest.fixture(scope='module') +def minio_client(http_client): + # Initialize minioClient with an endpoint and access/secret keys. + minio_client = Minio( + S3_CONN_INFO["endpoint"], + access_key=S3_CONN_INFO["access_key"], + secret_key=S3_CONN_INFO["secret_key"], + secure=True, + http_client=http_client, + ) + return minio_client + + +@pytest.fixture(scope='session') +def stores_config(): + stores_config = { + "raw": dict(protocol="file", location=tempfile.mkdtemp()), + "repo": dict( + stage=tempfile.mkdtemp(), protocol="file", location=tempfile.mkdtemp() + ), + "repo-s3": dict( + S3_CONN_INFO, protocol="s3", location="dj/repo", stage=tempfile.mkdtemp() + ), + "local": dict(protocol="file", location=tempfile.mkdtemp(), subfolding=(1, 1)), + "share": dict( + S3_CONN_INFO, protocol="s3", location="dj/store/repo", subfolding=(2, 4) + ), + } + return stores_config + + +@pytest.fixture +def schema_ext(connection_test, stores_config, enable_filepath_feature): + schema = dj.Schema(PREFIX + "_extern", context=LOCALS_EXTERNAL, connection=connection_test) + dj.config["stores"] = stores_config + dj.config["cache"] = tempfile.mkdtemp() + + schema(Simple) + schema(SimpleRemote) + schema(Seed) + schema(Dimension) + schema(Image) + schema(Attach) + + # dj.errors._switch_filepath_types(True) + schema(Filepath) + schema(FilepathS3) + # dj.errors._switch_filepath_types(False) + yield schema + schema.drop() + + +class TestS3: + def test_connection(self, http_client, minio_client): + assert minio_client.bucket_exists(S3_CONN_INFO["bucket"]) + + def test_connection_secure(self, minio_client): + assert minio_client.bucket_exists(S3_CONN_INFO["bucket"]) + + def test_remove_object_exception(self): + # TODO: mv to failing block + with pytest.raises(DataJointError): + # https://github.com/datajoint/datajoint-python/issues/952 + + # Insert some test data and remove it so that the external table is populated + test = [1, [1, 2, 3]] + SimpleRemote.insert1(test) + SimpleRemote.delete() + + # Save the old external table minio client + old_client = schema.external["share"].s3.client + + # Apply our new minio client which has a user that does not exist + schema.external["share"].s3.client = Minio( + S3_CONN_INFO["endpoint"], + access_key="jeffjeff", + secret_key="jeffjeff", + secure=False, + ) + + # This method returns a list of errors + error_list = schema.external["share"].delete( + delete_external_files=True, errors_as_string=False + ) + + # Teardown + schema.external["share"].s3.client = old_client + schema.external["share"].delete(delete_external_files=True) + + # Raise the error we want if the error matches the expected uuid + if str(error_list[0][0]) == str(uuid_from_buffer(pack(test[1]))): + raise error_list[0][2] From 6c36bd362368dc9f92a89543b97bbd1aa51afc5f Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 10:08:01 -0600 Subject: [PATCH 021/212] Add .devcontainer/docker-compose.yml --- .devcontainer/docker-compose.yml | 26 ++++++++++++++++++++++++++ .gitignore | 4 +++- 2 files changed, 29 insertions(+), 1 deletion(-) create mode 100644 .devcontainer/docker-compose.yml diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml new file mode 100644 index 000000000..373751880 --- /dev/null +++ b/.devcontainer/docker-compose.yml @@ -0,0 +1,26 @@ +version: '2.4' +services: + # Update this to the name of the service you want to work with in your docker-compose.yml file + app: + # Uncomment if you want to override the service's Dockerfile to one in the .devcontainer + # folder. Note that the path of the Dockerfile and context is relative to the *primary* + # docker-compose.yml file (the first in the devcontainer.json "dockerComposeFile" + # array). The sample below assumes your primary file is in the root of your project. + # + # build: + # context: . + # dockerfile: .devcontainer/Dockerfile + + volumes: + # Update this to wherever you want VS Code to mount the folder of your project + - ..:/workspaces:cached + + # Uncomment the next four lines if you will use a ptrace-based debugger like C++, Go, and Rust. + # cap_add: + # - SYS_PTRACE + # security_opt: + # - seccomp:unconfined + + # Overrides default command so things don't shut down after the process ends. + command: /bin/sh -c "while sleep 1000; do :; done" + diff --git a/.gitignore b/.gitignore index 4b7bdb2c4..6e1d664ff 100644 --- a/.gitignore +++ b/.gitignore @@ -30,4 +30,6 @@ docs/site !.vscode/settings.json !.vscode/launch.json -!.devcontainer/devcontainer.json \ No newline at end of file +!.devcontainer/devcontainer.json +!.devcontainer/docker-compose.yml + From c6ccaa3e296d4728f0fbb99dd9f11e8ca209b262 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 11:26:18 -0600 Subject: [PATCH 022/212] Set default MYSQL_PASS env var --- LNX-docker-compose.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/LNX-docker-compose.yml b/LNX-docker-compose.yml index 248b3611c..f90e73a04 100644 --- a/LNX-docker-compose.yml +++ b/LNX-docker-compose.yml @@ -9,7 +9,7 @@ services: <<: *net image: datajoint/mysql:${MYSQL_VER:-5.7} environment: - - MYSQL_ROOT_PASSWORD=${DJ_PASS} + - MYSQL_ROOT_PASSWORD=${DJ_PASS:-simple} # ports: # - "3306:3306" # volumes: @@ -69,7 +69,7 @@ services: environment: - DJ_HOST=fakeservices.datajoint.io - DJ_USER=root - - DJ_PASS + - DJ_PASS=simple - DJ_TEST_HOST=fakeservices.datajoint.io - DJ_TEST_USER=datajoint - DJ_TEST_PASSWORD=datajoint From f72d3d1c8bf0f361673889f15dce1c844fb789ad Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 12:37:19 -0600 Subject: [PATCH 023/212] Remove git feature from dev container --- .devcontainer/devcontainer.json | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 08a4482cf..6e414e8b6 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -24,11 +24,9 @@ "shutdownAction": "stopCompose", // Uncomment the next line to run commands after the container is created. "postCreateCommand": "python3 -m pip install -e .", - "features": { - "ghcr.io/cirolosapio/devcontainers-features/alpine-git:0": {}, - }, + "features": {}, // Configure tool-specific properties. // "customizations": {}, // Uncomment to connect as an existing user other than the container default. More info: https://aka.ms/dev-containers-non-root. // "remoteUser": "devcontainer" -} \ No newline at end of file +} From cfae0db75c760e58b5b8e5e8ca967be0c633f84f Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 12:38:00 -0600 Subject: [PATCH 024/212] Update compose stack to use MySQL 8.0 by default --- LNX-docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LNX-docker-compose.yml b/LNX-docker-compose.yml index f90e73a04..f650b81ce 100644 --- a/LNX-docker-compose.yml +++ b/LNX-docker-compose.yml @@ -7,7 +7,7 @@ x-net: services: db: <<: *net - image: datajoint/mysql:${MYSQL_VER:-5.7} + image: datajoint/mysql:${MYSQL_VER:-8.0} environment: - MYSQL_ROOT_PASSWORD=${DJ_PASS:-simple} # ports: From 2d588f27951e48fde141566866ea142a58eabd52 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 13:15:13 -0600 Subject: [PATCH 025/212] Readd git feature to dev container --- .devcontainer/devcontainer.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 6e414e8b6..4dde61560 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -24,7 +24,9 @@ "shutdownAction": "stopCompose", // Uncomment the next line to run commands after the container is created. "postCreateCommand": "python3 -m pip install -e .", - "features": {}, + "features": { + "ghcr.io/cirolosapio/devcontainers-features/alpine-git:0": {}, + }, // Configure tool-specific properties. // "customizations": {}, // Uncomment to connect as an existing user other than the container default. More info: https://aka.ms/dev-containers-non-root. From 53c17aee8762289c74aff1f5db718c4eca56955d Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 13:15:30 -0600 Subject: [PATCH 026/212] Update nginx version in compose stack --- LNX-docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LNX-docker-compose.yml b/LNX-docker-compose.yml index f650b81ce..d8bee4456 100644 --- a/LNX-docker-compose.yml +++ b/LNX-docker-compose.yml @@ -44,7 +44,7 @@ services: interval: 15s fakeservices.datajoint.io: <<: *net - image: datajoint/nginx:v0.2.6 + image: datajoint/nginx:v0.2.7 environment: - ADD_db_TYPE=DATABASE - ADD_db_ENDPOINT=db:3306 From 49398539279823f1d060d0597ae2a4435c94a28a Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 13:16:14 -0600 Subject: [PATCH 027/212] Update MySQL default auth plugin https://stackoverflow.com/questions/49019652/not-able-to-connect-to-mysql-docker-from-local --- LNX-docker-compose.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/LNX-docker-compose.yml b/LNX-docker-compose.yml index d8bee4456..c669a9d5f 100644 --- a/LNX-docker-compose.yml +++ b/LNX-docker-compose.yml @@ -10,6 +10,7 @@ services: image: datajoint/mysql:${MYSQL_VER:-8.0} environment: - MYSQL_ROOT_PASSWORD=${DJ_PASS:-simple} + command: mysqld --default-authentication-plugin=mysql_native_password # ports: # - "3306:3306" # volumes: From 22288078b137217b2746d1c8552588a3d385b218 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 14:50:25 -0600 Subject: [PATCH 028/212] Change default MySQL root password MySQL 8.0 in Docker was ignoring the MYSQL_ROOT_PASSWORD environment variable, setting the default user/pass to root and password. Update the env vars in the dev stack to match these values. --- LNX-docker-compose.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/LNX-docker-compose.yml b/LNX-docker-compose.yml index c669a9d5f..eaf3a48cd 100644 --- a/LNX-docker-compose.yml +++ b/LNX-docker-compose.yml @@ -9,7 +9,7 @@ services: <<: *net image: datajoint/mysql:${MYSQL_VER:-8.0} environment: - - MYSQL_ROOT_PASSWORD=${DJ_PASS:-simple} + - MYSQL_ROOT_PASSWORD=${DJ_PASS:-password} command: mysqld --default-authentication-plugin=mysql_native_password # ports: # - "3306:3306" @@ -70,7 +70,7 @@ services: environment: - DJ_HOST=fakeservices.datajoint.io - DJ_USER=root - - DJ_PASS=simple + - DJ_PASS=password - DJ_TEST_HOST=fakeservices.datajoint.io - DJ_TEST_USER=datajoint - DJ_TEST_PASSWORD=datajoint From 92f6d6e7a9f9e8595c1c597e10e113197c223e54 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 16:16:21 -0600 Subject: [PATCH 029/212] Add nosetests and mysql to dev container --- .devcontainer/Dockerfile | 13 +++++++++++++ .devcontainer/devcontainer.json | 26 ++++++++++++++++++++------ .devcontainer/docker-compose.yml | 18 +++++++++++------- 3 files changed, 44 insertions(+), 13 deletions(-) create mode 100644 .devcontainer/Dockerfile diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 000000000..06958f6cd --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,13 @@ +ARG PY_VER +ARG DISTRO +FROM mcr.microsoft.com/devcontainers/python:${PY_VER}-${DISTRO} +RUN \ + apt update && \ + apt-get install bash-completion graphviz default-mysql-client -y && \ + pip install flake8 black faker ipykernel pytest pytest-cov nose nose-cov datajoint && \ + pip uninstall datajoint -y + +USER root +ENV DJ_HOST fakeservices.datajoint.io +ENV DJ_USER root +ENV DJ_PASS password diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 4dde61560..35b1d0835 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -17,18 +17,32 @@ // Features to add to the dev container. More info: https://containers.dev/features. // "features": {}, // Use 'forwardPorts' to make a list of ports inside the container available locally. - // "forwardPorts": [], + "forwardPorts": [ + 80, + 443, + 3306, + 8080, + 9000 + ], // Uncomment the next line if you want start specific services in your Docker Compose config. // "runServices": [], // Uncomment the next line if you want to keep your containers running after VS Code shuts down. "shutdownAction": "stopCompose", - // Uncomment the next line to run commands after the container is created. - "postCreateCommand": "python3 -m pip install -e .", + "onCreateCommand": "python3 -m pip install -e .", "features": { - "ghcr.io/cirolosapio/devcontainers-features/alpine-git:0": {}, + "ghcr.io/devcontainers/features/git:1": {}, }, // Configure tool-specific properties. - // "customizations": {}, + "customizations": { + "vscode": { + "extensions": [ + "ms-python.python" + ] + } + }, + "remoteEnv": { + "LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}" + } // Uncomment to connect as an existing user other than the container default. More info: https://aka.ms/dev-containers-non-root. // "remoteUser": "devcontainer" -} +} \ No newline at end of file diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 373751880..75c199e7a 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -2,14 +2,16 @@ version: '2.4' services: # Update this to the name of the service you want to work with in your docker-compose.yml file app: - # Uncomment if you want to override the service's Dockerfile to one in the .devcontainer - # folder. Note that the path of the Dockerfile and context is relative to the *primary* + # Uncomment if you want to override the service's Dockerfile to one in the .devcontainer + # folder. Note that the path of the Dockerfile and context is relative to the *primary* # docker-compose.yml file (the first in the devcontainer.json "dockerComposeFile" # array). The sample below assumes your primary file is in the root of your project. - # - # build: - # context: . - # dockerfile: .devcontainer/Dockerfile + build: + context: . + dockerfile: .devcontainer/Dockerfile + args: + - PY_VER=${PY_VER:-3.8} + - DISTRO=${DISTRO:-buster} volumes: # Update this to wherever you want VS Code to mount the folder of your project @@ -21,6 +23,8 @@ services: # security_opt: # - seccomp:unconfined + user: root + # Overrides default command so things don't shut down after the process ends. command: /bin/sh -c "while sleep 1000; do :; done" - + From 39c42d05983e68a2000835d8a35e9430e40f43e2 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 21:58:43 -0600 Subject: [PATCH 030/212] Change default MySQL root password in CI --- .github/workflows/development.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/development.yaml b/.github/workflows/development.yaml index fdcbcd677..5f3b1f075 100644 --- a/.github/workflows/development.yaml +++ b/.github/workflows/development.yaml @@ -77,7 +77,7 @@ jobs: - name: Run primary tests env: PY_VER: ${{matrix.py_ver}} - DJ_PASS: simple + DJ_PASS: password MYSQL_VER: ${{matrix.mysql_ver}} DISTRO: alpine MINIO_VER: RELEASE.2021-09-03T03-56-13Z From 09a5439e074eec56a78aae88e5c186494da2b104 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 22:06:34 -0600 Subject: [PATCH 031/212] Update developer docs with MySQL password --- docs/src/develop.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/develop.md b/docs/src/develop.md index 4acb9ed35..842c04d96 100644 --- a/docs/src/develop.md +++ b/docs/src/develop.md @@ -104,7 +104,7 @@ It is often useful in development to connect to DataJoint's relational database Connect as follows to the database running within your developer environment: ``` -mysql -hfakeservices.datajoint.io -uroot -psimple +mysql -hfakeservices.datajoint.io -uroot -ppassword ``` ### Documentation From 4b7a68c2db79a2666029f88dc22dc7dcc50e974a Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 22:54:28 -0600 Subject: [PATCH 032/212] Unique tag for devcontainer --- .devcontainer/docker-compose.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 75c199e7a..1ee8ea148 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -6,6 +6,8 @@ services: # folder. Note that the path of the Dockerfile and context is relative to the *primary* # docker-compose.yml file (the first in the devcontainer.json "dockerComposeFile" # array). The sample below assumes your primary file is in the root of your project. + container_name: devcontainer + image: devcontainer build: context: . dockerfile: .devcontainer/Dockerfile From 071d9ebd6b93ab4f9d5cef08ed78503ddc4b79e7 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 4 Dec 2023 23:46:23 -0600 Subject: [PATCH 033/212] adapted_attributes tests passing --- tests/conftest.py | 126 ++++++++++++++++++++++++++----- tests/test_adapted_attributes.py | 3 +- 2 files changed, 109 insertions(+), 20 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index aed3ca468..47ea656be 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,9 +1,11 @@ import datajoint as dj from packaging import version import os +from os import environ, remove import minio import urllib3 import certifi +from distutils.version import LooseVersion import shutil import pytest import networkx as nx @@ -19,11 +21,13 @@ schema, schema_simple, schema_advanced, schema_adapted ) + @pytest.fixture(scope="session") def monkeysession(): with pytest.MonkeyPatch.context() as mp: yield mp + @pytest.fixture(scope="module") def monkeymodule(): with pytest.MonkeyPatch.context() as mp: @@ -31,26 +35,90 @@ def monkeymodule(): @pytest.fixture(scope="session") -def connection_root(): - """Root user database connection.""" - dj.config["safemode"] = False +def connection_root_bare(): connection = dj.Connection( host=os.getenv("DJ_HOST"), user=os.getenv("DJ_USER"), password=os.getenv("DJ_PASS"), ) yield connection - dj.config["safemode"] = True - connection.close() + + +@pytest.fixture(scope="session") +def connection_root(connection_root_bare): + """Root user database connection.""" + dj.config["safemode"] = False + conn_root = connection_root_bare + # Create MySQL users + if LooseVersion(conn_root.query("select @@version;").fetchone()[0]) >= LooseVersion( + "8.0.0" + ): + # create user if necessary on mysql8 + conn_root.query( + """ + CREATE USER IF NOT EXISTS 'datajoint'@'%%' + IDENTIFIED BY 'datajoint'; + """ + ) + conn_root.query( + """ + CREATE USER IF NOT EXISTS 'djview'@'%%' + IDENTIFIED BY 'djview'; + """ + ) + conn_root.query( + """ + CREATE USER IF NOT EXISTS 'djssl'@'%%' + IDENTIFIED BY 'djssl' + REQUIRE SSL; + """ + ) + conn_root.query("GRANT ALL PRIVILEGES ON `djtest%%`.* TO 'datajoint'@'%%';") + conn_root.query("GRANT SELECT ON `djtest%%`.* TO 'djview'@'%%';") + conn_root.query("GRANT SELECT ON `djtest%%`.* TO 'djssl'@'%%';") + else: + # grant permissions. For MySQL 5.7 this also automatically creates user + # if not exists + conn_root.query( + """ + GRANT ALL PRIVILEGES ON `djtest%%`.* TO 'datajoint'@'%%' + IDENTIFIED BY 'datajoint'; + """ + ) + conn_root.query( + "GRANT SELECT ON `djtest%%`.* TO 'djview'@'%%' IDENTIFIED BY 'djview';" + ) + conn_root.query( + """ + GRANT SELECT ON `djtest%%`.* TO 'djssl'@'%%' + IDENTIFIED BY 'djssl' + REQUIRE SSL; + """ + ) + + yield conn_root + + # Teardown + conn_root.query("SET FOREIGN_KEY_CHECKS=0") + cur = conn_root.query('SHOW DATABASES LIKE "{}\_%%"'.format(PREFIX)) + for db in cur.fetchall(): + conn_root.query("DROP DATABASE `{}`".format(db[0])) + conn_root.query("SET FOREIGN_KEY_CHECKS=1") + if os.path.exists("dj_local_conf.json"): + remove("dj_local_conf.json") + + # Remove created users + conn_root.query("DROP USER IF EXISTS `datajoint`") + conn_root.query("DROP USER IF EXISTS `djview`") + conn_root.query("DROP USER IF EXISTS `djssl`") + conn_root.close() @pytest.fixture(scope="session") def connection_test(connection_root): """Test user database connection.""" database = f"{PREFIX}%%" - credentials = dict( - host=os.getenv("DJ_HOST"), user="datajoint", password="datajoint" - ) + credentials = dict(host=os.getenv("DJ_HOST"), user="datajoint", password="datajoint") permission = "ALL PRIVILEGES" # Create MySQL users @@ -178,10 +246,10 @@ def schema_adv(connection_test): schema.drop() -@pytest.fixture -def httpClient(): +@pytest.fixture(scope='session') +def http_client(): # Initialize httpClient with relevant timeout. - httpClient = urllib3.PoolManager( + client = urllib3.PoolManager( timeout=30, cert_reqs="CERT_REQUIRED", ca_certs=certifi.where(), @@ -189,16 +257,38 @@ def httpClient(): total=3, backoff_factor=0.2, status_forcelist=[500, 502, 503, 504] ), ) - yield httpClient + yield client -@pytest.fixture -def minioClient(): - # Initialize minioClient with an endpoint and access/secret keys. - minioClient = minio.Minio( + +@pytest.fixture(scope='session') +def minio_client_bare(http_client): + client = minio.Minio( S3_CONN_INFO["endpoint"], access_key=S3_CONN_INFO["access_key"], secret_key=S3_CONN_INFO["secret_key"], secure=True, - http_client=httpClient, + http_client=http_client, ) - yield minioClient + return client + + +@pytest.fixture(scope='session') +def minio_client(minio_client_bare): + """Initialize MinIO with an endpoint and access/secret keys.""" + # Bootstrap MinIO bucket + aws_region = "us-east-1" + try: + minio_client_bare.make_bucket(S3_CONN_INFO["bucket"], location=aws_region) + except minio.error.S3Error as e: + if e.code != "BucketAlreadyOwnedByYou": + raise e + + yield minio_client_bare + + # Teardown S3 + objs = list(minio_client_bare.list_objects(S3_CONN_INFO["bucket"], recursive=True)) + objs = [ + minio_client_bare.remove_object(S3_CONN_INFO["bucket"], o.object_name.encode("utf-8")) + for o in objs + ] + minio_client_bare.remove_bucket(S3_CONN_INFO["bucket"]) diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index 2ec0c239f..03e8cfc1c 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -95,8 +95,7 @@ def test_adapted_type(schema_ad): c.delete() -@pytest.mark.skip(reason='misconfigured s3 fixtures') -def test_adapted_filepath_type(schema_ad): +def test_adapted_filepath_type(schema_ad, minio_client): """https://github.com/datajoint/datajoint-python/issues/684""" c = Connectivity() c.delete() From c2605d7562e06e4649d4567127c5c7b06de4ecea Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 09:29:04 -0600 Subject: [PATCH 034/212] Format with black --- tests/conftest.py | 31 +++++++++++++++++++----------- tests/schema_adapted.py | 1 + tests/schema_advanced.py | 2 ++ tests/test_adapted_attributes.py | 33 +++++++++++++++++++++----------- tests/test_connection.py | 4 +++- tests/test_json.py | 7 ++++--- tests/test_plugin.py | 5 +++-- 7 files changed, 55 insertions(+), 28 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 47ea656be..43a336254 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -13,12 +13,15 @@ from pathlib import Path import tempfile from datajoint import errors -from datajoint.errors import ( - ADAPTED_TYPE_SWITCH, FILEPATH_FEATURE_SWITCH -) +from datajoint.errors import ADAPTED_TYPE_SWITCH, FILEPATH_FEATURE_SWITCH from . import ( - PREFIX, CONN_INFO, S3_CONN_INFO, - schema, schema_simple, schema_advanced, schema_adapted + PREFIX, + CONN_INFO, + S3_CONN_INFO, + schema, + schema_simple, + schema_advanced, + schema_adapted, ) @@ -118,7 +121,9 @@ def connection_root(connection_root_bare): def connection_test(connection_root): """Test user database connection.""" database = f"{PREFIX}%%" - credentials = dict(host=os.getenv("DJ_HOST"), user="datajoint", password="datajoint") + credentials = dict( + host=os.getenv("DJ_HOST"), user="datajoint", password="datajoint" + ) permission = "ALL PRIVILEGES" # Create MySQL users @@ -231,7 +236,9 @@ def schema_simp(connection_test): @pytest.fixture def schema_adv(connection_test): schema = dj.Schema( - PREFIX + "_advanced", schema_advanced.LOCALS_ADVANCED, connection=connection_test + PREFIX + "_advanced", + schema_advanced.LOCALS_ADVANCED, + connection=connection_test, ) schema(schema_advanced.Person) schema(schema_advanced.Parent) @@ -246,7 +253,7 @@ def schema_adv(connection_test): schema.drop() -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def http_client(): # Initialize httpClient with relevant timeout. client = urllib3.PoolManager( @@ -260,7 +267,7 @@ def http_client(): yield client -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def minio_client_bare(http_client): client = minio.Minio( S3_CONN_INFO["endpoint"], @@ -272,7 +279,7 @@ def minio_client_bare(http_client): return client -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def minio_client(minio_client_bare): """Initialize MinIO with an endpoint and access/secret keys.""" # Bootstrap MinIO bucket @@ -288,7 +295,9 @@ def minio_client(minio_client_bare): # Teardown S3 objs = list(minio_client_bare.list_objects(S3_CONN_INFO["bucket"], recursive=True)) objs = [ - minio_client_bare.remove_object(S3_CONN_INFO["bucket"], o.object_name.encode("utf-8")) + minio_client_bare.remove_object( + S3_CONN_INFO["bucket"], o.object_name.encode("utf-8") + ) for o in objs ] minio_client_bare.remove_bucket(S3_CONN_INFO["bucket"]) diff --git a/tests/schema_adapted.py b/tests/schema_adapted.py index 68a7e965a..ab9a02e76 100644 --- a/tests/schema_adapted.py +++ b/tests/schema_adapted.py @@ -48,6 +48,7 @@ class Connectivity(dj.Manual): conn_graph = null : """ + class Layout(dj.Manual): definition = """ # stores graph layout diff --git a/tests/schema_advanced.py b/tests/schema_advanced.py index 649ff186a..6a35cb34a 100644 --- a/tests/schema_advanced.py +++ b/tests/schema_advanced.py @@ -1,6 +1,7 @@ import datajoint as dj import inspect + class Person(dj.Manual): definition = """ person_id : int @@ -134,5 +135,6 @@ class GlobalSynapse(dj.Manual): -> Cell.proj(post_slice="slice", post_cell="cell") """ + LOCALS_ADVANCED = {k: v for k, v in locals().items() if inspect.isclass(v)} __all__ = list(LOCALS_ADVANCED) diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index 03e8cfc1c..bd0ce7713 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -17,14 +17,14 @@ def adapted_graph_instance(): @pytest.fixture def enable_adapted_types(monkeypatch): - monkeypatch.setenv(ADAPTED_TYPE_SWITCH, 'TRUE') + monkeypatch.setenv(ADAPTED_TYPE_SWITCH, "TRUE") yield monkeypatch.delenv(ADAPTED_TYPE_SWITCH, raising=True) @pytest.fixture def enable_filepath_feature(monkeypatch): - monkeypatch.setenv(FILEPATH_FEATURE_SWITCH, 'TRUE') + monkeypatch.setenv(FILEPATH_FEATURE_SWITCH, "TRUE") yield monkeypatch.delenv(FILEPATH_FEATURE_SWITCH, raising=True) @@ -37,22 +37,30 @@ def schema_name_custom_datatype(): @pytest.fixture def schema_ad( - schema_name_custom_datatype, connection_test, adapted_graph_instance, - enable_adapted_types, enable_filepath_feature + schema_name_custom_datatype, + connection_test, + adapted_graph_instance, + enable_adapted_types, + enable_filepath_feature, ): stores_config = { "repo-s3": dict( - S3_CONN_INFO, protocol="s3", location="adapted/repo", stage=tempfile.mkdtemp() + S3_CONN_INFO, + protocol="s3", + location="adapted/repo", + stage=tempfile.mkdtemp(), ) } dj.config["stores"] = stores_config layout_to_filepath = schema_adapted.LayoutToFilepath() context = { **schema_adapted.LOCALS_ADAPTED, - 'graph': adapted_graph_instance, - 'layout_to_filepath': layout_to_filepath, + "graph": adapted_graph_instance, + "layout_to_filepath": layout_to_filepath, } - schema = dj.schema(schema_name_custom_datatype, context=context, connection=connection_test) + schema = dj.schema( + schema_name_custom_datatype, context=context, connection=connection_test + ) graph = adapted_graph_instance schema(schema_adapted.Connectivity) schema(schema_adapted.Layout) @@ -70,10 +78,14 @@ def local_schema(schema_ad, schema_name_custom_datatype): @pytest.fixture -def schema_virtual_module(schema_ad, schema_name_custom_datatype, adapted_graph_instance): +def schema_virtual_module( + schema_ad, schema_name_custom_datatype, adapted_graph_instance +): """Fixture for testing virtual modules""" schema_virtual_module = dj.VirtualModule( - "virtual_module", schema_name_custom_datatype, add_objects={"graph": adapted_graph_instance} + "virtual_module", + schema_name_custom_datatype, + add_objects={"graph": adapted_graph_instance}, ) return schema_virtual_module @@ -130,7 +142,6 @@ def test_adapted_spawned(local_schema, enable_adapted_types): c.delete() - def test_adapted_virtual(schema_virtual_module): c = schema_virtual_module.Connectivity() graphs = [ diff --git a/tests/test_connection.py b/tests/test_connection.py index a73677aec..8cdbbbff5 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -12,7 +12,9 @@ @pytest.fixture def schema(connection_test): - schema = dj.Schema(PREFIX + "_transactions", context=dict(), connection=connection_test) + schema = dj.Schema( + PREFIX + "_transactions", context=dict(), connection=connection_test + ) yield schema schema.drop() diff --git a/tests/test_json.py b/tests/test_json.py index 37a33c825..c1caaeedd 100644 --- a/tests/test_json.py +++ b/tests/test_json.py @@ -107,6 +107,7 @@ def test_insert_update(schema): q.delete_quick() assert not q + def test_describe(schema): rel = Team() context = inspect.currentframe().f_globals @@ -114,6 +115,7 @@ def test_describe(schema): s2 = declare(rel.full_table_name, rel.describe(), context) assert s1 == s2 + def test_restrict(schema): # dict assert (Team & {"car.name": "Chaching"}).fetch1("name") == "business" @@ -139,9 +141,7 @@ def test_restrict(schema): assert (Team & {"car": None}).fetch1("name") == "marketing" - assert (Team & {"car.tire_pressure": [34, 30, 27, 32]}).fetch1( - "name" - ) == "business" + assert (Team & {"car.tire_pressure": [34, 30, 27, 32]}).fetch1("name") == "business" assert ( Team & {"car.headlights[1]": {"side": "right", "hyper_white": True}} @@ -175,6 +175,7 @@ def test_restrict(schema): & """`car`->>'$.headlights[1]' = '{"side": "right", "hyper_white": true}'""" ).fetch1("name") == "business", "2nd `headlight` object did not match" + def test_proj(schema): # proj necessary since we need to rename indexed value into a proper attribute name assert Team.proj(car_length="car.length").fetch( diff --git a/tests/test_plugin.py b/tests/test_plugin.py index e41224116..ddb8b3bfc 100644 --- a/tests/test_plugin.py +++ b/tests/test_plugin.py @@ -23,7 +23,7 @@ def test_normal_djerror(): assert e.__cause__ is None -@pytest.mark.parametrize('category', ('connection', )) +@pytest.mark.parametrize("category", ("connection",)) def test_verified_djerror(category): try: curr_plugins = getattr(p, "{}_plugins".format(category)) @@ -41,7 +41,8 @@ def test_verified_djerror(category): def test_verified_djerror_type(): test_verified_djerror(category="type") -@pytest.mark.parametrize('category', ('connection', )) + +@pytest.mark.parametrize("category", ("connection",)) def test_unverified_djerror(category): try: curr_plugins = getattr(p, "{}_plugins".format(category)) From 874c9cba2323be97df01a8d927bb029422302690 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 10:00:57 -0600 Subject: [PATCH 035/212] test_s3 uses bucket setup fixtures --- tests/conftest.py | 5 +++-- tests/test_s3.py | 25 ------------------------- 2 files changed, 3 insertions(+), 27 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 43a336254..fa51bb8a0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -269,6 +269,7 @@ def http_client(): @pytest.fixture(scope="session") def minio_client_bare(http_client): + """Initialize MinIO with an endpoint and access/secret keys.""" client = minio.Minio( S3_CONN_INFO["endpoint"], access_key=S3_CONN_INFO["access_key"], @@ -281,8 +282,8 @@ def minio_client_bare(http_client): @pytest.fixture(scope="session") def minio_client(minio_client_bare): - """Initialize MinIO with an endpoint and access/secret keys.""" - # Bootstrap MinIO bucket + """Initialize a MinIO client and create buckets for testing session.""" + # Setup MinIO bucket aws_region = "us-east-1" try: minio_client_bare.make_bucket(S3_CONN_INFO["bucket"], location=aws_region) diff --git a/tests/test_s3.py b/tests/test_s3.py index 7173f7650..b8fa0b958 100644 --- a/tests/test_s3.py +++ b/tests/test_s3.py @@ -9,31 +9,6 @@ from . import S3_CONN_INFO from minio import Minio -@pytest.fixture(scope='module') -def http_client(): - http_client = urllib3.PoolManager( - timeout=30, - cert_reqs="CERT_REQUIRED", - ca_certs=certifi.where(), - retries=urllib3.Retry( - total=3, backoff_factor=0.2, status_forcelist=[500, 502, 503, 504] - ), - ) - return http_client - - -@pytest.fixture(scope='module') -def minio_client(http_client): - # Initialize minioClient with an endpoint and access/secret keys. - minio_client = Minio( - S3_CONN_INFO["endpoint"], - access_key=S3_CONN_INFO["access_key"], - secret_key=S3_CONN_INFO["secret_key"], - secure=True, - http_client=http_client, - ) - return minio_client - @pytest.fixture(scope='session') def stores_config(): From 5568954548e60064c8e20956615d9f1076f258a0 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 10:06:56 -0600 Subject: [PATCH 036/212] Format with black --- tests/schema_external.py | 2 +- tests/test_s3.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/tests/schema_external.py b/tests/schema_external.py index 7702772fa..294ecb070 100644 --- a/tests/schema_external.py +++ b/tests/schema_external.py @@ -85,5 +85,5 @@ class FilepathS3(dj.Manual): """ -LOCALS_EXTERNAL= {k: v for k, v in locals().items() if inspect.isclass(v)} +LOCALS_EXTERNAL = {k: v for k, v in locals().items() if inspect.isclass(v)} __all__ = list(LOCALS_EXTERNAL) diff --git a/tests/test_s3.py b/tests/test_s3.py index b8fa0b958..b2add2695 100644 --- a/tests/test_s3.py +++ b/tests/test_s3.py @@ -10,7 +10,7 @@ from minio import Minio -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def stores_config(): stores_config = { "raw": dict(protocol="file", location=tempfile.mkdtemp()), @@ -30,7 +30,9 @@ def stores_config(): @pytest.fixture def schema_ext(connection_test, stores_config, enable_filepath_feature): - schema = dj.Schema(PREFIX + "_extern", context=LOCALS_EXTERNAL, connection=connection_test) + schema = dj.Schema( + PREFIX + "_extern", context=LOCALS_EXTERNAL, connection=connection_test + ) dj.config["stores"] = stores_config dj.config["cache"] = tempfile.mkdtemp() From 2ba91c47b39311c57cb1def2f13fdcdc49df7487 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 10:08:26 -0600 Subject: [PATCH 037/212] cp to tests --- tests/test_attach.py | 68 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 68 insertions(+) create mode 100644 tests/test_attach.py diff --git a/tests/test_attach.py b/tests/test_attach.py new file mode 100644 index 000000000..32ade5672 --- /dev/null +++ b/tests/test_attach.py @@ -0,0 +1,68 @@ +from nose.tools import assert_true, assert_equal, assert_not_equal +import tempfile +from pathlib import Path +import os + +from .schema_external import Attach + + +def test_attach_attributes(): + """test saving files in attachments""" + # create a mock file + table = Attach() + source_folder = tempfile.mkdtemp() + for i in range(2): + attach1 = Path(source_folder, "attach1.img") + data1 = os.urandom(100) + with attach1.open("wb") as f: + f.write(data1) + attach2 = Path(source_folder, "attach2.txt") + data2 = os.urandom(200) + with attach2.open("wb") as f: + f.write(data2) + table.insert1(dict(attach=i, img=attach1, txt=attach2)) + + download_folder = Path(tempfile.mkdtemp()) + keys, path1, path2 = table.fetch( + "KEY", "img", "txt", download_path=download_folder, order_by="KEY" + ) + + # verify that different attachment are renamed if their filenames collide + assert_not_equal(path1[0], path2[0]) + assert_not_equal(path1[0], path1[1]) + assert_equal(Path(path1[0]).parent, download_folder) + with Path(path1[-1]).open("rb") as f: + check1 = f.read() + with Path(path2[-1]).open("rb") as f: + check2 = f.read() + assert_equal(data1, check1) + assert_equal(data2, check2) + + # verify that existing files are not duplicated if their filename matches issue #592 + p1, p2 = (Attach & keys[0]).fetch1("img", "txt", download_path=download_folder) + assert_equal(p1, path1[0]) + assert_equal(p2, path2[0]) + + +def test_return_string(): + """test returning string on fetch""" + # create a mock file + table = Attach() + source_folder = tempfile.mkdtemp() + + attach1 = Path(source_folder, "attach1.img") + data1 = os.urandom(100) + with attach1.open("wb") as f: + f.write(data1) + attach2 = Path(source_folder, "attach2.txt") + data2 = os.urandom(200) + with attach2.open("wb") as f: + f.write(data2) + table.insert1(dict(attach=2, img=attach1, txt=attach2)) + + download_folder = Path(tempfile.mkdtemp()) + keys, path1, path2 = table.fetch( + "KEY", "img", "txt", download_path=download_folder, order_by="KEY" + ) + + assert_true(isinstance(path1[0], str)) From d35aa3b96d1032113f9889d7004ba88ff07872d5 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 11:21:39 -0600 Subject: [PATCH 038/212] Move schema_ext to conftest --- tests/conftest.py | 58 ++++++++++++++++++++++++++++++++ tests/test_adapted_attributes.py | 15 --------- 2 files changed, 58 insertions(+), 15 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index fa51bb8a0..376416eff 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -22,6 +22,7 @@ schema_simple, schema_advanced, schema_adapted, + schema_external, ) @@ -37,6 +38,20 @@ def monkeymodule(): yield mp +@pytest.fixture +def enable_adapted_types(monkeypatch): + monkeypatch.setenv(ADAPTED_TYPE_SWITCH, "TRUE") + yield + monkeypatch.delenv(ADAPTED_TYPE_SWITCH, raising=True) + + +@pytest.fixture +def enable_filepath_feature(monkeypatch): + monkeypatch.setenv(FILEPATH_FEATURE_SWITCH, "TRUE") + yield + monkeypatch.delenv(FILEPATH_FEATURE_SWITCH, raising=True) + + @pytest.fixture(scope="session") def connection_root_bare(): connection = dj.Connection( @@ -160,6 +175,24 @@ def connection_test(connection_root): connection.close() +@pytest.fixture(scope="session") +def stores_config(): + stores_config = { + "raw": dict(protocol="file", location=tempfile.mkdtemp()), + "repo": dict( + stage=tempfile.mkdtemp(), protocol="file", location=tempfile.mkdtemp() + ), + "repo-s3": dict( + S3_CONN_INFO, protocol="s3", location="dj/repo", stage=tempfile.mkdtemp() + ), + "local": dict(protocol="file", location=tempfile.mkdtemp(), subfolding=(1, 1)), + "share": dict( + S3_CONN_INFO, protocol="s3", location="dj/store/repo", subfolding=(2, 4) + ), + } + return stores_config + + @pytest.fixture def schema_any(connection_test): schema_any = dj.Schema( @@ -253,6 +286,31 @@ def schema_adv(connection_test): schema.drop() +@pytest.fixture +def schema_ext(connection_test, stores_config, enable_filepath_feature): + schema = dj.Schema( + PREFIX + "_extern", + context=schema_external.LOCALS_EXTERNAL, + connection=connection_test, + ) + dj.config["stores"] = stores_config + dj.config["cache"] = tempfile.mkdtemp() + + schema(schema_external.Simple) + schema(schema_external.SimpleRemote) + schema(schema_external.Seed) + schema(schema_external.Dimension) + schema(schema_external.Image) + schema(schema_external.Attach) + + # dj.errors._switch_filepath_types(True) + schema(schema_external.Filepath) + schema(schema_external.FilepathS3) + # dj.errors._switch_filepath_types(False) + yield schema + schema.drop() + + @pytest.fixture(scope="session") def http_client(): # Initialize httpClient with relevant timeout. diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index bd0ce7713..8a806fd25 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -2,7 +2,6 @@ import pytest import tempfile import datajoint as dj -from datajoint.errors import ADAPTED_TYPE_SWITCH, FILEPATH_FEATURE_SWITCH import networkx as nx from itertools import zip_longest from . import schema_adapted @@ -15,20 +14,6 @@ def adapted_graph_instance(): yield schema_adapted.GraphAdapter() -@pytest.fixture -def enable_adapted_types(monkeypatch): - monkeypatch.setenv(ADAPTED_TYPE_SWITCH, "TRUE") - yield - monkeypatch.delenv(ADAPTED_TYPE_SWITCH, raising=True) - - -@pytest.fixture -def enable_filepath_feature(monkeypatch): - monkeypatch.setenv(FILEPATH_FEATURE_SWITCH, "TRUE") - yield - monkeypatch.delenv(FILEPATH_FEATURE_SWITCH, raising=True) - - @pytest.fixture def schema_name_custom_datatype(): schema_name = PREFIX + "_test_custom_datatype" From 18626c88981157638db7dabe4d6fe938bc6d2d80 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 11:21:58 -0600 Subject: [PATCH 039/212] Migrate test_attach --- tests/test_attach.py | 27 +++++++++++++-------------- tests/test_s3.py | 41 ----------------------------------------- 2 files changed, 13 insertions(+), 55 deletions(-) diff --git a/tests/test_attach.py b/tests/test_attach.py index 32ade5672..654feef5b 100644 --- a/tests/test_attach.py +++ b/tests/test_attach.py @@ -1,13 +1,12 @@ -from nose.tools import assert_true, assert_equal, assert_not_equal +import pytest import tempfile from pathlib import Path import os - from .schema_external import Attach -def test_attach_attributes(): - """test saving files in attachments""" +def test_attach_attributes(schema_ext, minio_client): + """Test saving files in attachments""" # create a mock file table = Attach() source_folder = tempfile.mkdtemp() @@ -28,24 +27,24 @@ def test_attach_attributes(): ) # verify that different attachment are renamed if their filenames collide - assert_not_equal(path1[0], path2[0]) - assert_not_equal(path1[0], path1[1]) - assert_equal(Path(path1[0]).parent, download_folder) + assert path1[0] != path2[0] + assert path1[0] != path1[1] + assert Path(path1[0]).parent == download_folder with Path(path1[-1]).open("rb") as f: check1 = f.read() with Path(path2[-1]).open("rb") as f: check2 = f.read() - assert_equal(data1, check1) - assert_equal(data2, check2) + assert data1 == check1 + assert data2 == check2 # verify that existing files are not duplicated if their filename matches issue #592 p1, p2 = (Attach & keys[0]).fetch1("img", "txt", download_path=download_folder) - assert_equal(p1, path1[0]) - assert_equal(p2, path2[0]) + assert p1 == path1[0] + assert p2 == path2[0] -def test_return_string(): - """test returning string on fetch""" +def test_return_string(schema_ext, minio_client): + """Test returning string on fetch""" # create a mock file table = Attach() source_folder = tempfile.mkdtemp() @@ -65,4 +64,4 @@ def test_return_string(): "KEY", "img", "txt", download_path=download_folder, order_by="KEY" ) - assert_true(isinstance(path1[0], str)) + assert isinstance(path1[0], str) diff --git a/tests/test_s3.py b/tests/test_s3.py index b2add2695..43b1c2263 100644 --- a/tests/test_s3.py +++ b/tests/test_s3.py @@ -10,47 +10,6 @@ from minio import Minio -@pytest.fixture(scope="session") -def stores_config(): - stores_config = { - "raw": dict(protocol="file", location=tempfile.mkdtemp()), - "repo": dict( - stage=tempfile.mkdtemp(), protocol="file", location=tempfile.mkdtemp() - ), - "repo-s3": dict( - S3_CONN_INFO, protocol="s3", location="dj/repo", stage=tempfile.mkdtemp() - ), - "local": dict(protocol="file", location=tempfile.mkdtemp(), subfolding=(1, 1)), - "share": dict( - S3_CONN_INFO, protocol="s3", location="dj/store/repo", subfolding=(2, 4) - ), - } - return stores_config - - -@pytest.fixture -def schema_ext(connection_test, stores_config, enable_filepath_feature): - schema = dj.Schema( - PREFIX + "_extern", context=LOCALS_EXTERNAL, connection=connection_test - ) - dj.config["stores"] = stores_config - dj.config["cache"] = tempfile.mkdtemp() - - schema(Simple) - schema(SimpleRemote) - schema(Seed) - schema(Dimension) - schema(Image) - schema(Attach) - - # dj.errors._switch_filepath_types(True) - schema(Filepath) - schema(FilepathS3) - # dj.errors._switch_filepath_types(False) - yield schema - schema.drop() - - class TestS3: def test_connection(self, http_client, minio_client): assert minio_client.bucket_exists(S3_CONN_INFO["bucket"]) From 81ef9a9a1e97f7015f478414d3a418572cf7751b Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 17:28:54 +0000 Subject: [PATCH 040/212] Add @A-Baji suggestions for SCHEMA_NAME --- tests/test_adapted_attributes.py | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index 2ec0c239f..ffa85f8a5 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -9,6 +9,8 @@ from .schema_adapted import Connectivity, Layout from . import PREFIX, S3_CONN_INFO +SCHEMA_NAME = PREFIX + "_test_custom_datatype" + @pytest.fixture def adapted_graph_instance(): @@ -29,15 +31,9 @@ def enable_filepath_feature(monkeypatch): monkeypatch.delenv(FILEPATH_FEATURE_SWITCH, raising=True) -@pytest.fixture -def schema_name_custom_datatype(): - schema_name = PREFIX + "_test_custom_datatype" - return schema_name - - @pytest.fixture def schema_ad( - schema_name_custom_datatype, connection_test, adapted_graph_instance, + connection_test, adapted_graph_instance, enable_adapted_types, enable_filepath_feature ): stores_config = { @@ -52,7 +48,7 @@ def schema_ad( 'graph': adapted_graph_instance, 'layout_to_filepath': layout_to_filepath, } - schema = dj.schema(schema_name_custom_datatype, context=context, connection=connection_test) + schema = dj.schema(SCHEMA_NAME, context=context, connection=connection_test) graph = adapted_graph_instance schema(schema_adapted.Connectivity) schema(schema_adapted.Layout) @@ -61,19 +57,19 @@ def schema_ad( @pytest.fixture -def local_schema(schema_ad, schema_name_custom_datatype): +def local_schema(schema_ad): """Fixture for testing spawned classes""" - local_schema = dj.Schema(schema_name_custom_datatype) + local_schema = dj.Schema(SCHEMA_NAME) local_schema.spawn_missing_classes() yield local_schema local_schema.drop() @pytest.fixture -def schema_virtual_module(schema_ad, schema_name_custom_datatype, adapted_graph_instance): +def schema_virtual_module(schema_ad, adapted_graph_instance): """Fixture for testing virtual modules""" schema_virtual_module = dj.VirtualModule( - "virtual_module", schema_name_custom_datatype, add_objects={"graph": adapted_graph_instance} + "virtual_module", SCHEMA_NAME, add_objects={"graph": adapted_graph_instance} ) return schema_virtual_module From eff463dd239d911d094ffef35ca553796af0473d Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 17:29:04 +0000 Subject: [PATCH 041/212] Format with black --- tests/conftest.py | 20 ++++++++++++++------ tests/schema_adapted.py | 1 + tests/schema_advanced.py | 2 ++ tests/test_adapted_attributes.py | 22 +++++++++++++--------- tests/test_connection.py | 4 +++- tests/test_json.py | 7 ++++--- tests/test_plugin.py | 5 +++-- 7 files changed, 40 insertions(+), 21 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index aed3ca468..2c4063a1d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,19 +11,24 @@ from pathlib import Path import tempfile from datajoint import errors -from datajoint.errors import ( - ADAPTED_TYPE_SWITCH, FILEPATH_FEATURE_SWITCH -) +from datajoint.errors import ADAPTED_TYPE_SWITCH, FILEPATH_FEATURE_SWITCH from . import ( - PREFIX, CONN_INFO, S3_CONN_INFO, - schema, schema_simple, schema_advanced, schema_adapted + PREFIX, + CONN_INFO, + S3_CONN_INFO, + schema, + schema_simple, + schema_advanced, + schema_adapted, ) + @pytest.fixture(scope="session") def monkeysession(): with pytest.MonkeyPatch.context() as mp: yield mp + @pytest.fixture(scope="module") def monkeymodule(): with pytest.MonkeyPatch.context() as mp: @@ -163,7 +168,9 @@ def schema_simp(connection_test): @pytest.fixture def schema_adv(connection_test): schema = dj.Schema( - PREFIX + "_advanced", schema_advanced.LOCALS_ADVANCED, connection=connection_test + PREFIX + "_advanced", + schema_advanced.LOCALS_ADVANCED, + connection=connection_test, ) schema(schema_advanced.Person) schema(schema_advanced.Parent) @@ -191,6 +198,7 @@ def httpClient(): ) yield httpClient + @pytest.fixture def minioClient(): # Initialize minioClient with an endpoint and access/secret keys. diff --git a/tests/schema_adapted.py b/tests/schema_adapted.py index 68a7e965a..ab9a02e76 100644 --- a/tests/schema_adapted.py +++ b/tests/schema_adapted.py @@ -48,6 +48,7 @@ class Connectivity(dj.Manual): conn_graph = null : """ + class Layout(dj.Manual): definition = """ # stores graph layout diff --git a/tests/schema_advanced.py b/tests/schema_advanced.py index 649ff186a..6a35cb34a 100644 --- a/tests/schema_advanced.py +++ b/tests/schema_advanced.py @@ -1,6 +1,7 @@ import datajoint as dj import inspect + class Person(dj.Manual): definition = """ person_id : int @@ -134,5 +135,6 @@ class GlobalSynapse(dj.Manual): -> Cell.proj(post_slice="slice", post_cell="cell") """ + LOCALS_ADVANCED = {k: v for k, v in locals().items() if inspect.isclass(v)} __all__ = list(LOCALS_ADVANCED) diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index ffa85f8a5..29d773473 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -19,34 +19,39 @@ def adapted_graph_instance(): @pytest.fixture def enable_adapted_types(monkeypatch): - monkeypatch.setenv(ADAPTED_TYPE_SWITCH, 'TRUE') + monkeypatch.setenv(ADAPTED_TYPE_SWITCH, "TRUE") yield monkeypatch.delenv(ADAPTED_TYPE_SWITCH, raising=True) @pytest.fixture def enable_filepath_feature(monkeypatch): - monkeypatch.setenv(FILEPATH_FEATURE_SWITCH, 'TRUE') + monkeypatch.setenv(FILEPATH_FEATURE_SWITCH, "TRUE") yield monkeypatch.delenv(FILEPATH_FEATURE_SWITCH, raising=True) @pytest.fixture def schema_ad( - connection_test, adapted_graph_instance, - enable_adapted_types, enable_filepath_feature + connection_test, + adapted_graph_instance, + enable_adapted_types, + enable_filepath_feature, ): stores_config = { "repo-s3": dict( - S3_CONN_INFO, protocol="s3", location="adapted/repo", stage=tempfile.mkdtemp() + S3_CONN_INFO, + protocol="s3", + location="adapted/repo", + stage=tempfile.mkdtemp(), ) } dj.config["stores"] = stores_config layout_to_filepath = schema_adapted.LayoutToFilepath() context = { **schema_adapted.LOCALS_ADAPTED, - 'graph': adapted_graph_instance, - 'layout_to_filepath': layout_to_filepath, + "graph": adapted_graph_instance, + "layout_to_filepath": layout_to_filepath, } schema = dj.schema(SCHEMA_NAME, context=context, connection=connection_test) graph = adapted_graph_instance @@ -91,7 +96,7 @@ def test_adapted_type(schema_ad): c.delete() -@pytest.mark.skip(reason='misconfigured s3 fixtures') +@pytest.mark.skip(reason="misconfigured s3 fixtures") def test_adapted_filepath_type(schema_ad): """https://github.com/datajoint/datajoint-python/issues/684""" c = Connectivity() @@ -127,7 +132,6 @@ def test_adapted_spawned(local_schema, enable_adapted_types): c.delete() - def test_adapted_virtual(schema_virtual_module): c = schema_virtual_module.Connectivity() graphs = [ diff --git a/tests/test_connection.py b/tests/test_connection.py index a73677aec..8cdbbbff5 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -12,7 +12,9 @@ @pytest.fixture def schema(connection_test): - schema = dj.Schema(PREFIX + "_transactions", context=dict(), connection=connection_test) + schema = dj.Schema( + PREFIX + "_transactions", context=dict(), connection=connection_test + ) yield schema schema.drop() diff --git a/tests/test_json.py b/tests/test_json.py index 37a33c825..c1caaeedd 100644 --- a/tests/test_json.py +++ b/tests/test_json.py @@ -107,6 +107,7 @@ def test_insert_update(schema): q.delete_quick() assert not q + def test_describe(schema): rel = Team() context = inspect.currentframe().f_globals @@ -114,6 +115,7 @@ def test_describe(schema): s2 = declare(rel.full_table_name, rel.describe(), context) assert s1 == s2 + def test_restrict(schema): # dict assert (Team & {"car.name": "Chaching"}).fetch1("name") == "business" @@ -139,9 +141,7 @@ def test_restrict(schema): assert (Team & {"car": None}).fetch1("name") == "marketing" - assert (Team & {"car.tire_pressure": [34, 30, 27, 32]}).fetch1( - "name" - ) == "business" + assert (Team & {"car.tire_pressure": [34, 30, 27, 32]}).fetch1("name") == "business" assert ( Team & {"car.headlights[1]": {"side": "right", "hyper_white": True}} @@ -175,6 +175,7 @@ def test_restrict(schema): & """`car`->>'$.headlights[1]' = '{"side": "right", "hyper_white": true}'""" ).fetch1("name") == "business", "2nd `headlight` object did not match" + def test_proj(schema): # proj necessary since we need to rename indexed value into a proper attribute name assert Team.proj(car_length="car.length").fetch( diff --git a/tests/test_plugin.py b/tests/test_plugin.py index e41224116..ddb8b3bfc 100644 --- a/tests/test_plugin.py +++ b/tests/test_plugin.py @@ -23,7 +23,7 @@ def test_normal_djerror(): assert e.__cause__ is None -@pytest.mark.parametrize('category', ('connection', )) +@pytest.mark.parametrize("category", ("connection",)) def test_verified_djerror(category): try: curr_plugins = getattr(p, "{}_plugins".format(category)) @@ -41,7 +41,8 @@ def test_verified_djerror(category): def test_verified_djerror_type(): test_verified_djerror(category="type") -@pytest.mark.parametrize('category', ('connection', )) + +@pytest.mark.parametrize("category", ("connection",)) def test_unverified_djerror(category): try: curr_plugins = getattr(p, "{}_plugins".format(category)) From 186d7cdc6b3b40936542f21517556941b1ad393e Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 11:42:55 -0600 Subject: [PATCH 042/212] Merge #1116 changes --- tests/test_adapted_attributes.py | 25 +++++++------------------ 1 file changed, 7 insertions(+), 18 deletions(-) diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index bd0ce7713..82fefe9f1 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -9,6 +9,8 @@ from .schema_adapted import Connectivity, Layout from . import PREFIX, S3_CONN_INFO +SCHEMA_NAME = PREFIX + "_test_custom_datatype" + @pytest.fixture def adapted_graph_instance(): @@ -29,15 +31,8 @@ def enable_filepath_feature(monkeypatch): monkeypatch.delenv(FILEPATH_FEATURE_SWITCH, raising=True) -@pytest.fixture -def schema_name_custom_datatype(): - schema_name = PREFIX + "_test_custom_datatype" - return schema_name - - @pytest.fixture def schema_ad( - schema_name_custom_datatype, connection_test, adapted_graph_instance, enable_adapted_types, @@ -58,9 +53,7 @@ def schema_ad( "graph": adapted_graph_instance, "layout_to_filepath": layout_to_filepath, } - schema = dj.schema( - schema_name_custom_datatype, context=context, connection=connection_test - ) + schema = dj.schema(SCHEMA_NAME, context=context, connection=connection_test) graph = adapted_graph_instance schema(schema_adapted.Connectivity) schema(schema_adapted.Layout) @@ -69,23 +62,19 @@ def schema_ad( @pytest.fixture -def local_schema(schema_ad, schema_name_custom_datatype): +def local_schema(schema_ad): """Fixture for testing spawned classes""" - local_schema = dj.Schema(schema_name_custom_datatype) + local_schema = dj.Schema(SCHEMA_NAME) local_schema.spawn_missing_classes() yield local_schema local_schema.drop() @pytest.fixture -def schema_virtual_module( - schema_ad, schema_name_custom_datatype, adapted_graph_instance -): +def schema_virtual_module(schema_ad, adapted_graph_instance): """Fixture for testing virtual modules""" schema_virtual_module = dj.VirtualModule( - "virtual_module", - schema_name_custom_datatype, - add_objects={"graph": adapted_graph_instance}, + "virtual_module", SCHEMA_NAME, add_objects={"graph": adapted_graph_instance} ) return schema_virtual_module From a76bbd5a9c50009caeedbd9a39cff3d126e3f1af Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 11:54:22 -0600 Subject: [PATCH 043/212] Format with black --- tests/conftest.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 667caec6e..23d42574d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -33,14 +33,12 @@ ) - @pytest.fixture(scope="session") def monkeysession(): with pytest.MonkeyPatch.context() as mp: yield mp - @pytest.fixture(scope="module") def monkeymodule(): with pytest.MonkeyPatch.context() as mp: From 13b128d6cf2d1181de96060aa7f1f8b260cd8685 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 17:57:43 +0000 Subject: [PATCH 044/212] Format with black --- tests/conftest.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 00c94f8aa..fedaa20c0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -27,14 +27,12 @@ ) - @pytest.fixture(scope="session") def monkeysession(): with pytest.MonkeyPatch.context() as mp: yield mp - @pytest.fixture(scope="module") def monkeymodule(): with pytest.MonkeyPatch.context() as mp: From 10e0a90c6b26cbb061c4c38477b59a1142797481 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 12:09:48 -0600 Subject: [PATCH 045/212] Finish migrating test_s3 --- tests/test_s3.py | 48 +++++++++++++++++++++++------------------------- 1 file changed, 23 insertions(+), 25 deletions(-) diff --git a/tests/test_s3.py b/tests/test_s3.py index 43b1c2263..090d6acf0 100644 --- a/tests/test_s3.py +++ b/tests/test_s3.py @@ -1,7 +1,6 @@ import pytest import urllib3 import certifi -from nose.tools import assert_true, raises from .schema_external import SimpleRemote from datajoint.errors import DataJointError from datajoint.hash import uuid_from_buffer @@ -17,36 +16,35 @@ def test_connection(self, http_client, minio_client): def test_connection_secure(self, minio_client): assert minio_client.bucket_exists(S3_CONN_INFO["bucket"]) - def test_remove_object_exception(self): - # TODO: mv to failing block - with pytest.raises(DataJointError): - # https://github.com/datajoint/datajoint-python/issues/952 + def test_remove_object_exception(self, schema_ext): + # https://github.com/datajoint/datajoint-python/issues/952 - # Insert some test data and remove it so that the external table is populated - test = [1, [1, 2, 3]] - SimpleRemote.insert1(test) - SimpleRemote.delete() + # Insert some test data and remove it so that the external table is populated + test = [1, [1, 2, 3]] + SimpleRemote.insert1(test) + SimpleRemote.delete() - # Save the old external table minio client - old_client = schema.external["share"].s3.client + # Save the old external table minio client + old_client = schema_ext.external["share"].s3.client - # Apply our new minio client which has a user that does not exist - schema.external["share"].s3.client = Minio( - S3_CONN_INFO["endpoint"], - access_key="jeffjeff", - secret_key="jeffjeff", - secure=False, - ) + # Apply our new minio client which has a user that does not exist + schema_ext.external["share"].s3.client = Minio( + S3_CONN_INFO["endpoint"], + access_key="jeffjeff", + secret_key="jeffjeff", + secure=False, + ) - # This method returns a list of errors - error_list = schema.external["share"].delete( - delete_external_files=True, errors_as_string=False - ) + # This method returns a list of errors + error_list = schema_ext.external["share"].delete( + delete_external_files=True, errors_as_string=False + ) - # Teardown - schema.external["share"].s3.client = old_client - schema.external["share"].delete(delete_external_files=True) + # Teardown + schema_ext.external["share"].s3.client = old_client + schema_ext.external["share"].delete(delete_external_files=True) + with pytest.raises(DataJointError): # Raise the error we want if the error matches the expected uuid if str(error_list[0][0]) == str(uuid_from_buffer(pack(test[1]))): raise error_list[0][2] From f993076e8c9f1a2f7ac8f2b4cedf297cbccf1938 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 12:17:56 -0600 Subject: [PATCH 046/212] Change scope of raises block --- tests/test_s3.py | 57 ++++++++++++++++++++++++------------------------ 1 file changed, 28 insertions(+), 29 deletions(-) diff --git a/tests/test_s3.py b/tests/test_s3.py index b2add2695..829ec104e 100644 --- a/tests/test_s3.py +++ b/tests/test_s3.py @@ -58,36 +58,35 @@ def test_connection(self, http_client, minio_client): def test_connection_secure(self, minio_client): assert minio_client.bucket_exists(S3_CONN_INFO["bucket"]) - def test_remove_object_exception(self): - # TODO: mv to failing block - with pytest.raises(DataJointError): - # https://github.com/datajoint/datajoint-python/issues/952 - - # Insert some test data and remove it so that the external table is populated - test = [1, [1, 2, 3]] - SimpleRemote.insert1(test) - SimpleRemote.delete() - - # Save the old external table minio client - old_client = schema.external["share"].s3.client - - # Apply our new minio client which has a user that does not exist - schema.external["share"].s3.client = Minio( - S3_CONN_INFO["endpoint"], - access_key="jeffjeff", - secret_key="jeffjeff", - secure=False, - ) - - # This method returns a list of errors - error_list = schema.external["share"].delete( - delete_external_files=True, errors_as_string=False - ) - - # Teardown - schema.external["share"].s3.client = old_client - schema.external["share"].delete(delete_external_files=True) + def test_remove_object_exception(self, schema_ext): + # https://github.com/datajoint/datajoint-python/issues/952 + + # Insert some test data and remove it so that the external table is populated + test = [1, [1, 2, 3]] + SimpleRemote.insert1(test) + SimpleRemote.delete() + + # Save the old external table minio client + old_client = schema_ext.external["share"].s3.client + + # Apply our new minio client which has a user that does not exist + schema_ext.external["share"].s3.client = Minio( + S3_CONN_INFO["endpoint"], + access_key="jeffjeff", + secret_key="jeffjeff", + secure=False, + ) + + # This method returns a list of errors + error_list = schema_ext.external["share"].delete( + delete_external_files=True, errors_as_string=False + ) + + # Teardown + schema_ext.external["share"].s3.client = old_client + schema_ext.external["share"].delete(delete_external_files=True) + with pytest.raises(DataJointError): # Raise the error we want if the error matches the expected uuid if str(error_list[0][0]) == str(uuid_from_buffer(pack(test[1]))): raise error_list[0][2] From 3cd99f1f01bfc9cf6df3b0a9369d3cc494e1d9aa Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 12:19:36 -0600 Subject: [PATCH 047/212] Move schema_ext to conftest --- tests/conftest.py | 65 ++++++++++++++++++++++++++++---- tests/test_adapted_attributes.py | 15 ++------ tests/test_s3.py | 42 --------------------- 3 files changed, 61 insertions(+), 61 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 4f70c5f3a..fedaa20c0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -23,13 +23,7 @@ schema_simple, schema_advanced, schema_adapted, - PREFIX, - CONN_INFO, - S3_CONN_INFO, - schema, - schema_simple, - schema_advanced, - schema_adapted, + schema_external, ) @@ -45,6 +39,20 @@ def monkeymodule(): yield mp +@pytest.fixture +def enable_adapted_types(monkeypatch): + monkeypatch.setenv(ADAPTED_TYPE_SWITCH, "TRUE") + yield + monkeypatch.delenv(ADAPTED_TYPE_SWITCH, raising=True) + + +@pytest.fixture +def enable_filepath_feature(monkeypatch): + monkeypatch.setenv(FILEPATH_FEATURE_SWITCH, "TRUE") + yield + monkeypatch.delenv(FILEPATH_FEATURE_SWITCH, raising=True) + + @pytest.fixture(scope="session") def connection_root_bare(): connection = dj.Connection( @@ -168,6 +176,24 @@ def connection_test(connection_root): connection.close() +@pytest.fixture(scope="session") +def stores_config(): + stores_config = { + "raw": dict(protocol="file", location=tempfile.mkdtemp()), + "repo": dict( + stage=tempfile.mkdtemp(), protocol="file", location=tempfile.mkdtemp() + ), + "repo-s3": dict( + S3_CONN_INFO, protocol="s3", location="dj/repo", stage=tempfile.mkdtemp() + ), + "local": dict(protocol="file", location=tempfile.mkdtemp(), subfolding=(1, 1)), + "share": dict( + S3_CONN_INFO, protocol="s3", location="dj/store/repo", subfolding=(2, 4) + ), + } + return stores_config + + @pytest.fixture def schema_any(connection_test): schema_any = dj.Schema( @@ -261,6 +287,31 @@ def schema_adv(connection_test): schema.drop() +@pytest.fixture +def schema_ext(connection_test, stores_config, enable_filepath_feature): + schema = dj.Schema( + PREFIX + "_extern", + context=schema_external.LOCALS_EXTERNAL, + connection=connection_test, + ) + dj.config["stores"] = stores_config + dj.config["cache"] = tempfile.mkdtemp() + + schema(schema_external.Simple) + schema(schema_external.SimpleRemote) + schema(schema_external.Seed) + schema(schema_external.Dimension) + schema(schema_external.Image) + schema(schema_external.Attach) + + # dj.errors._switch_filepath_types(True) + schema(schema_external.Filepath) + schema(schema_external.FilepathS3) + # dj.errors._switch_filepath_types(False) + yield schema + schema.drop() + + @pytest.fixture(scope="session") def http_client(): # Initialize httpClient with relevant timeout. diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index 61166f68f..cf06575c6 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -2,7 +2,6 @@ import pytest import tempfile import datajoint as dj -from datajoint.errors import ADAPTED_TYPE_SWITCH, FILEPATH_FEATURE_SWITCH import networkx as nx from itertools import zip_longest from . import schema_adapted @@ -20,17 +19,9 @@ def adapted_graph_instance(): @pytest.fixture -def enable_adapted_types(monkeypatch): - monkeypatch.setenv(ADAPTED_TYPE_SWITCH, "TRUE") - yield - monkeypatch.delenv(ADAPTED_TYPE_SWITCH, raising=True) - - -@pytest.fixture -def enable_filepath_feature(monkeypatch): - monkeypatch.setenv(FILEPATH_FEATURE_SWITCH, "TRUE") - yield - monkeypatch.delenv(FILEPATH_FEATURE_SWITCH, raising=True) +def schema_name_custom_datatype(): + schema_name = PREFIX + "_test_custom_datatype" + return schema_name @pytest.fixture diff --git a/tests/test_s3.py b/tests/test_s3.py index 829ec104e..090d6acf0 100644 --- a/tests/test_s3.py +++ b/tests/test_s3.py @@ -1,7 +1,6 @@ import pytest import urllib3 import certifi -from nose.tools import assert_true, raises from .schema_external import SimpleRemote from datajoint.errors import DataJointError from datajoint.hash import uuid_from_buffer @@ -10,47 +9,6 @@ from minio import Minio -@pytest.fixture(scope="session") -def stores_config(): - stores_config = { - "raw": dict(protocol="file", location=tempfile.mkdtemp()), - "repo": dict( - stage=tempfile.mkdtemp(), protocol="file", location=tempfile.mkdtemp() - ), - "repo-s3": dict( - S3_CONN_INFO, protocol="s3", location="dj/repo", stage=tempfile.mkdtemp() - ), - "local": dict(protocol="file", location=tempfile.mkdtemp(), subfolding=(1, 1)), - "share": dict( - S3_CONN_INFO, protocol="s3", location="dj/store/repo", subfolding=(2, 4) - ), - } - return stores_config - - -@pytest.fixture -def schema_ext(connection_test, stores_config, enable_filepath_feature): - schema = dj.Schema( - PREFIX + "_extern", context=LOCALS_EXTERNAL, connection=connection_test - ) - dj.config["stores"] = stores_config - dj.config["cache"] = tempfile.mkdtemp() - - schema(Simple) - schema(SimpleRemote) - schema(Seed) - schema(Dimension) - schema(Image) - schema(Attach) - - # dj.errors._switch_filepath_types(True) - schema(Filepath) - schema(FilepathS3) - # dj.errors._switch_filepath_types(False) - yield schema - schema.drop() - - class TestS3: def test_connection(self, http_client, minio_client): assert minio_client.bucket_exists(S3_CONN_INFO["bucket"]) From 124b1fb907a5585b0026a7f684fd1f1481d43016 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 12:23:43 -0600 Subject: [PATCH 048/212] Remove duplicates from merge conflict resolution --- tests/conftest.py | 7 ------- tests/test_adapted_attributes.py | 2 -- 2 files changed, 9 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 23d42574d..dc984616c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -23,13 +23,6 @@ schema_simple, schema_advanced, schema_adapted, - PREFIX, - CONN_INFO, - S3_CONN_INFO, - schema, - schema_simple, - schema_advanced, - schema_adapted, ) diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index 61166f68f..82fefe9f1 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -11,8 +11,6 @@ SCHEMA_NAME = PREFIX + "_test_custom_datatype" -SCHEMA_NAME = PREFIX + "_test_custom_datatype" - @pytest.fixture def adapted_graph_instance(): From 7bd840edf87358e3efb9031b0d894a400aa2bc38 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 12:33:27 -0600 Subject: [PATCH 049/212] Remove duplicates from merge conflict resolution --- tests/conftest.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index fedaa20c0..376416eff 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -14,7 +14,6 @@ import tempfile from datajoint import errors from datajoint.errors import ADAPTED_TYPE_SWITCH, FILEPATH_FEATURE_SWITCH -from datajoint.errors import ADAPTED_TYPE_SWITCH, FILEPATH_FEATURE_SWITCH from . import ( PREFIX, CONN_INFO, From 3b5047b346b66cf6a1f4c32ea9a3fa70ce574d42 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 12:47:38 -0600 Subject: [PATCH 050/212] Add @A-Baji suggestions --- tests/conftest.py | 3 --- tests/test_adapted_attributes.py | 6 ------ 2 files changed, 9 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index fedaa20c0..5a4858636 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -303,11 +303,8 @@ def schema_ext(connection_test, stores_config, enable_filepath_feature): schema(schema_external.Dimension) schema(schema_external.Image) schema(schema_external.Attach) - - # dj.errors._switch_filepath_types(True) schema(schema_external.Filepath) schema(schema_external.FilepathS3) - # dj.errors._switch_filepath_types(False) yield schema schema.drop() diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index a0af540c7..bbe8456f5 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -16,12 +16,6 @@ def adapted_graph_instance(): yield schema_adapted.GraphAdapter() -@pytest.fixture -def schema_name_custom_datatype(): - schema_name = PREFIX + "_test_custom_datatype" - return schema_name - - @pytest.fixture def schema_ad( connection_test, From 77e9cc91856ef9662a8844b26f7397ba0ada05fe Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 14:40:17 -0600 Subject: [PATCH 051/212] cp to tests --- tests/test_autopopulate.py | 158 +++++++++++++++++++++++++++++++++++++ 1 file changed, 158 insertions(+) create mode 100644 tests/test_autopopulate.py diff --git a/tests/test_autopopulate.py b/tests/test_autopopulate.py new file mode 100644 index 000000000..7a0a58e39 --- /dev/null +++ b/tests/test_autopopulate.py @@ -0,0 +1,158 @@ +from nose.tools import assert_equal, assert_false, assert_true, raises +from . import schema, PREFIX +from datajoint import DataJointError +import datajoint as dj + + +class TestPopulate: + """ + Test base relations: insert, delete + """ + + def setUp(self): + self.user = schema.User() + self.subject = schema.Subject() + self.experiment = schema.Experiment() + self.trial = schema.Trial() + self.ephys = schema.Ephys() + self.channel = schema.Ephys.Channel() + + def tearDown(self): + # delete automatic tables just in case + self.channel.delete_quick() + self.ephys.delete_quick() + self.trial.Condition.delete_quick() + self.trial.delete_quick() + self.experiment.delete_quick() + + def test_populate(self): + # test simple populate + assert_true(self.subject, "root tables are empty") + assert_false(self.experiment, "table already filled?") + self.experiment.populate() + assert_true( + len(self.experiment) + == len(self.subject) * self.experiment.fake_experiments_per_subject + ) + + # test restricted populate + assert_false(self.trial, "table already filled?") + restriction = self.subject.proj(animal="subject_id").fetch("KEY")[0] + d = self.trial.connection.dependencies + d.load() + self.trial.populate(restriction) + assert_true(self.trial, "table was not populated") + key_source = self.trial.key_source + assert_equal(len(key_source & self.trial), len(key_source & restriction)) + assert_equal(len(key_source - self.trial), len(key_source - restriction)) + + # test subtable populate + assert_false(self.ephys) + assert_false(self.channel) + self.ephys.populate() + assert_true(self.ephys) + assert_true(self.channel) + + def test_populate_with_success_count(self): + # test simple populate + assert_true(self.subject, "root tables are empty") + assert_false(self.experiment, "table already filled?") + ret = self.experiment.populate() + success_count = ret["success_count"] + assert_equal(len(self.experiment.key_source & self.experiment), success_count) + + # test restricted populate + assert_false(self.trial, "table already filled?") + restriction = self.subject.proj(animal="subject_id").fetch("KEY")[0] + d = self.trial.connection.dependencies + d.load() + ret = self.trial.populate(restriction, suppress_errors=True) + success_count = ret["success_count"] + assert_equal(len(self.trial.key_source & self.trial), success_count) + + def test_populate_exclude_error_and_ignore_jobs(self): + # test simple populate + assert_true(self.subject, "root tables are empty") + assert_false(self.experiment, "table already filled?") + + keys = self.experiment.key_source.fetch("KEY", limit=2) + for idx, key in enumerate(keys): + if idx == 0: + schema.schema.jobs.ignore(self.experiment.table_name, key) + else: + schema.schema.jobs.error(self.experiment.table_name, key, "") + + self.experiment.populate(reserve_jobs=True) + assert_equal( + len(self.experiment.key_source & self.experiment), + len(self.experiment.key_source) - 2, + ) + + def test_allow_direct_insert(self): + assert_true(self.subject, "root tables are empty") + key = self.subject.fetch("KEY", limit=1)[0] + key["experiment_id"] = 1000 + key["experiment_date"] = "2018-10-30" + self.experiment.insert1(key, allow_direct_insert=True) + + def test_multi_processing(self): + assert self.subject, "root tables are empty" + assert not self.experiment, "table already filled?" + self.experiment.populate(processes=2) + assert ( + len(self.experiment) + == len(self.subject) * self.experiment.fake_experiments_per_subject + ) + + def test_max_multi_processing(self): + assert self.subject, "root tables are empty" + assert not self.experiment, "table already filled?" + self.experiment.populate(processes=None) + assert ( + len(self.experiment) + == len(self.subject) * self.experiment.fake_experiments_per_subject + ) + + @raises(DataJointError) + def test_allow_insert(self): + assert_true(self.subject, "root tables are empty") + key = self.subject.fetch("KEY")[0] + key["experiment_id"] = 1001 + key["experiment_date"] = "2018-10-30" + self.experiment.insert1(key) + + def test_load_dependencies(self): + schema = dj.Schema(f"{PREFIX}_load_dependencies_populate") + + @schema + class ImageSource(dj.Lookup): + definition = """ + image_source_id: int + """ + contents = [(0,)] + + @schema + class Image(dj.Imported): + definition = """ + -> ImageSource + --- + image_data: longblob + """ + + def make(self, key): + self.insert1(dict(key, image_data=dict())) + + Image.populate() + + @schema + class Crop(dj.Computed): + definition = """ + -> Image + --- + crop_image: longblob + """ + + def make(self, key): + self.insert1(dict(key, crop_image=dict())) + + Crop.populate() From 22cfc2228ec3d20f6e65d62c5b44e92edc1ccbfd Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 14:55:11 -0600 Subject: [PATCH 052/212] First pass at migrating test_autopopulate --- tests/test_autopopulate.py | 87 ++++++++++++++++++++------------------ 1 file changed, 45 insertions(+), 42 deletions(-) diff --git a/tests/test_autopopulate.py b/tests/test_autopopulate.py index 7a0a58e39..cb035fa26 100644 --- a/tests/test_autopopulate.py +++ b/tests/test_autopopulate.py @@ -1,79 +1,77 @@ -from nose.tools import assert_equal, assert_false, assert_true, raises +import pytest from . import schema, PREFIX from datajoint import DataJointError import datajoint as dj +@pytest.fixture +def schema_any_with_teardown(schema_any): + yield schema_any + # delete automatic tables just in case + schema_any.Ephys.Channel().delete_quick() + schema_any.Ephys().delete_quick() + schema_any.Trial().Condition.delete_quick() + schema_any.Trial().delete_quick() + schema_any.Experiment().delete_quick() + + class TestPopulate: """ Test base relations: insert, delete """ - def setUp(self): - self.user = schema.User() - self.subject = schema.Subject() - self.experiment = schema.Experiment() - self.trial = schema.Trial() - self.ephys = schema.Ephys() - self.channel = schema.Ephys.Channel() - - def tearDown(self): - # delete automatic tables just in case - self.channel.delete_quick() - self.ephys.delete_quick() - self.trial.Condition.delete_quick() - self.trial.delete_quick() - self.experiment.delete_quick() - - def test_populate(self): + def test_populate(self, schema_any_with_teardown): + breakpoint() # test simple populate - assert_true(self.subject, "root tables are empty") - assert_false(self.experiment, "table already filled?") + assert self.subject, "root tables are empty" + assert not self.experiment, "table already filled?" self.experiment.populate() - assert_true( + assert ( len(self.experiment) == len(self.subject) * self.experiment.fake_experiments_per_subject ) # test restricted populate - assert_false(self.trial, "table already filled?") + assert not self.trial, "table already filled?" restriction = self.subject.proj(animal="subject_id").fetch("KEY")[0] d = self.trial.connection.dependencies d.load() self.trial.populate(restriction) - assert_true(self.trial, "table was not populated") + assert self.trial, "table was not populated" key_source = self.trial.key_source - assert_equal(len(key_source & self.trial), len(key_source & restriction)) - assert_equal(len(key_source - self.trial), len(key_source - restriction)) + assert len(key_source & self.trial) == len(key_source & restriction) + assert len(key_source - self.trial) == len(key_source - restriction) # test subtable populate - assert_false(self.ephys) - assert_false(self.channel) + assert not self.ephys + assert not self.channel self.ephys.populate() - assert_true(self.ephys) - assert_true(self.channel) + assert self.ephys + assert self.channel + @pytest.mark.skip(reason="temp") def test_populate_with_success_count(self): # test simple populate - assert_true(self.subject, "root tables are empty") - assert_false(self.experiment, "table already filled?") + assert self.subject, "root tables are empty" + assert not self.experiment, "table already filled?" ret = self.experiment.populate() success_count = ret["success_count"] - assert_equal(len(self.experiment.key_source & self.experiment), success_count) + assert len(self.experiment.key_source & self.experiment) == success_count # test restricted populate - assert_false(self.trial, "table already filled?") + assert not self.trial, "table already filled?" restriction = self.subject.proj(animal="subject_id").fetch("KEY")[0] d = self.trial.connection.dependencies d.load() ret = self.trial.populate(restriction, suppress_errors=True) success_count = ret["success_count"] - assert_equal(len(self.trial.key_source & self.trial), success_count) + assert len(self.trial.key_source & self.trial) == success_count + @pytest.mark.skip(reason="temp") def test_populate_exclude_error_and_ignore_jobs(self): # test simple populate - assert_true(self.subject, "root tables are empty") - assert_false(self.experiment, "table already filled?") + assert self.subject, "root tables are empty" + assert not self.experiment, "table already filled?" keys = self.experiment.key_source.fetch("KEY", limit=2) for idx, key in enumerate(keys): @@ -83,18 +81,20 @@ def test_populate_exclude_error_and_ignore_jobs(self): schema.schema.jobs.error(self.experiment.table_name, key, "") self.experiment.populate(reserve_jobs=True) - assert_equal( - len(self.experiment.key_source & self.experiment), + assert ( + len(self.experiment.key_source & self.experiment) == len(self.experiment.key_source) - 2, ) + @pytest.mark.skip(reason="temp") def test_allow_direct_insert(self): - assert_true(self.subject, "root tables are empty") + assert self.subject, "root tables are empty" key = self.subject.fetch("KEY", limit=1)[0] key["experiment_id"] = 1000 key["experiment_date"] = "2018-10-30" self.experiment.insert1(key, allow_direct_insert=True) + @pytest.mark.skip(reason="temp") def test_multi_processing(self): assert self.subject, "root tables are empty" assert not self.experiment, "table already filled?" @@ -104,6 +104,7 @@ def test_multi_processing(self): == len(self.subject) * self.experiment.fake_experiments_per_subject ) + @pytest.mark.skip(reason="temp") def test_max_multi_processing(self): assert self.subject, "root tables are empty" assert not self.experiment, "table already filled?" @@ -113,14 +114,16 @@ def test_max_multi_processing(self): == len(self.subject) * self.experiment.fake_experiments_per_subject ) - @raises(DataJointError) + @pytest.mark.skip(reason="temp") def test_allow_insert(self): - assert_true(self.subject, "root tables are empty") + assert self.subject, "root tables are empty" key = self.subject.fetch("KEY")[0] key["experiment_id"] = 1001 key["experiment_date"] = "2018-10-30" - self.experiment.insert1(key) + with pytest.raises(DataJointError): + self.experiment.insert1(key) + @pytest.mark.skip(reason="temp") def test_load_dependencies(self): schema = dj.Schema(f"{PREFIX}_load_dependencies_populate") From f067017391223bc9786717fff4a53ba43b9fbb49 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 20:55:33 +0000 Subject: [PATCH 053/212] Format with black --- tests/test_autopopulate.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_autopopulate.py b/tests/test_autopopulate.py index cb035fa26..ef47cc1b7 100644 --- a/tests/test_autopopulate.py +++ b/tests/test_autopopulate.py @@ -82,8 +82,8 @@ def test_populate_exclude_error_and_ignore_jobs(self): self.experiment.populate(reserve_jobs=True) assert ( - len(self.experiment.key_source & self.experiment) == - len(self.experiment.key_source) - 2, + len(self.experiment.key_source & self.experiment) + == len(self.experiment.key_source) - 2, ) @pytest.mark.skip(reason="temp") From 78e1ed2818b0ee713bf86fa8412d342edd523028 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 15:11:04 -0600 Subject: [PATCH 054/212] Use setup and teardown class methods instead --- tests/test_autopopulate.py | 32 +++++++++++++++++++------------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/tests/test_autopopulate.py b/tests/test_autopopulate.py index ef47cc1b7..5802e7f9c 100644 --- a/tests/test_autopopulate.py +++ b/tests/test_autopopulate.py @@ -4,24 +4,30 @@ import datajoint as dj -@pytest.fixture -def schema_any_with_teardown(schema_any): - yield schema_any - # delete automatic tables just in case - schema_any.Ephys.Channel().delete_quick() - schema_any.Ephys().delete_quick() - schema_any.Trial().Condition.delete_quick() - schema_any.Trial().delete_quick() - schema_any.Experiment().delete_quick() - - class TestPopulate: """ Test base relations: insert, delete """ - def test_populate(self, schema_any_with_teardown): - breakpoint() + @classmethod + def setup_class(cls): + cls.user = schema.User() + cls.subject = schema.Subject() + cls.experiment = schema.Experiment() + cls.trial = schema.Trial() + cls.ephys = schema.Ephys() + cls.channel = schema.Ephys.Channel() + + @classmethod + def teardown_class(cls): + # Delete automatic tables just in case + cls.channel.delete_quick() + cls.ephys.delete_quick() + cls.trial.Condition.delete_quick() + cls.trial.delete_quick() + cls.experiment.delete_quick() + + def test_populate(self, schema_any): # test simple populate assert self.subject, "root tables are empty" assert not self.experiment, "table already filled?" From 4f2a3ca3b81ea9109dc18af0926e6040d5e4bf37 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 15:11:34 -0600 Subject: [PATCH 055/212] Teardown tolerates nonexistent table --- tests/test_autopopulate.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/tests/test_autopopulate.py b/tests/test_autopopulate.py index 5802e7f9c..282dc1136 100644 --- a/tests/test_autopopulate.py +++ b/tests/test_autopopulate.py @@ -2,6 +2,7 @@ from . import schema, PREFIX from datajoint import DataJointError import datajoint as dj +import pymysql class TestPopulate: @@ -20,12 +21,15 @@ def setup_class(cls): @classmethod def teardown_class(cls): - # Delete automatic tables just in case - cls.channel.delete_quick() - cls.ephys.delete_quick() - cls.trial.Condition.delete_quick() - cls.trial.delete_quick() - cls.experiment.delete_quick() + """Delete automatic tables just in case""" + for autopop_table in ( + cls.channel, cls.ephys, cls.trial.Condition, cls.trial, cls.experiment + ): + try: + autopop_table.delete_quick() + except pymysql.err.OperationalError: + # Table doesn't exist + pass def test_populate(self, schema_any): # test simple populate From ebc2144b06049ab6bae1bf330603cda1730d43e9 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 15:18:32 -0600 Subject: [PATCH 056/212] Migrate test_autopopulate.py --- tests/test_autopopulate.py | 28 +++++++++------------------- 1 file changed, 9 insertions(+), 19 deletions(-) diff --git a/tests/test_autopopulate.py b/tests/test_autopopulate.py index 282dc1136..d72cbc2c0 100644 --- a/tests/test_autopopulate.py +++ b/tests/test_autopopulate.py @@ -59,8 +59,7 @@ def test_populate(self, schema_any): assert self.ephys assert self.channel - @pytest.mark.skip(reason="temp") - def test_populate_with_success_count(self): + def test_populate_with_success_count(self, schema_any): # test simple populate assert self.subject, "root tables are empty" assert not self.experiment, "table already filled?" @@ -77,8 +76,7 @@ def test_populate_with_success_count(self): success_count = ret["success_count"] assert len(self.trial.key_source & self.trial) == success_count - @pytest.mark.skip(reason="temp") - def test_populate_exclude_error_and_ignore_jobs(self): + def test_populate_exclude_error_and_ignore_jobs(self, schema_any): # test simple populate assert self.subject, "root tables are empty" assert not self.experiment, "table already filled?" @@ -86,26 +84,21 @@ def test_populate_exclude_error_and_ignore_jobs(self): keys = self.experiment.key_source.fetch("KEY", limit=2) for idx, key in enumerate(keys): if idx == 0: - schema.schema.jobs.ignore(self.experiment.table_name, key) + schema_any.jobs.ignore(self.experiment.table_name, key) else: - schema.schema.jobs.error(self.experiment.table_name, key, "") + schema_any.jobs.error(self.experiment.table_name, key, "") self.experiment.populate(reserve_jobs=True) - assert ( - len(self.experiment.key_source & self.experiment) - == len(self.experiment.key_source) - 2, - ) + assert len(self.experiment.key_source & self.experiment) == len(self.experiment.key_source) - 2 - @pytest.mark.skip(reason="temp") - def test_allow_direct_insert(self): + def test_allow_direct_insert(self, schema_any): assert self.subject, "root tables are empty" key = self.subject.fetch("KEY", limit=1)[0] key["experiment_id"] = 1000 key["experiment_date"] = "2018-10-30" self.experiment.insert1(key, allow_direct_insert=True) - @pytest.mark.skip(reason="temp") - def test_multi_processing(self): + def test_multi_processing(self, schema_any): assert self.subject, "root tables are empty" assert not self.experiment, "table already filled?" self.experiment.populate(processes=2) @@ -114,8 +107,7 @@ def test_multi_processing(self): == len(self.subject) * self.experiment.fake_experiments_per_subject ) - @pytest.mark.skip(reason="temp") - def test_max_multi_processing(self): + def test_max_multi_processing(self, schema_any): assert self.subject, "root tables are empty" assert not self.experiment, "table already filled?" self.experiment.populate(processes=None) @@ -124,8 +116,7 @@ def test_max_multi_processing(self): == len(self.subject) * self.experiment.fake_experiments_per_subject ) - @pytest.mark.skip(reason="temp") - def test_allow_insert(self): + def test_allow_insert(self, schema_any): assert self.subject, "root tables are empty" key = self.subject.fetch("KEY")[0] key["experiment_id"] = 1001 @@ -133,7 +124,6 @@ def test_allow_insert(self): with pytest.raises(DataJointError): self.experiment.insert1(key) - @pytest.mark.skip(reason="temp") def test_load_dependencies(self): schema = dj.Schema(f"{PREFIX}_load_dependencies_populate") From 57cddd5293e3b786fdf7b2a9445ee6e7aafd7d33 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 15:19:01 -0600 Subject: [PATCH 057/212] Switch from deprecated Version classes --- tests/conftest.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 6861214a3..9b43c2eb3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,7 +5,6 @@ import minio import urllib3 import certifi -from distutils.version import LooseVersion import shutil import pytest import networkx as nx @@ -68,9 +67,9 @@ def connection_root(connection_root_bare): dj.config["safemode"] = False conn_root = connection_root_bare # Create MySQL users - if LooseVersion(conn_root.query("select @@version;").fetchone()[0]) >= LooseVersion( - "8.0.0" - ): + if version.parse( + connection_root.query("select @@version;").fetchone()[0] + ) >= version.parse("8.0.0"): # create user if necessary on mysql8 conn_root.query( """ From 0de5a69fda533f70a702c3bb33933f0c4512f854 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 21:19:13 +0000 Subject: [PATCH 058/212] Format with black --- tests/test_autopopulate.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/tests/test_autopopulate.py b/tests/test_autopopulate.py index d72cbc2c0..c04827f5f 100644 --- a/tests/test_autopopulate.py +++ b/tests/test_autopopulate.py @@ -23,7 +23,11 @@ def setup_class(cls): def teardown_class(cls): """Delete automatic tables just in case""" for autopop_table in ( - cls.channel, cls.ephys, cls.trial.Condition, cls.trial, cls.experiment + cls.channel, + cls.ephys, + cls.trial.Condition, + cls.trial, + cls.experiment, ): try: autopop_table.delete_quick() @@ -89,7 +93,10 @@ def test_populate_exclude_error_and_ignore_jobs(self, schema_any): schema_any.jobs.error(self.experiment.table_name, key, "") self.experiment.populate(reserve_jobs=True) - assert len(self.experiment.key_source & self.experiment) == len(self.experiment.key_source) - 2 + assert ( + len(self.experiment.key_source & self.experiment) + == len(self.experiment.key_source) - 2 + ) def test_allow_direct_insert(self, schema_any): assert self.subject, "root tables are empty" From 209a69b476b7ab74b0618c5d6cb6d14582642ae3 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 15:42:51 -0600 Subject: [PATCH 059/212] Fix syntax error --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 9b43c2eb3..0b1465241 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -68,7 +68,7 @@ def connection_root(connection_root_bare): conn_root = connection_root_bare # Create MySQL users if version.parse( - connection_root.query("select @@version;").fetchone()[0] + conn_root.query("select @@version;").fetchone()[0] ) >= version.parse("8.0.0"): # create user if necessary on mysql8 conn_root.query( From 61c7e17cf885b74a37e7ec34b0b54b1eb603995f Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 16:01:09 -0600 Subject: [PATCH 060/212] Catch a DJ error on drop_quick --- tests/test_autopopulate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_autopopulate.py b/tests/test_autopopulate.py index c04827f5f..25f8e16ec 100644 --- a/tests/test_autopopulate.py +++ b/tests/test_autopopulate.py @@ -31,7 +31,7 @@ def teardown_class(cls): ): try: autopop_table.delete_quick() - except pymysql.err.OperationalError: + except (pymysql.err.OperationalError, dj.errors.MissingTableError): # Table doesn't exist pass From e04d15d67614e593a203975572012d9fe4bd5b8d Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 16:08:46 -0600 Subject: [PATCH 061/212] cp to tests --- tests/test_filepath.py | 283 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 283 insertions(+) create mode 100644 tests/test_filepath.py diff --git a/tests/test_filepath.py b/tests/test_filepath.py new file mode 100644 index 000000000..3e94e4885 --- /dev/null +++ b/tests/test_filepath.py @@ -0,0 +1,283 @@ +from nose.tools import assert_true, assert_false, assert_equal, raises +import datajoint as dj +import os +from pathlib import Path +import random +from .schema_external import schema, Filepath, FilepathS3, stores_config +import logging +import io + +logger = logging.getLogger("datajoint") + + +def setUp(self): + dj.config["stores"] = stores_config + + +def test_path_match(store="repo"): + """test file path matches and empty file""" + dj.errors._switch_filepath_types(True) + ext = schema.external[store] + stage_path = dj.config["stores"][store]["stage"] + + # create a mock file + relpath = "path/to/films" + managed_file = Path(stage_path, relpath, "vid.mov") + managed_file.parent.mkdir(parents=True, exist_ok=True) + open(str(managed_file), "a").close() + + # put the file + uuid = ext.upload_filepath(str(managed_file)) + + # remove + managed_file.unlink() + assert_false(managed_file.exists()) + + # check filepath + assert_equal( + (ext & {"hash": uuid}).fetch1("filepath"), + str(managed_file.relative_to(stage_path).as_posix()), + ) + + # # Download the file and check its contents. + restored_path, checksum = ext.download_filepath(uuid) + assert_equal(restored_path, str(managed_file)) + assert_equal(checksum, dj.hash.uuid_from_file(str(managed_file))) + + # cleanup + ext.delete(delete_external_files=True) + dj.errors._switch_filepath_types(False) + + +def test_filepath(store="repo"): + """test file management""" + dj.errors._switch_filepath_types(True) + + ext = schema.external[store] + stage_path = dj.config["stores"][store]["stage"] + filename = "picture.dat" + + # create a mock file + relpath = "one/two/three" + managed_file = Path(stage_path, relpath, filename) + managed_file.parent.mkdir(parents=True, exist_ok=True) + data = os.urandom(3000) + with managed_file.open("wb") as f: + f.write(data) + + # put the same file twice to ensure storing once + uuid1 = ext.upload_filepath(str(managed_file)) + # no duplication should arise if file is the same + uuid2 = ext.upload_filepath(str(managed_file)) + assert_equal(uuid1, uuid2) + + # remove to ensure downloading + managed_file.unlink() + assert_false(managed_file.exists()) + + # Download the file and check its contents. Repeat causes no download from remote + for _ in 1, 2: + restored_path, checksum = ext.download_filepath(uuid1) + assert_equal(restored_path, str(managed_file)) + assert_equal(checksum, dj.hash.uuid_from_file(str(managed_file))) + + # verify same data + with managed_file.open("rb") as f: + synced_data = f.read() + assert_equal(data, synced_data) + + # cleanup + ext.delete(delete_external_files=True) + assert_false(ext.exists(ext._make_external_filepath(str(Path(relpath, filename))))) + + dj.errors._switch_filepath_types(False) + + +def test_filepath_s3(): + """test file management with s3""" + test_filepath(store="repo-s3") + + +def test_duplicate_upload(store="repo"): + ext = schema.external[store] + stage_path = dj.config["stores"][store]["stage"] + relpath = "one/two/three" + managed_file = Path(stage_path, relpath, "plot.dat") + managed_file.parent.mkdir(parents=True, exist_ok=True) + with managed_file.open("wb") as f: + f.write(os.urandom(300)) + ext.upload_filepath(str(managed_file)) + ext.upload_filepath(str(managed_file)) # this is fine because the file is the same + + +def test_duplicate_upload_s3(): + test_duplicate_upload(store="repo-s3") + + +@raises(dj.DataJointError) +def test_duplicate_error(store="repo"): + """syncing duplicate non-matching file should fail""" + ext = schema.external[store] + stage_path = dj.config["stores"][store]["stage"] + relpath = "one/two/three" + managed_file = Path(stage_path, relpath, "thesis.dat") + managed_file.parent.mkdir(parents=True, exist_ok=True) + with managed_file.open("wb") as f: + f.write(os.urandom(300)) + ext.upload_filepath(str(managed_file)) + with managed_file.open("wb") as f: + f.write(os.urandom(300)) + # this should raise exception because the file has changed + ext.upload_filepath(str(managed_file)) + + +def test_duplicate_error_s3(): + test_duplicate_error(store="repo-s3") + + +def test_filepath_class(table=Filepath(), store="repo", verify_checksum=True): + if not verify_checksum: + dj.config["filepath_checksum_size_limit"] = 0 + dj.errors._switch_filepath_types(True) + stage_path = dj.config["stores"][store]["stage"] + # create a mock file + relative_path = "one/two/three" + managed_file = Path(stage_path, relative_path, "attachment.dat") + managed_file.parent.mkdir(parents=True, exist_ok=True) + data = os.urandom(3000) + with managed_file.open("wb") as f: + f.write(data) + with managed_file.open("rb") as f: + contents = f.read() + assert_equal(data, contents) + + # upload file into shared repo + table.insert1((1, str(managed_file))) + + # remove file locally + managed_file.unlink() + assert_false(managed_file.is_file()) + + # fetch file from remote + filepath = (table & {"fnum": 1}).fetch1("img") + assert_equal(filepath, str(managed_file)) + + # verify original contents + with managed_file.open("rb") as f: + contents = f.read() + assert_equal(data, contents) + + # delete from table + table.delete() + assert_true(table.external[store]) + + # delete from external table + table.external[store].delete(delete_external_files=True) + dj.errors._switch_filepath_types(False) + dj.config["filepath_checksum_size_limit"] = None + + +def test_filepath_class_again(): + """test_filepath_class again to deal with existing remote files""" + test_filepath_class() + + +def test_filepath_class_s3(): + test_filepath_class(FilepathS3(), "repo-s3") + + +def test_filepath_class_s3_again(): + """test_filepath_class_s3 again to deal with existing remote files""" + test_filepath_class(FilepathS3(), "repo-s3") + + +def test_filepath_class_no_checksum(): + log_capture = io.StringIO() + stream_handler = logging.StreamHandler(log_capture) + log_format = logging.Formatter( + "[%(asctime)s][%(funcName)s][%(levelname)s]: %(message)s" + ) + stream_handler.setFormatter(log_format) + stream_handler.set_name("test_limit_warning") + logger.addHandler(stream_handler) + test_filepath_class(verify_checksum=False) + log_contents = log_capture.getvalue() + log_capture.close() + for handler in logger.handlers: # Clean up handler + if handler.name == "test_limit_warning": + logger.removeHandler(handler) + assert "Skipped checksum for file with hash:" in log_contents + + +def test_filepath_cleanup(table=Filepath(), store="repo"): + """test deletion of filepath entries from external table""" + + dj.errors._switch_filepath_types(True) + + stage_path = dj.config["stores"][store]["stage"] + n = 20 + contents = os.urandom(345) + for i in range(n): + relative_path = Path(*random.sample(("one", "two", "three", "four"), k=3)) + managed_file = Path(stage_path, relative_path, "file.dat") + managed_file.parent.mkdir(parents=True, exist_ok=True) + with managed_file.open("wb") as f: + f.write(contents) # same in all files + table.insert1((i, str(managed_file))) + assert_equal(len(table), n) + + ext = schema.external[store] + + assert_equal(len(table), n) + assert_true(0 < len(ext) < n) + + (table & "fnum in (1, 2, 3, 4, 5, 6)").delete() + m = n - len(table) # number deleted + assert_true(m == 6) + + ext.delete(delete_external_files=True) # delete unused entries + assert_true(0 < len(ext) <= n - m) + + dj.errors._switch_filepath_types(False) + + +def test_filepath_cleanup_s3(): + """test deletion of filepath entries from external table""" + store = "repo-s3" + test_filepath_cleanup(FilepathS3(), store) + + +def test_delete_without_files(store="repo"): + """test deletion of filepath entries from external table without removing files""" + dj.errors._switch_filepath_types(True) + # do not delete unused entries + schema.external[store].delete(delete_external_files=False) + dj.errors._switch_filepath_types(False) + + +def test_return_string(table=Filepath(), store="repo"): + """test returning string on fetch""" + dj.errors._switch_filepath_types(True) + stage_path = dj.config["stores"][store]["stage"] + # create a mock file + relative_path = "this/is/a/test" + managed_file = Path(stage_path, relative_path, "string.dat") + managed_file.parent.mkdir(parents=True, exist_ok=True) + data = os.urandom(3000) + with managed_file.open("wb") as f: + f.write(data) + with managed_file.open("rb") as f: + contents = f.read() + assert_equal(data, contents) + + # upload file into shared repo + table.insert1((138, str(managed_file))) + + # remove file locally + managed_file.unlink() + assert_false(managed_file.is_file()) + + # fetch file from remote + filepath = (table & {"fnum": 138}).fetch1("img") + assert_true(isinstance(filepath, str)) + dj.errors._switch_filepath_types(False) From 6fe69d183e84d2bc886a49607e430d589fa08199 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 5 Dec 2023 16:41:28 -0600 Subject: [PATCH 062/212] Migrate test_filepath.py --- tests/test_filepath.py | 286 +++++++++++++++++++---------------------- 1 file changed, 134 insertions(+), 152 deletions(-) diff --git a/tests/test_filepath.py b/tests/test_filepath.py index 3e94e4885..54478e476 100644 --- a/tests/test_filepath.py +++ b/tests/test_filepath.py @@ -1,23 +1,16 @@ -from nose.tools import assert_true, assert_false, assert_equal, raises +import pytest import datajoint as dj import os from pathlib import Path import random -from .schema_external import schema, Filepath, FilepathS3, stores_config +from .schema_external import Filepath, FilepathS3 import logging import io -logger = logging.getLogger("datajoint") - -def setUp(self): - dj.config["stores"] = stores_config - - -def test_path_match(store="repo"): +def test_path_match(schema_ext, enable_filepath_feature, minio_client, store="repo"): """test file path matches and empty file""" - dj.errors._switch_filepath_types(True) - ext = schema.external[store] + ext = schema_ext.external[store] stage_path = dj.config["stores"][store]["stage"] # create a mock file @@ -31,29 +24,26 @@ def test_path_match(store="repo"): # remove managed_file.unlink() - assert_false(managed_file.exists()) + assert not managed_file.exists() # check filepath - assert_equal( - (ext & {"hash": uuid}).fetch1("filepath"), - str(managed_file.relative_to(stage_path).as_posix()), + assert (ext & {"hash": uuid}).fetch1("filepath") == str( + managed_file.relative_to(stage_path).as_posix() ) # # Download the file and check its contents. restored_path, checksum = ext.download_filepath(uuid) - assert_equal(restored_path, str(managed_file)) - assert_equal(checksum, dj.hash.uuid_from_file(str(managed_file))) + assert restored_path == str(managed_file) + assert checksum == dj.hash.uuid_from_file(str(managed_file)) # cleanup ext.delete(delete_external_files=True) - dj.errors._switch_filepath_types(False) -def test_filepath(store="repo"): +@pytest.mark.parametrize("store", ("repo", "repo-s3")) +def test_filepath(enable_filepath_feature, schema_ext, store): """test file management""" - dj.errors._switch_filepath_types(True) - - ext = schema.external[store] + ext = schema_ext.external[store] stage_path = dj.config["stores"][store]["stage"] filename = "picture.dat" @@ -69,37 +59,31 @@ def test_filepath(store="repo"): uuid1 = ext.upload_filepath(str(managed_file)) # no duplication should arise if file is the same uuid2 = ext.upload_filepath(str(managed_file)) - assert_equal(uuid1, uuid2) + assert uuid1 == uuid2 # remove to ensure downloading managed_file.unlink() - assert_false(managed_file.exists()) + assert not managed_file.exists() # Download the file and check its contents. Repeat causes no download from remote for _ in 1, 2: restored_path, checksum = ext.download_filepath(uuid1) - assert_equal(restored_path, str(managed_file)) - assert_equal(checksum, dj.hash.uuid_from_file(str(managed_file))) + assert restored_path == str(managed_file) + assert checksum == dj.hash.uuid_from_file(str(managed_file)) # verify same data with managed_file.open("rb") as f: synced_data = f.read() - assert_equal(data, synced_data) + assert data == synced_data # cleanup ext.delete(delete_external_files=True) - assert_false(ext.exists(ext._make_external_filepath(str(Path(relpath, filename))))) - - dj.errors._switch_filepath_types(False) - - -def test_filepath_s3(): - """test file management with s3""" - test_filepath(store="repo-s3") + assert not ext.exists(ext._make_external_filepath(str(Path(relpath, filename)))) -def test_duplicate_upload(store="repo"): - ext = schema.external[store] +@pytest.mark.parametrize("store", ("repo", "repo-s3")) +def test_duplicate_upload(schema_ext, store): + ext = schema_ext.external[store] stage_path = dj.config["stores"][store]["stage"] relpath = "one/two/three" managed_file = Path(stage_path, relpath, "plot.dat") @@ -110,14 +94,10 @@ def test_duplicate_upload(store="repo"): ext.upload_filepath(str(managed_file)) # this is fine because the file is the same -def test_duplicate_upload_s3(): - test_duplicate_upload(store="repo-s3") - - -@raises(dj.DataJointError) -def test_duplicate_error(store="repo"): +@pytest.mark.parametrize("store", ("repo", "repo-s3")) +def test_duplicate_error(schema_ext, store): """syncing duplicate non-matching file should fail""" - ext = schema.external[store] + ext = schema_ext.external[store] stage_path = dj.config["stores"][store]["stage"] relpath = "one/two/three" managed_file = Path(stage_path, relpath, "thesis.dat") @@ -128,92 +108,100 @@ def test_duplicate_error(store="repo"): with managed_file.open("wb") as f: f.write(os.urandom(300)) # this should raise exception because the file has changed - ext.upload_filepath(str(managed_file)) - - -def test_duplicate_error_s3(): - test_duplicate_error(store="repo-s3") - - -def test_filepath_class(table=Filepath(), store="repo", verify_checksum=True): - if not verify_checksum: - dj.config["filepath_checksum_size_limit"] = 0 - dj.errors._switch_filepath_types(True) - stage_path = dj.config["stores"][store]["stage"] - # create a mock file - relative_path = "one/two/three" - managed_file = Path(stage_path, relative_path, "attachment.dat") - managed_file.parent.mkdir(parents=True, exist_ok=True) - data = os.urandom(3000) - with managed_file.open("wb") as f: - f.write(data) - with managed_file.open("rb") as f: - contents = f.read() - assert_equal(data, contents) - - # upload file into shared repo - table.insert1((1, str(managed_file))) - - # remove file locally - managed_file.unlink() - assert_false(managed_file.is_file()) - - # fetch file from remote - filepath = (table & {"fnum": 1}).fetch1("img") - assert_equal(filepath, str(managed_file)) - - # verify original contents - with managed_file.open("rb") as f: - contents = f.read() - assert_equal(data, contents) - - # delete from table - table.delete() - assert_true(table.external[store]) - - # delete from external table - table.external[store].delete(delete_external_files=True) - dj.errors._switch_filepath_types(False) - dj.config["filepath_checksum_size_limit"] = None - - -def test_filepath_class_again(): - """test_filepath_class again to deal with existing remote files""" - test_filepath_class() - - -def test_filepath_class_s3(): - test_filepath_class(FilepathS3(), "repo-s3") - - -def test_filepath_class_s3_again(): - """test_filepath_class_s3 again to deal with existing remote files""" - test_filepath_class(FilepathS3(), "repo-s3") - - -def test_filepath_class_no_checksum(): - log_capture = io.StringIO() - stream_handler = logging.StreamHandler(log_capture) - log_format = logging.Formatter( - "[%(asctime)s][%(funcName)s][%(levelname)s]: %(message)s" + with pytest.raises(dj.DataJointError): + ext.upload_filepath(str(managed_file)) + + +class TestFilepath: + def _test_filepath_class( + self, table=Filepath(), store="repo", verify_checksum=True + ): + if not verify_checksum: + dj.config["filepath_checksum_size_limit"] = 0 + stage_path = dj.config["stores"][store]["stage"] + # create a mock file + relative_path = "one/two/three" + managed_file = Path(stage_path, relative_path, "attachment.dat") + managed_file.parent.mkdir(parents=True, exist_ok=True) + data = os.urandom(3000) + with managed_file.open("wb") as f: + f.write(data) + with managed_file.open("rb") as f: + contents = f.read() + assert data == contents + + # upload file into shared repo + table.insert1((1, str(managed_file))) + + # remove file locally + managed_file.unlink() + assert not managed_file.is_file() + + # fetch file from remote + filepath = (table & {"fnum": 1}).fetch1("img") + assert filepath == str(managed_file) + + # verify original contents + with managed_file.open("rb") as f: + contents = f.read() + assert data == contents + + # delete from table + table.delete() + assert table.external[store] + + # delete from external table + table.external[store].delete(delete_external_files=True) + dj.config["filepath_checksum_size_limit"] = None + + @pytest.mark.parametrize( + "table, store, n_repeats", + ( + (Filepath(), "repo", 2), + (FilepathS3(), "repo-s3", 2), + ), ) - stream_handler.setFormatter(log_format) - stream_handler.set_name("test_limit_warning") - logger.addHandler(stream_handler) - test_filepath_class(verify_checksum=False) - log_contents = log_capture.getvalue() - log_capture.close() - for handler in logger.handlers: # Clean up handler - if handler.name == "test_limit_warning": - logger.removeHandler(handler) - assert "Skipped checksum for file with hash:" in log_contents - - -def test_filepath_cleanup(table=Filepath(), store="repo"): + def test_filepath_class( + self, + schema_ext, + table, + store, + n_repeats, + minio_client, + enable_filepath_feature, + verify_checksum=True, + ): + for _ in range(n_repeats): + self._test_filepath_class(table, store, verify_checksum) + + def test_filepath_class_no_checksum(self, schema_ext, enable_filepath_feature): + logger = logging.getLogger("datajoint") + log_capture = io.StringIO() + stream_handler = logging.StreamHandler(log_capture) + log_format = logging.Formatter( + "[%(asctime)s][%(funcName)s][%(levelname)s]: %(message)s" + ) + stream_handler.setFormatter(log_format) + stream_handler.set_name("test_limit_warning") + logger.addHandler(stream_handler) + self._test_filepath_class(table=Filepath(), store="repo", verify_checksum=False) + log_contents = log_capture.getvalue() + log_capture.close() + for handler in logger.handlers: # Clean up handler + if handler.name == "test_limit_warning": + logger.removeHandler(handler) + assert "Skipped checksum for file with hash:" in log_contents + + +@pytest.mark.parametrize( + "table, store", + ( + (Filepath(), "repo"), + (FilepathS3(), "repo-s3"), + ), +) +def test_filepath_cleanup(table, store, schema_ext, enable_filepath_feature): """test deletion of filepath entries from external table""" - - dj.errors._switch_filepath_types(True) - stage_path = dj.config["stores"][store]["stage"] n = 20 contents = os.urandom(345) @@ -224,40 +212,35 @@ def test_filepath_cleanup(table=Filepath(), store="repo"): with managed_file.open("wb") as f: f.write(contents) # same in all files table.insert1((i, str(managed_file))) - assert_equal(len(table), n) + assert len(table) == n - ext = schema.external[store] + ext = schema_ext.external[store] - assert_equal(len(table), n) - assert_true(0 < len(ext) < n) + assert len(table) == n + assert 0 < len(ext) < n (table & "fnum in (1, 2, 3, 4, 5, 6)").delete() m = n - len(table) # number deleted - assert_true(m == 6) + assert m == 6 ext.delete(delete_external_files=True) # delete unused entries - assert_true(0 < len(ext) <= n - m) - - dj.errors._switch_filepath_types(False) - - -def test_filepath_cleanup_s3(): - """test deletion of filepath entries from external table""" - store = "repo-s3" - test_filepath_cleanup(FilepathS3(), store) + assert 0 < len(ext) <= n - m -def test_delete_without_files(store="repo"): +def test_delete_without_files( + schema_ext, + enable_filepath_feature, + store="repo", +): """test deletion of filepath entries from external table without removing files""" - dj.errors._switch_filepath_types(True) # do not delete unused entries - schema.external[store].delete(delete_external_files=False) - dj.errors._switch_filepath_types(False) + schema_ext.external[store].delete(delete_external_files=False) -def test_return_string(table=Filepath(), store="repo"): +def test_return_string( + schema_ext, enable_filepath_feature, table=Filepath(), store="repo" +): """test returning string on fetch""" - dj.errors._switch_filepath_types(True) stage_path = dj.config["stores"][store]["stage"] # create a mock file relative_path = "this/is/a/test" @@ -268,16 +251,15 @@ def test_return_string(table=Filepath(), store="repo"): f.write(data) with managed_file.open("rb") as f: contents = f.read() - assert_equal(data, contents) + assert data == contents # upload file into shared repo table.insert1((138, str(managed_file))) # remove file locally managed_file.unlink() - assert_false(managed_file.is_file()) + assert not managed_file.is_file() # fetch file from remote filepath = (table & {"fnum": 138}).fetch1("img") - assert_true(isinstance(filepath, str)) - dj.errors._switch_filepath_types(False) + assert isinstance(filepath, str) From 993a659ccadf5606d79da5552483421b16f3af4e Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 7 Dec 2023 15:03:53 -0600 Subject: [PATCH 063/212] cp to tests --- tests/schema_uuid.py | 50 ++++++++++++ tests/test_aggr_regressions.py | 141 +++++++++++++++++++++++++++++++++ 2 files changed, 191 insertions(+) create mode 100644 tests/schema_uuid.py create mode 100644 tests/test_aggr_regressions.py diff --git a/tests/schema_uuid.py b/tests/schema_uuid.py new file mode 100644 index 000000000..8aeff5cb5 --- /dev/null +++ b/tests/schema_uuid.py @@ -0,0 +1,50 @@ +import uuid +import datajoint as dj +from . import PREFIX, CONN_INFO + +schema = dj.Schema(PREFIX + "_test1", connection=dj.conn(**CONN_INFO)) + +top_level_namespace_id = uuid.UUID("00000000-0000-0000-0000-000000000000") + + +@schema +class Basic(dj.Manual): + definition = """ + item : uuid + --- + number : int + """ + + +@schema +class Topic(dj.Manual): + definition = """ + # A topic for items + topic_id : uuid # internal identification of a topic, reflects topic name + --- + topic : varchar(8000) # full topic name used to generate the topic id + """ + + def add(self, topic): + """add a new topic with a its UUID""" + self.insert1( + dict(topic_id=uuid.uuid5(top_level_namespace_id, topic), topic=topic) + ) + + +@schema +class Item(dj.Computed): + definition = """ + item_id : uuid # internal identification of + --- + -> Topic + word : varchar(8000) + """ + + key_source = Topic # test key source that is not instantiated + + def make(self, key): + for word in ("Habenula", "Hippocampus", "Hypothalamus", "Hypophysis"): + self.insert1( + dict(key, word=word, item_id=uuid.uuid5(key["topic_id"], word)) + ) diff --git a/tests/test_aggr_regressions.py b/tests/test_aggr_regressions.py new file mode 100644 index 000000000..18ed0ba84 --- /dev/null +++ b/tests/test_aggr_regressions.py @@ -0,0 +1,141 @@ +""" +Regression tests for issues 386, 449, 484, and 558 — all related to processing complex aggregations and projections. +""" + +import itertools +from nose.tools import assert_equal +import datajoint as dj +from . import PREFIX, CONN_INFO +import uuid +from .schema_uuid import Topic, Item, top_level_namespace_id + +schema = dj.Schema(PREFIX + "_aggr_regress", connection=dj.conn(**CONN_INFO)) + +# --------------- ISSUE 386 ------------------- +# Issue 386 resulted from the loss of aggregated attributes when the aggregation was used as the restrictor +# Q & (R.aggr(S, n='count(*)') & 'n=2') +# Error: Unknown column 'n' in HAVING + + +@schema +class R(dj.Lookup): + definition = """ + r : char(1) + """ + contents = zip("ABCDFGHIJKLMNOPQRST") + + +@schema +class Q(dj.Lookup): + definition = """ + -> R + """ + contents = zip("ABCDFGH") + + +@schema +class S(dj.Lookup): + definition = """ + -> R + s : int + """ + contents = itertools.product("ABCDF", range(10)) + + +def test_issue386(): + result = R.aggr(S, n="count(*)") & "n=10" + result = Q & result + result.fetch() + + +# ---------------- ISSUE 449 ------------------ +# Issue 449 arises from incorrect group by attributes after joining with a dj.U() + + +def test_issue449(): + result = dj.U("n") * R.aggr(S, n="max(s)") + result.fetch() + + +# ---------------- ISSUE 484 ----------------- +# Issue 484 +def test_issue484(): + q = dj.U().aggr(S, n="max(s)") + n = q.fetch("n") + n = q.fetch1("n") + q = dj.U().aggr(S, n="avg(s)") + result = dj.U().aggr(q, m="max(n)") + result.fetch() + + +# --------------- ISSUE 558 ------------------ +# Issue 558 resulted from the fact that DataJoint saves subqueries and often combines a restriction followed +# by a projection into a single SELECT statement, which in several unusual cases produces unexpected results. + + +@schema +class A(dj.Lookup): + definition = """ + id: int + """ + contents = zip(range(10)) + + +@schema +class B(dj.Lookup): + definition = """ + -> A + id2: int + """ + contents = zip(range(5), range(5, 10)) + + +@schema +class X(dj.Lookup): + definition = """ + id: int + """ + contents = zip(range(10)) + + +def test_issue558_part1(): + q = (A - B).proj(id2="3") + assert_equal(len(A - B), len(q)) + + +def test_issue558_part2(): + d = dict(id=3, id2=5) + assert_equal(len(X & d), len((X & d).proj(id2="3"))) + + +def test_left_join_len(): + Topic().add("jeff") + Item.populate() + Topic().add("jeff2") + Topic().add("jeff3") + q = Topic.join( + Item - dict(topic_id=uuid.uuid5(top_level_namespace_id, "jeff")), left=True + ) + qf = q.fetch() + assert len(q) == len(qf) + + +def test_union_join(): + # https://github.com/datajoint/datajoint-python/issues/930 + A.insert(zip([100, 200, 300, 400, 500, 600])) + B.insert([(100, 11), (200, 22), (300, 33), (400, 44)]) + q1 = B & "id < 300" + q2 = B & "id > 300" + + expected_data = [ + {"id": 0, "id2": 5}, + {"id": 1, "id2": 6}, + {"id": 2, "id2": 7}, + {"id": 3, "id2": 8}, + {"id": 4, "id2": 9}, + {"id": 100, "id2": 11}, + {"id": 200, "id2": 22}, + {"id": 400, "id2": 44}, + ] + + assert ((q1 + q2) * A).fetch(as_dict=True) == expected_data From 2f30e401d99a0a32bb95bf09ba1fc9e227c78e4a Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 7 Dec 2023 15:11:57 -0600 Subject: [PATCH 064/212] Migrate schema_uuid --- tests/conftest.py | 15 +++++++++++++++ tests/schema_uuid.py | 13 ++++++------- 2 files changed, 21 insertions(+), 7 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 0b1465241..fc7f7c42c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -22,6 +22,7 @@ schema_advanced, schema_adapted, schema_external, + schema_uuid, ) @@ -307,6 +308,20 @@ def schema_ext(connection_test, stores_config, enable_filepath_feature): schema.drop() +@pytest.fixture +def schema_uuid(connection_test): + schema = dj.Schema( + PREFIX + "_test1", + context=schema_uuid.LOCALS_UUID, + connection=connection_test, + ) + schema(Basic) + schema(Topic) + schema(Item) + yield schema + schema.drop() + + @pytest.fixture(scope="session") def http_client(): # Initialize httpClient with relevant timeout. diff --git a/tests/schema_uuid.py b/tests/schema_uuid.py index 8aeff5cb5..b8e55f26b 100644 --- a/tests/schema_uuid.py +++ b/tests/schema_uuid.py @@ -2,12 +2,9 @@ import datajoint as dj from . import PREFIX, CONN_INFO -schema = dj.Schema(PREFIX + "_test1", connection=dj.conn(**CONN_INFO)) - top_level_namespace_id = uuid.UUID("00000000-0000-0000-0000-000000000000") -@schema class Basic(dj.Manual): definition = """ item : uuid @@ -16,7 +13,6 @@ class Basic(dj.Manual): """ -@schema class Topic(dj.Manual): definition = """ # A topic for items @@ -32,12 +28,11 @@ def add(self, topic): ) -@schema class Item(dj.Computed): definition = """ - item_id : uuid # internal identification of + item_id : uuid # internal identification of --- - -> Topic + -> Topic word : varchar(8000) """ @@ -48,3 +43,7 @@ def make(self, key): self.insert1( dict(key, word=word, item_id=uuid.uuid5(key["topic_id"], word)) ) + + +LOCALS_UUID = {k: v for k, v in locals().items() if inspect.isclass(v)} +__all__ = list(LOCALS_UUID) From b1cd039b269d05a514797f0474aad75d168ca5ac Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 7 Dec 2023 15:37:57 -0600 Subject: [PATCH 065/212] Fix test tests/test_aggr_regressions.py::test_left_join_len Needs schema_uuid fixture, not schema_aggr_regress fixture --- tests/conftest.py | 10 +-- tests/schema_aggr_regress.py | 51 ++++++++++++++ tests/schema_uuid.py | 1 + tests/test_aggr_regressions.py | 124 ++++++++++++++------------------- 4 files changed, 108 insertions(+), 78 deletions(-) create mode 100644 tests/schema_aggr_regress.py diff --git a/tests/conftest.py b/tests/conftest.py index fc7f7c42c..f0a7a58b6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -22,7 +22,7 @@ schema_advanced, schema_adapted, schema_external, - schema_uuid, + schema_uuid as schema_uuid_module, ) @@ -312,12 +312,12 @@ def schema_ext(connection_test, stores_config, enable_filepath_feature): def schema_uuid(connection_test): schema = dj.Schema( PREFIX + "_test1", - context=schema_uuid.LOCALS_UUID, + context=schema_uuid_module.LOCALS_UUID, connection=connection_test, ) - schema(Basic) - schema(Topic) - schema(Item) + schema(schema_uuid_module.Basic) + schema(schema_uuid_module.Topic) + schema(schema_uuid_module.Item) yield schema schema.drop() diff --git a/tests/schema_aggr_regress.py b/tests/schema_aggr_regress.py new file mode 100644 index 000000000..9b85bfffb --- /dev/null +++ b/tests/schema_aggr_regress.py @@ -0,0 +1,51 @@ +import datajoint as dj +import itertools +import inspect + + +class R(dj.Lookup): + definition = """ + r : char(1) + """ + contents = zip("ABCDFGHIJKLMNOPQRST") + + +class Q(dj.Lookup): + definition = """ + -> R + """ + contents = zip("ABCDFGH") + + +class S(dj.Lookup): + definition = """ + -> R + s : int + """ + contents = itertools.product("ABCDF", range(10)) + + +class A(dj.Lookup): + definition = """ + id: int + """ + contents = zip(range(10)) + + +class B(dj.Lookup): + definition = """ + -> A + id2: int + """ + contents = zip(range(5), range(5, 10)) + + +class X(dj.Lookup): + definition = """ + id: int + """ + contents = zip(range(10)) + + +LOCALS_AGGR_REGRESS = {k: v for k, v in locals().items() if inspect.isclass(v)} +__all__ = list(LOCALS_AGGR_REGRESS) diff --git a/tests/schema_uuid.py b/tests/schema_uuid.py index b8e55f26b..6bf994b5b 100644 --- a/tests/schema_uuid.py +++ b/tests/schema_uuid.py @@ -1,4 +1,5 @@ import uuid +import inspect import datajoint as dj from . import PREFIX, CONN_INFO diff --git a/tests/test_aggr_regressions.py b/tests/test_aggr_regressions.py index 18ed0ba84..fe5e12ab1 100644 --- a/tests/test_aggr_regressions.py +++ b/tests/test_aggr_regressions.py @@ -2,64 +2,62 @@ Regression tests for issues 386, 449, 484, and 558 — all related to processing complex aggregations and projections. """ -import itertools -from nose.tools import assert_equal +import pytest import datajoint as dj -from . import PREFIX, CONN_INFO +from . import PREFIX import uuid from .schema_uuid import Topic, Item, top_level_namespace_id +from .schema_aggr_regress import R, Q, S, A, B, X, LOCALS_AGGR_REGRESS -schema = dj.Schema(PREFIX + "_aggr_regress", connection=dj.conn(**CONN_INFO)) -# --------------- ISSUE 386 ------------------- -# Issue 386 resulted from the loss of aggregated attributes when the aggregation was used as the restrictor -# Q & (R.aggr(S, n='count(*)') & 'n=2') -# Error: Unknown column 'n' in HAVING +@pytest.fixture +def schema_aggr_reg(connection_test): + schema = dj.Schema( + PREFIX + "_aggr_regress", + context=LOCALS_AGGR_REGRESS, + connection=connection_test, + ) + schema(R) + schema(Q) + schema(S) + yield schema + schema.drop() -@schema -class R(dj.Lookup): - definition = """ - r : char(1) - """ - contents = zip("ABCDFGHIJKLMNOPQRST") +@pytest.fixture +def schema_aggr_reg_with_abx(schema_aggr_reg): + schema_aggr_reg(A) + schema_aggr_reg(B) + schema_aggr_reg(X) + yield schema_aggr_reg -@schema -class Q(dj.Lookup): - definition = """ - -> R +def test_issue386(schema_aggr_reg): """ - contents = zip("ABCDFGH") - - -@schema -class S(dj.Lookup): - definition = """ - -> R - s : int + --------------- ISSUE 386 ------------------- + Issue 386 resulted from the loss of aggregated attributes when the aggregation was used as the restrictor + Q & (R.aggr(S, n='count(*)') & 'n=2') + Error: Unknown column 'n' in HAVING """ - contents = itertools.product("ABCDF", range(10)) - - -def test_issue386(): result = R.aggr(S, n="count(*)") & "n=10" result = Q & result result.fetch() -# ---------------- ISSUE 449 ------------------ -# Issue 449 arises from incorrect group by attributes after joining with a dj.U() - - -def test_issue449(): +def test_issue449(schema_aggr_reg): + """ + ---------------- ISSUE 449 ------------------ + Issue 449 arises from incorrect group by attributes after joining with a dj.U() + """ result = dj.U("n") * R.aggr(S, n="max(s)") result.fetch() -# ---------------- ISSUE 484 ----------------- -# Issue 484 -def test_issue484(): +def test_issue484(schema_aggr_reg): + """ + ---------------- ISSUE 484 ----------------- + Issue 484 + """ q = dj.U().aggr(S, n="max(s)") n = q.fetch("n") n = q.fetch1("n") @@ -68,47 +66,25 @@ def test_issue484(): result.fetch() -# --------------- ISSUE 558 ------------------ -# Issue 558 resulted from the fact that DataJoint saves subqueries and often combines a restriction followed -# by a projection into a single SELECT statement, which in several unusual cases produces unexpected results. - - -@schema -class A(dj.Lookup): - definition = """ - id: int - """ - contents = zip(range(10)) - -@schema -class B(dj.Lookup): - definition = """ - -> A - id2: int +class TestIssue558: """ - contents = zip(range(5), range(5, 10)) - - -@schema -class X(dj.Lookup): - definition = """ - id: int + --------------- ISSUE 558 ------------------ + Issue 558 resulted from the fact that DataJoint saves subqueries and often combines a restriction followed + by a projection into a single SELECT statement, which in several unusual cases produces unexpected results. """ - contents = zip(range(10)) - -def test_issue558_part1(): - q = (A - B).proj(id2="3") - assert_equal(len(A - B), len(q)) + def test_issue558_part1(self, schema_aggr_reg_with_abx): + q = (A - B).proj(id2="3") + assert len(A - B) == len(q) -def test_issue558_part2(): - d = dict(id=3, id2=5) - assert_equal(len(X & d), len((X & d).proj(id2="3"))) + def test_issue558_part2(self, schema_aggr_reg_with_abx): + d = dict(id=3, id2=5) + assert len(X & d) == len((X & d).proj(id2="3")) -def test_left_join_len(): +def test_left_join_len(schema_uuid): Topic().add("jeff") Item.populate() Topic().add("jeff2") @@ -120,8 +96,10 @@ def test_left_join_len(): assert len(q) == len(qf) -def test_union_join(): - # https://github.com/datajoint/datajoint-python/issues/930 +def test_union_join(schema_aggr_reg_with_abx): + """ + https://github.com/datajoint/datajoint-python/issues/930 + """ A.insert(zip([100, 200, 300, 400, 500, 600])) B.insert([(100, 11), (200, 22), (300, 33), (400, 44)]) q1 = B & "id < 300" From 5b8c4296f0da58287c99f276fc8a7ce9bb1502a5 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 7 Dec 2023 15:41:45 -0600 Subject: [PATCH 066/212] Tests pass with module scoped fixtures --- tests/test_aggr_regressions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_aggr_regressions.py b/tests/test_aggr_regressions.py index fe5e12ab1..1d208ea56 100644 --- a/tests/test_aggr_regressions.py +++ b/tests/test_aggr_regressions.py @@ -10,7 +10,7 @@ from .schema_aggr_regress import R, Q, S, A, B, X, LOCALS_AGGR_REGRESS -@pytest.fixture +@pytest.fixture(scope="module") def schema_aggr_reg(connection_test): schema = dj.Schema( PREFIX + "_aggr_regress", @@ -24,7 +24,7 @@ def schema_aggr_reg(connection_test): schema.drop() -@pytest.fixture +@pytest.fixture(scope="module") def schema_aggr_reg_with_abx(schema_aggr_reg): schema_aggr_reg(A) schema_aggr_reg(B) From f84c21a02e697a32e7db0a3763402b5623d11d28 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Fri, 8 Dec 2023 15:27:59 -0600 Subject: [PATCH 067/212] Fix fixtures --- tests/test_aggr_regressions.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/test_aggr_regressions.py b/tests/test_aggr_regressions.py index 1d208ea56..e97f91f64 100644 --- a/tests/test_aggr_regressions.py +++ b/tests/test_aggr_regressions.py @@ -10,7 +10,7 @@ from .schema_aggr_regress import R, Q, S, A, B, X, LOCALS_AGGR_REGRESS -@pytest.fixture(scope="module") +@pytest.fixture def schema_aggr_reg(connection_test): schema = dj.Schema( PREFIX + "_aggr_regress", @@ -24,12 +24,13 @@ def schema_aggr_reg(connection_test): schema.drop() -@pytest.fixture(scope="module") +@pytest.fixture def schema_aggr_reg_with_abx(schema_aggr_reg): schema_aggr_reg(A) schema_aggr_reg(B) schema_aggr_reg(X) yield schema_aggr_reg + schema_aggr_reg.drop() def test_issue386(schema_aggr_reg): From af2872920715007937c3c0b6d46595e5731f0da9 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Fri, 8 Dec 2023 15:28:14 -0600 Subject: [PATCH 068/212] If we skip these, test_union_join passes --- tests/test_aggr_regressions.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/test_aggr_regressions.py b/tests/test_aggr_regressions.py index e97f91f64..3846c0408 100644 --- a/tests/test_aggr_regressions.py +++ b/tests/test_aggr_regressions.py @@ -33,6 +33,7 @@ def schema_aggr_reg_with_abx(schema_aggr_reg): schema_aggr_reg.drop() +@pytest.mark.skip def test_issue386(schema_aggr_reg): """ --------------- ISSUE 386 ------------------- @@ -45,6 +46,7 @@ def test_issue386(schema_aggr_reg): result.fetch() +@pytest.mark.skip def test_issue449(schema_aggr_reg): """ ---------------- ISSUE 449 ------------------ @@ -75,11 +77,12 @@ class TestIssue558: by a projection into a single SELECT statement, which in several unusual cases produces unexpected results. """ + @pytest.mark.skip def test_issue558_part1(self, schema_aggr_reg_with_abx): q = (A - B).proj(id2="3") assert len(A - B) == len(q) - + @pytest.mark.skip def test_issue558_part2(self, schema_aggr_reg_with_abx): d = dict(id=3, id2=5) assert len(X & d) == len((X & d).proj(id2="3")) From c81ee0a61260f73c7953dbf47e206a0239d23036 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Fri, 8 Dec 2023 15:30:40 -0600 Subject: [PATCH 069/212] Skipping TestIssue558 passes all --- tests/test_aggr_regressions.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/tests/test_aggr_regressions.py b/tests/test_aggr_regressions.py index 3846c0408..66d84c2e5 100644 --- a/tests/test_aggr_regressions.py +++ b/tests/test_aggr_regressions.py @@ -33,7 +33,6 @@ def schema_aggr_reg_with_abx(schema_aggr_reg): schema_aggr_reg.drop() -@pytest.mark.skip def test_issue386(schema_aggr_reg): """ --------------- ISSUE 386 ------------------- @@ -46,7 +45,6 @@ def test_issue386(schema_aggr_reg): result.fetch() -@pytest.mark.skip def test_issue449(schema_aggr_reg): """ ---------------- ISSUE 449 ------------------ @@ -70,6 +68,7 @@ def test_issue484(schema_aggr_reg): +@pytest.mark.skip class TestIssue558: """ --------------- ISSUE 558 ------------------ @@ -77,12 +76,10 @@ class TestIssue558: by a projection into a single SELECT statement, which in several unusual cases produces unexpected results. """ - @pytest.mark.skip def test_issue558_part1(self, schema_aggr_reg_with_abx): q = (A - B).proj(id2="3") assert len(A - B) == len(q) - @pytest.mark.skip def test_issue558_part2(self, schema_aggr_reg_with_abx): d = dict(id=3, id2=5) assert len(X & d) == len((X & d).proj(id2="3")) From 71720a33b3de72baf896258fb84bd8a17c0d27b3 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Fri, 8 Dec 2023 15:37:01 -0600 Subject: [PATCH 070/212] Tests passing with reordering --- tests/test_aggr_regressions.py | 60 ++++++++++++++++++---------------- 1 file changed, 31 insertions(+), 29 deletions(-) diff --git a/tests/test_aggr_regressions.py b/tests/test_aggr_regressions.py index 66d84c2e5..cc55aa469 100644 --- a/tests/test_aggr_regressions.py +++ b/tests/test_aggr_regressions.py @@ -10,11 +10,12 @@ from .schema_aggr_regress import R, Q, S, A, B, X, LOCALS_AGGR_REGRESS -@pytest.fixture +@pytest.fixture(scope="function") def schema_aggr_reg(connection_test): + context = {k: v for k, v in LOCALS_AGGR_REGRESS.items() if k in ('R', 'Q', 'S')} schema = dj.Schema( PREFIX + "_aggr_regress", - context=LOCALS_AGGR_REGRESS, + context=context, connection=connection_test, ) schema(R) @@ -24,11 +25,12 @@ def schema_aggr_reg(connection_test): schema.drop() -@pytest.fixture +@pytest.fixture(scope="function") def schema_aggr_reg_with_abx(schema_aggr_reg): - schema_aggr_reg(A) - schema_aggr_reg(B) - schema_aggr_reg(X) + context = {k: v for k, v in LOCALS_AGGR_REGRESS.items() if k in ('A', 'B', 'X')} + schema_aggr_reg(A, context=context) + schema_aggr_reg(B, context=context) + schema_aggr_reg(X, context=context) yield schema_aggr_reg schema_aggr_reg.drop() @@ -68,7 +70,29 @@ def test_issue484(schema_aggr_reg): -@pytest.mark.skip +def test_union_join(schema_aggr_reg_with_abx): + """ + https://github.com/datajoint/datajoint-python/issues/930 + """ + A.insert(zip([100, 200, 300, 400, 500, 600])) + B.insert([(100, 11), (200, 22), (300, 33), (400, 44)]) + q1 = B & "id < 300" + q2 = B & "id > 300" + + expected_data = [ + {"id": 0, "id2": 5}, + {"id": 1, "id2": 6}, + {"id": 2, "id2": 7}, + {"id": 3, "id2": 8}, + {"id": 4, "id2": 9}, + {"id": 100, "id2": 11}, + {"id": 200, "id2": 22}, + {"id": 400, "id2": 44}, + ] + + assert ((q1 + q2) * A).fetch(as_dict=True) == expected_data + +# @pytest.mark.skip class TestIssue558: """ --------------- ISSUE 558 ------------------ @@ -96,25 +120,3 @@ def test_left_join_len(schema_uuid): qf = q.fetch() assert len(q) == len(qf) - -def test_union_join(schema_aggr_reg_with_abx): - """ - https://github.com/datajoint/datajoint-python/issues/930 - """ - A.insert(zip([100, 200, 300, 400, 500, 600])) - B.insert([(100, 11), (200, 22), (300, 33), (400, 44)]) - q1 = B & "id < 300" - q2 = B & "id > 300" - - expected_data = [ - {"id": 0, "id2": 5}, - {"id": 1, "id2": 6}, - {"id": 2, "id2": 7}, - {"id": 3, "id2": 8}, - {"id": 4, "id2": 9}, - {"id": 100, "id2": 11}, - {"id": 200, "id2": 22}, - {"id": 400, "id2": 44}, - ] - - assert ((q1 + q2) * A).fetch(as_dict=True) == expected_data From 19a4744d1810af1bc942ce518d65ccac427a317f Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Fri, 8 Dec 2023 15:38:36 -0600 Subject: [PATCH 071/212] test_aggr_regressions passing --- tests/test_aggr_regressions.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/tests/test_aggr_regressions.py b/tests/test_aggr_regressions.py index cc55aa469..3f528ccc0 100644 --- a/tests/test_aggr_regressions.py +++ b/tests/test_aggr_regressions.py @@ -12,10 +12,9 @@ @pytest.fixture(scope="function") def schema_aggr_reg(connection_test): - context = {k: v for k, v in LOCALS_AGGR_REGRESS.items() if k in ('R', 'Q', 'S')} schema = dj.Schema( PREFIX + "_aggr_regress", - context=context, + context=LOCALS_AGGR_REGRESS, connection=connection_test, ) schema(R) @@ -27,7 +26,7 @@ def schema_aggr_reg(connection_test): @pytest.fixture(scope="function") def schema_aggr_reg_with_abx(schema_aggr_reg): - context = {k: v for k, v in LOCALS_AGGR_REGRESS.items() if k in ('A', 'B', 'X')} + context = LOCALS_AGGR_REGRESS schema_aggr_reg(A, context=context) schema_aggr_reg(B, context=context) schema_aggr_reg(X, context=context) @@ -69,7 +68,6 @@ def test_issue484(schema_aggr_reg): result.fetch() - def test_union_join(schema_aggr_reg_with_abx): """ https://github.com/datajoint/datajoint-python/issues/930 @@ -92,7 +90,7 @@ def test_union_join(schema_aggr_reg_with_abx): assert ((q1 + q2) * A).fetch(as_dict=True) == expected_data -# @pytest.mark.skip + class TestIssue558: """ --------------- ISSUE 558 ------------------ @@ -119,4 +117,3 @@ def test_left_join_len(schema_uuid): ) qf = q.fetch() assert len(q) == len(qf) - From 894e548e4dc3118c6afcd83798b0d3ff567ad07f Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Fri, 8 Dec 2023 16:31:50 -0600 Subject: [PATCH 072/212] Construct schema_aggr_reg_with_abx separately --- tests/test_aggr_regressions.py | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/tests/test_aggr_regressions.py b/tests/test_aggr_regressions.py index 3f528ccc0..b4d4e0802 100644 --- a/tests/test_aggr_regressions.py +++ b/tests/test_aggr_regressions.py @@ -12,9 +12,10 @@ @pytest.fixture(scope="function") def schema_aggr_reg(connection_test): + context = LOCALS_AGGR_REGRESS schema = dj.Schema( PREFIX + "_aggr_regress", - context=LOCALS_AGGR_REGRESS, + context=context, connection=connection_test, ) schema(R) @@ -25,13 +26,21 @@ def schema_aggr_reg(connection_test): @pytest.fixture(scope="function") -def schema_aggr_reg_with_abx(schema_aggr_reg): +def schema_aggr_reg_with_abx(connection_test): context = LOCALS_AGGR_REGRESS - schema_aggr_reg(A, context=context) - schema_aggr_reg(B, context=context) - schema_aggr_reg(X, context=context) - yield schema_aggr_reg - schema_aggr_reg.drop() + schema = dj.Schema( + PREFIX + "_aggr_regress_with_abx", + context=context, + connection=connection_test, + ) + schema(R) + schema(Q) + schema(S) + schema(A) + schema(B) + schema(X) + yield schema + schema.drop() def test_issue386(schema_aggr_reg): @@ -70,6 +79,8 @@ def test_issue484(schema_aggr_reg): def test_union_join(schema_aggr_reg_with_abx): """ + This test fails if it runs after TestIssue558. + https://github.com/datajoint/datajoint-python/issues/930 """ A.insert(zip([100, 200, 300, 400, 500, 600])) From 110d642f18e75502be2de9fdeb1aa4733b58bac6 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Fri, 8 Dec 2023 16:32:04 -0600 Subject: [PATCH 073/212] Add context to diagram tests --- tests/test_erd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_erd.py b/tests/test_erd.py index aebf62eaf..8a2d1d3ac 100644 --- a/tests/test_erd.py +++ b/tests/test_erd.py @@ -58,7 +58,7 @@ def test_make_image(schema_simp): def test_part_table_parsing(schema_simp): # https://github.com/datajoint/datajoint-python/issues/882 - erd = dj.Di(schema_simp) + erd = dj.Di(schema_simp, context=LOCALS_SIMPLE) graph = erd._make_graph() assert "OutfitLaunch" in graph.nodes() assert "OutfitLaunch.OutfitPiece" in graph.nodes() From 7e9ea6a022c20ed04e93f52b7d8773b673f2e9e2 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Sat, 9 Dec 2023 09:29:02 -0600 Subject: [PATCH 074/212] First pass at migrating test_alter --- tests/schema_alter.py | 58 ++++++++++++++++++++++++++ tests/test_alter.py | 96 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 154 insertions(+) create mode 100644 tests/schema_alter.py create mode 100644 tests/test_alter.py diff --git a/tests/schema_alter.py b/tests/schema_alter.py new file mode 100644 index 000000000..a8375b182 --- /dev/null +++ b/tests/schema_alter.py @@ -0,0 +1,58 @@ +import random +import numpy as np +import datajoint as dj +import inspect + + +class Experiment(dj.Imported): + original_definition = """ # information about experiments + -> Subject + experiment_id :smallint # experiment number for this subject + --- + experiment_date :date # date when experiment was started + -> [nullable] User + data_path="" :varchar(255) # file path to recorded data + notes="" :varchar(2048) # e.g. purpose of experiment + entry_time=CURRENT_TIMESTAMP :timestamp # automatic timestamp + """ + + definition1 = """ # Experiment + -> Subject + experiment_id :smallint # experiment number for this subject + --- + data_path : int # some number + extra=null : longblob # just testing + -> [nullable] User + subject_notes=null :varchar(2048) # {notes} e.g. purpose of experiment + entry_time=CURRENT_TIMESTAMP :timestamp # automatic timestamp + """ + + +class Parent(dj.Manual): + definition = """ + parent_id: int + """ + + class Child(dj.Part): + definition = """ + -> Parent + """ + definition_new = """ + -> master + --- + child_id=null: int + """ + + class Grandchild(dj.Part): + definition = """ + -> master.Child + """ + definition_new = """ + -> master.Child + --- + grandchild_id=null: int + """ + + +LOCALS_ALTER = {k: v for k, v in locals().items() if inspect.isclass(v)} +__all__ = list(LOCALS_ALTER) diff --git a/tests/test_alter.py b/tests/test_alter.py new file mode 100644 index 000000000..29144d0cb --- /dev/null +++ b/tests/test_alter.py @@ -0,0 +1,96 @@ +import pytest +import re +import datajoint as dj +from . import schema_simple, schema_alter as schema_alter_module, PREFIX + + +@pytest.fixture +def _schema_alter(connection_test): + context = { + # **schema_alter_module.LOCALS_ALTER, + # **schema_simple.LOCALS_SIMPLE, + } + schema = dj.Schema( + PREFIX + "_alter", context=context, connection=connection_test + ) + schema(schema_simple.IJ, context=schema_simple.LOCALS_SIMPLE) + schema(schema_simple.JI, context=schema_simple.LOCALS_SIMPLE) + schema(schema_simple.A, context=schema_simple.LOCALS_SIMPLE) + schema(schema_simple.B, context=schema_simple.LOCALS_SIMPLE) + schema(schema_simple.L, context=schema_simple.LOCALS_SIMPLE) + schema(schema_simple.D, context=schema_simple.LOCALS_SIMPLE) + schema(schema_simple.E, context=schema_simple.LOCALS_SIMPLE) + schema(schema_simple.F, context=schema_simple.LOCALS_SIMPLE) + schema(schema_simple.F, context=schema_simple.LOCALS_SIMPLE) + schema(schema_simple.DataA, context=schema_simple.LOCALS_SIMPLE) + schema(schema_simple.DataB, context=schema_simple.LOCALS_SIMPLE) + schema(schema_simple.Website, context=schema_simple.LOCALS_SIMPLE) + schema(schema_simple.Profile, context=schema_simple.LOCALS_SIMPLE) + schema(schema_simple.Website, context=schema_simple.LOCALS_SIMPLE) + schema(schema_simple.TTestUpdate, context=schema_simple.LOCALS_SIMPLE) + schema(schema_simple.ArgmaxTest, context=schema_simple.LOCALS_SIMPLE) + schema(schema_simple.ReservedWord, context=schema_simple.LOCALS_SIMPLE) + schema(schema_simple.OutfitLaunch, context=schema_simple.LOCALS_SIMPLE) + + schema(schema_alter_module.Experiment, context=schema_alter_module.LOCALS_ALTER) + schema(schema_alter_module.Parent, context=schema_alter_module.LOCALS_ALTER) + + yield schema + schema.drop() + + +@pytest.fixture +def schema_alter(schema_simp): + # context = { + # **schema_simple.LOCALS_SIMPLE, + # **schema_alter_module.LOCALS_ALTER, + # } + schema = schema_simp + schema(schema_alter_module.Experiment) + schema(schema_alter_module.Parent) + yield schema + schema.drop() + + + +def test_alter(schema_alter): + schema = schema_alter + original = schema.connection.query( + "SHOW CREATE TABLE " + Experiment.full_table_name + ).fetchone()[1] + Experiment.definition = Experiment.definition1 + Experiment.alter(prompt=False) + altered = schema.connection.query( + "SHOW CREATE TABLE " + Experiment.full_table_name + ).fetchone()[1] + assert original != altered + Experiment.definition = Experiment.original_definition + Experiment().alter(prompt=False) + restored = schema.connection.query( + "SHOW CREATE TABLE " + Experiment.full_table_name + ).fetchone()[1] + assert altered != restored + assert original == restored + + +def test_alter_part(schema_alter): + # https://github.com/datajoint/datajoint-python/issues/936 + schema = schema_alter + + def verify_alter(table, attribute_sql): + definition_original = schema.connection.query( + f"SHOW CREATE TABLE {table.full_table_name}" + ).fetchone()[1] + table.definition = table.definition_new + table.alter(prompt=False) + definition_new = schema.connection.query( + f"SHOW CREATE TABLE {table.full_table_name}" + ).fetchone()[1] + assert ( + re.sub(f"{attribute_sql},\n ", "", definition_new) == definition_original + ) + + verify_alter(table=Parent.Child, attribute_sql="`child_id` .* DEFAULT NULL") + verify_alter( + table=Parent.Grandchild, attribute_sql="`grandchild_id` .* DEFAULT NULL" + ) From 6bc4a7fceed74f32a52c51a50fce910c6cc535d6 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Sat, 9 Dec 2023 09:45:41 -0600 Subject: [PATCH 075/212] One test passing --- tests/test_alter.py | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/tests/test_alter.py b/tests/test_alter.py index 29144d0cb..97b77e8c3 100644 --- a/tests/test_alter.py +++ b/tests/test_alter.py @@ -40,14 +40,14 @@ def _schema_alter(connection_test): @pytest.fixture -def schema_alter(schema_simp): - # context = { - # **schema_simple.LOCALS_SIMPLE, - # **schema_alter_module.LOCALS_ALTER, - # } - schema = schema_simp - schema(schema_alter_module.Experiment) - schema(schema_alter_module.Parent) +def schema_alter(schema_any): + context = { + # **schema_simple.LOCALS_SIMPLE, + **schema_alter_module.LOCALS_ALTER, + } + schema = schema_any + schema(schema_alter_module.Experiment, context=context) + schema(schema_alter_module.Parent, context=context) yield schema schema.drop() @@ -56,18 +56,18 @@ def schema_alter(schema_simp): def test_alter(schema_alter): schema = schema_alter original = schema.connection.query( - "SHOW CREATE TABLE " + Experiment.full_table_name + "SHOW CREATE TABLE " + schema_alter_module.Experiment.full_table_name ).fetchone()[1] - Experiment.definition = Experiment.definition1 - Experiment.alter(prompt=False) + schema_alter_module.Experiment.definition = schema_alter_module.Experiment.definition1 + schema_alter_module.Experiment.alter(prompt=False) altered = schema.connection.query( - "SHOW CREATE TABLE " + Experiment.full_table_name + "SHOW CREATE TABLE " + schema_alter_module.Experiment.full_table_name ).fetchone()[1] assert original != altered - Experiment.definition = Experiment.original_definition - Experiment().alter(prompt=False) + schema_alter_module.Experiment.definition = schema_alter_module.Experiment.original_definition + schema_alter_module.Experiment().alter(prompt=False) restored = schema.connection.query( - "SHOW CREATE TABLE " + Experiment.full_table_name + "SHOW CREATE TABLE " + schema_alter_module.Experiment.full_table_name ).fetchone()[1] assert altered != restored assert original == restored @@ -90,7 +90,7 @@ def verify_alter(table, attribute_sql): re.sub(f"{attribute_sql},\n ", "", definition_new) == definition_original ) - verify_alter(table=Parent.Child, attribute_sql="`child_id` .* DEFAULT NULL") + verify_alter(table=schema_alter_module.Parent.Child, attribute_sql="`child_id` .* DEFAULT NULL") verify_alter( - table=Parent.Grandchild, attribute_sql="`grandchild_id` .* DEFAULT NULL" + table=schema_alter_module.Parent.Grandchild, attribute_sql="`grandchild_id` .* DEFAULT NULL" ) From 9c0b1b0d624ea81e2db8a03183765201a4c5068f Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Sat, 9 Dec 2023 13:41:21 -0600 Subject: [PATCH 076/212] Start with copy of schema_any fixture --- tests/test_alter.py | 87 +++++++++++++++++++++++++++------------------ 1 file changed, 52 insertions(+), 35 deletions(-) diff --git a/tests/test_alter.py b/tests/test_alter.py index 97b77e8c3..aecf9b11f 100644 --- a/tests/test_alter.py +++ b/tests/test_alter.py @@ -1,55 +1,72 @@ import pytest import re import datajoint as dj -from . import schema_simple, schema_alter as schema_alter_module, PREFIX +from . import schema as schema_any_module, schema_alter as schema_alter_module, PREFIX @pytest.fixture -def _schema_alter(connection_test): +def schema_alter(connection_test): context = { + **schema_any_module.LOCALS_ANY, # **schema_alter_module.LOCALS_ALTER, - # **schema_simple.LOCALS_SIMPLE, } - schema = dj.Schema( - PREFIX + "_alter", context=context, connection=connection_test + schema_any = dj.Schema( + PREFIX + "_test1", context=context, connection=connection_test ) - schema(schema_simple.IJ, context=schema_simple.LOCALS_SIMPLE) - schema(schema_simple.JI, context=schema_simple.LOCALS_SIMPLE) - schema(schema_simple.A, context=schema_simple.LOCALS_SIMPLE) - schema(schema_simple.B, context=schema_simple.LOCALS_SIMPLE) - schema(schema_simple.L, context=schema_simple.LOCALS_SIMPLE) - schema(schema_simple.D, context=schema_simple.LOCALS_SIMPLE) - schema(schema_simple.E, context=schema_simple.LOCALS_SIMPLE) - schema(schema_simple.F, context=schema_simple.LOCALS_SIMPLE) - schema(schema_simple.F, context=schema_simple.LOCALS_SIMPLE) - schema(schema_simple.DataA, context=schema_simple.LOCALS_SIMPLE) - schema(schema_simple.DataB, context=schema_simple.LOCALS_SIMPLE) - schema(schema_simple.Website, context=schema_simple.LOCALS_SIMPLE) - schema(schema_simple.Profile, context=schema_simple.LOCALS_SIMPLE) - schema(schema_simple.Website, context=schema_simple.LOCALS_SIMPLE) - schema(schema_simple.TTestUpdate, context=schema_simple.LOCALS_SIMPLE) - schema(schema_simple.ArgmaxTest, context=schema_simple.LOCALS_SIMPLE) - schema(schema_simple.ReservedWord, context=schema_simple.LOCALS_SIMPLE) - schema(schema_simple.OutfitLaunch, context=schema_simple.LOCALS_SIMPLE) + schema_any(schema_any_module.TTest) + schema_any(schema_any_module.TTest2) + schema_any(schema_any_module.TTest3) + schema_any(schema_any_module.NullableNumbers) + schema_any(schema_any_module.TTestExtra) + schema_any(schema_any_module.TTestNoExtra) + schema_any(schema_any_module.Auto) + schema_any(schema_any_module.User) + schema_any(schema_any_module.Subject) + schema_any(schema_any_module.Language) + schema_any(schema_any_module.Experiment) + schema_any(schema_any_module.Trial) + schema_any(schema_any_module.Ephys) + schema_any(schema_any_module.Image) + schema_any(schema_any_module.UberTrash) + schema_any(schema_any_module.UnterTrash) + schema_any(schema_any_module.SimpleSource) + schema_any(schema_any_module.SigIntTable) + schema_any(schema_any_module.SigTermTable) + schema_any(schema_any_module.DjExceptionName) + schema_any(schema_any_module.ErrorClass) + schema_any(schema_any_module.DecimalPrimaryKey) + schema_any(schema_any_module.IndexRich) + schema_any(schema_any_module.ThingA) + schema_any(schema_any_module.ThingB) + schema_any(schema_any_module.ThingC) + schema_any(schema_any_module.Parent) + schema_any(schema_any_module.Child) + schema_any(schema_any_module.ComplexParent) + schema_any(schema_any_module.ComplexChild) + schema_any(schema_any_module.SubjectA) + schema_any(schema_any_module.SessionA) + schema_any(schema_any_module.SessionStatusA) + schema_any(schema_any_module.SessionDateA) + schema_any(schema_any_module.Stimulus) + schema_any(schema_any_module.Longblob) - schema(schema_alter_module.Experiment, context=schema_alter_module.LOCALS_ALTER) - schema(schema_alter_module.Parent, context=schema_alter_module.LOCALS_ALTER) + # schema_any(schema_alter_module.Experiment) + # schema_any(schema_alter_module.Parent) - yield schema - schema.drop() + yield schema_any + schema_any.drop() -@pytest.fixture -def schema_alter(schema_any): +# @pytest.fixture +def _schema_alter(schema_any): context = { - # **schema_simple.LOCALS_SIMPLE, + **schema_any_module.LOCALS_ANY, **schema_alter_module.LOCALS_ALTER, } - schema = schema_any - schema(schema_alter_module.Experiment, context=context) - schema(schema_alter_module.Parent, context=context) - yield schema - schema.drop() + schema_any(schema_alter_module.Experiment, context=context) + schema_any(schema_alter_module.Parent, context=context) + yield schema_any + schema_any.drop() From 815a353d5c353557528b42fd42768f0e4d142c2b Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Sat, 9 Dec 2023 15:46:49 -0600 Subject: [PATCH 077/212] One test_alter test passing --- tests/test_alter.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_alter.py b/tests/test_alter.py index aecf9b11f..ed601c045 100644 --- a/tests/test_alter.py +++ b/tests/test_alter.py @@ -50,8 +50,8 @@ def schema_alter(connection_test): schema_any(schema_any_module.Stimulus) schema_any(schema_any_module.Longblob) - # schema_any(schema_alter_module.Experiment) - # schema_any(schema_alter_module.Parent) + schema_any(schema_alter_module.Experiment, context=schema_alter_module.LOCALS_ALTER) + schema_any(schema_alter_module.Parent, context=schema_alter_module.LOCALS_ALTER) yield schema_any schema_any.drop() From 895971c59e05acb49688cd27534f2b1b7573a184 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Sat, 9 Dec 2023 15:50:48 -0600 Subject: [PATCH 078/212] All test_alter passing --- tests/test_alter.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tests/test_alter.py b/tests/test_alter.py index ed601c045..f3ce8e648 100644 --- a/tests/test_alter.py +++ b/tests/test_alter.py @@ -3,6 +3,11 @@ import datajoint as dj from . import schema as schema_any_module, schema_alter as schema_alter_module, PREFIX +COMBINED_CONTEXT = { + **schema_any_module.LOCALS_ANY, + **schema_alter_module.LOCALS_ALTER, +} + @pytest.fixture def schema_alter(connection_test): @@ -76,13 +81,13 @@ def test_alter(schema_alter): "SHOW CREATE TABLE " + schema_alter_module.Experiment.full_table_name ).fetchone()[1] schema_alter_module.Experiment.definition = schema_alter_module.Experiment.definition1 - schema_alter_module.Experiment.alter(prompt=False) + schema_alter_module.Experiment.alter(prompt=False, context=COMBINED_CONTEXT) altered = schema.connection.query( "SHOW CREATE TABLE " + schema_alter_module.Experiment.full_table_name ).fetchone()[1] assert original != altered schema_alter_module.Experiment.definition = schema_alter_module.Experiment.original_definition - schema_alter_module.Experiment().alter(prompt=False) + schema_alter_module.Experiment().alter(prompt=False, context=COMBINED_CONTEXT) restored = schema.connection.query( "SHOW CREATE TABLE " + schema_alter_module.Experiment.full_table_name ).fetchone()[1] From 8a9d0ee0838cb2d58f7ee675cae80579b9dd6838 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Sat, 9 Dec 2023 15:56:14 -0600 Subject: [PATCH 079/212] Clean up --- tests/test_alter.py | 84 +++++++++++++++++++-------------------------- 1 file changed, 35 insertions(+), 49 deletions(-) diff --git a/tests/test_alter.py b/tests/test_alter.py index f3ce8e648..bdb843581 100644 --- a/tests/test_alter.py +++ b/tests/test_alter.py @@ -2,6 +2,7 @@ import re import datajoint as dj from . import schema as schema_any_module, schema_alter as schema_alter_module, PREFIX +from .schema_alter import Parent, Experiment COMBINED_CONTEXT = { **schema_any_module.LOCALS_ANY, @@ -11,12 +12,8 @@ @pytest.fixture def schema_alter(connection_test): - context = { - **schema_any_module.LOCALS_ANY, - # **schema_alter_module.LOCALS_ALTER, - } schema_any = dj.Schema( - PREFIX + "_test1", context=context, connection=connection_test + PREFIX + "_alter", context=schema_any_module.LOCALS_ANY, connection=connection_test ) schema_any(schema_any_module.TTest) schema_any(schema_any_module.TTest2) @@ -55,64 +52,53 @@ def schema_alter(connection_test): schema_any(schema_any_module.Stimulus) schema_any(schema_any_module.Longblob) - schema_any(schema_alter_module.Experiment, context=schema_alter_module.LOCALS_ALTER) - schema_any(schema_alter_module.Parent, context=schema_alter_module.LOCALS_ALTER) + # Add nodes from schema_alter_module + schema_any(Experiment, context=schema_alter_module.LOCALS_ALTER) + schema_any(Parent, context=schema_alter_module.LOCALS_ALTER) yield schema_any schema_any.drop() -# @pytest.fixture -def _schema_alter(schema_any): - context = { - **schema_any_module.LOCALS_ANY, - **schema_alter_module.LOCALS_ALTER, - } - schema_any(schema_alter_module.Experiment, context=context) - schema_any(schema_alter_module.Parent, context=context) - yield schema_any - schema_any.drop() - - - -def test_alter(schema_alter): - schema = schema_alter - original = schema.connection.query( - "SHOW CREATE TABLE " + schema_alter_module.Experiment.full_table_name - ).fetchone()[1] - schema_alter_module.Experiment.definition = schema_alter_module.Experiment.definition1 - schema_alter_module.Experiment.alter(prompt=False, context=COMBINED_CONTEXT) - altered = schema.connection.query( - "SHOW CREATE TABLE " + schema_alter_module.Experiment.full_table_name - ).fetchone()[1] - assert original != altered - schema_alter_module.Experiment.definition = schema_alter_module.Experiment.original_definition - schema_alter_module.Experiment().alter(prompt=False, context=COMBINED_CONTEXT) - restored = schema.connection.query( - "SHOW CREATE TABLE " + schema_alter_module.Experiment.full_table_name - ).fetchone()[1] - assert altered != restored - assert original == restored - +class TestAlter: -def test_alter_part(schema_alter): - # https://github.com/datajoint/datajoint-python/issues/936 - schema = schema_alter + def test_alter(self, schema_alter): + original = schema_alter.connection.query( + "SHOW CREATE TABLE " + Experiment.full_table_name + ).fetchone()[1] + Experiment.definition = Experiment.definition1 + Experiment.alter(prompt=False, context=COMBINED_CONTEXT) + altered = schema_alter.connection.query( + "SHOW CREATE TABLE " + Experiment.full_table_name + ).fetchone()[1] + assert original != altered + Experiment.definition = Experiment.original_definition + Experiment().alter(prompt=False, context=COMBINED_CONTEXT) + restored = schema_alter.connection.query( + "SHOW CREATE TABLE " + Experiment.full_table_name + ).fetchone()[1] + assert altered != restored + assert original == restored - def verify_alter(table, attribute_sql): - definition_original = schema.connection.query( + def verify_alter(self, schema_alter, table, attribute_sql): + definition_original = schema_alter.connection.query( f"SHOW CREATE TABLE {table.full_table_name}" ).fetchone()[1] table.definition = table.definition_new table.alter(prompt=False) - definition_new = schema.connection.query( + definition_new = schema_alter.connection.query( f"SHOW CREATE TABLE {table.full_table_name}" ).fetchone()[1] assert ( re.sub(f"{attribute_sql},\n ", "", definition_new) == definition_original ) - verify_alter(table=schema_alter_module.Parent.Child, attribute_sql="`child_id` .* DEFAULT NULL") - verify_alter( - table=schema_alter_module.Parent.Grandchild, attribute_sql="`grandchild_id` .* DEFAULT NULL" - ) + def test_alter_part(self, schema_alter): + """ + https://github.com/datajoint/datajoint-python/issues/936 + """ + self.verify_alter(schema_alter, table=Parent.Child, attribute_sql="`child_id` .* DEFAULT NULL") + self.verify_alter( + schema_alter, + table=Parent.Grandchild, attribute_sql="`grandchild_id` .* DEFAULT NULL" + ) From 6935e870da27545429143a8901e75c5c213e0a01 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Sat, 9 Dec 2023 15:56:28 -0600 Subject: [PATCH 080/212] Format with black --- tests/test_alter.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/tests/test_alter.py b/tests/test_alter.py index bdb843581..fad4b2c33 100644 --- a/tests/test_alter.py +++ b/tests/test_alter.py @@ -13,7 +13,9 @@ @pytest.fixture def schema_alter(connection_test): schema_any = dj.Schema( - PREFIX + "_alter", context=schema_any_module.LOCALS_ANY, connection=connection_test + PREFIX + "_alter", + context=schema_any_module.LOCALS_ANY, + connection=connection_test, ) schema_any(schema_any_module.TTest) schema_any(schema_any_module.TTest2) @@ -61,7 +63,6 @@ def schema_alter(connection_test): class TestAlter: - def test_alter(self, schema_alter): original = schema_alter.connection.query( "SHOW CREATE TABLE " + Experiment.full_table_name @@ -97,8 +98,11 @@ def test_alter_part(self, schema_alter): """ https://github.com/datajoint/datajoint-python/issues/936 """ - self.verify_alter(schema_alter, table=Parent.Child, attribute_sql="`child_id` .* DEFAULT NULL") + self.verify_alter( + schema_alter, table=Parent.Child, attribute_sql="`child_id` .* DEFAULT NULL" + ) self.verify_alter( schema_alter, - table=Parent.Grandchild, attribute_sql="`grandchild_id` .* DEFAULT NULL" + table=Parent.Grandchild, + attribute_sql="`grandchild_id` .* DEFAULT NULL", ) From c6cfc4b0ca43ca3a2034afdad7e3817d497ca136 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Sat, 9 Dec 2023 16:35:03 -0600 Subject: [PATCH 081/212] cp to tests --- tests/test_bypass_serialization.py | 46 ++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 tests/test_bypass_serialization.py diff --git a/tests/test_bypass_serialization.py b/tests/test_bypass_serialization.py new file mode 100644 index 000000000..aa6e89ffc --- /dev/null +++ b/tests/test_bypass_serialization.py @@ -0,0 +1,46 @@ +import datajoint as dj +import numpy as np + +from . import PREFIX, CONN_INFO +from numpy.testing import assert_array_equal +from nose.tools import assert_true + + +schema_in = dj.Schema( + PREFIX + "_test_bypass_serialization_in", connection=dj.conn(**CONN_INFO) +) + +schema_out = dj.Schema( + PREFIX + "_test_blob_bypass_serialization_out", connection=dj.conn(**CONN_INFO) +) + + +test_blob = np.array([1, 2, 3]) + + +@schema_in +class Input(dj.Lookup): + definition = """ + id: int + --- + data: blob + """ + contents = [(0, test_blob)] + + +@schema_out +class Output(dj.Manual): + definition = """ + id: int + --- + data: blob + """ + + +def test_bypass_serialization(): + dj.blob.bypass_serialization = True + contents = Input.fetch(as_dict=True) + assert_true(isinstance(contents[0]["data"], bytes)) + Output.insert(contents) + dj.blob.bypass_serialization = False + assert_array_equal(Input.fetch1("data"), Output.fetch1("data")) From fe51002fa7cc6f3a1bc400a6d70216d94a0a65b3 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Sat, 9 Dec 2023 16:39:30 -0600 Subject: [PATCH 082/212] Migrate test_bypass_serialization --- tests/test_bypass_serialization.py | 45 +++++++++++++++++++----------- 1 file changed, 28 insertions(+), 17 deletions(-) diff --git a/tests/test_bypass_serialization.py b/tests/test_bypass_serialization.py index aa6e89ffc..5f73e1d2e 100644 --- a/tests/test_bypass_serialization.py +++ b/tests/test_bypass_serialization.py @@ -1,24 +1,12 @@ +import pytest import datajoint as dj import numpy as np - -from . import PREFIX, CONN_INFO +from . import PREFIX from numpy.testing import assert_array_equal -from nose.tools import assert_true - - -schema_in = dj.Schema( - PREFIX + "_test_bypass_serialization_in", connection=dj.conn(**CONN_INFO) -) - -schema_out = dj.Schema( - PREFIX + "_test_blob_bypass_serialization_out", connection=dj.conn(**CONN_INFO) -) - test_blob = np.array([1, 2, 3]) -@schema_in class Input(dj.Lookup): definition = """ id: int @@ -28,7 +16,6 @@ class Input(dj.Lookup): contents = [(0, test_blob)] -@schema_out class Output(dj.Manual): definition = """ id: int @@ -37,10 +24,34 @@ class Output(dj.Manual): """ -def test_bypass_serialization(): +@pytest.fixture +def schema_in(connection_test): + schema = dj.Schema( + PREFIX + "_test_bypass_serialization_in", + context=dict(Input=Input), + connection=connection_test, + ) + schema(Input) + yield schema + schema.drop() + + +@pytest.fixture +def schema_out(connection_test): + schema = dj.Schema( + PREFIX + "_test_blob_bypass_serialization_out", + context=dict(Output=Output), + connection=connection_test, + ) + schema(Output) + yield schema + schema.drop() + + +def test_bypass_serialization(schema_in, schema_out): dj.blob.bypass_serialization = True contents = Input.fetch(as_dict=True) - assert_true(isinstance(contents[0]["data"], bytes)) + assert isinstance(contents[0]["data"], bytes) Output.insert(contents) dj.blob.bypass_serialization = False assert_array_equal(Input.fetch1("data"), Output.fetch1("data")) From 149555cd12ff6977226e4bcdb5f9d362c98809eb Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Sat, 9 Dec 2023 20:54:19 -0600 Subject: [PATCH 083/212] cp to tests --- tests/test_cascading_delete.py | 124 +++++++++++++++++++++++++++++++++ 1 file changed, 124 insertions(+) create mode 100644 tests/test_cascading_delete.py diff --git a/tests/test_cascading_delete.py b/tests/test_cascading_delete.py new file mode 100644 index 000000000..ca55b0261 --- /dev/null +++ b/tests/test_cascading_delete.py @@ -0,0 +1,124 @@ +from nose.tools import assert_false, assert_true, assert_equal, raises +import datajoint as dj +from .schema_simple import A, B, D, E, L, Website, Profile +from .schema import ComplexChild, ComplexParent + + +class TestDelete: + @staticmethod + def setup(): + """ + class-level test setup. Executes before each test method. + """ + A().insert(A.contents, skip_duplicates=True) + L().insert(L.contents, skip_duplicates=True) + B().populate() + D().populate() + E().populate() + + @staticmethod + def test_delete_tree(): + assert_false(dj.config["safemode"], "safemode must be off for testing") + assert_true( + L() and A() and B() and B.C() and D() and E() and E.F(), + "schema is not populated", + ) + A().delete() + assert_false(A() or B() or B.C() or D() or E() or E.F(), "incomplete delete") + + @staticmethod + def test_stepwise_delete(): + assert not dj.config["safemode"], "safemode must be off for testing" + assert L() and A() and B() and B.C(), "schema population failed" + B.C().delete(force=True) + assert not B.C(), "failed to delete child tables" + B().delete() + assert ( + not B() + ), "failed to delete from the parent table following child table deletion" + + @staticmethod + def test_delete_tree_restricted(): + assert not dj.config["safemode"], "safemode must be off for testing" + assert ( + L() and A() and B() and B.C() and D() and E() and E.F() + ), "schema is not populated" + cond = "cond_in_a" + rel = A() & cond + rest = dict( + A=len(A()) - len(rel), + B=len(B() - rel), + C=len(B.C() - rel), + D=len(D() - rel), + E=len(E() - rel), + F=len(E.F() - rel), + ) + rel.delete() + assert not ( + rel or B() & rel or B.C() & rel or D() & rel or E() & rel or (E.F() & rel) + ), "incomplete delete" + assert len(A()) == rest["A"], "invalid delete restriction" + assert len(B()) == rest["B"], "invalid delete restriction" + assert len(B.C()) == rest["C"], "invalid delete restriction" + assert len(D()) == rest["D"], "invalid delete restriction" + assert len(E()) == rest["E"], "invalid delete restriction" + assert len(E.F()) == rest["F"], "invalid delete restriction" + + @staticmethod + def test_delete_lookup(): + assert_false(dj.config["safemode"], "safemode must be off for testing") + assert_true( + bool(L() and A() and B() and B.C() and D() and E() and E.F()), + "schema is not populated", + ) + L().delete() + assert_false(bool(L() or D() or E() or E.F()), "incomplete delete") + A().delete() # delete all is necessary because delete L deletes from subtables. + + @staticmethod + def test_delete_lookup_restricted(): + assert_false(dj.config["safemode"], "safemode must be off for testing") + assert_true( + L() and A() and B() and B.C() and D() and E() and E.F(), + "schema is not populated", + ) + rel = L() & "cond_in_l" + original_count = len(L()) + deleted_count = len(rel) + rel.delete() + assert_true(len(L()) == original_count - deleted_count) + + @staticmethod + def test_delete_complex_keys(): + # https://github.com/datajoint/datajoint-python/issues/883 + # https://github.com/datajoint/datajoint-python/issues/886 + assert_false(dj.config["safemode"], "safemode must be off for testing") + parent_key_count = 8 + child_key_count = 1 + restriction = dict( + {"parent_id_{}".format(i + 1): i for i in range(parent_key_count)}, + **{ + "child_id_{}".format(i + 1): (i + parent_key_count) + for i in range(child_key_count) + } + ) + assert len(ComplexParent & restriction) == 1, "Parent record missing" + assert len(ComplexChild & restriction) == 1, "Child record missing" + (ComplexParent & restriction).delete() + assert len(ComplexParent & restriction) == 0, "Parent record was not deleted" + assert len(ComplexChild & restriction) == 0, "Child record was not deleted" + + def test_delete_master(self): + Profile().populate_random() + Profile().delete() + + @raises(dj.DataJointError) + def test_delete_parts(self): + """test issue #151""" + Profile().populate_random() + Website().delete() + + @raises(dj.DataJointError) + def test_drop_part(self): + """test issue #374""" + Website().drop() From 0b08225c42d1506963797ee2d9228c786d54fe77 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Sat, 9 Dec 2023 21:11:52 -0600 Subject: [PATCH 084/212] Migrate test_cascading_delete --- tests/test_cascading_delete.py | 82 ++++++++++++++++------------------ 1 file changed, 39 insertions(+), 43 deletions(-) diff --git a/tests/test_cascading_delete.py b/tests/test_cascading_delete.py index ca55b0261..b437a65ad 100644 --- a/tests/test_cascading_delete.py +++ b/tests/test_cascading_delete.py @@ -1,33 +1,31 @@ -from nose.tools import assert_false, assert_true, assert_equal, raises +import pytest import datajoint as dj from .schema_simple import A, B, D, E, L, Website, Profile from .schema import ComplexChild, ComplexParent +@pytest.fixture +def schema_simp_pop(schema_simp): + A().insert(A.contents, skip_duplicates=True) + L().insert(L.contents, skip_duplicates=True) + B().populate() + D().populate() + E().populate() + yield schema_simp + + class TestDelete: - @staticmethod - def setup(): - """ - class-level test setup. Executes before each test method. - """ - A().insert(A.contents, skip_duplicates=True) - L().insert(L.contents, skip_duplicates=True) - B().populate() - D().populate() - E().populate() - @staticmethod - def test_delete_tree(): - assert_false(dj.config["safemode"], "safemode must be off for testing") - assert_true( + def test_delete_tree(self, schema_simp_pop): + assert not dj.config["safemode"], "safemode must be off for testing" + assert ( L() and A() and B() and B.C() and D() and E() and E.F(), "schema is not populated", ) A().delete() - assert_false(A() or B() or B.C() or D() or E() or E.F(), "incomplete delete") + assert not A() or B() or B.C() or D() or E() or E.F(), "incomplete delete" - @staticmethod - def test_stepwise_delete(): + def test_stepwise_delete(self, schema_simp_pop): assert not dj.config["safemode"], "safemode must be off for testing" assert L() and A() and B() and B.C(), "schema population failed" B.C().delete(force=True) @@ -37,8 +35,7 @@ def test_stepwise_delete(): not B() ), "failed to delete from the parent table following child table deletion" - @staticmethod - def test_delete_tree_restricted(): + def test_delete_tree_restricted(self, schema_simp_pop): assert not dj.config["safemode"], "safemode must be off for testing" assert ( L() and A() and B() and B.C() and D() and E() and E.F() @@ -64,21 +61,19 @@ def test_delete_tree_restricted(): assert len(E()) == rest["E"], "invalid delete restriction" assert len(E.F()) == rest["F"], "invalid delete restriction" - @staticmethod - def test_delete_lookup(): - assert_false(dj.config["safemode"], "safemode must be off for testing") - assert_true( + def test_delete_lookup(self, schema_simp_pop): + assert not dj.config["safemode"], "safemode must be off for testing" + assert ( bool(L() and A() and B() and B.C() and D() and E() and E.F()), "schema is not populated", ) L().delete() - assert_false(bool(L() or D() or E() or E.F()), "incomplete delete") + assert not bool(L() or D() or E() or E.F()), "incomplete delete" A().delete() # delete all is necessary because delete L deletes from subtables. - @staticmethod - def test_delete_lookup_restricted(): - assert_false(dj.config["safemode"], "safemode must be off for testing") - assert_true( + def test_delete_lookup_restricted(self, schema_simp_pop): + assert not dj.config["safemode"], "safemode must be off for testing" + assert ( L() and A() and B() and B.C() and D() and E() and E.F(), "schema is not populated", ) @@ -86,13 +81,14 @@ def test_delete_lookup_restricted(): original_count = len(L()) deleted_count = len(rel) rel.delete() - assert_true(len(L()) == original_count - deleted_count) + assert len(L()) == original_count - deleted_count - @staticmethod - def test_delete_complex_keys(): - # https://github.com/datajoint/datajoint-python/issues/883 - # https://github.com/datajoint/datajoint-python/issues/886 - assert_false(dj.config["safemode"], "safemode must be off for testing") + def test_delete_complex_keys(self, schema_any): + """ + https://github.com/datajoint/datajoint-python/issues/883 + https://github.com/datajoint/datajoint-python/issues/886 + """ + assert not dj.config["safemode"], "safemode must be off for testing" parent_key_count = 8 child_key_count = 1 restriction = dict( @@ -108,17 +104,17 @@ def test_delete_complex_keys(): assert len(ComplexParent & restriction) == 0, "Parent record was not deleted" assert len(ComplexChild & restriction) == 0, "Child record was not deleted" - def test_delete_master(self): + def test_delete_master(self, schema_simp_pop): Profile().populate_random() Profile().delete() - @raises(dj.DataJointError) - def test_delete_parts(self): + def test_delete_parts(self, schema_simp_pop): """test issue #151""" - Profile().populate_random() - Website().delete() + with pytest.raises(dj.DataJointError): + Profile().populate_random() + Website().delete() - @raises(dj.DataJointError) - def test_drop_part(self): + def test_drop_part(self, schema_simp_pop): """test issue #374""" - Website().drop() + with pytest.raises(dj.DataJointError): + Website().drop() From 1b4e23877953b672bca0f3dc90608c7a16f294d5 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Sat, 9 Dec 2023 21:15:59 -0600 Subject: [PATCH 085/212] cp to tests --- tests/test_declare.py | 343 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 343 insertions(+) create mode 100644 tests/test_declare.py diff --git a/tests/test_declare.py b/tests/test_declare.py new file mode 100644 index 000000000..67f532449 --- /dev/null +++ b/tests/test_declare.py @@ -0,0 +1,343 @@ +from nose.tools import ( + assert_true, + assert_false, + assert_equal, + assert_list_equal, + raises, + assert_set_equal, +) +from .schema import * +import datajoint as dj +import inspect +from datajoint.declare import declare + + +auto = Auto() +auto.fill() +user = User() +subject = Subject() +experiment = Experiment() +trial = Trial() +ephys = Ephys() +channel = Ephys.Channel() + + +class TestDeclare: + @staticmethod + def test_schema_decorator(): + assert_true(issubclass(Subject, dj.Lookup)) + assert_true(not issubclass(Subject, dj.Part)) + + @staticmethod + def test_class_help(): + help(TTest) + help(TTest2) + assert_true(TTest.definition in TTest.__doc__) + assert_true(TTest.definition in TTest2.__doc__) + + @staticmethod + def test_instance_help(): + help(TTest()) + help(TTest2()) + assert_true(TTest().definition in TTest().__doc__) + assert_true(TTest2().definition in TTest2().__doc__) + + @staticmethod + def test_describe(): + """real_definition should match original definition""" + rel = Experiment() + context = inspect.currentframe().f_globals + s1 = declare(rel.full_table_name, rel.definition, context) + s2 = declare(rel.full_table_name, rel.describe(), context) + assert_equal(s1, s2) + + @staticmethod + def test_describe_indexes(): + """real_definition should match original definition""" + rel = IndexRich() + context = inspect.currentframe().f_globals + s1 = declare(rel.full_table_name, rel.definition, context) + s2 = declare(rel.full_table_name, rel.describe(), context) + assert_equal(s1, s2) + + @staticmethod + def test_describe_dependencies(): + """real_definition should match original definition""" + rel = ThingC() + context = inspect.currentframe().f_globals + s1 = declare(rel.full_table_name, rel.definition, context) + s2 = declare(rel.full_table_name, rel.describe(), context) + assert_equal(s1, s2) + + @staticmethod + def test_part(): + # Lookup and part with the same name. See issue #365 + local_schema = dj.Schema(schema.database) + + @local_schema + class Type(dj.Lookup): + definition = """ + type : varchar(255) + """ + contents = zip(("Type1", "Type2", "Type3")) + + @local_schema + class TypeMaster(dj.Manual): + definition = """ + master_id : int + """ + + class Type(dj.Part): + definition = """ + -> TypeMaster + -> Type + """ + + @staticmethod + def test_attributes(): + # test autoincrement declaration + assert_list_equal(auto.heading.names, ["id", "name"]) + assert_true(auto.heading.attributes["id"].autoincrement) + + # test attribute declarations + assert_list_equal( + subject.heading.names, + ["subject_id", "real_id", "species", "date_of_birth", "subject_notes"], + ) + assert_list_equal(subject.primary_key, ["subject_id"]) + assert_true(subject.heading.attributes["subject_id"].numeric) + assert_false(subject.heading.attributes["real_id"].numeric) + + assert_list_equal( + experiment.heading.names, + [ + "subject_id", + "experiment_id", + "experiment_date", + "username", + "data_path", + "notes", + "entry_time", + ], + ) + assert_list_equal(experiment.primary_key, ["subject_id", "experiment_id"]) + + assert_list_equal( + trial.heading.names, # tests issue #516 + ["animal", "experiment_id", "trial_id", "start_time"], + ) + assert_list_equal(trial.primary_key, ["animal", "experiment_id", "trial_id"]) + + assert_list_equal( + ephys.heading.names, + ["animal", "experiment_id", "trial_id", "sampling_frequency", "duration"], + ) + assert_list_equal(ephys.primary_key, ["animal", "experiment_id", "trial_id"]) + + assert_list_equal( + channel.heading.names, + ["animal", "experiment_id", "trial_id", "channel", "voltage", "current"], + ) + assert_list_equal( + channel.primary_key, ["animal", "experiment_id", "trial_id", "channel"] + ) + assert_true(channel.heading.attributes["voltage"].is_blob) + + @staticmethod + def test_dependencies(): + assert_true(experiment.full_table_name in user.children(primary=False)) + assert_equal(set(experiment.parents(primary=False)), {user.full_table_name}) + assert_true(experiment.full_table_name in user.children(primary=False)) + assert_set_equal(set(experiment.parents(primary=False)), {user.full_table_name}) + assert_set_equal( + set( + s.full_table_name + for s in experiment.parents(primary=False, as_objects=True) + ), + {user.full_table_name}, + ) + + assert_true(experiment.full_table_name in subject.descendants()) + assert_true( + experiment.full_table_name + in {s.full_table_name for s in subject.descendants(as_objects=True)} + ) + assert_true(subject.full_table_name in experiment.ancestors()) + assert_true( + subject.full_table_name + in {s.full_table_name for s in experiment.ancestors(as_objects=True)} + ) + + assert_true(trial.full_table_name in experiment.descendants()) + assert_true( + trial.full_table_name + in {s.full_table_name for s in experiment.descendants(as_objects=True)} + ) + assert_true(experiment.full_table_name in trial.ancestors()) + assert_true( + experiment.full_table_name + in {s.full_table_name for s in trial.ancestors(as_objects=True)} + ) + + assert_set_equal( + set(trial.children(primary=True)), + {ephys.full_table_name, trial.Condition.full_table_name}, + ) + assert_set_equal(set(trial.parts()), {trial.Condition.full_table_name}) + assert_set_equal( + set(s.full_table_name for s in trial.parts(as_objects=True)), + {trial.Condition.full_table_name}, + ) + assert_set_equal(set(ephys.parents(primary=True)), {trial.full_table_name}) + assert_set_equal( + set( + s.full_table_name for s in ephys.parents(primary=True, as_objects=True) + ), + {trial.full_table_name}, + ) + assert_set_equal(set(ephys.children(primary=True)), {channel.full_table_name}) + assert_set_equal( + set( + s.full_table_name for s in ephys.children(primary=True, as_objects=True) + ), + {channel.full_table_name}, + ) + assert_set_equal(set(channel.parents(primary=True)), {ephys.full_table_name}) + assert_set_equal( + set( + s.full_table_name + for s in channel.parents(primary=True, as_objects=True) + ), + {ephys.full_table_name}, + ) + + @staticmethod + def test_descendants_only_contain_part_table(): + """issue #927""" + + @schema + class A(dj.Manual): + definition = """ + a: int + """ + + @schema + class B(dj.Manual): + definition = """ + -> A + b: int + """ + + @schema + class Master(dj.Manual): + definition = """ + table_master: int + """ + + class Part(dj.Part): + definition = """ + -> master + -> B + """ + + assert A.descendants() == [ + "`djtest_test1`.`a`", + "`djtest_test1`.`b`", + "`djtest_test1`.`master__part`", + ] + + @staticmethod + @raises(dj.DataJointError) + def test_bad_attribute_name(): + @schema + class BadName(dj.Manual): + definition = """ + Bad_name : int + """ + + @staticmethod + @raises(dj.DataJointError) + def test_bad_fk_rename(): + """issue #381""" + + @schema + class A(dj.Manual): + definition = """ + a : int + """ + + @schema + class B(dj.Manual): + definition = """ + b -> A # invalid, the new syntax is (b) -> A + """ + + @staticmethod + @raises(dj.DataJointError) + def test_primary_nullable_foreign_key(): + @schema + class Q(dj.Manual): + definition = """ + -> [nullable] Experiment + """ + + @staticmethod + @raises(dj.DataJointError) + def test_invalid_foreign_key_option(): + @schema + class R(dj.Manual): + definition = """ + -> Experiment + ---- + -> [optional] User + """ + + @staticmethod + @raises(dj.DataJointError) + def test_unsupported_datatype(): + @schema + class Q(dj.Manual): + definition = """ + experiment : int + --- + description : text + """ + + @staticmethod + def test_int_datatype(): + @schema + class Owner(dj.Manual): + definition = """ + ownerid : int + --- + car_count : integer + """ + + @staticmethod + @raises(dj.DataJointError) + def test_unsupported_int_datatype(): + @schema + class Driver(dj.Manual): + definition = """ + driverid : tinyint + --- + car_count : tinyinteger + """ + + @staticmethod + @raises(dj.DataJointError) + def test_long_table_name(): + """ + test issue #205 -- reject table names over 64 characters in length + """ + + @schema + class WhyWouldAnyoneCreateATableNameThisLong(dj.Manual): + definition = """ + master : int + """ + + class WithSuchALongPartNameThatItCrashesMySQL(dj.Part): + definition = """ + -> (master) + """ From 69a793878d1834761e902d8f91c08952de0cdd15 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Sat, 9 Dec 2023 21:21:33 -0600 Subject: [PATCH 086/212] WIP test_declare migration --- tests/test_declare.py | 85 ++++++++++++++++--------------------------- 1 file changed, 31 insertions(+), 54 deletions(-) diff --git a/tests/test_declare.py b/tests/test_declare.py index 67f532449..6005a25ff 100644 --- a/tests/test_declare.py +++ b/tests/test_declare.py @@ -1,49 +1,40 @@ -from nose.tools import ( - assert_true, - assert_false, - assert_equal, - assert_list_equal, - raises, - assert_set_equal, -) +import pytest from .schema import * import datajoint as dj import inspect from datajoint.declare import declare -auto = Auto() -auto.fill() -user = User() -subject = Subject() -experiment = Experiment() -trial = Trial() -ephys = Ephys() -channel = Ephys.Channel() - - class TestDeclare: - @staticmethod - def test_schema_decorator(): + + @classmethod + def setup_class(cls): + cls.auto = Auto() + cls.auto.fill() + cls.user = User() + cls.subject = Subject() + cls.experiment = Experiment() + cls.trial = Trial() + cls.ephys = Ephys() + cls.channel = Ephys.Channel() + + def test_schema_decorator(self, schema_any): assert_true(issubclass(Subject, dj.Lookup)) assert_true(not issubclass(Subject, dj.Part)) - @staticmethod - def test_class_help(): + def test_class_help(self, schema_any): help(TTest) help(TTest2) assert_true(TTest.definition in TTest.__doc__) assert_true(TTest.definition in TTest2.__doc__) - @staticmethod - def test_instance_help(): + def test_instance_help(self, schema_any): help(TTest()) help(TTest2()) assert_true(TTest().definition in TTest().__doc__) assert_true(TTest2().definition in TTest2().__doc__) - @staticmethod - def test_describe(): + def test_describe(self, schema_any): """real_definition should match original definition""" rel = Experiment() context = inspect.currentframe().f_globals @@ -51,8 +42,7 @@ def test_describe(): s2 = declare(rel.full_table_name, rel.describe(), context) assert_equal(s1, s2) - @staticmethod - def test_describe_indexes(): + def test_describe_indexes(self, schema_any): """real_definition should match original definition""" rel = IndexRich() context = inspect.currentframe().f_globals @@ -60,8 +50,7 @@ def test_describe_indexes(): s2 = declare(rel.full_table_name, rel.describe(), context) assert_equal(s1, s2) - @staticmethod - def test_describe_dependencies(): + def test_describe_dependencies(self, schema_any): """real_definition should match original definition""" rel = ThingC() context = inspect.currentframe().f_globals @@ -69,8 +58,7 @@ def test_describe_dependencies(): s2 = declare(rel.full_table_name, rel.describe(), context) assert_equal(s1, s2) - @staticmethod - def test_part(): + def test_part(self, schema_any): # Lookup and part with the same name. See issue #365 local_schema = dj.Schema(schema.database) @@ -93,8 +81,7 @@ class Type(dj.Part): -> Type """ - @staticmethod - def test_attributes(): + def test_attributes(self, schema_any): # test autoincrement declaration assert_list_equal(auto.heading.names, ["id", "name"]) assert_true(auto.heading.attributes["id"].autoincrement) @@ -143,8 +130,7 @@ def test_attributes(): ) assert_true(channel.heading.attributes["voltage"].is_blob) - @staticmethod - def test_dependencies(): + def test_dependencies(self, schema_any): assert_true(experiment.full_table_name in user.children(primary=False)) assert_equal(set(experiment.parents(primary=False)), {user.full_table_name}) assert_true(experiment.full_table_name in user.children(primary=False)) @@ -211,8 +197,7 @@ def test_dependencies(): {ephys.full_table_name}, ) - @staticmethod - def test_descendants_only_contain_part_table(): + def test_descendants_only_contain_part_table(self, schema_any): """issue #927""" @schema @@ -246,18 +231,16 @@ class Part(dj.Part): "`djtest_test1`.`master__part`", ] - @staticmethod @raises(dj.DataJointError) - def test_bad_attribute_name(): + def test_bad_attribute_name(self): @schema class BadName(dj.Manual): definition = """ Bad_name : int """ - @staticmethod @raises(dj.DataJointError) - def test_bad_fk_rename(): + def test_bad_fk_rename(self): """issue #381""" @schema @@ -272,18 +255,16 @@ class B(dj.Manual): b -> A # invalid, the new syntax is (b) -> A """ - @staticmethod @raises(dj.DataJointError) - def test_primary_nullable_foreign_key(): + def test_primary_nullable_foreign_key(self): @schema class Q(dj.Manual): definition = """ -> [nullable] Experiment """ - @staticmethod @raises(dj.DataJointError) - def test_invalid_foreign_key_option(): + def test_invalid_foreign_key_option(self): @schema class R(dj.Manual): definition = """ @@ -292,9 +273,8 @@ class R(dj.Manual): -> [optional] User """ - @staticmethod @raises(dj.DataJointError) - def test_unsupported_datatype(): + def test_unsupported_datatype(self): @schema class Q(dj.Manual): definition = """ @@ -303,8 +283,7 @@ class Q(dj.Manual): description : text """ - @staticmethod - def test_int_datatype(): + def test_int_datatype(self): @schema class Owner(dj.Manual): definition = """ @@ -313,9 +292,8 @@ class Owner(dj.Manual): car_count : integer """ - @staticmethod @raises(dj.DataJointError) - def test_unsupported_int_datatype(): + def test_unsupported_int_datatype(self): @schema class Driver(dj.Manual): definition = """ @@ -324,9 +302,8 @@ class Driver(dj.Manual): car_count : tinyinteger """ - @staticmethod @raises(dj.DataJointError) - def test_long_table_name(): + def test_long_table_name(self): """ test issue #205 -- reject table names over 64 characters in length """ From fbecd8a1dd2b1bf3767a3e1c2626a04174b675cd Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Sat, 9 Dec 2023 21:32:20 -0600 Subject: [PATCH 087/212] nose2pytest tests --- tests/test_declare.py | 180 +++++++++++++++++++----------------------- 1 file changed, 80 insertions(+), 100 deletions(-) diff --git a/tests/test_declare.py b/tests/test_declare.py index 6005a25ff..a1c0ed775 100644 --- a/tests/test_declare.py +++ b/tests/test_declare.py @@ -19,20 +19,20 @@ def setup_class(cls): cls.channel = Ephys.Channel() def test_schema_decorator(self, schema_any): - assert_true(issubclass(Subject, dj.Lookup)) - assert_true(not issubclass(Subject, dj.Part)) + assert issubclass(Subject, dj.Lookup) + assert not issubclass(Subject, dj.Part) def test_class_help(self, schema_any): help(TTest) help(TTest2) - assert_true(TTest.definition in TTest.__doc__) - assert_true(TTest.definition in TTest2.__doc__) + assert TTest.definition in TTest.__doc__ + assert TTest.definition in TTest2.__doc__ def test_instance_help(self, schema_any): help(TTest()) help(TTest2()) - assert_true(TTest().definition in TTest().__doc__) - assert_true(TTest2().definition in TTest2().__doc__) + assert TTest().definition in TTest().__doc__ + assert TTest2().definition in TTest2().__doc__ def test_describe(self, schema_any): """real_definition should match original definition""" @@ -40,7 +40,7 @@ def test_describe(self, schema_any): context = inspect.currentframe().f_globals s1 = declare(rel.full_table_name, rel.definition, context) s2 = declare(rel.full_table_name, rel.describe(), context) - assert_equal(s1, s2) + assert s1 == s2 def test_describe_indexes(self, schema_any): """real_definition should match original definition""" @@ -48,7 +48,7 @@ def test_describe_indexes(self, schema_any): context = inspect.currentframe().f_globals s1 = declare(rel.full_table_name, rel.definition, context) s2 = declare(rel.full_table_name, rel.describe(), context) - assert_equal(s1, s2) + assert s1 == s2 def test_describe_dependencies(self, schema_any): """real_definition should match original definition""" @@ -56,7 +56,7 @@ def test_describe_dependencies(self, schema_any): context = inspect.currentframe().f_globals s1 = declare(rel.full_table_name, rel.definition, context) s2 = declare(rel.full_table_name, rel.describe(), context) - assert_equal(s1, s2) + assert s1 == s2 def test_part(self, schema_any): # Lookup and part with the same name. See issue #365 @@ -83,20 +83,19 @@ class Type(dj.Part): def test_attributes(self, schema_any): # test autoincrement declaration - assert_list_equal(auto.heading.names, ["id", "name"]) - assert_true(auto.heading.attributes["id"].autoincrement) + assert auto.heading.names == ["id", "name"] + assert auto.heading.attributes["id"].autoincrement # test attribute declarations - assert_list_equal( - subject.heading.names, - ["subject_id", "real_id", "species", "date_of_birth", "subject_notes"], - ) - assert_list_equal(subject.primary_key, ["subject_id"]) - assert_true(subject.heading.attributes["subject_id"].numeric) - assert_false(subject.heading.attributes["real_id"].numeric) - - assert_list_equal( - experiment.heading.names, + assert ( + subject.heading.names == + ["subject_id", "real_id", "species", "date_of_birth", "subject_notes"]) + assert subject.primary_key == ["subject_id"] + assert subject.heading.attributes["subject_id"].numeric + assert not subject.heading.attributes["real_id"].numeric + + assert ( + experiment.heading.names == [ "subject_id", "experiment_id", @@ -105,97 +104,78 @@ def test_attributes(self, schema_any): "data_path", "notes", "entry_time", - ], - ) - assert_list_equal(experiment.primary_key, ["subject_id", "experiment_id"]) - - assert_list_equal( - trial.heading.names, # tests issue #516 - ["animal", "experiment_id", "trial_id", "start_time"], - ) - assert_list_equal(trial.primary_key, ["animal", "experiment_id", "trial_id"]) - - assert_list_equal( - ephys.heading.names, - ["animal", "experiment_id", "trial_id", "sampling_frequency", "duration"], - ) - assert_list_equal(ephys.primary_key, ["animal", "experiment_id", "trial_id"]) - - assert_list_equal( - channel.heading.names, - ["animal", "experiment_id", "trial_id", "channel", "voltage", "current"], - ) - assert_list_equal( - channel.primary_key, ["animal", "experiment_id", "trial_id", "channel"] - ) - assert_true(channel.heading.attributes["voltage"].is_blob) + ]) + assert experiment.primary_key == ["subject_id", "experiment_id"] + + assert ( + trial.heading.names == # tests issue #516 + ["animal", "experiment_id", "trial_id", "start_time"]) + assert trial.primary_key == ["animal", "experiment_id", "trial_id"] + + assert ( + ephys.heading.names == + ["animal", "experiment_id", "trial_id", "sampling_frequency", "duration"]) + assert ephys.primary_key == ["animal", "experiment_id", "trial_id"] + + assert ( + channel.heading.names == + ["animal", "experiment_id", "trial_id", "channel", "voltage", "current"]) + assert ( + channel.primary_key == ["animal", "experiment_id", "trial_id", "channel"]) + assert channel.heading.attributes["voltage"].is_blob def test_dependencies(self, schema_any): - assert_true(experiment.full_table_name in user.children(primary=False)) - assert_equal(set(experiment.parents(primary=False)), {user.full_table_name}) - assert_true(experiment.full_table_name in user.children(primary=False)) - assert_set_equal(set(experiment.parents(primary=False)), {user.full_table_name}) - assert_set_equal( + assert experiment.full_table_name in user.children(primary=False) + assert set(experiment.parents(primary=False)) == {user.full_table_name} + assert experiment.full_table_name in user.children(primary=False) + assert set(experiment.parents(primary=False)) == {user.full_table_name} + assert ( set( s.full_table_name for s in experiment.parents(primary=False, as_objects=True) - ), - {user.full_table_name}, - ) - - assert_true(experiment.full_table_name in subject.descendants()) - assert_true( - experiment.full_table_name - in {s.full_table_name for s in subject.descendants(as_objects=True)} - ) - assert_true(subject.full_table_name in experiment.ancestors()) - assert_true( - subject.full_table_name - in {s.full_table_name for s in experiment.ancestors(as_objects=True)} - ) - - assert_true(trial.full_table_name in experiment.descendants()) - assert_true( - trial.full_table_name - in {s.full_table_name for s in experiment.descendants(as_objects=True)} - ) - assert_true(experiment.full_table_name in trial.ancestors()) - assert_true( - experiment.full_table_name - in {s.full_table_name for s in trial.ancestors(as_objects=True)} - ) - - assert_set_equal( - set(trial.children(primary=True)), - {ephys.full_table_name, trial.Condition.full_table_name}, - ) - assert_set_equal(set(trial.parts()), {trial.Condition.full_table_name}) - assert_set_equal( - set(s.full_table_name for s in trial.parts(as_objects=True)), - {trial.Condition.full_table_name}, - ) - assert_set_equal(set(ephys.parents(primary=True)), {trial.full_table_name}) - assert_set_equal( + ) == + {user.full_table_name}) + + assert experiment.full_table_name in subject.descendants() + assert (experiment.full_table_name + in {s.full_table_name for s in subject.descendants(as_objects=True)}) + assert subject.full_table_name in experiment.ancestors() + assert (subject.full_table_name + in {s.full_table_name for s in experiment.ancestors(as_objects=True)}) + + assert trial.full_table_name in experiment.descendants() + assert (trial.full_table_name + in {s.full_table_name for s in experiment.descendants(as_objects=True)}) + assert experiment.full_table_name in trial.ancestors() + assert (experiment.full_table_name + in {s.full_table_name for s in trial.ancestors(as_objects=True)}) + + assert ( + set(trial.children(primary=True)) == + {ephys.full_table_name, trial.Condition.full_table_name}) + assert set(trial.parts()) == {trial.Condition.full_table_name} + assert ( + set(s.full_table_name for s in trial.parts(as_objects=True)) == + {trial.Condition.full_table_name}) + assert set(ephys.parents(primary=True)) == {trial.full_table_name} + assert ( set( s.full_table_name for s in ephys.parents(primary=True, as_objects=True) - ), - {trial.full_table_name}, - ) - assert_set_equal(set(ephys.children(primary=True)), {channel.full_table_name}) - assert_set_equal( + ) == + {trial.full_table_name}) + assert set(ephys.children(primary=True)) == {channel.full_table_name} + assert ( set( s.full_table_name for s in ephys.children(primary=True, as_objects=True) - ), - {channel.full_table_name}, - ) - assert_set_equal(set(channel.parents(primary=True)), {ephys.full_table_name}) - assert_set_equal( + ) == + {channel.full_table_name}) + assert set(channel.parents(primary=True)) == {ephys.full_table_name} + assert ( set( s.full_table_name for s in channel.parents(primary=True, as_objects=True) - ), - {ephys.full_table_name}, - ) + ) == + {ephys.full_table_name}) def test_descendants_only_contain_part_table(self, schema_any): """issue #927""" From d815e8293dafe4f7279f608c537585f7bacfe294 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Sat, 9 Dec 2023 21:48:12 -0600 Subject: [PATCH 088/212] First pass at migrating test_declare --- tests/test_declare.py | 97 +++++++++++++++++++++++++------------------ 1 file changed, 57 insertions(+), 40 deletions(-) diff --git a/tests/test_declare.py b/tests/test_declare.py index a1c0ed775..63d5bc5ac 100644 --- a/tests/test_declare.py +++ b/tests/test_declare.py @@ -5,18 +5,20 @@ from datajoint.declare import declare -class TestDeclare: +@pytest.fixture +def schema_any(schema_any): + auto = Auto() + auto.fill() + user = User() + subject = Subject() + experiment = Experiment() + trial = Trial() + ephys = Ephys() + channel = Ephys.Channel() + yield schema_any + - @classmethod - def setup_class(cls): - cls.auto = Auto() - cls.auto.fill() - cls.user = User() - cls.subject = Subject() - cls.experiment = Experiment() - cls.trial = Trial() - cls.ephys = Ephys() - cls.channel = Ephys.Channel() +class TestDeclare: def test_schema_decorator(self, schema_any): assert issubclass(Subject, dj.Lookup) @@ -58,8 +60,10 @@ def test_describe_dependencies(self, schema_any): s2 = declare(rel.full_table_name, rel.describe(), context) assert s1 == s2 - def test_part(self, schema_any): - # Lookup and part with the same name. See issue #365 + def test_part(self): + """ + Lookup and part with the same name. See issue #365 + """ local_schema = dj.Schema(schema.database) @local_schema @@ -180,20 +184,20 @@ def test_dependencies(self, schema_any): def test_descendants_only_contain_part_table(self, schema_any): """issue #927""" - @schema + @schema_any class A(dj.Manual): definition = """ a: int """ - @schema + @schema_any class B(dj.Manual): definition = """ -> A b: int """ - @schema + @schema_any class Master(dj.Manual): definition = """ table_master: int @@ -211,41 +215,45 @@ class Part(dj.Part): "`djtest_test1`.`master__part`", ] - @raises(dj.DataJointError) - def test_bad_attribute_name(self): - @schema + def test_bad_attribute_name(self, schema_any): + class BadName(dj.Manual): definition = """ Bad_name : int """ - @raises(dj.DataJointError) - def test_bad_fk_rename(self): + with pytest.raises(dj.DataJointError): + schema_any(BadName) + + def test_bad_fk_rename(self, schema_any): """issue #381""" - @schema class A(dj.Manual): definition = """ a : int """ - @schema class B(dj.Manual): definition = """ b -> A # invalid, the new syntax is (b) -> A """ - @raises(dj.DataJointError) - def test_primary_nullable_foreign_key(self): - @schema + schema_any(A) + with pytest.raises(dj.DataJointError): + schema_any(B) + + def test_primary_nullable_foreign_key(self, schema_any): + class Q(dj.Manual): definition = """ -> [nullable] Experiment """ - @raises(dj.DataJointError) - def test_invalid_foreign_key_option(self): - @schema + with pytest.raises(dj.DataJointError): + schema_any(Q) + + def test_invalid_foreign_key_option(self, schema_any): + class R(dj.Manual): definition = """ -> Experiment @@ -253,9 +261,11 @@ class R(dj.Manual): -> [optional] User """ - @raises(dj.DataJointError) - def test_unsupported_datatype(self): - @schema + with pytest.raises(dj.DataJointError): + schema_any(R) + + def test_unsupported_datatype(self, schema_any): + class Q(dj.Manual): definition = """ experiment : int @@ -263,8 +273,12 @@ class Q(dj.Manual): description : text """ - def test_int_datatype(self): - @schema + with pytest.raises(dj.DataJointError): + schema_any(Q) + + def test_int_datatype(self, schema_any): + + @schema_any class Owner(dj.Manual): definition = """ ownerid : int @@ -272,9 +286,8 @@ class Owner(dj.Manual): car_count : integer """ - @raises(dj.DataJointError) - def test_unsupported_int_datatype(self): - @schema + def test_unsupported_int_datatype(self, schema_any): + class Driver(dj.Manual): definition = """ driverid : tinyint @@ -282,13 +295,14 @@ class Driver(dj.Manual): car_count : tinyinteger """ - @raises(dj.DataJointError) - def test_long_table_name(self): + with pytest.raises(dj.DataJointError): + schema_any(Driver) + + def test_long_table_name(self, schema_any): """ test issue #205 -- reject table names over 64 characters in length """ - @schema class WhyWouldAnyoneCreateATableNameThisLong(dj.Manual): definition = """ master : int @@ -298,3 +312,6 @@ class WithSuchALongPartNameThatItCrashesMySQL(dj.Part): definition = """ -> (master) """ + + with pytest.raises(dj.DataJointError): + schema_any(WhyWouldAnyoneCreateATableNameThisLong) From e97f163a6cf0984f59c27127d454dc453b0318ca Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Sat, 9 Dec 2023 21:32:58 -0700 Subject: [PATCH 089/212] Clean up and format --- tests/test_declare.py | 186 ++++++++++++++++++++++-------------------- 1 file changed, 98 insertions(+), 88 deletions(-) diff --git a/tests/test_declare.py b/tests/test_declare.py index 63d5bc5ac..0398b79fd 100644 --- a/tests/test_declare.py +++ b/tests/test_declare.py @@ -5,21 +5,7 @@ from datajoint.declare import declare -@pytest.fixture -def schema_any(schema_any): - auto = Auto() - auto.fill() - user = User() - subject = Subject() - experiment = Experiment() - trial = Trial() - ephys = Ephys() - channel = Ephys.Channel() - yield schema_any - - class TestDeclare: - def test_schema_decorator(self, schema_any): assert issubclass(Subject, dj.Lookup) assert not issubclass(Subject, dj.Part) @@ -60,11 +46,11 @@ def test_describe_dependencies(self, schema_any): s2 = declare(rel.full_table_name, rel.describe(), context) assert s1 == s2 - def test_part(self): + def test_part(self, schema_any): """ Lookup and part with the same name. See issue #365 """ - local_schema = dj.Schema(schema.database) + local_schema = dj.Schema(schema_any.database) @local_schema class Type(dj.Lookup): @@ -86,118 +72,144 @@ class Type(dj.Part): """ def test_attributes(self, schema_any): - # test autoincrement declaration + """ + Test autoincrement declaration + """ + auto = Auto() + auto.fill() + user = User() + subject = Subject() + experiment = Experiment() + trial = Trial() + ephys = Ephys() + channel = Ephys.Channel() + assert auto.heading.names == ["id", "name"] assert auto.heading.attributes["id"].autoincrement # test attribute declarations - assert ( - subject.heading.names == - ["subject_id", "real_id", "species", "date_of_birth", "subject_notes"]) + assert subject.heading.names == [ + "subject_id", + "real_id", + "species", + "date_of_birth", + "subject_notes", + ] assert subject.primary_key == ["subject_id"] assert subject.heading.attributes["subject_id"].numeric assert not subject.heading.attributes["real_id"].numeric - assert ( - experiment.heading.names == - [ - "subject_id", - "experiment_id", - "experiment_date", - "username", - "data_path", - "notes", - "entry_time", - ]) + assert experiment.heading.names == [ + "subject_id", + "experiment_id", + "experiment_date", + "username", + "data_path", + "notes", + "entry_time", + ] assert experiment.primary_key == ["subject_id", "experiment_id"] - assert ( - trial.heading.names == # tests issue #516 - ["animal", "experiment_id", "trial_id", "start_time"]) + assert trial.heading.names == [ # tests issue #516 + "animal", + "experiment_id", + "trial_id", + "start_time", + ] assert trial.primary_key == ["animal", "experiment_id", "trial_id"] - assert ( - ephys.heading.names == - ["animal", "experiment_id", "trial_id", "sampling_frequency", "duration"]) + assert ephys.heading.names == [ + "animal", + "experiment_id", + "trial_id", + "sampling_frequency", + "duration", + ] assert ephys.primary_key == ["animal", "experiment_id", "trial_id"] - assert ( - channel.heading.names == - ["animal", "experiment_id", "trial_id", "channel", "voltage", "current"]) - assert ( - channel.primary_key == ["animal", "experiment_id", "trial_id", "channel"]) + assert channel.heading.names == [ + "animal", + "experiment_id", + "trial_id", + "channel", + "voltage", + "current", + ] + assert channel.primary_key == ["animal", "experiment_id", "trial_id", "channel"] assert channel.heading.attributes["voltage"].is_blob def test_dependencies(self, schema_any): + auto = Auto() + auto.fill() + user = User() + subject = Subject() + experiment = Experiment() + trial = Trial() + ephys = Ephys() + channel = Ephys.Channel() + assert experiment.full_table_name in user.children(primary=False) assert set(experiment.parents(primary=False)) == {user.full_table_name} assert experiment.full_table_name in user.children(primary=False) assert set(experiment.parents(primary=False)) == {user.full_table_name} - assert ( - set( - s.full_table_name - for s in experiment.parents(primary=False, as_objects=True) - ) == - {user.full_table_name}) + assert set( + s.full_table_name + for s in experiment.parents(primary=False, as_objects=True) + ) == {user.full_table_name} assert experiment.full_table_name in subject.descendants() - assert (experiment.full_table_name - in {s.full_table_name for s in subject.descendants(as_objects=True)}) + assert experiment.full_table_name in { + s.full_table_name for s in subject.descendants(as_objects=True) + } assert subject.full_table_name in experiment.ancestors() - assert (subject.full_table_name - in {s.full_table_name for s in experiment.ancestors(as_objects=True)}) + assert subject.full_table_name in { + s.full_table_name for s in experiment.ancestors(as_objects=True) + } assert trial.full_table_name in experiment.descendants() - assert (trial.full_table_name - in {s.full_table_name for s in experiment.descendants(as_objects=True)}) + assert trial.full_table_name in { + s.full_table_name for s in experiment.descendants(as_objects=True) + } assert experiment.full_table_name in trial.ancestors() - assert (experiment.full_table_name - in {s.full_table_name for s in trial.ancestors(as_objects=True)}) - - assert ( - set(trial.children(primary=True)) == - {ephys.full_table_name, trial.Condition.full_table_name}) + assert experiment.full_table_name in { + s.full_table_name for s in trial.ancestors(as_objects=True) + } + + assert set(trial.children(primary=True)) == { + ephys.full_table_name, + trial.Condition.full_table_name, + } assert set(trial.parts()) == {trial.Condition.full_table_name} - assert ( - set(s.full_table_name for s in trial.parts(as_objects=True)) == - {trial.Condition.full_table_name}) + assert set(s.full_table_name for s in trial.parts(as_objects=True)) == { + trial.Condition.full_table_name + } assert set(ephys.parents(primary=True)) == {trial.full_table_name} - assert ( - set( - s.full_table_name for s in ephys.parents(primary=True, as_objects=True) - ) == - {trial.full_table_name}) + assert set( + s.full_table_name for s in ephys.parents(primary=True, as_objects=True) + ) == {trial.full_table_name} assert set(ephys.children(primary=True)) == {channel.full_table_name} - assert ( - set( - s.full_table_name for s in ephys.children(primary=True, as_objects=True) - ) == - {channel.full_table_name}) + assert set( + s.full_table_name for s in ephys.children(primary=True, as_objects=True) + ) == {channel.full_table_name} assert set(channel.parents(primary=True)) == {ephys.full_table_name} - assert ( - set( - s.full_table_name - for s in channel.parents(primary=True, as_objects=True) - ) == - {ephys.full_table_name}) + assert set( + s.full_table_name for s in channel.parents(primary=True, as_objects=True) + ) == {ephys.full_table_name} def test_descendants_only_contain_part_table(self, schema_any): """issue #927""" - @schema_any class A(dj.Manual): definition = """ a: int """ - @schema_any class B(dj.Manual): definition = """ -> A b: int """ - @schema_any class Master(dj.Manual): definition = """ table_master: int @@ -209,6 +221,10 @@ class Part(dj.Part): -> B """ + context = dict(A=A, B=B, Master=Master) + schema_any(A, context=context) + schema_any(B, context=context) + schema_any(Master, context=context) assert A.descendants() == [ "`djtest_test1`.`a`", "`djtest_test1`.`b`", @@ -216,7 +232,6 @@ class Part(dj.Part): ] def test_bad_attribute_name(self, schema_any): - class BadName(dj.Manual): definition = """ Bad_name : int @@ -243,7 +258,6 @@ class B(dj.Manual): schema_any(B) def test_primary_nullable_foreign_key(self, schema_any): - class Q(dj.Manual): definition = """ -> [nullable] Experiment @@ -253,7 +267,6 @@ class Q(dj.Manual): schema_any(Q) def test_invalid_foreign_key_option(self, schema_any): - class R(dj.Manual): definition = """ -> Experiment @@ -265,7 +278,6 @@ class R(dj.Manual): schema_any(R) def test_unsupported_datatype(self, schema_any): - class Q(dj.Manual): definition = """ experiment : int @@ -277,7 +289,6 @@ class Q(dj.Manual): schema_any(Q) def test_int_datatype(self, schema_any): - @schema_any class Owner(dj.Manual): definition = """ @@ -287,7 +298,6 @@ class Owner(dj.Manual): """ def test_unsupported_int_datatype(self, schema_any): - class Driver(dj.Manual): definition = """ driverid : tinyint From 47cea7db407c3ee69156b9360f92ab516bec3a1d Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Sat, 9 Dec 2023 21:33:34 -0700 Subject: [PATCH 090/212] Format with black --- tests/test_cascading_delete.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_cascading_delete.py b/tests/test_cascading_delete.py index b437a65ad..8646edeca 100644 --- a/tests/test_cascading_delete.py +++ b/tests/test_cascading_delete.py @@ -15,7 +15,6 @@ def schema_simp_pop(schema_simp): class TestDelete: - def test_delete_tree(self, schema_simp_pop): assert not dj.config["safemode"], "safemode must be off for testing" assert ( From e2c0d3029c3d4d5237c1ea243fa9295e3e24bdfd Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 11 Dec 2023 11:31:15 -0700 Subject: [PATCH 091/212] Define schema_alter in test module Per @A-Baji suggestion --- tests/schema_alter.py | 58 ------------------------------------- tests/test_alter.py | 66 +++++++++++++++++++++++++++++++++++++++---- 2 files changed, 60 insertions(+), 64 deletions(-) delete mode 100644 tests/schema_alter.py diff --git a/tests/schema_alter.py b/tests/schema_alter.py deleted file mode 100644 index a8375b182..000000000 --- a/tests/schema_alter.py +++ /dev/null @@ -1,58 +0,0 @@ -import random -import numpy as np -import datajoint as dj -import inspect - - -class Experiment(dj.Imported): - original_definition = """ # information about experiments - -> Subject - experiment_id :smallint # experiment number for this subject - --- - experiment_date :date # date when experiment was started - -> [nullable] User - data_path="" :varchar(255) # file path to recorded data - notes="" :varchar(2048) # e.g. purpose of experiment - entry_time=CURRENT_TIMESTAMP :timestamp # automatic timestamp - """ - - definition1 = """ # Experiment - -> Subject - experiment_id :smallint # experiment number for this subject - --- - data_path : int # some number - extra=null : longblob # just testing - -> [nullable] User - subject_notes=null :varchar(2048) # {notes} e.g. purpose of experiment - entry_time=CURRENT_TIMESTAMP :timestamp # automatic timestamp - """ - - -class Parent(dj.Manual): - definition = """ - parent_id: int - """ - - class Child(dj.Part): - definition = """ - -> Parent - """ - definition_new = """ - -> master - --- - child_id=null: int - """ - - class Grandchild(dj.Part): - definition = """ - -> master.Child - """ - definition_new = """ - -> master.Child - --- - grandchild_id=null: int - """ - - -LOCALS_ALTER = {k: v for k, v in locals().items() if inspect.isclass(v)} -__all__ = list(LOCALS_ALTER) diff --git a/tests/test_alter.py b/tests/test_alter.py index fad4b2c33..a26306117 100644 --- a/tests/test_alter.py +++ b/tests/test_alter.py @@ -1,12 +1,66 @@ import pytest import re import datajoint as dj -from . import schema as schema_any_module, schema_alter as schema_alter_module, PREFIX -from .schema_alter import Parent, Experiment +from . import schema as schema_any_module, PREFIX + +class Experiment(dj.Imported): + original_definition = """ # information about experiments + -> Subject + experiment_id :smallint # experiment number for this subject + --- + experiment_date :date # date when experiment was started + -> [nullable] User + data_path="" :varchar(255) # file path to recorded data + notes="" :varchar(2048) # e.g. purpose of experiment + entry_time=CURRENT_TIMESTAMP :timestamp # automatic timestamp + """ + + definition1 = """ # Experiment + -> Subject + experiment_id :smallint # experiment number for this subject + --- + data_path : int # some number + extra=null : longblob # just testing + -> [nullable] User + subject_notes=null :varchar(2048) # {notes} e.g. purpose of experiment + entry_time=CURRENT_TIMESTAMP :timestamp # automatic timestamp + """ + + +class Parent(dj.Manual): + definition = """ + parent_id: int + """ + + class Child(dj.Part): + definition = """ + -> Parent + """ + definition_new = """ + -> master + --- + child_id=null: int + """ + + class Grandchild(dj.Part): + definition = """ + -> master.Child + """ + definition_new = """ + -> master.Child + --- + grandchild_id=null: int + """ + + +LOCALS_ALTER = { + "Experiment": Experiment, + "Parent": Parent +} COMBINED_CONTEXT = { **schema_any_module.LOCALS_ANY, - **schema_alter_module.LOCALS_ALTER, + **LOCALS_ALTER, } @@ -54,9 +108,9 @@ def schema_alter(connection_test): schema_any(schema_any_module.Stimulus) schema_any(schema_any_module.Longblob) - # Add nodes from schema_alter_module - schema_any(Experiment, context=schema_alter_module.LOCALS_ALTER) - schema_any(Parent, context=schema_alter_module.LOCALS_ALTER) + # Overwrite Experiment and Parent nodes + schema_any(Experiment, context=LOCALS_ALTER) + schema_any(Parent, context=LOCALS_ALTER) yield schema_any schema_any.drop() From 8a8b0c3e5f33beda3a2d5d4eff57cef007186532 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 11 Dec 2023 11:34:16 -0700 Subject: [PATCH 092/212] Simpler schema_alter fixture Per @A-Baji suggestion --- tests/test_alter.py | 50 ++------------------------------------------- 1 file changed, 2 insertions(+), 48 deletions(-) diff --git a/tests/test_alter.py b/tests/test_alter.py index a26306117..a78a07f26 100644 --- a/tests/test_alter.py +++ b/tests/test_alter.py @@ -54,10 +54,7 @@ class Grandchild(dj.Part): """ -LOCALS_ALTER = { - "Experiment": Experiment, - "Parent": Parent -} +LOCALS_ALTER = {"Experiment": Experiment, "Parent": Parent} COMBINED_CONTEXT = { **schema_any_module.LOCALS_ANY, **LOCALS_ALTER, @@ -65,53 +62,10 @@ class Grandchild(dj.Part): @pytest.fixture -def schema_alter(connection_test): - schema_any = dj.Schema( - PREFIX + "_alter", - context=schema_any_module.LOCALS_ANY, - connection=connection_test, - ) - schema_any(schema_any_module.TTest) - schema_any(schema_any_module.TTest2) - schema_any(schema_any_module.TTest3) - schema_any(schema_any_module.NullableNumbers) - schema_any(schema_any_module.TTestExtra) - schema_any(schema_any_module.TTestNoExtra) - schema_any(schema_any_module.Auto) - schema_any(schema_any_module.User) - schema_any(schema_any_module.Subject) - schema_any(schema_any_module.Language) - schema_any(schema_any_module.Experiment) - schema_any(schema_any_module.Trial) - schema_any(schema_any_module.Ephys) - schema_any(schema_any_module.Image) - schema_any(schema_any_module.UberTrash) - schema_any(schema_any_module.UnterTrash) - schema_any(schema_any_module.SimpleSource) - schema_any(schema_any_module.SigIntTable) - schema_any(schema_any_module.SigTermTable) - schema_any(schema_any_module.DjExceptionName) - schema_any(schema_any_module.ErrorClass) - schema_any(schema_any_module.DecimalPrimaryKey) - schema_any(schema_any_module.IndexRich) - schema_any(schema_any_module.ThingA) - schema_any(schema_any_module.ThingB) - schema_any(schema_any_module.ThingC) - schema_any(schema_any_module.Parent) - schema_any(schema_any_module.Child) - schema_any(schema_any_module.ComplexParent) - schema_any(schema_any_module.ComplexChild) - schema_any(schema_any_module.SubjectA) - schema_any(schema_any_module.SessionA) - schema_any(schema_any_module.SessionStatusA) - schema_any(schema_any_module.SessionDateA) - schema_any(schema_any_module.Stimulus) - schema_any(schema_any_module.Longblob) - +def schema_alter(connection_test, schema_any): # Overwrite Experiment and Parent nodes schema_any(Experiment, context=LOCALS_ALTER) schema_any(Parent, context=LOCALS_ALTER) - yield schema_any schema_any.drop() From 23c01f421fa7053ded3d9bc96a44b29b6308cb25 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 11 Dec 2023 14:31:05 -0700 Subject: [PATCH 093/212] Remove extraneous declarations Per @A-Bagi suggestion --- tests/test_declare.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/test_declare.py b/tests/test_declare.py index 0398b79fd..a88d396e7 100644 --- a/tests/test_declare.py +++ b/tests/test_declare.py @@ -77,7 +77,6 @@ def test_attributes(self, schema_any): """ auto = Auto() auto.fill() - user = User() subject = Subject() experiment = Experiment() trial = Trial() @@ -139,8 +138,6 @@ def test_attributes(self, schema_any): assert channel.heading.attributes["voltage"].is_blob def test_dependencies(self, schema_any): - auto = Auto() - auto.fill() user = User() subject = Subject() experiment = Experiment() From 45f99970d99a55350fb041cf425fa689ffac1b37 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 11 Dec 2023 14:38:09 -0700 Subject: [PATCH 094/212] Fix escape sequence warning Fix the following warning: tests/conftest.py:121 /workspaces/ethho-datajoint-python/tests/conftest.py:121: DeprecationWarning: invalid escape sequence \_ cur = conn_root.query('SHOW DATABASES LIKE "{}\_%%"'.format(PREFIX)) --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index f0a7a58b6..f2fca93af 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -118,7 +118,7 @@ def connection_root(connection_root_bare): # Teardown conn_root.query("SET FOREIGN_KEY_CHECKS=0") - cur = conn_root.query('SHOW DATABASES LIKE "{}\_%%"'.format(PREFIX)) + cur = conn_root.query('SHOW DATABASES LIKE "{}\\_%%"'.format(PREFIX)) for db in cur.fetchall(): conn_root.query("DROP DATABASE `{}`".format(db[0])) conn_root.query("SET FOREIGN_KEY_CHECKS=1") From 6046b81fd8fd297666adbb70210ff3937c7c9e48 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 11 Dec 2023 15:11:32 -0700 Subject: [PATCH 095/212] cp to tests --- tests/test_external.py | 135 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 135 insertions(+) create mode 100644 tests/test_external.py diff --git a/tests/test_external.py b/tests/test_external.py new file mode 100644 index 000000000..fcbb21fce --- /dev/null +++ b/tests/test_external.py @@ -0,0 +1,135 @@ +import numpy as np +from numpy.testing import assert_array_equal +from nose.tools import assert_true, assert_equal +from datajoint.external import ExternalTable +from datajoint.blob import pack, unpack +import datajoint as dj +from .schema_external import stores_config, SimpleRemote, Simple, schema +import os + +current_location_s3 = dj.config["stores"]["share"]["location"] +current_location_local = dj.config["stores"]["local"]["location"] + + +def setUp(self): + dj.config["stores"] = stores_config + + +def tearDown(self): + dj.config["stores"]["share"]["location"] = current_location_s3 + dj.config["stores"]["local"]["location"] = current_location_local + + +def test_external_put(): + """ + external storage put and get and remove + """ + ext = ExternalTable(schema.connection, store="raw", database=schema.database) + initial_length = len(ext) + input_ = np.random.randn(3, 7, 8) + count = 7 + extra = 3 + for i in range(count): + hash1 = ext.put(pack(input_)) + for i in range(extra): + hash2 = ext.put(pack(np.random.randn(4, 3, 2))) + + fetched_hashes = ext.fetch("hash") + assert_true(all(hash in fetched_hashes for hash in (hash1, hash2))) + assert_equal(len(ext), initial_length + 1 + extra) + + output_ = unpack(ext.get(hash1)) + assert_array_equal(input_, output_) + + +def test_s3_leading_slash(index=100, store="share"): + """ + s3 external storage configured with leading slash + """ + + oldConfig = dj.config["stores"][store]["location"] + + value = np.array([1, 2, 3]) + + id = index + dj.config["stores"][store]["location"] = "leading/slash/test" + SimpleRemote.insert([{"simple": id, "item": value}]) + assert_true( + np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + ) + + id = index + 1 + dj.config["stores"][store]["location"] = "/leading/slash/test" + SimpleRemote.insert([{"simple": id, "item": value}]) + assert_true( + np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + ) + + id = index + 2 + dj.config["stores"][store]["location"] = "leading\\slash\\test" + SimpleRemote.insert([{"simple": id, "item": value}]) + assert_true( + np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + ) + + id = index + 3 + dj.config["stores"][store]["location"] = "f:\\leading\\slash\\test" + SimpleRemote.insert([{"simple": id, "item": value}]) + assert_true( + np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + ) + + id = index + 4 + dj.config["stores"][store]["location"] = "f:\\leading/slash\\test" + SimpleRemote.insert([{"simple": id, "item": value}]) + assert_true( + np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + ) + + id = index + 5 + dj.config["stores"][store]["location"] = "/" + SimpleRemote.insert([{"simple": id, "item": value}]) + assert_true( + np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + ) + + id = index + 6 + dj.config["stores"][store]["location"] = "C:\\" + SimpleRemote.insert([{"simple": id, "item": value}]) + assert_true( + np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + ) + + id = index + 7 + dj.config["stores"][store]["location"] = "" + SimpleRemote.insert([{"simple": id, "item": value}]) + assert_true( + np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + ) + + dj.config["stores"][store]["location"] = oldConfig + + +def test_file_leading_slash(): + """ + file external storage configured with leading slash + """ + test_s3_leading_slash(index=200, store="local") + + +def test_remove_fail(): + # https://github.com/datajoint/datajoint-python/issues/953 + data = dict(simple=2, item=[1, 2, 3]) + Simple.insert1(data) + path1 = dj.config["stores"]["local"]["location"] + "/djtest_extern/4/c/" + currentMode = int(oct(os.stat(path1).st_mode), 8) + os.chmod(path1, 0o40555) + (Simple & "simple=2").delete() + listOfErrors = schema.external["local"].delete(delete_external_files=True) + assert len(listOfErrors) == 1, "unexpected number of errors" + assert ( + len(schema.external["local"] & dict(hash=listOfErrors[0][0])) == 1 + ), "unexpected number of rows in external table" + # ---------------------CLEAN UP-------------------- + os.chmod(path1, currentMode) + listOfErrors = schema.external["local"].delete(delete_external_files=True) From cdb40525b329814a95839278631e58c4c4e4cbaa Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 11 Dec 2023 15:12:09 -0700 Subject: [PATCH 096/212] nose2pytest test_external --- tests/test_external.py | 36 ++++++++++-------------------------- 1 file changed, 10 insertions(+), 26 deletions(-) diff --git a/tests/test_external.py b/tests/test_external.py index fcbb21fce..b599e8fc7 100644 --- a/tests/test_external.py +++ b/tests/test_external.py @@ -35,8 +35,8 @@ def test_external_put(): hash2 = ext.put(pack(np.random.randn(4, 3, 2))) fetched_hashes = ext.fetch("hash") - assert_true(all(hash in fetched_hashes for hash in (hash1, hash2))) - assert_equal(len(ext), initial_length + 1 + extra) + assert all(hash in fetched_hashes for hash in (hash1, hash2)) + assert len(ext) == initial_length + 1 + extra output_ = unpack(ext.get(hash1)) assert_array_equal(input_, output_) @@ -54,58 +54,42 @@ def test_s3_leading_slash(index=100, store="share"): id = index dj.config["stores"][store]["location"] = "leading/slash/test" SimpleRemote.insert([{"simple": id, "item": value}]) - assert_true( - np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) - ) + assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) id = index + 1 dj.config["stores"][store]["location"] = "/leading/slash/test" SimpleRemote.insert([{"simple": id, "item": value}]) - assert_true( - np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) - ) + assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) id = index + 2 dj.config["stores"][store]["location"] = "leading\\slash\\test" SimpleRemote.insert([{"simple": id, "item": value}]) - assert_true( - np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) - ) + assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) id = index + 3 dj.config["stores"][store]["location"] = "f:\\leading\\slash\\test" SimpleRemote.insert([{"simple": id, "item": value}]) - assert_true( - np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) - ) + assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) id = index + 4 dj.config["stores"][store]["location"] = "f:\\leading/slash\\test" SimpleRemote.insert([{"simple": id, "item": value}]) - assert_true( - np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) - ) + assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) id = index + 5 dj.config["stores"][store]["location"] = "/" SimpleRemote.insert([{"simple": id, "item": value}]) - assert_true( - np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) - ) + assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) id = index + 6 dj.config["stores"][store]["location"] = "C:\\" SimpleRemote.insert([{"simple": id, "item": value}]) - assert_true( - np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) - ) + assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) id = index + 7 dj.config["stores"][store]["location"] = "" SimpleRemote.insert([{"simple": id, "item": value}]) - assert_true( - np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) - ) + assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) dj.config["stores"][store]["location"] = oldConfig From 49670c953927cd06d631cbcf70499d637fff3a71 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 11 Dec 2023 15:38:45 -0700 Subject: [PATCH 097/212] Use pytest tmpdir fixtures for mocking stores --- tests/conftest.py | 38 ++++++++++++++++++++++++++++---------- 1 file changed, 28 insertions(+), 10 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index f2fca93af..d148e9b35 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,7 +10,6 @@ import networkx as nx import json from pathlib import Path -import tempfile from datajoint import errors from datajoint.errors import ADAPTED_TYPE_SWITCH, FILEPATH_FEATURE_SWITCH from . import ( @@ -176,16 +175,16 @@ def connection_test(connection_root): @pytest.fixture(scope="session") -def stores_config(): +def stores_config(tmpdir_factory): stores_config = { - "raw": dict(protocol="file", location=tempfile.mkdtemp()), + "raw": dict(protocol="file", location=tmpdir_factory.mktemp("raw")), "repo": dict( - stage=tempfile.mkdtemp(), protocol="file", location=tempfile.mkdtemp() + stage=tmpdir_factory.mktemp("repo"), protocol="file", location=tmpdir_factory.mktemp("repo") ), "repo-s3": dict( - S3_CONN_INFO, protocol="s3", location="dj/repo", stage=tempfile.mkdtemp() + S3_CONN_INFO, protocol="s3", location="dj/repo", stage=tmpdir_factory.mktemp("repo-s3") ), - "local": dict(protocol="file", location=tempfile.mkdtemp(), subfolding=(1, 1)), + "local": dict(protocol="file", location=tmpdir_factory.mktemp("local"), subfolding=(1, 1)), "share": dict( S3_CONN_INFO, protocol="s3", location="dj/store/repo", subfolding=(2, 4) ), @@ -193,6 +192,28 @@ def stores_config(): return stores_config +@pytest.fixture +def mock_stores(stores_config): + og_stores_config = dj.config.get("stores") + dj.config["stores"] = stores_config + yield + if og_stores_config is None: + del dj.config["stores"] + else: + dj.config["stores"] = og_stores_config + + +@pytest.fixture +def mock_cache(tmpdir_factory): + og_cache = dj.config.get("cache") + dj.config["cache"] = tmpdir_factory.mktemp("cache") + yield + if og_cache is None: + del dj.config["cache"] + else: + dj.config["cache"] = og_cache + + @pytest.fixture def schema_any(connection_test): schema_any = dj.Schema( @@ -287,15 +308,12 @@ def schema_adv(connection_test): @pytest.fixture -def schema_ext(connection_test, stores_config, enable_filepath_feature): +def schema_ext(connection_test, enable_filepath_feature, mock_stores, mock_cache): schema = dj.Schema( PREFIX + "_extern", context=schema_external.LOCALS_EXTERNAL, connection=connection_test, ) - dj.config["stores"] = stores_config - dj.config["cache"] = tempfile.mkdtemp() - schema(schema_external.Simple) schema(schema_external.SimpleRemote) schema(schema_external.Seed) From 012e46d6a948a01192f39147aab8989d425b7799 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 11 Dec 2023 16:08:29 -0700 Subject: [PATCH 098/212] Migrate test_external --- tests/test_external.py | 123 +++++++++++++++++++---------------------- 1 file changed, 58 insertions(+), 65 deletions(-) diff --git a/tests/test_external.py b/tests/test_external.py index b599e8fc7..ca235c6d2 100644 --- a/tests/test_external.py +++ b/tests/test_external.py @@ -1,30 +1,17 @@ import numpy as np from numpy.testing import assert_array_equal -from nose.tools import assert_true, assert_equal from datajoint.external import ExternalTable from datajoint.blob import pack, unpack import datajoint as dj -from .schema_external import stores_config, SimpleRemote, Simple, schema +from .schema_external import SimpleRemote, Simple import os -current_location_s3 = dj.config["stores"]["share"]["location"] -current_location_local = dj.config["stores"]["local"]["location"] - -def setUp(self): - dj.config["stores"] = stores_config - - -def tearDown(self): - dj.config["stores"]["share"]["location"] = current_location_s3 - dj.config["stores"]["local"]["location"] = current_location_local - - -def test_external_put(): +def test_external_put(schema_ext, mock_stores, mock_cache): """ external storage put and get and remove """ - ext = ExternalTable(schema.connection, store="raw", database=schema.database) + ext = ExternalTable(schema_ext.connection, store="raw", database=schema_ext.database) initial_length = len(ext) input_ = np.random.randn(3, 7, 8) count = 7 @@ -42,78 +29,84 @@ def test_external_put(): assert_array_equal(input_, output_) -def test_s3_leading_slash(index=100, store="share"): - """ - s3 external storage configured with leading slash - """ +class TestLeadingSlash: + + def test_s3_leading_slash(self, schema_ext, mock_stores, mock_cache, minio_client): + """ + s3 external storage configured with leading slash + """ + self._leading_slash(schema_ext, index=100, store="share") - oldConfig = dj.config["stores"][store]["location"] + def test_file_leading_slash(self, schema_ext, mock_stores, mock_cache, minio_client): + """ + File external storage configured with leading slash + """ + self._leading_slash(schema_ext, index=200, store="local") - value = np.array([1, 2, 3]) + def _leading_slash(self, schema_ext, index, store): + oldConfig = dj.config["stores"][store]["location"] + value = np.array([1, 2, 3]) - id = index - dj.config["stores"][store]["location"] = "leading/slash/test" - SimpleRemote.insert([{"simple": id, "item": value}]) - assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + id = index + dj.config["stores"][store]["location"] = "leading/slash/test" + SimpleRemote.insert([{"simple": id, "item": value}]) + assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) - id = index + 1 - dj.config["stores"][store]["location"] = "/leading/slash/test" - SimpleRemote.insert([{"simple": id, "item": value}]) - assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + id = index + 1 + dj.config["stores"][store]["location"] = "/leading/slash/test" + SimpleRemote.insert([{"simple": id, "item": value}]) + assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) - id = index + 2 - dj.config["stores"][store]["location"] = "leading\\slash\\test" - SimpleRemote.insert([{"simple": id, "item": value}]) - assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + id = index + 2 + dj.config["stores"][store]["location"] = "leading\\slash\\test" + SimpleRemote.insert([{"simple": id, "item": value}]) + assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) - id = index + 3 - dj.config["stores"][store]["location"] = "f:\\leading\\slash\\test" - SimpleRemote.insert([{"simple": id, "item": value}]) - assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + id = index + 3 + dj.config["stores"][store]["location"] = "f:\\leading\\slash\\test" + SimpleRemote.insert([{"simple": id, "item": value}]) + assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) - id = index + 4 - dj.config["stores"][store]["location"] = "f:\\leading/slash\\test" - SimpleRemote.insert([{"simple": id, "item": value}]) - assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + id = index + 4 + dj.config["stores"][store]["location"] = "f:\\leading/slash\\test" + SimpleRemote.insert([{"simple": id, "item": value}]) + assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) - id = index + 5 - dj.config["stores"][store]["location"] = "/" - SimpleRemote.insert([{"simple": id, "item": value}]) - assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + id = index + 5 + dj.config["stores"][store]["location"] = "/" + SimpleRemote.insert([{"simple": id, "item": value}]) + assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) - id = index + 6 - dj.config["stores"][store]["location"] = "C:\\" - SimpleRemote.insert([{"simple": id, "item": value}]) - assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + id = index + 6 + dj.config["stores"][store]["location"] = "C:\\" + SimpleRemote.insert([{"simple": id, "item": value}]) + assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) - id = index + 7 - dj.config["stores"][store]["location"] = "" - SimpleRemote.insert([{"simple": id, "item": value}]) - assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + id = index + 7 + dj.config["stores"][store]["location"] = "" + SimpleRemote.insert([{"simple": id, "item": value}]) + assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) - dj.config["stores"][store]["location"] = oldConfig + dj.config["stores"][store]["location"] = oldConfig -def test_file_leading_slash(): +def test_remove_fail(schema_ext, mock_stores, mock_cache, minio_client): """ - file external storage configured with leading slash + https://github.com/datajoint/datajoint-python/issues/953 """ - test_s3_leading_slash(index=200, store="local") + assert dj.config["stores"]["local"]["location"] - -def test_remove_fail(): - # https://github.com/datajoint/datajoint-python/issues/953 data = dict(simple=2, item=[1, 2, 3]) Simple.insert1(data) path1 = dj.config["stores"]["local"]["location"] + "/djtest_extern/4/c/" currentMode = int(oct(os.stat(path1).st_mode), 8) os.chmod(path1, 0o40555) (Simple & "simple=2").delete() - listOfErrors = schema.external["local"].delete(delete_external_files=True) - assert len(listOfErrors) == 1, "unexpected number of errors" + listOfErrors = schema_ext.external["local"].delete(delete_external_files=True) + assert ( - len(schema.external["local"] & dict(hash=listOfErrors[0][0])) == 1 + len(schema_ext.external["local"] & dict(hash=listOfErrors[0][0])) == 1 ), "unexpected number of rows in external table" # ---------------------CLEAN UP-------------------- os.chmod(path1, currentMode) - listOfErrors = schema.external["local"].delete(delete_external_files=True) + listOfErrors = schema_ext.external["local"].delete(delete_external_files=True) From 939295997d316ae129d32411127a9c4ef282de3c Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 11 Dec 2023 16:09:19 -0700 Subject: [PATCH 099/212] Format with black --- tests/conftest.py | 13 ++++++++++--- tests/test_external.py | 41 ++++++++++++++++++++++++++++++----------- 2 files changed, 40 insertions(+), 14 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index d148e9b35..9d697ef47 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -179,12 +179,19 @@ def stores_config(tmpdir_factory): stores_config = { "raw": dict(protocol="file", location=tmpdir_factory.mktemp("raw")), "repo": dict( - stage=tmpdir_factory.mktemp("repo"), protocol="file", location=tmpdir_factory.mktemp("repo") + stage=tmpdir_factory.mktemp("repo"), + protocol="file", + location=tmpdir_factory.mktemp("repo"), ), "repo-s3": dict( - S3_CONN_INFO, protocol="s3", location="dj/repo", stage=tmpdir_factory.mktemp("repo-s3") + S3_CONN_INFO, + protocol="s3", + location="dj/repo", + stage=tmpdir_factory.mktemp("repo-s3"), + ), + "local": dict( + protocol="file", location=tmpdir_factory.mktemp("local"), subfolding=(1, 1) ), - "local": dict(protocol="file", location=tmpdir_factory.mktemp("local"), subfolding=(1, 1)), "share": dict( S3_CONN_INFO, protocol="s3", location="dj/store/repo", subfolding=(2, 4) ), diff --git a/tests/test_external.py b/tests/test_external.py index ca235c6d2..1e212b7d9 100644 --- a/tests/test_external.py +++ b/tests/test_external.py @@ -11,7 +11,9 @@ def test_external_put(schema_ext, mock_stores, mock_cache): """ external storage put and get and remove """ - ext = ExternalTable(schema_ext.connection, store="raw", database=schema_ext.database) + ext = ExternalTable( + schema_ext.connection, store="raw", database=schema_ext.database + ) initial_length = len(ext) input_ = np.random.randn(3, 7, 8) count = 7 @@ -30,14 +32,15 @@ def test_external_put(schema_ext, mock_stores, mock_cache): class TestLeadingSlash: - def test_s3_leading_slash(self, schema_ext, mock_stores, mock_cache, minio_client): """ s3 external storage configured with leading slash """ self._leading_slash(schema_ext, index=100, store="share") - def test_file_leading_slash(self, schema_ext, mock_stores, mock_cache, minio_client): + def test_file_leading_slash( + self, schema_ext, mock_stores, mock_cache, minio_client + ): """ File external storage configured with leading slash """ @@ -50,42 +53,58 @@ def _leading_slash(self, schema_ext, index, store): id = index dj.config["stores"][store]["location"] = "leading/slash/test" SimpleRemote.insert([{"simple": id, "item": value}]) - assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + assert np.array_equal( + value, (SimpleRemote & "simple={}".format(id)).fetch1("item") + ) id = index + 1 dj.config["stores"][store]["location"] = "/leading/slash/test" SimpleRemote.insert([{"simple": id, "item": value}]) - assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + assert np.array_equal( + value, (SimpleRemote & "simple={}".format(id)).fetch1("item") + ) id = index + 2 dj.config["stores"][store]["location"] = "leading\\slash\\test" SimpleRemote.insert([{"simple": id, "item": value}]) - assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + assert np.array_equal( + value, (SimpleRemote & "simple={}".format(id)).fetch1("item") + ) id = index + 3 dj.config["stores"][store]["location"] = "f:\\leading\\slash\\test" SimpleRemote.insert([{"simple": id, "item": value}]) - assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + assert np.array_equal( + value, (SimpleRemote & "simple={}".format(id)).fetch1("item") + ) id = index + 4 dj.config["stores"][store]["location"] = "f:\\leading/slash\\test" SimpleRemote.insert([{"simple": id, "item": value}]) - assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + assert np.array_equal( + value, (SimpleRemote & "simple={}".format(id)).fetch1("item") + ) id = index + 5 dj.config["stores"][store]["location"] = "/" SimpleRemote.insert([{"simple": id, "item": value}]) - assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + assert np.array_equal( + value, (SimpleRemote & "simple={}".format(id)).fetch1("item") + ) id = index + 6 dj.config["stores"][store]["location"] = "C:\\" SimpleRemote.insert([{"simple": id, "item": value}]) - assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + assert np.array_equal( + value, (SimpleRemote & "simple={}".format(id)).fetch1("item") + ) id = index + 7 dj.config["stores"][store]["location"] = "" SimpleRemote.insert([{"simple": id, "item": value}]) - assert np.array_equal(value, (SimpleRemote & "simple={}".format(id)).fetch1("item")) + assert np.array_equal( + value, (SimpleRemote & "simple={}".format(id)).fetch1("item") + ) dj.config["stores"][store]["location"] = oldConfig From 071a3c0984e38d3407f976558ef05b33b7695151 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 11 Dec 2023 16:14:38 -0700 Subject: [PATCH 100/212] cp to tests --- tests/test_external_class.py | 54 ++++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 tests/test_external_class.py diff --git a/tests/test_external_class.py b/tests/test_external_class.py new file mode 100644 index 000000000..63c1fb994 --- /dev/null +++ b/tests/test_external_class.py @@ -0,0 +1,54 @@ +from nose.tools import assert_true, assert_list_equal +from numpy.testing import assert_almost_equal +import datajoint as dj +from . import schema_external as modu + + +def setUp(self): + dj.config["stores"] = modu.stores_config + + +def test_heading(): + heading = modu.Simple().heading + assert_true("item" in heading) + assert_true(heading["item"].is_external) + + +def test_insert_and_fetch(): + original_list = [1, 3, 8] + modu.Simple().insert1(dict(simple=1, item=original_list)) + # test fetch + q = (modu.Simple() & {"simple": 1}).fetch("item")[0] + assert_list_equal(list(q), original_list) + # test fetch1 as a tuple + q = (modu.Simple() & {"simple": 1}).fetch1("item") + assert_list_equal(list(q), original_list) + # test fetch1 as a dict + q = (modu.Simple() & {"simple": 1}).fetch1() + assert_list_equal(list(q["item"]), original_list) + # test without cache + previous_cache = dj.config["cache"] + dj.config["cache"] = None + q = (modu.Simple() & {"simple": 1}).fetch1() + assert_list_equal(list(q["item"]), original_list) + # test with cache + dj.config["cache"] = previous_cache + q = (modu.Simple() & {"simple": 1}).fetch1() + assert_list_equal(list(q["item"]), original_list) + + +def test_populate(): + image = modu.Image() + image.populate() + remaining, total = image.progress() + assert_true(total == len(modu.Dimension() * modu.Seed()) and remaining == 0) + for img, neg, dimensions in zip( + *(image * modu.Dimension()).fetch("img", "neg", "dimensions") + ): + assert_list_equal(list(img.shape), list(dimensions)) + assert_almost_equal(img, -neg) + image.delete() + dj.errors._switch_filepath_types(True) + for external_table in image.external.values(): + external_table.delete(display_progress=False, delete_external_files=True) + dj.errors._switch_filepath_types(False) From cee3da898d1e60a9b3d0645a3533e10b5980a64a Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 11 Dec 2023 16:31:35 -0700 Subject: [PATCH 101/212] Migrate test_external_class --- tests/test_external_class.py | 51 ++++++++++++++++-------------------- 1 file changed, 22 insertions(+), 29 deletions(-) diff --git a/tests/test_external_class.py b/tests/test_external_class.py index 63c1fb994..b4a4939ba 100644 --- a/tests/test_external_class.py +++ b/tests/test_external_class.py @@ -1,54 +1,47 @@ -from nose.tools import assert_true, assert_list_equal from numpy.testing import assert_almost_equal import datajoint as dj -from . import schema_external as modu +from . import schema_external -def setUp(self): - dj.config["stores"] = modu.stores_config +def test_heading(schema_ext, mock_stores): + heading = schema_external.Simple().heading + assert "item" in heading + assert heading["item"].is_external -def test_heading(): - heading = modu.Simple().heading - assert_true("item" in heading) - assert_true(heading["item"].is_external) - - -def test_insert_and_fetch(): +def test_insert_and_fetch(schema_ext, mock_stores, mock_cache): original_list = [1, 3, 8] - modu.Simple().insert1(dict(simple=1, item=original_list)) + schema_external.Simple().insert1(dict(simple=1, item=original_list)) # test fetch - q = (modu.Simple() & {"simple": 1}).fetch("item")[0] - assert_list_equal(list(q), original_list) + q = (schema_external.Simple() & {"simple": 1}).fetch("item")[0] + assert list(q) == original_list # test fetch1 as a tuple - q = (modu.Simple() & {"simple": 1}).fetch1("item") - assert_list_equal(list(q), original_list) + q = (schema_external.Simple() & {"simple": 1}).fetch1("item") + assert list(q) == original_list # test fetch1 as a dict - q = (modu.Simple() & {"simple": 1}).fetch1() - assert_list_equal(list(q["item"]), original_list) + q = (schema_external.Simple() & {"simple": 1}).fetch1() + assert list(q["item"]) == original_list # test without cache previous_cache = dj.config["cache"] dj.config["cache"] = None - q = (modu.Simple() & {"simple": 1}).fetch1() - assert_list_equal(list(q["item"]), original_list) + q = (schema_external.Simple() & {"simple": 1}).fetch1() + assert list(q["item"]) == original_list # test with cache dj.config["cache"] = previous_cache - q = (modu.Simple() & {"simple": 1}).fetch1() - assert_list_equal(list(q["item"]), original_list) + q = (schema_external.Simple() & {"simple": 1}).fetch1() + assert list(q["item"]) == original_list -def test_populate(): - image = modu.Image() +def test_populate(schema_ext, mock_stores): + image = schema_external.Image() image.populate() remaining, total = image.progress() - assert_true(total == len(modu.Dimension() * modu.Seed()) and remaining == 0) + assert total == len(schema_external.Dimension() * schema_external.Seed()) and remaining == 0 for img, neg, dimensions in zip( - *(image * modu.Dimension()).fetch("img", "neg", "dimensions") + *(image * schema_external.Dimension()).fetch("img", "neg", "dimensions") ): - assert_list_equal(list(img.shape), list(dimensions)) + assert list(img.shape) == list(dimensions) assert_almost_equal(img, -neg) image.delete() - dj.errors._switch_filepath_types(True) for external_table in image.external.values(): external_table.delete(display_progress=False, delete_external_files=True) - dj.errors._switch_filepath_types(False) From 7c3c2b7b2fc98710de8cde942220cc34d7133651 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 11 Dec 2023 16:31:53 -0700 Subject: [PATCH 102/212] Format with black --- tests/test_external_class.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/test_external_class.py b/tests/test_external_class.py index b4a4939ba..15136a944 100644 --- a/tests/test_external_class.py +++ b/tests/test_external_class.py @@ -36,7 +36,10 @@ def test_populate(schema_ext, mock_stores): image = schema_external.Image() image.populate() remaining, total = image.progress() - assert total == len(schema_external.Dimension() * schema_external.Seed()) and remaining == 0 + assert ( + total == len(schema_external.Dimension() * schema_external.Seed()) + and remaining == 0 + ) for img, neg, dimensions in zip( *(image * schema_external.Dimension()).fetch("img", "neg", "dimensions") ): From e23f9091ab794356343129bedc6201b98803129e Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 11 Dec 2023 16:42:23 -0700 Subject: [PATCH 103/212] cp to tests --- tests/test_fetch.py | 390 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 390 insertions(+) create mode 100644 tests/test_fetch.py diff --git a/tests/test_fetch.py b/tests/test_fetch.py new file mode 100644 index 000000000..1c415cb27 --- /dev/null +++ b/tests/test_fetch.py @@ -0,0 +1,390 @@ +from nose.tools import ( + assert_true, + raises, + assert_equal, + assert_dict_equal, + assert_list_equal, + assert_set_equal, +) +from operator import itemgetter +import itertools +import numpy as np +import decimal +import pandas +import warnings +from . import schema +from .schema import Parent, Stimulus +import datajoint as dj +import os +import logging +import io + +logger = logging.getLogger("datajoint") + + +class TestFetch: + @classmethod + def setup_class(cls): + cls.subject = schema.Subject() + cls.lang = schema.Language() + + def test_getattribute(self): + """Testing Fetch.__call__ with attributes""" + list1 = sorted( + self.subject.proj().fetch(as_dict=True), key=itemgetter("subject_id") + ) + list2 = sorted(self.subject.fetch(dj.key), key=itemgetter("subject_id")) + for l1, l2 in zip(list1, list2): + assert_dict_equal(l1, l2, "Primary key is not returned correctly") + + tmp = self.subject.fetch(order_by="subject_id") + + subject_notes, key, real_id = self.subject.fetch( + "subject_notes", dj.key, "real_id" + ) + + np.testing.assert_array_equal( + sorted(subject_notes), sorted(tmp["subject_notes"]) + ) + np.testing.assert_array_equal(sorted(real_id), sorted(tmp["real_id"])) + list1 = sorted(key, key=itemgetter("subject_id")) + for l1, l2 in zip(list1, list2): + assert_dict_equal(l1, l2, "Primary key is not returned correctly") + + def test_getattribute_for_fetch1(self): + """Testing Fetch1.__call__ with attributes""" + assert_true((self.subject & "subject_id=10").fetch1("subject_id") == 10) + assert_equal( + (self.subject & "subject_id=10").fetch1("subject_id", "species"), + (10, "monkey"), + ) + + def test_order_by(self): + """Tests order_by sorting order""" + languages = schema.Language.contents + + for ord_name, ord_lang in itertools.product(*2 * [["ASC", "DESC"]]): + cur = self.lang.fetch(order_by=("name " + ord_name, "language " + ord_lang)) + languages.sort(key=itemgetter(1), reverse=ord_lang == "DESC") + languages.sort(key=itemgetter(0), reverse=ord_name == "DESC") + for c, l in zip(cur, languages): + assert_true( + np.all(cc == ll for cc, ll in zip(c, l)), + "Sorting order is different", + ) + + def test_order_by_default(self): + """Tests order_by sorting order with defaults""" + languages = schema.Language.contents + cur = self.lang.fetch(order_by=("language", "name DESC")) + languages.sort(key=itemgetter(0), reverse=True) + languages.sort(key=itemgetter(1), reverse=False) + for c, l in zip(cur, languages): + assert_true( + np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" + ) + + def test_limit(self): + """Test the limit kwarg""" + limit = 4 + cur = self.lang.fetch(limit=limit) + assert_equal(len(cur), limit, "Length is not correct") + + def test_order_by_limit(self): + """Test the combination of order by and limit kwargs""" + languages = schema.Language.contents + + cur = self.lang.fetch(limit=4, order_by=["language", "name DESC"]) + languages.sort(key=itemgetter(0), reverse=True) + languages.sort(key=itemgetter(1), reverse=False) + assert_equal(len(cur), 4, "Length is not correct") + for c, l in list(zip(cur, languages))[:4]: + assert_true( + np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" + ) + + @staticmethod + def test_head_tail(): + query = schema.User * schema.Language + n = 5 + frame = query.head(n, format="frame") + assert_true(isinstance(frame, pandas.DataFrame)) + array = query.head(n, format="array") + assert_equal(array.size, n) + assert_equal(len(frame), n) + assert_list_equal(query.primary_key, frame.index.names) + + n = 4 + frame = query.tail(n, format="frame") + array = query.tail(n, format="array") + assert_equal(array.size, n) + assert_equal(len(frame), n) + assert_list_equal(query.primary_key, frame.index.names) + + def test_limit_offset(self): + """Test the limit and offset kwargs together""" + languages = schema.Language.contents + + cur = self.lang.fetch(offset=2, limit=4, order_by=["language", "name DESC"]) + languages.sort(key=itemgetter(0), reverse=True) + languages.sort(key=itemgetter(1), reverse=False) + assert_equal(len(cur), 4, "Length is not correct") + for c, l in list(zip(cur, languages[2:6])): + assert_true( + np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" + ) + + def test_iter(self): + """Test iterator""" + languages = schema.Language.contents + cur = self.lang.fetch(order_by=["language", "name DESC"]) + languages.sort(key=itemgetter(0), reverse=True) + languages.sort(key=itemgetter(1), reverse=False) + for (name, lang), (tname, tlang) in list(zip(cur, languages)): + assert_true(name == tname and lang == tlang, "Values are not the same") + # now as dict + cur = self.lang.fetch(as_dict=True, order_by=("language", "name DESC")) + for row, (tname, tlang) in list(zip(cur, languages)): + assert_true( + row["name"] == tname and row["language"] == tlang, + "Values are not the same", + ) + + def test_keys(self): + """test key fetch""" + languages = schema.Language.contents + languages.sort(key=itemgetter(0), reverse=True) + languages.sort(key=itemgetter(1), reverse=False) + + cur = self.lang.fetch("name", "language", order_by=("language", "name DESC")) + cur2 = list(self.lang.fetch("KEY", order_by=["language", "name DESC"])) + + for c, c2 in zip(zip(*cur), cur2): + assert_true(c == tuple(c2.values()), "Values are not the same") + + def test_attributes_as_dict(self): # issue #595 + attrs = ("species", "date_of_birth") + result = self.subject.fetch(*attrs, as_dict=True) + assert_true(bool(result) and len(result) == len(self.subject)) + assert_set_equal(set(result[0]), set(attrs)) + + def test_fetch1_step1(self): + key = {"name": "Edgar", "language": "Japanese"} + true = schema.Language.contents[-1] + dat = (self.lang & key).fetch1() + for k, (ke, c) in zip(true, dat.items()): + assert_true( + k == c == (self.lang & key).fetch1(ke), "Values are not the same" + ) + + @raises(dj.DataJointError) + def test_misspelled_attribute(self): + f = (schema.Language & 'lang = "ENGLISH"').fetch() + + def test_repr(self): + """Test string representation of fetch, returning table preview""" + repr = self.subject.fetch.__repr__() + n = len(repr.strip().split("\n")) + limit = dj.config["display.limit"] + # 3 lines are used for headers (2) and summary statement (1) + assert_true(n - 3 <= limit) + + @raises(dj.DataJointError) + def test_fetch_none(self): + """Test preparing attributes for getitem""" + self.lang.fetch(None) + + def test_asdict(self): + """Test returns as dictionaries""" + d = self.lang.fetch(as_dict=True) + for dd in d: + assert_true(isinstance(dd, dict)) + + def test_offset(self): + """Tests offset""" + cur = self.lang.fetch(limit=4, offset=1, order_by=["language", "name DESC"]) + + languages = self.lang.contents + languages.sort(key=itemgetter(0), reverse=True) + languages.sort(key=itemgetter(1), reverse=False) + assert_equal(len(cur), 4, "Length is not correct") + for c, l in list(zip(cur, languages[1:]))[:4]: + assert_true( + np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" + ) + + def test_limit_warning(self): + """Tests whether warning is raised if offset is used without limit.""" + log_capture = io.StringIO() + stream_handler = logging.StreamHandler(log_capture) + log_format = logging.Formatter( + "[%(asctime)s][%(funcName)s][%(levelname)s]: %(message)s" + ) + stream_handler.setFormatter(log_format) + stream_handler.set_name("test_limit_warning") + logger.addHandler(stream_handler) + self.lang.fetch(offset=1) + + log_contents = log_capture.getvalue() + log_capture.close() + + for handler in logger.handlers: # Clean up handler + if handler.name == "test_limit_warning": + logger.removeHandler(handler) + assert "[WARNING]: Offset set, but no limit." in log_contents + + def test_len(self): + """Tests __len__""" + assert_equal( + len(self.lang.fetch()), len(self.lang), "__len__ is not behaving properly" + ) + + @raises(dj.DataJointError) + def test_fetch1_step2(self): + """Tests whether fetch1 raises error""" + self.lang.fetch1() + + @raises(dj.DataJointError) + def test_fetch1_step3(self): + """Tests whether fetch1 raises error""" + self.lang.fetch1("name") + + def test_decimal(self): + """Tests that decimal fields are correctly fetched and used in restrictions, see issue #334""" + rel = schema.DecimalPrimaryKey() + rel.insert1([decimal.Decimal("3.1415926")]) + keys = rel.fetch() + assert_true(len(rel & keys[0]) == 1) + keys = rel.fetch(dj.key) + assert_true(len(rel & keys[1]) == 1) + + def test_nullable_numbers(self): + """test mixture of values and nulls in numeric attributes""" + table = schema.NullableNumbers() + table.insert( + ( + ( + k, + np.random.randn(), + np.random.randint(-1000, 1000), + np.random.randn(), + ) + for k in range(10) + ) + ) + table.insert1((100, None, None, None)) + f, d, i = table.fetch("fvalue", "dvalue", "ivalue") + assert_true(None in i) + assert_true(any(np.isnan(d))) + assert_true(any(np.isnan(f))) + + def test_fetch_format(self): + """test fetch_format='frame'""" + with dj.config(fetch_format="frame"): + # test if lists are both dicts + list1 = sorted( + self.subject.proj().fetch(as_dict=True), key=itemgetter("subject_id") + ) + list2 = sorted(self.subject.fetch(dj.key), key=itemgetter("subject_id")) + for l1, l2 in zip(list1, list2): + assert_dict_equal(l1, l2, "Primary key is not returned correctly") + + # tests if pandas dataframe + tmp = self.subject.fetch(order_by="subject_id") + assert_true(isinstance(tmp, pandas.DataFrame)) + tmp = tmp.to_records() + + subject_notes, key, real_id = self.subject.fetch( + "subject_notes", dj.key, "real_id" + ) + + np.testing.assert_array_equal( + sorted(subject_notes), sorted(tmp["subject_notes"]) + ) + np.testing.assert_array_equal(sorted(real_id), sorted(tmp["real_id"])) + list1 = sorted(key, key=itemgetter("subject_id")) + for l1, l2 in zip(list1, list2): + assert_dict_equal(l1, l2, "Primary key is not returned correctly") + + def test_key_fetch1(self): + """test KEY fetch1 - issue #976""" + with dj.config(fetch_format="array"): + k1 = (self.subject & "subject_id=10").fetch1("KEY") + with dj.config(fetch_format="frame"): + k2 = (self.subject & "subject_id=10").fetch1("KEY") + assert_equal(k1, k2) + + def test_same_secondary_attribute(self): + children = (schema.Child * schema.Parent().proj()).fetch()["name"] + assert len(children) == 1 + assert children[0] == "Dan" + + def test_query_caching(self): + # initialize cache directory + os.mkdir(os.path.expanduser("~/dj_query_cache")) + + with dj.config(query_cache=os.path.expanduser("~/dj_query_cache")): + conn = schema.TTest3.connection + # insert sample data and load cache + schema.TTest3.insert([dict(key=100 + i, value=200 + i) for i in range(2)]) + conn.set_query_cache(query_cache="main") + cached_res = schema.TTest3().fetch() + # attempt to insert while caching enabled + try: + schema.TTest3.insert( + [dict(key=200 + i, value=400 + i) for i in range(2)] + ) + assert False, "Insert allowed while query caching enabled" + except dj.DataJointError: + conn.set_query_cache() + # insert new data + schema.TTest3.insert([dict(key=600 + i, value=800 + i) for i in range(2)]) + # re-enable cache to access old results + conn.set_query_cache(query_cache="main") + previous_cache = schema.TTest3().fetch() + # verify properly cached and how to refresh results + assert all([c == p for c, p in zip(cached_res, previous_cache)]) + conn.set_query_cache() + uncached_res = schema.TTest3().fetch() + assert len(uncached_res) > len(cached_res) + # purge query cache + conn.purge_query_cache() + + # reset cache directory state (will fail if purge was unsuccessful) + os.rmdir(os.path.expanduser("~/dj_query_cache")) + + def test_fetch_group_by(self): + # https://github.com/datajoint/datajoint-python/issues/914 + + assert Parent().fetch("KEY", order_by="name") == [{"parent_id": 1}] + + def test_dj_u_distinct(self): + # Test developed to see if removing DISTINCT from the select statement + # generation breaks the dj.U universal set implementation + + # Contents to be inserted + contents = [(1, 2, 3), (2, 2, 3), (3, 3, 2), (4, 5, 5)] + Stimulus.insert(contents) + + # Query the whole table + test_query = Stimulus() + + # Use dj.U to create a list of unique contrast and brightness combinations + result = dj.U("contrast", "brightness") & test_query + expected_result = [ + {"contrast": 2, "brightness": 3}, + {"contrast": 3, "brightness": 2}, + {"contrast": 5, "brightness": 5}, + ] + + fetched_result = result.fetch(as_dict=True, order_by=("contrast", "brightness")) + Stimulus.delete_quick() + assert fetched_result == expected_result + + def test_backslash(self): + # https://github.com/datajoint/datajoint-python/issues/999 + expected = "She\Hulk" + Parent.insert([(2, expected)]) + q = Parent & dict(name=expected) + assert q.fetch1("name") == expected + q.delete() From e0a0bdbb051b2029603e098f5f174b15cb1467fb Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 11 Dec 2023 16:58:15 -0700 Subject: [PATCH 104/212] First pass at migrating test_fetch --- tests/test_fetch.py | 291 ++++++++++++++++++++++---------------------- 1 file changed, 146 insertions(+), 145 deletions(-) diff --git a/tests/test_fetch.py b/tests/test_fetch.py index 1c415cb27..ec95cf9c7 100644 --- a/tests/test_fetch.py +++ b/tests/test_fetch.py @@ -1,11 +1,4 @@ -from nose.tools import ( - assert_true, - raises, - assert_equal, - assert_dict_equal, - assert_list_equal, - assert_set_equal, -) +import pytest from operator import itemgetter import itertools import numpy as np @@ -13,33 +6,26 @@ import pandas import warnings from . import schema -from .schema import Parent, Stimulus import datajoint as dj import os import logging import io -logger = logging.getLogger("datajoint") - class TestFetch: - @classmethod - def setup_class(cls): - cls.subject = schema.Subject() - cls.lang = schema.Language() - - def test_getattribute(self): + def test_getattribute(self, schema_any): """Testing Fetch.__call__ with attributes""" + subject = schema.Subject() list1 = sorted( - self.subject.proj().fetch(as_dict=True), key=itemgetter("subject_id") + subject.proj().fetch(as_dict=True), key=itemgetter("subject_id") ) - list2 = sorted(self.subject.fetch(dj.key), key=itemgetter("subject_id")) + list2 = sorted(subject.fetch(dj.key), key=itemgetter("subject_id")) for l1, l2 in zip(list1, list2): - assert_dict_equal(l1, l2, "Primary key is not returned correctly") + assert l1 == l2, "Primary key is not returned correctly" - tmp = self.subject.fetch(order_by="subject_id") + tmp = subject.fetch(order_by="subject_id") - subject_notes, key, real_id = self.subject.fetch( + subject_notes, key, real_id = subject.fetch( "subject_notes", dj.key, "real_id" ) @@ -49,172 +35,177 @@ def test_getattribute(self): np.testing.assert_array_equal(sorted(real_id), sorted(tmp["real_id"])) list1 = sorted(key, key=itemgetter("subject_id")) for l1, l2 in zip(list1, list2): - assert_dict_equal(l1, l2, "Primary key is not returned correctly") + assert l1 == l2, "Primary key is not returned correctly" - def test_getattribute_for_fetch1(self): + def test_getattribute_for_fetch1(self, schema_any): """Testing Fetch1.__call__ with attributes""" - assert_true((self.subject & "subject_id=10").fetch1("subject_id") == 10) - assert_equal( - (self.subject & "subject_id=10").fetch1("subject_id", "species"), - (10, "monkey"), - ) + subject = schema.Subject() + assert (subject & "subject_id=10").fetch1("subject_id") == 10 + assert ( + (subject & "subject_id=10").fetch1("subject_id", "species") == + (10, "monkey")) - def test_order_by(self): + def test_order_by(self, schema_any): """Tests order_by sorting order""" + lang = schema.Language() languages = schema.Language.contents for ord_name, ord_lang in itertools.product(*2 * [["ASC", "DESC"]]): - cur = self.lang.fetch(order_by=("name " + ord_name, "language " + ord_lang)) + cur = lang.fetch(order_by=("name " + ord_name, "language " + ord_lang)) languages.sort(key=itemgetter(1), reverse=ord_lang == "DESC") languages.sort(key=itemgetter(0), reverse=ord_name == "DESC") for c, l in zip(cur, languages): - assert_true( + assert ( np.all(cc == ll for cc, ll in zip(c, l)), "Sorting order is different", ) - def test_order_by_default(self): + def test_order_by_default(self, schema_any): """Tests order_by sorting order with defaults""" + lang = schema.Language() languages = schema.Language.contents - cur = self.lang.fetch(order_by=("language", "name DESC")) + cur = lang.fetch(order_by=("language", "name DESC")) languages.sort(key=itemgetter(0), reverse=True) languages.sort(key=itemgetter(1), reverse=False) for c, l in zip(cur, languages): - assert_true( - np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" - ) + assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" - def test_limit(self): + def test_limit(self, schema_any): """Test the limit kwarg""" + lang = schema.Language() limit = 4 - cur = self.lang.fetch(limit=limit) - assert_equal(len(cur), limit, "Length is not correct") + cur = lang.fetch(limit=limit) + assert len(cur) == limit, "Length is not correct" - def test_order_by_limit(self): + def test_order_by_limit(self, schema_any): """Test the combination of order by and limit kwargs""" + lang = schema.Language() languages = schema.Language.contents - cur = self.lang.fetch(limit=4, order_by=["language", "name DESC"]) + cur = lang.fetch(limit=4, order_by=["language", "name DESC"]) languages.sort(key=itemgetter(0), reverse=True) languages.sort(key=itemgetter(1), reverse=False) - assert_equal(len(cur), 4, "Length is not correct") + assert len(cur) == 4, "Length is not correct" for c, l in list(zip(cur, languages))[:4]: - assert_true( - np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" - ) + assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" @staticmethod - def test_head_tail(): - query = schema.User * schema.Language + def test_head_tail(self, schema_any): + query = schema_any.User * schema.Language n = 5 frame = query.head(n, format="frame") - assert_true(isinstance(frame, pandas.DataFrame)) + assert isinstance(frame, pandas.DataFrame) array = query.head(n, format="array") - assert_equal(array.size, n) - assert_equal(len(frame), n) - assert_list_equal(query.primary_key, frame.index.names) + assert array.size == n + assert len(frame) == n + assert query.primary_key == frame.index.names n = 4 frame = query.tail(n, format="frame") array = query.tail(n, format="array") - assert_equal(array.size, n) - assert_equal(len(frame), n) - assert_list_equal(query.primary_key, frame.index.names) + assert array.size == n + assert len(frame) == n + assert query.primary_key == frame.index.names - def test_limit_offset(self): + def test_limit_offset(self, schema_any): """Test the limit and offset kwargs together""" + lang = schema.Language() languages = schema.Language.contents - cur = self.lang.fetch(offset=2, limit=4, order_by=["language", "name DESC"]) + cur = lang.fetch(offset=2, limit=4, order_by=["language", "name DESC"]) languages.sort(key=itemgetter(0), reverse=True) languages.sort(key=itemgetter(1), reverse=False) - assert_equal(len(cur), 4, "Length is not correct") + assert len(cur) == 4, "Length is not correct" for c, l in list(zip(cur, languages[2:6])): - assert_true( - np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" - ) + assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" - def test_iter(self): + def test_iter(self, schema_any): """Test iterator""" + lang = schema.Language() languages = schema.Language.contents - cur = self.lang.fetch(order_by=["language", "name DESC"]) + cur = lang.fetch(order_by=["language", "name DESC"]) languages.sort(key=itemgetter(0), reverse=True) languages.sort(key=itemgetter(1), reverse=False) for (name, lang), (tname, tlang) in list(zip(cur, languages)): - assert_true(name == tname and lang == tlang, "Values are not the same") + assert name == tname and lang == tlang, "Values are not the same" # now as dict - cur = self.lang.fetch(as_dict=True, order_by=("language", "name DESC")) + cur = lang.fetch(as_dict=True, order_by=("language", "name DESC")) for row, (tname, tlang) in list(zip(cur, languages)): - assert_true( + assert ( row["name"] == tname and row["language"] == tlang, "Values are not the same", ) - def test_keys(self): + def test_keys(self, schema_any): """test key fetch""" + lang = schema.Language() languages = schema.Language.contents languages.sort(key=itemgetter(0), reverse=True) languages.sort(key=itemgetter(1), reverse=False) - cur = self.lang.fetch("name", "language", order_by=("language", "name DESC")) - cur2 = list(self.lang.fetch("KEY", order_by=["language", "name DESC"])) + cur = lang.fetch("name", "language", order_by=("language", "name DESC")) + cur2 = list(lang.fetch("KEY", order_by=["language", "name DESC"])) for c, c2 in zip(zip(*cur), cur2): - assert_true(c == tuple(c2.values()), "Values are not the same") + assert c == tuple(c2.values()), "Values are not the same" - def test_attributes_as_dict(self): # issue #595 + def test_attributes_as_dict(self, schema_any): # issue #595 + subject = schema.Subject() attrs = ("species", "date_of_birth") - result = self.subject.fetch(*attrs, as_dict=True) - assert_true(bool(result) and len(result) == len(self.subject)) - assert_set_equal(set(result[0]), set(attrs)) + result = subject.fetch(*attrs, as_dict=True) + assert bool(result) and len(result) == len(subject) + assert set(result[0]) == set(attrs) - def test_fetch1_step1(self): + def test_fetch1_step1(self, schema_any): + lang = schema.Language() key = {"name": "Edgar", "language": "Japanese"} true = schema.Language.contents[-1] - dat = (self.lang & key).fetch1() + dat = (lang & key).fetch1() for k, (ke, c) in zip(true, dat.items()): - assert_true( - k == c == (self.lang & key).fetch1(ke), "Values are not the same" - ) + assert k == c == (lang & key).fetch1(ke), "Values are not the same" - @raises(dj.DataJointError) - def test_misspelled_attribute(self): - f = (schema.Language & 'lang = "ENGLISH"').fetch() + def test_misspelled_attribute(self, schema_any): + with pytest.raises(dj.DataJointError): + f = (schema.Language & 'lang = "ENGLISH"').fetch() - def test_repr(self): + def test_repr(self, schema_any): """Test string representation of fetch, returning table preview""" - repr = self.subject.fetch.__repr__() + subject = schema.Subject() + repr = subject.fetch.__repr__() n = len(repr.strip().split("\n")) limit = dj.config["display.limit"] # 3 lines are used for headers (2) and summary statement (1) - assert_true(n - 3 <= limit) + assert n - 3 <= limit - @raises(dj.DataJointError) - def test_fetch_none(self): + def test_fetch_none(self, schema_any): """Test preparing attributes for getitem""" - self.lang.fetch(None) + lang = schema.Language() + with pytest.raises(dj.DataJointError): + lang.fetch(None) - def test_asdict(self): + def test_asdict(self, schema_any): """Test returns as dictionaries""" - d = self.lang.fetch(as_dict=True) + lang = schema.Language() + d = lang.fetch(as_dict=True) for dd in d: - assert_true(isinstance(dd, dict)) + assert isinstance(dd, dict) - def test_offset(self): + def test_offset(self, schema_any): """Tests offset""" - cur = self.lang.fetch(limit=4, offset=1, order_by=["language", "name DESC"]) + lang = schema.Language() + cur = lang.fetch(limit=4, offset=1, order_by=["language", "name DESC"]) - languages = self.lang.contents + languages = lang.contents languages.sort(key=itemgetter(0), reverse=True) languages.sort(key=itemgetter(1), reverse=False) - assert_equal(len(cur), 4, "Length is not correct") + assert len(cur) == 4, "Length is not correct" for c, l in list(zip(cur, languages[1:]))[:4]: - assert_true( - np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" - ) + assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" - def test_limit_warning(self): + def test_limit_warning(self, schema_any): """Tests whether warning is raised if offset is used without limit.""" + lang = schema.Language() + logger = logging.getLogger("datajoint") log_capture = io.StringIO() stream_handler = logging.StreamHandler(log_capture) log_format = logging.Formatter( @@ -223,7 +214,7 @@ def test_limit_warning(self): stream_handler.setFormatter(log_format) stream_handler.set_name("test_limit_warning") logger.addHandler(stream_handler) - self.lang.fetch(offset=1) + lang.fetch(offset=1) log_contents = log_capture.getvalue() log_capture.close() @@ -233,32 +224,34 @@ def test_limit_warning(self): logger.removeHandler(handler) assert "[WARNING]: Offset set, but no limit." in log_contents - def test_len(self): + def test_len(self, schema_any): """Tests __len__""" - assert_equal( - len(self.lang.fetch()), len(self.lang), "__len__ is not behaving properly" - ) + lang = schema.Language() + assert ( + len(lang.fetch()) == len(lang)), "__len__ is not behaving properly" - @raises(dj.DataJointError) - def test_fetch1_step2(self): + def test_fetch1_step2(self, schema_any): """Tests whether fetch1 raises error""" - self.lang.fetch1() + lang = schema.Language() + with pytest.raises(dj.DataJointError): + lang.fetch1() - @raises(dj.DataJointError) - def test_fetch1_step3(self): + def test_fetch1_step3(self, schema_any): """Tests whether fetch1 raises error""" - self.lang.fetch1("name") + lang = schema.Language() + with pytest.raises(dj.DataJointError): + lang.fetch1("name") - def test_decimal(self): + def test_decimal(self, schema_any): """Tests that decimal fields are correctly fetched and used in restrictions, see issue #334""" rel = schema.DecimalPrimaryKey() rel.insert1([decimal.Decimal("3.1415926")]) keys = rel.fetch() - assert_true(len(rel & keys[0]) == 1) + assert len(rel & keys[0]) == 1 keys = rel.fetch(dj.key) - assert_true(len(rel & keys[1]) == 1) + assert len(rel & keys[1]) == 1 - def test_nullable_numbers(self): + def test_nullable_numbers(self, schema_any): """test mixture of values and nulls in numeric attributes""" table = schema.NullableNumbers() table.insert( @@ -274,27 +267,28 @@ def test_nullable_numbers(self): ) table.insert1((100, None, None, None)) f, d, i = table.fetch("fvalue", "dvalue", "ivalue") - assert_true(None in i) - assert_true(any(np.isnan(d))) - assert_true(any(np.isnan(f))) + assert None in i + assert any(np.isnan(d)) + assert any(np.isnan(f)) - def test_fetch_format(self): + def test_fetch_format(self, schema_any): """test fetch_format='frame'""" + subject = schema.Subject() with dj.config(fetch_format="frame"): # test if lists are both dicts list1 = sorted( - self.subject.proj().fetch(as_dict=True), key=itemgetter("subject_id") + subject.proj().fetch(as_dict=True), key=itemgetter("subject_id") ) - list2 = sorted(self.subject.fetch(dj.key), key=itemgetter("subject_id")) + list2 = sorted(subject.fetch(dj.key), key=itemgetter("subject_id")) for l1, l2 in zip(list1, list2): - assert_dict_equal(l1, l2, "Primary key is not returned correctly") + assert l1 == l2, "Primary key is not returned correctly" # tests if pandas dataframe - tmp = self.subject.fetch(order_by="subject_id") - assert_true(isinstance(tmp, pandas.DataFrame)) + tmp = subject.fetch(order_by="subject_id") + assert isinstance(tmp, pandas.DataFrame) tmp = tmp.to_records() - subject_notes, key, real_id = self.subject.fetch( + subject_notes, key, real_id = subject.fetch( "subject_notes", dj.key, "real_id" ) @@ -304,22 +298,23 @@ def test_fetch_format(self): np.testing.assert_array_equal(sorted(real_id), sorted(tmp["real_id"])) list1 = sorted(key, key=itemgetter("subject_id")) for l1, l2 in zip(list1, list2): - assert_dict_equal(l1, l2, "Primary key is not returned correctly") + assert l1 == l2, "Primary key is not returned correctly" - def test_key_fetch1(self): + def test_key_fetch1(self, schema_any): """test KEY fetch1 - issue #976""" + subject = schema.Subject() with dj.config(fetch_format="array"): - k1 = (self.subject & "subject_id=10").fetch1("KEY") + k1 = (subject & "subject_id=10").fetch1("KEY") with dj.config(fetch_format="frame"): - k2 = (self.subject & "subject_id=10").fetch1("KEY") - assert_equal(k1, k2) + k2 = (subject & "subject_id=10").fetch1("KEY") + assert k1 == k2 - def test_same_secondary_attribute(self): + def test_same_secondary_attribute(self, schema_any): children = (schema.Child * schema.Parent().proj()).fetch()["name"] assert len(children) == 1 assert children[0] == "Dan" - def test_query_caching(self): + def test_query_caching(self, schema_any): # initialize cache directory os.mkdir(os.path.expanduser("~/dj_query_cache")) @@ -353,21 +348,25 @@ def test_query_caching(self): # reset cache directory state (will fail if purge was unsuccessful) os.rmdir(os.path.expanduser("~/dj_query_cache")) - def test_fetch_group_by(self): - # https://github.com/datajoint/datajoint-python/issues/914 + def test_fetch_group_by(self, schema_any): + """ + https://github.com/datajoint/datajoint-python/issues/914 + """ - assert Parent().fetch("KEY", order_by="name") == [{"parent_id": 1}] + assert schema.Parent().fetch("KEY", order_by="name") == [{"parent_id": 1}] - def test_dj_u_distinct(self): - # Test developed to see if removing DISTINCT from the select statement - # generation breaks the dj.U universal set implementation + def test_dj_u_distinct(self, schema_any): + """ + Test developed to see if removing DISTINCT from the select statement + generation breaks the dj.U universal set implementation + """ # Contents to be inserted contents = [(1, 2, 3), (2, 2, 3), (3, 3, 2), (4, 5, 5)] - Stimulus.insert(contents) + schema.Stimulus.insert(contents) # Query the whole table - test_query = Stimulus() + test_query = schema.Stimulus() # Use dj.U to create a list of unique contrast and brightness combinations result = dj.U("contrast", "brightness") & test_query @@ -378,13 +377,15 @@ def test_dj_u_distinct(self): ] fetched_result = result.fetch(as_dict=True, order_by=("contrast", "brightness")) - Stimulus.delete_quick() + schema.Stimulus.delete_quick() assert fetched_result == expected_result - def test_backslash(self): - # https://github.com/datajoint/datajoint-python/issues/999 - expected = "She\Hulk" - Parent.insert([(2, expected)]) - q = Parent & dict(name=expected) + def test_backslash(self, schema_any): + """ + https://github.com/datajoint/datajoint-python/issues/999 + """ + expected = "She\\Hulk" + schema.Parent.insert([(2, expected)]) + q = schema.Parent & dict(name=expected) assert q.fetch1("name") == expected q.delete() From 97d5bf3e86b1acce81b85a6323a544368a15d6dd Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 15:14:47 -0700 Subject: [PATCH 105/212] All but one test_fetch passing --- tests/test_fetch.py | 117 ++++++++++++++++++++++---------------------- 1 file changed, 58 insertions(+), 59 deletions(-) diff --git a/tests/test_fetch.py b/tests/test_fetch.py index ec95cf9c7..2ff213e6b 100644 --- a/tests/test_fetch.py +++ b/tests/test_fetch.py @@ -1,4 +1,5 @@ import pytest +from typing import List from operator import itemgetter import itertools import numpy as np @@ -12,10 +13,28 @@ import io +@pytest.fixture +def lang(): + yield schema.Language() + + +@pytest.fixture +def languages(lang) -> List: + og_contents = lang.contents + languages = og_contents.copy() + yield languages + lang.contents = og_contents + + +@pytest.fixture +def subject(): + yield schema.Subject() + + + class TestFetch: - def test_getattribute(self, schema_any): + def test_getattribute(self, schema_any, subject): """Testing Fetch.__call__ with attributes""" - subject = schema.Subject() list1 = sorted( subject.proj().fetch(as_dict=True), key=itemgetter("subject_id") ) @@ -37,19 +56,15 @@ def test_getattribute(self, schema_any): for l1, l2 in zip(list1, list2): assert l1 == l2, "Primary key is not returned correctly" - def test_getattribute_for_fetch1(self, schema_any): + def test_getattribute_for_fetch1(self, schema_any, subject): """Testing Fetch1.__call__ with attributes""" - subject = schema.Subject() assert (subject & "subject_id=10").fetch1("subject_id") == 10 assert ( (subject & "subject_id=10").fetch1("subject_id", "species") == (10, "monkey")) - def test_order_by(self, schema_any): + def test_order_by(self, schema_any, lang, languages): """Tests order_by sorting order""" - lang = schema.Language() - languages = schema.Language.contents - for ord_name, ord_lang in itertools.product(*2 * [["ASC", "DESC"]]): cur = lang.fetch(order_by=("name " + ord_name, "language " + ord_lang)) languages.sort(key=itemgetter(1), reverse=ord_lang == "DESC") @@ -60,28 +75,22 @@ def test_order_by(self, schema_any): "Sorting order is different", ) - def test_order_by_default(self, schema_any): + def test_order_by_default(self, schema_any, lang, languages): """Tests order_by sorting order with defaults""" - lang = schema.Language() - languages = schema.Language.contents cur = lang.fetch(order_by=("language", "name DESC")) languages.sort(key=itemgetter(0), reverse=True) languages.sort(key=itemgetter(1), reverse=False) for c, l in zip(cur, languages): assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" - def test_limit(self, schema_any): + def test_limit(self, schema_any, lang): """Test the limit kwarg""" - lang = schema.Language() limit = 4 cur = lang.fetch(limit=limit) assert len(cur) == limit, "Length is not correct" - def test_order_by_limit(self, schema_any): + def test_order_by_limit(self, schema_any, lang, languages): """Test the combination of order by and limit kwargs""" - lang = schema.Language() - languages = schema.Language.contents - cur = lang.fetch(limit=4, order_by=["language", "name DESC"]) languages.sort(key=itemgetter(0), reverse=True) languages.sort(key=itemgetter(1), reverse=False) @@ -89,9 +98,8 @@ def test_order_by_limit(self, schema_any): for c, l in list(zip(cur, languages))[:4]: assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" - @staticmethod def test_head_tail(self, schema_any): - query = schema_any.User * schema.Language + query = schema.User * schema.Language n = 5 frame = query.head(n, format="frame") assert isinstance(frame, pandas.DataFrame) @@ -107,11 +115,8 @@ def test_head_tail(self, schema_any): assert len(frame) == n assert query.primary_key == frame.index.names - def test_limit_offset(self, schema_any): + def test_limit_offset(self, schema_any, lang, languages): """Test the limit and offset kwargs together""" - lang = schema.Language() - languages = schema.Language.contents - cur = lang.fetch(offset=2, limit=4, order_by=["language", "name DESC"]) languages.sort(key=itemgetter(0), reverse=True) languages.sort(key=itemgetter(1), reverse=False) @@ -119,15 +124,13 @@ def test_limit_offset(self, schema_any): for c, l in list(zip(cur, languages[2:6])): assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" - def test_iter(self, schema_any): + def test_iter(self, schema_any, lang, languages): """Test iterator""" - lang = schema.Language() - languages = schema.Language.contents cur = lang.fetch(order_by=["language", "name DESC"]) languages.sort(key=itemgetter(0), reverse=True) languages.sort(key=itemgetter(1), reverse=False) - for (name, lang), (tname, tlang) in list(zip(cur, languages)): - assert name == tname and lang == tlang, "Values are not the same" + for (name, lang_val), (tname, tlang) in list(zip(cur, languages)): + assert name == tname and lang_val == tlang, "Values are not the same" # now as dict cur = lang.fetch(as_dict=True, order_by=("language", "name DESC")) for row, (tname, tlang) in list(zip(cur, languages)): @@ -136,30 +139,38 @@ def test_iter(self, schema_any): "Values are not the same", ) - def test_keys(self, schema_any): + def test_keys(self, schema_any, lang, languages): """test key fetch""" - lang = schema.Language() - languages = schema.Language.contents languages.sort(key=itemgetter(0), reverse=True) languages.sort(key=itemgetter(1), reverse=False) + lang = schema.Language() cur = lang.fetch("name", "language", order_by=("language", "name DESC")) cur2 = list(lang.fetch("KEY", order_by=["language", "name DESC"])) for c, c2 in zip(zip(*cur), cur2): assert c == tuple(c2.values()), "Values are not the same" - def test_attributes_as_dict(self, schema_any): # issue #595 - subject = schema.Subject() + def test_attributes_as_dict(self, schema_any, subject): + """ + Issue #595 + """ attrs = ("species", "date_of_birth") result = subject.fetch(*attrs, as_dict=True) assert bool(result) and len(result) == len(subject) assert set(result[0]) == set(attrs) - def test_fetch1_step1(self, schema_any): - lang = schema.Language() + def test_fetch1_step1(self, schema_any, lang, languages): + assert lang.contents == languages == [ + ("Fabian", "English"), + ("Edgar", "English"), + ("Dimitri", "English"), + ("Dimitri", "Ukrainian"), + ("Fabian", "German"), + ("Edgar", "Japanese"), + ], "Unexpected contents in Language table" key = {"name": "Edgar", "language": "Japanese"} - true = schema.Language.contents[-1] + true = languages[-1] dat = (lang & key).fetch1() for k, (ke, c) in zip(true, dat.items()): assert k == c == (lang & key).fetch1(ke), "Values are not the same" @@ -168,43 +179,37 @@ def test_misspelled_attribute(self, schema_any): with pytest.raises(dj.DataJointError): f = (schema.Language & 'lang = "ENGLISH"').fetch() - def test_repr(self, schema_any): + def test_repr(self, schema_any, subject): """Test string representation of fetch, returning table preview""" - subject = schema.Subject() repr = subject.fetch.__repr__() n = len(repr.strip().split("\n")) limit = dj.config["display.limit"] # 3 lines are used for headers (2) and summary statement (1) assert n - 3 <= limit - def test_fetch_none(self, schema_any): + def test_fetch_none(self, schema_any, lang): """Test preparing attributes for getitem""" - lang = schema.Language() with pytest.raises(dj.DataJointError): lang.fetch(None) - def test_asdict(self, schema_any): + def test_asdict(self, schema_any, lang): """Test returns as dictionaries""" - lang = schema.Language() d = lang.fetch(as_dict=True) for dd in d: assert isinstance(dd, dict) - def test_offset(self, schema_any): + def test_offset(self, schema_any, lang, languages): """Tests offset""" - lang = schema.Language() cur = lang.fetch(limit=4, offset=1, order_by=["language", "name DESC"]) - languages = lang.contents languages.sort(key=itemgetter(0), reverse=True) languages.sort(key=itemgetter(1), reverse=False) assert len(cur) == 4, "Length is not correct" for c, l in list(zip(cur, languages[1:]))[:4]: assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" - def test_limit_warning(self, schema_any): + def test_limit_warning(self, schema_any, lang): """Tests whether warning is raised if offset is used without limit.""" - lang = schema.Language() logger = logging.getLogger("datajoint") log_capture = io.StringIO() stream_handler = logging.StreamHandler(log_capture) @@ -224,21 +229,17 @@ def test_limit_warning(self, schema_any): logger.removeHandler(handler) assert "[WARNING]: Offset set, but no limit." in log_contents - def test_len(self, schema_any): + def test_len(self, schema_any, lang): """Tests __len__""" - lang = schema.Language() - assert ( - len(lang.fetch()) == len(lang)), "__len__ is not behaving properly" + assert len(lang.fetch()) == len(lang), "__len__ is not behaving properly" - def test_fetch1_step2(self, schema_any): + def test_fetch1_step2(self, schema_any, lang): """Tests whether fetch1 raises error""" - lang = schema.Language() with pytest.raises(dj.DataJointError): lang.fetch1() - def test_fetch1_step3(self, schema_any): + def test_fetch1_step3(self, schema_any, lang): """Tests whether fetch1 raises error""" - lang = schema.Language() with pytest.raises(dj.DataJointError): lang.fetch1("name") @@ -271,9 +272,8 @@ def test_nullable_numbers(self, schema_any): assert any(np.isnan(d)) assert any(np.isnan(f)) - def test_fetch_format(self, schema_any): + def test_fetch_format(self, schema_any, subject): """test fetch_format='frame'""" - subject = schema.Subject() with dj.config(fetch_format="frame"): # test if lists are both dicts list1 = sorted( @@ -300,9 +300,8 @@ def test_fetch_format(self, schema_any): for l1, l2 in zip(list1, list2): assert l1 == l2, "Primary key is not returned correctly" - def test_key_fetch1(self, schema_any): + def test_key_fetch1(self, schema_any, subject): """test KEY fetch1 - issue #976""" - subject = schema.Subject() with dj.config(fetch_format="array"): k1 = (subject & "subject_id=10").fetch1("KEY") with dj.config(fetch_format="frame"): From 30174bce9daf3674a001ac54be2b9b8699e501e5 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 16:04:39 -0700 Subject: [PATCH 106/212] Skip nonbreaking tests --- tests/test_fetch.py | 32 +++++++++++++++++++++++--------- 1 file changed, 23 insertions(+), 9 deletions(-) diff --git a/tests/test_fetch.py b/tests/test_fetch.py index 2ff213e6b..9e418bd44 100644 --- a/tests/test_fetch.py +++ b/tests/test_fetch.py @@ -31,8 +31,8 @@ def subject(): yield schema.Subject() - class TestFetch: + @pytest.mark.skip(reason='temp') def test_getattribute(self, schema_any, subject): """Testing Fetch.__call__ with attributes""" list1 = sorted( @@ -56,6 +56,7 @@ def test_getattribute(self, schema_any, subject): for l1, l2 in zip(list1, list2): assert l1 == l2, "Primary key is not returned correctly" + @pytest.mark.skip(reason='temp') def test_getattribute_for_fetch1(self, schema_any, subject): """Testing Fetch1.__call__ with attributes""" assert (subject & "subject_id=10").fetch1("subject_id") == 10 @@ -63,6 +64,7 @@ def test_getattribute_for_fetch1(self, schema_any, subject): (subject & "subject_id=10").fetch1("subject_id", "species") == (10, "monkey")) + @pytest.mark.skip(reason='temp') def test_order_by(self, schema_any, lang, languages): """Tests order_by sorting order""" for ord_name, ord_lang in itertools.product(*2 * [["ASC", "DESC"]]): @@ -70,11 +72,9 @@ def test_order_by(self, schema_any, lang, languages): languages.sort(key=itemgetter(1), reverse=ord_lang == "DESC") languages.sort(key=itemgetter(0), reverse=ord_name == "DESC") for c, l in zip(cur, languages): - assert ( - np.all(cc == ll for cc, ll in zip(c, l)), - "Sorting order is different", - ) + assert np.all(cc == ll for cc, ll in zip(c, l)), "Sorting order is different" + @pytest.mark.skip(reason='temp') def test_order_by_default(self, schema_any, lang, languages): """Tests order_by sorting order with defaults""" cur = lang.fetch(order_by=("language", "name DESC")) @@ -83,12 +83,14 @@ def test_order_by_default(self, schema_any, lang, languages): for c, l in zip(cur, languages): assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" + @pytest.mark.skip(reason='temp') def test_limit(self, schema_any, lang): """Test the limit kwarg""" limit = 4 cur = lang.fetch(limit=limit) assert len(cur) == limit, "Length is not correct" + @pytest.mark.skip(reason='temp') def test_order_by_limit(self, schema_any, lang, languages): """Test the combination of order by and limit kwargs""" cur = lang.fetch(limit=4, order_by=["language", "name DESC"]) @@ -98,6 +100,7 @@ def test_order_by_limit(self, schema_any, lang, languages): for c, l in list(zip(cur, languages))[:4]: assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" + @pytest.mark.skip(reason='temp') def test_head_tail(self, schema_any): query = schema.User * schema.Language n = 5 @@ -115,6 +118,7 @@ def test_head_tail(self, schema_any): assert len(frame) == n assert query.primary_key == frame.index.names + @pytest.mark.skip(reason='temp') def test_limit_offset(self, schema_any, lang, languages): """Test the limit and offset kwargs together""" cur = lang.fetch(offset=2, limit=4, order_by=["language", "name DESC"]) @@ -124,6 +128,7 @@ def test_limit_offset(self, schema_any, lang, languages): for c, l in list(zip(cur, languages[2:6])): assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" + @pytest.mark.skip(reason='temp') def test_iter(self, schema_any, lang, languages): """Test iterator""" cur = lang.fetch(order_by=["language", "name DESC"]) @@ -134,11 +139,9 @@ def test_iter(self, schema_any, lang, languages): # now as dict cur = lang.fetch(as_dict=True, order_by=("language", "name DESC")) for row, (tname, tlang) in list(zip(cur, languages)): - assert ( - row["name"] == tname and row["language"] == tlang, - "Values are not the same", - ) + assert row["name"] == tname and row["language"] == tlang, "Values are not the same" + @pytest.mark.skip(reason='temp') def test_keys(self, schema_any, lang, languages): """test key fetch""" languages.sort(key=itemgetter(0), reverse=True) @@ -151,6 +154,7 @@ def test_keys(self, schema_any, lang, languages): for c, c2 in zip(zip(*cur), cur2): assert c == tuple(c2.values()), "Values are not the same" + @pytest.mark.skip(reason='temp') def test_attributes_as_dict(self, schema_any, subject): """ Issue #595 @@ -160,6 +164,7 @@ def test_attributes_as_dict(self, schema_any, subject): assert bool(result) and len(result) == len(subject) assert set(result[0]) == set(attrs) + @pytest.mark.skip(reason='temp') def test_fetch1_step1(self, schema_any, lang, languages): assert lang.contents == languages == [ ("Fabian", "English"), @@ -175,10 +180,12 @@ def test_fetch1_step1(self, schema_any, lang, languages): for k, (ke, c) in zip(true, dat.items()): assert k == c == (lang & key).fetch1(ke), "Values are not the same" + @pytest.mark.skip(reason='temp') def test_misspelled_attribute(self, schema_any): with pytest.raises(dj.DataJointError): f = (schema.Language & 'lang = "ENGLISH"').fetch() + @pytest.mark.skip(reason='temp') def test_repr(self, schema_any, subject): """Test string representation of fetch, returning table preview""" repr = subject.fetch.__repr__() @@ -187,6 +194,7 @@ def test_repr(self, schema_any, subject): # 3 lines are used for headers (2) and summary statement (1) assert n - 3 <= limit + @pytest.mark.skip(reason='temp') def test_fetch_none(self, schema_any, lang): """Test preparing attributes for getitem""" with pytest.raises(dj.DataJointError): @@ -229,15 +237,18 @@ def test_limit_warning(self, schema_any, lang): logger.removeHandler(handler) assert "[WARNING]: Offset set, but no limit." in log_contents + @pytest.mark.skip(reason='temp') def test_len(self, schema_any, lang): """Tests __len__""" assert len(lang.fetch()) == len(lang), "__len__ is not behaving properly" + @pytest.mark.skip(reason='temp') def test_fetch1_step2(self, schema_any, lang): """Tests whether fetch1 raises error""" with pytest.raises(dj.DataJointError): lang.fetch1() + @pytest.mark.skip(reason='temp') def test_fetch1_step3(self, schema_any, lang): """Tests whether fetch1 raises error""" with pytest.raises(dj.DataJointError): @@ -246,10 +257,13 @@ def test_fetch1_step3(self, schema_any, lang): def test_decimal(self, schema_any): """Tests that decimal fields are correctly fetched and used in restrictions, see issue #334""" rel = schema.DecimalPrimaryKey() + assert bool(schema.DecimalPrimaryKey().fetch()), "Table DecimalPrimaryKey is empty" rel.insert1([decimal.Decimal("3.1415926")]) keys = rel.fetch() + assert len(keys) > 0 assert len(rel & keys[0]) == 1 keys = rel.fetch(dj.key) + assert len(keys) >= 2 assert len(rel & keys[1]) == 1 def test_nullable_numbers(self, schema_any): From 115c8bdaf77f1419272f7ca29016fcde4beee7f0 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 16:07:50 -0700 Subject: [PATCH 107/212] Show breaking test test_decimal fails because its contents, a zip object, are exhausted by a previous test. Reproduce by seeing a pass then a fail when running pytest -k 'test_offset or test_decimal' tests/test_fetch.py --- tests/test_fetch.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_fetch.py b/tests/test_fetch.py index 9e418bd44..070c879df 100644 --- a/tests/test_fetch.py +++ b/tests/test_fetch.py @@ -215,6 +215,7 @@ def test_offset(self, schema_any, lang, languages): assert len(cur) == 4, "Length is not correct" for c, l in list(zip(cur, languages[1:]))[:4]: assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" + assert len(schema.DecimalPrimaryKey().fetch()), "Table DecimalPrimaryKey is empty" def test_limit_warning(self, schema_any, lang): """Tests whether warning is raised if offset is used without limit.""" @@ -257,7 +258,7 @@ def test_fetch1_step3(self, schema_any, lang): def test_decimal(self, schema_any): """Tests that decimal fields are correctly fetched and used in restrictions, see issue #334""" rel = schema.DecimalPrimaryKey() - assert bool(schema.DecimalPrimaryKey().fetch()), "Table DecimalPrimaryKey is empty" + assert len(rel.fetch()), "Table DecimalPrimaryKey contents are empty" rel.insert1([decimal.Decimal("3.1415926")]) keys = rel.fetch() assert len(keys) > 0 From 17cc5648cbfd939846afdd972e9be0d069a2b23d Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 16:10:09 -0700 Subject: [PATCH 108/212] Fix breaking test The following command now passes, as do all tests in this module: pytest -k 'test_offset or test_decimal' tests/test_fetch.py --- tests/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/schema.py b/tests/schema.py index 140a34bba..5a60b1c0b 100644 --- a/tests/schema.py +++ b/tests/schema.py @@ -307,7 +307,7 @@ class DecimalPrimaryKey(dj.Lookup): definition = """ id : decimal(4,3) """ - contents = zip((0.1, 0.25, 3.99)) + contents = list(zip((0.1, 0.25, 3.99))) class IndexRich(dj.Manual): From b1db688674491ebb43c62813815303bc08c467a8 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 16:23:15 -0700 Subject: [PATCH 109/212] Unskip tests --- tests/test_fetch.py | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/tests/test_fetch.py b/tests/test_fetch.py index 070c879df..68745120b 100644 --- a/tests/test_fetch.py +++ b/tests/test_fetch.py @@ -32,7 +32,6 @@ def subject(): class TestFetch: - @pytest.mark.skip(reason='temp') def test_getattribute(self, schema_any, subject): """Testing Fetch.__call__ with attributes""" list1 = sorted( @@ -56,7 +55,6 @@ def test_getattribute(self, schema_any, subject): for l1, l2 in zip(list1, list2): assert l1 == l2, "Primary key is not returned correctly" - @pytest.mark.skip(reason='temp') def test_getattribute_for_fetch1(self, schema_any, subject): """Testing Fetch1.__call__ with attributes""" assert (subject & "subject_id=10").fetch1("subject_id") == 10 @@ -64,7 +62,6 @@ def test_getattribute_for_fetch1(self, schema_any, subject): (subject & "subject_id=10").fetch1("subject_id", "species") == (10, "monkey")) - @pytest.mark.skip(reason='temp') def test_order_by(self, schema_any, lang, languages): """Tests order_by sorting order""" for ord_name, ord_lang in itertools.product(*2 * [["ASC", "DESC"]]): @@ -74,7 +71,6 @@ def test_order_by(self, schema_any, lang, languages): for c, l in zip(cur, languages): assert np.all(cc == ll for cc, ll in zip(c, l)), "Sorting order is different" - @pytest.mark.skip(reason='temp') def test_order_by_default(self, schema_any, lang, languages): """Tests order_by sorting order with defaults""" cur = lang.fetch(order_by=("language", "name DESC")) @@ -83,14 +79,12 @@ def test_order_by_default(self, schema_any, lang, languages): for c, l in zip(cur, languages): assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" - @pytest.mark.skip(reason='temp') def test_limit(self, schema_any, lang): """Test the limit kwarg""" limit = 4 cur = lang.fetch(limit=limit) assert len(cur) == limit, "Length is not correct" - @pytest.mark.skip(reason='temp') def test_order_by_limit(self, schema_any, lang, languages): """Test the combination of order by and limit kwargs""" cur = lang.fetch(limit=4, order_by=["language", "name DESC"]) @@ -100,7 +94,6 @@ def test_order_by_limit(self, schema_any, lang, languages): for c, l in list(zip(cur, languages))[:4]: assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" - @pytest.mark.skip(reason='temp') def test_head_tail(self, schema_any): query = schema.User * schema.Language n = 5 @@ -118,7 +111,6 @@ def test_head_tail(self, schema_any): assert len(frame) == n assert query.primary_key == frame.index.names - @pytest.mark.skip(reason='temp') def test_limit_offset(self, schema_any, lang, languages): """Test the limit and offset kwargs together""" cur = lang.fetch(offset=2, limit=4, order_by=["language", "name DESC"]) @@ -128,7 +120,6 @@ def test_limit_offset(self, schema_any, lang, languages): for c, l in list(zip(cur, languages[2:6])): assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" - @pytest.mark.skip(reason='temp') def test_iter(self, schema_any, lang, languages): """Test iterator""" cur = lang.fetch(order_by=["language", "name DESC"]) @@ -141,7 +132,6 @@ def test_iter(self, schema_any, lang, languages): for row, (tname, tlang) in list(zip(cur, languages)): assert row["name"] == tname and row["language"] == tlang, "Values are not the same" - @pytest.mark.skip(reason='temp') def test_keys(self, schema_any, lang, languages): """test key fetch""" languages.sort(key=itemgetter(0), reverse=True) @@ -154,7 +144,6 @@ def test_keys(self, schema_any, lang, languages): for c, c2 in zip(zip(*cur), cur2): assert c == tuple(c2.values()), "Values are not the same" - @pytest.mark.skip(reason='temp') def test_attributes_as_dict(self, schema_any, subject): """ Issue #595 @@ -164,7 +153,6 @@ def test_attributes_as_dict(self, schema_any, subject): assert bool(result) and len(result) == len(subject) assert set(result[0]) == set(attrs) - @pytest.mark.skip(reason='temp') def test_fetch1_step1(self, schema_any, lang, languages): assert lang.contents == languages == [ ("Fabian", "English"), @@ -180,12 +168,10 @@ def test_fetch1_step1(self, schema_any, lang, languages): for k, (ke, c) in zip(true, dat.items()): assert k == c == (lang & key).fetch1(ke), "Values are not the same" - @pytest.mark.skip(reason='temp') def test_misspelled_attribute(self, schema_any): with pytest.raises(dj.DataJointError): f = (schema.Language & 'lang = "ENGLISH"').fetch() - @pytest.mark.skip(reason='temp') def test_repr(self, schema_any, subject): """Test string representation of fetch, returning table preview""" repr = subject.fetch.__repr__() @@ -194,7 +180,6 @@ def test_repr(self, schema_any, subject): # 3 lines are used for headers (2) and summary statement (1) assert n - 3 <= limit - @pytest.mark.skip(reason='temp') def test_fetch_none(self, schema_any, lang): """Test preparing attributes for getitem""" with pytest.raises(dj.DataJointError): @@ -238,18 +223,15 @@ def test_limit_warning(self, schema_any, lang): logger.removeHandler(handler) assert "[WARNING]: Offset set, but no limit." in log_contents - @pytest.mark.skip(reason='temp') def test_len(self, schema_any, lang): """Tests __len__""" assert len(lang.fetch()) == len(lang), "__len__ is not behaving properly" - @pytest.mark.skip(reason='temp') def test_fetch1_step2(self, schema_any, lang): """Tests whether fetch1 raises error""" with pytest.raises(dj.DataJointError): lang.fetch1() - @pytest.mark.skip(reason='temp') def test_fetch1_step3(self, schema_any, lang): """Tests whether fetch1 raises error""" with pytest.raises(dj.DataJointError): From 8885795945e29107a09e67439fe7cd4c1870ab31 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 16:23:18 -0700 Subject: [PATCH 110/212] Format with black --- tests/test_fetch.py | 63 ++++++++++++++++++++++++++++----------------- 1 file changed, 39 insertions(+), 24 deletions(-) diff --git a/tests/test_fetch.py b/tests/test_fetch.py index 68745120b..8125a3c6d 100644 --- a/tests/test_fetch.py +++ b/tests/test_fetch.py @@ -34,18 +34,14 @@ def subject(): class TestFetch: def test_getattribute(self, schema_any, subject): """Testing Fetch.__call__ with attributes""" - list1 = sorted( - subject.proj().fetch(as_dict=True), key=itemgetter("subject_id") - ) + list1 = sorted(subject.proj().fetch(as_dict=True), key=itemgetter("subject_id")) list2 = sorted(subject.fetch(dj.key), key=itemgetter("subject_id")) for l1, l2 in zip(list1, list2): assert l1 == l2, "Primary key is not returned correctly" tmp = subject.fetch(order_by="subject_id") - subject_notes, key, real_id = subject.fetch( - "subject_notes", dj.key, "real_id" - ) + subject_notes, key, real_id = subject.fetch("subject_notes", dj.key, "real_id") np.testing.assert_array_equal( sorted(subject_notes), sorted(tmp["subject_notes"]) @@ -58,9 +54,10 @@ def test_getattribute(self, schema_any, subject): def test_getattribute_for_fetch1(self, schema_any, subject): """Testing Fetch1.__call__ with attributes""" assert (subject & "subject_id=10").fetch1("subject_id") == 10 - assert ( - (subject & "subject_id=10").fetch1("subject_id", "species") == - (10, "monkey")) + assert (subject & "subject_id=10").fetch1("subject_id", "species") == ( + 10, + "monkey", + ) def test_order_by(self, schema_any, lang, languages): """Tests order_by sorting order""" @@ -69,7 +66,9 @@ def test_order_by(self, schema_any, lang, languages): languages.sort(key=itemgetter(1), reverse=ord_lang == "DESC") languages.sort(key=itemgetter(0), reverse=ord_name == "DESC") for c, l in zip(cur, languages): - assert np.all(cc == ll for cc, ll in zip(c, l)), "Sorting order is different" + assert np.all( + cc == ll for cc, ll in zip(c, l) + ), "Sorting order is different" def test_order_by_default(self, schema_any, lang, languages): """Tests order_by sorting order with defaults""" @@ -77,7 +76,9 @@ def test_order_by_default(self, schema_any, lang, languages): languages.sort(key=itemgetter(0), reverse=True) languages.sort(key=itemgetter(1), reverse=False) for c, l in zip(cur, languages): - assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" + assert np.all( + [cc == ll for cc, ll in zip(c, l)] + ), "Sorting order is different" def test_limit(self, schema_any, lang): """Test the limit kwarg""" @@ -92,7 +93,9 @@ def test_order_by_limit(self, schema_any, lang, languages): languages.sort(key=itemgetter(1), reverse=False) assert len(cur) == 4, "Length is not correct" for c, l in list(zip(cur, languages))[:4]: - assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" + assert np.all( + [cc == ll for cc, ll in zip(c, l)] + ), "Sorting order is different" def test_head_tail(self, schema_any): query = schema.User * schema.Language @@ -118,7 +121,9 @@ def test_limit_offset(self, schema_any, lang, languages): languages.sort(key=itemgetter(1), reverse=False) assert len(cur) == 4, "Length is not correct" for c, l in list(zip(cur, languages[2:6])): - assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" + assert np.all( + [cc == ll for cc, ll in zip(c, l)] + ), "Sorting order is different" def test_iter(self, schema_any, lang, languages): """Test iterator""" @@ -130,7 +135,9 @@ def test_iter(self, schema_any, lang, languages): # now as dict cur = lang.fetch(as_dict=True, order_by=("language", "name DESC")) for row, (tname, tlang) in list(zip(cur, languages)): - assert row["name"] == tname and row["language"] == tlang, "Values are not the same" + assert ( + row["name"] == tname and row["language"] == tlang + ), "Values are not the same" def test_keys(self, schema_any, lang, languages): """test key fetch""" @@ -154,14 +161,18 @@ def test_attributes_as_dict(self, schema_any, subject): assert set(result[0]) == set(attrs) def test_fetch1_step1(self, schema_any, lang, languages): - assert lang.contents == languages == [ - ("Fabian", "English"), - ("Edgar", "English"), - ("Dimitri", "English"), - ("Dimitri", "Ukrainian"), - ("Fabian", "German"), - ("Edgar", "Japanese"), - ], "Unexpected contents in Language table" + assert ( + lang.contents + == languages + == [ + ("Fabian", "English"), + ("Edgar", "English"), + ("Dimitri", "English"), + ("Dimitri", "Ukrainian"), + ("Fabian", "German"), + ("Edgar", "Japanese"), + ] + ), "Unexpected contents in Language table" key = {"name": "Edgar", "language": "Japanese"} true = languages[-1] dat = (lang & key).fetch1() @@ -199,8 +210,12 @@ def test_offset(self, schema_any, lang, languages): languages.sort(key=itemgetter(1), reverse=False) assert len(cur) == 4, "Length is not correct" for c, l in list(zip(cur, languages[1:]))[:4]: - assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" - assert len(schema.DecimalPrimaryKey().fetch()), "Table DecimalPrimaryKey is empty" + assert np.all( + [cc == ll for cc, ll in zip(c, l)] + ), "Sorting order is different" + assert len( + schema.DecimalPrimaryKey().fetch() + ), "Table DecimalPrimaryKey is empty" def test_limit_warning(self, schema_any, lang): """Tests whether warning is raised if offset is used without limit.""" From 761c1663f4a2be45e521e84f3d33ef79b34b3ba4 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 17:11:28 -0700 Subject: [PATCH 111/212] Remove unnecessary assert --- tests/test_fetch.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/test_fetch.py b/tests/test_fetch.py index 8125a3c6d..b1480fa7d 100644 --- a/tests/test_fetch.py +++ b/tests/test_fetch.py @@ -213,9 +213,6 @@ def test_offset(self, schema_any, lang, languages): assert np.all( [cc == ll for cc, ll in zip(c, l)] ), "Sorting order is different" - assert len( - schema.DecimalPrimaryKey().fetch() - ), "Table DecimalPrimaryKey is empty" def test_limit_warning(self, schema_any, lang): """Tests whether warning is raised if offset is used without limit.""" From 0a498a65f55ed68eb886bc072aeb439cb14fe9b1 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 18:27:52 -0700 Subject: [PATCH 112/212] cp to tests --- tests/test_fetch_same.py | 62 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100644 tests/test_fetch_same.py diff --git a/tests/test_fetch_same.py b/tests/test_fetch_same.py new file mode 100644 index 000000000..d42d88b1a --- /dev/null +++ b/tests/test_fetch_same.py @@ -0,0 +1,62 @@ +from nose.tools import assert_equal +from . import PREFIX, CONN_INFO +import numpy as np +import datajoint as dj + +schema = dj.Schema(PREFIX + "_fetch_same", connection=dj.conn(**CONN_INFO)) + + +class TestFetchSame: + @classmethod + def setup_class(cls): + @schema + class ProjData(dj.Manual): + definition = """ + id : int + --- + resp : float + sim : float + big : longblob + blah : varchar(10) + """ + + ProjData().insert( + [ + {"id": 0, "resp": 20.33, "sim": 45.324, "big": 3, "blah": "yes"}, + { + "id": 1, + "resp": 94.3, + "sim": 34.23, + "big": {"key1": np.random.randn(20, 10)}, + "blah": "si", + }, + { + "id": 2, + "resp": 1.90, + "sim": 10.23, + "big": np.random.randn(4, 2), + "blah": "sim", + }, + ] + ) + + cls.projdata = ProjData() + + def test_object_conversion_one(self): + new = self.projdata.proj(sub="resp").fetch("sub") + assert_equal(new.dtype, np.float64) + + def test_object_conversion_two(self): + [sub, add] = self.projdata.proj(sub="resp", add="sim").fetch("sub", "add") + assert_equal(sub.dtype, np.float64) + assert_equal(add.dtype, np.float64) + + def test_object_conversion_all(self): + new = self.projdata.proj(sub="resp", add="sim").fetch() + assert_equal(new["sub"].dtype, np.float64) + assert_equal(new["add"].dtype, np.float64) + + def test_object_no_convert(self): + new = self.projdata.fetch() + assert_equal(new["big"].dtype, "object") + assert_equal(new["blah"].dtype, "object") From e2acbeaccb00769763141ffdd40e56f543695902 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 18:34:15 -0700 Subject: [PATCH 113/212] Migrate test_fetch_same --- tests/test_fetch_same.py | 120 +++++++++++++++++++++------------------ 1 file changed, 65 insertions(+), 55 deletions(-) diff --git a/tests/test_fetch_same.py b/tests/test_fetch_same.py index d42d88b1a..4935bb037 100644 --- a/tests/test_fetch_same.py +++ b/tests/test_fetch_same.py @@ -1,62 +1,72 @@ -from nose.tools import assert_equal +import pytest from . import PREFIX, CONN_INFO import numpy as np import datajoint as dj -schema = dj.Schema(PREFIX + "_fetch_same", connection=dj.conn(**CONN_INFO)) + +class ProjData(dj.Manual): + definition = """ + id : int + --- + resp : float + sim : float + big : longblob + blah : varchar(10) + """ + + +@pytest.fixture +def schema_fetch_same(connection_root): + schema = dj.Schema( + PREFIX + "_fetch_same", + context=dict(ProjData=ProjData), + connection=connection_root, + ) + schema(ProjData) + ProjData().insert( + [ + {"id": 0, "resp": 20.33, "sim": 45.324, "big": 3, "blah": "yes"}, + { + "id": 1, + "resp": 94.3, + "sim": 34.23, + "big": {"key1": np.random.randn(20, 10)}, + "blah": "si", + }, + { + "id": 2, + "resp": 1.90, + "sim": 10.23, + "big": np.random.randn(4, 2), + "blah": "sim", + }, + ] + ) + yield schema + schema.drop() + + +@pytest.fixture +def projdata(): + yield ProjData() class TestFetchSame: - @classmethod - def setup_class(cls): - @schema - class ProjData(dj.Manual): - definition = """ - id : int - --- - resp : float - sim : float - big : longblob - blah : varchar(10) - """ - - ProjData().insert( - [ - {"id": 0, "resp": 20.33, "sim": 45.324, "big": 3, "blah": "yes"}, - { - "id": 1, - "resp": 94.3, - "sim": 34.23, - "big": {"key1": np.random.randn(20, 10)}, - "blah": "si", - }, - { - "id": 2, - "resp": 1.90, - "sim": 10.23, - "big": np.random.randn(4, 2), - "blah": "sim", - }, - ] - ) - - cls.projdata = ProjData() - - def test_object_conversion_one(self): - new = self.projdata.proj(sub="resp").fetch("sub") - assert_equal(new.dtype, np.float64) - - def test_object_conversion_two(self): - [sub, add] = self.projdata.proj(sub="resp", add="sim").fetch("sub", "add") - assert_equal(sub.dtype, np.float64) - assert_equal(add.dtype, np.float64) - - def test_object_conversion_all(self): - new = self.projdata.proj(sub="resp", add="sim").fetch() - assert_equal(new["sub"].dtype, np.float64) - assert_equal(new["add"].dtype, np.float64) - - def test_object_no_convert(self): - new = self.projdata.fetch() - assert_equal(new["big"].dtype, "object") - assert_equal(new["blah"].dtype, "object") + def test_object_conversion_one(self, schema_fetch_same, projdata): + new = projdata.proj(sub="resp").fetch("sub") + assert new.dtype == np.float64 + + def test_object_conversion_two(self, schema_fetch_same, projdata): + [sub, add] = projdata.proj(sub="resp", add="sim").fetch("sub", "add") + assert sub.dtype == np.float64 + assert add.dtype == np.float64 + + def test_object_conversion_all(self, schema_fetch_same, projdata): + new = projdata.proj(sub="resp", add="sim").fetch() + assert new["sub"].dtype == np.float64 + assert new["add"].dtype == np.float64 + + def test_object_no_convert(self, schema_fetch_same, projdata): + new = projdata.fetch() + assert new["big"].dtype == "object" + assert new["blah"].dtype == "object" From 74402ebeb66ab7f078aa052f2da44346d3b1f02d Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 18:40:00 -0700 Subject: [PATCH 114/212] cp to tests --- tests/test_jobs.py | 168 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 168 insertions(+) create mode 100644 tests/test_jobs.py diff --git a/tests/test_jobs.py b/tests/test_jobs.py new file mode 100644 index 000000000..21fdef940 --- /dev/null +++ b/tests/test_jobs.py @@ -0,0 +1,168 @@ +from nose.tools import assert_true, assert_false, assert_equals +from . import schema +from datajoint.jobs import ERROR_MESSAGE_LENGTH, TRUNCATION_APPENDIX +import random +import string +import datajoint as dj + +subjects = schema.Subject() + + +def test_reserve_job(): + schema.schema.jobs.delete() + assert_true(subjects) + table_name = "fake_table" + + # reserve jobs + for key in subjects.fetch("KEY"): + assert_true( + schema.schema.jobs.reserve(table_name, key), "failed to reserve a job" + ) + + # refuse jobs + for key in subjects.fetch("KEY"): + assert_false( + schema.schema.jobs.reserve(table_name, key), "failed to respect reservation" + ) + + # complete jobs + for key in subjects.fetch("KEY"): + schema.schema.jobs.complete(table_name, key) + assert_false(schema.schema.jobs, "failed to free jobs") + + # reserve jobs again + for key in subjects.fetch("KEY"): + assert_true( + schema.schema.jobs.reserve(table_name, key), "failed to reserve new jobs" + ) + + # finish with error + for key in subjects.fetch("KEY"): + schema.schema.jobs.error(table_name, key, "error message") + + # refuse jobs with errors + for key in subjects.fetch("KEY"): + assert_false( + schema.schema.jobs.reserve(table_name, key), "failed to ignore error jobs" + ) + + # clear error jobs + (schema.schema.jobs & dict(status="error")).delete() + assert_false(schema.schema.jobs, "failed to clear error jobs") + + +def test_restrictions(): + jobs = schema.schema.jobs + jobs.delete() + jobs.reserve("a", {"key": "a1"}) + jobs.reserve("a", {"key": "a2"}) + jobs.reserve("b", {"key": "b1"}) + jobs.error("a", {"key": "a2"}, "error") + jobs.error("b", {"key": "b1"}, "error") + + assert_true(len(jobs & {"table_name": "a"}) == 2) + assert_true(len(jobs & {"status": "error"}) == 2) + assert_true(len(jobs & {"table_name": "a", "status": "error"}) == 1) + jobs.delete() + + +def test_sigint(): + # clear out job table + schema.schema.jobs.delete() + try: + schema.SigIntTable().populate(reserve_jobs=True) + except KeyboardInterrupt: + pass + + status, error_message = schema.schema.jobs.fetch1("status", "error_message") + assert_equals(status, "error") + assert_equals(error_message, "KeyboardInterrupt") + schema.schema.jobs.delete() + + +def test_sigterm(): + # clear out job table + schema.schema.jobs.delete() + try: + schema.SigTermTable().populate(reserve_jobs=True) + except SystemExit: + pass + + status, error_message = schema.schema.jobs.fetch1("status", "error_message") + assert_equals(status, "error") + assert_equals(error_message, "SystemExit: SIGTERM received") + schema.schema.jobs.delete() + + +def test_suppress_dj_errors(): + """test_suppress_dj_errors: dj errors suppressible w/o native py blobs""" + schema.schema.jobs.delete() + with dj.config(enable_python_native_blobs=False): + schema.ErrorClass.populate(reserve_jobs=True, suppress_errors=True) + assert_true(len(schema.DjExceptionName()) == len(schema.schema.jobs) > 0) + + +def test_long_error_message(): + # clear out jobs table + schema.schema.jobs.delete() + + # create long error message + long_error_message = "".join( + random.choice(string.ascii_letters) for _ in range(ERROR_MESSAGE_LENGTH + 100) + ) + short_error_message = "".join( + random.choice(string.ascii_letters) for _ in range(ERROR_MESSAGE_LENGTH // 2) + ) + assert_true(subjects) + table_name = "fake_table" + + key = subjects.fetch("KEY")[0] + + # test long error message + schema.schema.jobs.reserve(table_name, key) + schema.schema.jobs.error(table_name, key, long_error_message) + error_message = schema.schema.jobs.fetch1("error_message") + assert_true( + len(error_message) == ERROR_MESSAGE_LENGTH, + "error message is longer than max allowed", + ) + assert_true( + error_message.endswith(TRUNCATION_APPENDIX), + "appropriate ending missing for truncated error message", + ) + schema.schema.jobs.delete() + + # test long error message + schema.schema.jobs.reserve(table_name, key) + schema.schema.jobs.error(table_name, key, short_error_message) + error_message = schema.schema.jobs.fetch1("error_message") + assert_true(error_message == short_error_message, "error messages do not agree") + assert_false( + error_message.endswith(TRUNCATION_APPENDIX), + "error message should not be truncated", + ) + schema.schema.jobs.delete() + + +def test_long_error_stack(): + # clear out jobs table + schema.schema.jobs.delete() + + # create long error stack + STACK_SIZE = ( + 89942 # Does not fit into small blob (should be 64k, but found to be higher) + ) + long_error_stack = "".join( + random.choice(string.ascii_letters) for _ in range(STACK_SIZE) + ) + assert subjects + table_name = "fake_table" + + key = subjects.fetch("KEY")[0] + + # test long error stack + schema.schema.jobs.reserve(table_name, key) + schema.schema.jobs.error(table_name, key, "error message", long_error_stack) + error_stack = schema.schema.jobs.fetch1("error_stack") + assert error_stack == long_error_stack, "error stacks do not agree" + schema.schema.jobs.delete() From 66cd1c1258964c34a7967e6dd7e8ba0a7b736b27 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 18:40:15 -0700 Subject: [PATCH 115/212] nose2pytest test_jobs --- tests/test_jobs.py | 42 +++++++++++++++++------------------------- 1 file changed, 17 insertions(+), 25 deletions(-) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 21fdef940..157875940 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -10,31 +10,25 @@ def test_reserve_job(): schema.schema.jobs.delete() - assert_true(subjects) + assert subjects table_name = "fake_table" # reserve jobs for key in subjects.fetch("KEY"): - assert_true( - schema.schema.jobs.reserve(table_name, key), "failed to reserve a job" - ) + assert schema.schema.jobs.reserve(table_name, key), "failed to reserve a job" # refuse jobs for key in subjects.fetch("KEY"): - assert_false( - schema.schema.jobs.reserve(table_name, key), "failed to respect reservation" - ) + assert not schema.schema.jobs.reserve(table_name, key), "failed to respect reservation" # complete jobs for key in subjects.fetch("KEY"): schema.schema.jobs.complete(table_name, key) - assert_false(schema.schema.jobs, "failed to free jobs") + assert not schema.schema.jobs, "failed to free jobs" # reserve jobs again for key in subjects.fetch("KEY"): - assert_true( - schema.schema.jobs.reserve(table_name, key), "failed to reserve new jobs" - ) + assert schema.schema.jobs.reserve(table_name, key), "failed to reserve new jobs" # finish with error for key in subjects.fetch("KEY"): @@ -42,13 +36,11 @@ def test_reserve_job(): # refuse jobs with errors for key in subjects.fetch("KEY"): - assert_false( - schema.schema.jobs.reserve(table_name, key), "failed to ignore error jobs" - ) + assert not schema.schema.jobs.reserve(table_name, key), "failed to ignore error jobs" # clear error jobs (schema.schema.jobs & dict(status="error")).delete() - assert_false(schema.schema.jobs, "failed to clear error jobs") + assert not schema.schema.jobs, "failed to clear error jobs" def test_restrictions(): @@ -60,9 +52,9 @@ def test_restrictions(): jobs.error("a", {"key": "a2"}, "error") jobs.error("b", {"key": "b1"}, "error") - assert_true(len(jobs & {"table_name": "a"}) == 2) - assert_true(len(jobs & {"status": "error"}) == 2) - assert_true(len(jobs & {"table_name": "a", "status": "error"}) == 1) + assert len(jobs & {"table_name": "a"}) == 2 + assert len(jobs & {"status": "error"}) == 2 + assert len(jobs & {"table_name": "a", "status": "error"}) == 1 jobs.delete() @@ -75,8 +67,8 @@ def test_sigint(): pass status, error_message = schema.schema.jobs.fetch1("status", "error_message") - assert_equals(status, "error") - assert_equals(error_message, "KeyboardInterrupt") + assert status == "error" + assert error_message == "KeyboardInterrupt" schema.schema.jobs.delete() @@ -89,8 +81,8 @@ def test_sigterm(): pass status, error_message = schema.schema.jobs.fetch1("status", "error_message") - assert_equals(status, "error") - assert_equals(error_message, "SystemExit: SIGTERM received") + assert status == "error" + assert error_message == "SystemExit: SIGTERM received" schema.schema.jobs.delete() @@ -99,7 +91,7 @@ def test_suppress_dj_errors(): schema.schema.jobs.delete() with dj.config(enable_python_native_blobs=False): schema.ErrorClass.populate(reserve_jobs=True, suppress_errors=True) - assert_true(len(schema.DjExceptionName()) == len(schema.schema.jobs) > 0) + assert len(schema.DjExceptionName()) == len(schema.schema.jobs) > 0 def test_long_error_message(): @@ -113,7 +105,7 @@ def test_long_error_message(): short_error_message = "".join( random.choice(string.ascii_letters) for _ in range(ERROR_MESSAGE_LENGTH // 2) ) - assert_true(subjects) + assert subjects table_name = "fake_table" key = subjects.fetch("KEY")[0] @@ -136,7 +128,7 @@ def test_long_error_message(): schema.schema.jobs.reserve(table_name, key) schema.schema.jobs.error(table_name, key, short_error_message) error_message = schema.schema.jobs.fetch1("error_message") - assert_true(error_message == short_error_message, "error messages do not agree") + assert error_message == short_error_message, "error messages do not agree" assert_false( error_message.endswith(TRUNCATION_APPENDIX), "error message should not be truncated", From b2c1f0e14814f0da3641a504e08721661bcf2477 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 18:53:33 -0700 Subject: [PATCH 116/212] All but two test_jobs passing --- tests/test_jobs.py | 102 +++++++++++++++++++++------------------------ 1 file changed, 48 insertions(+), 54 deletions(-) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 157875940..0359bc597 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -1,50 +1,53 @@ -from nose.tools import assert_true, assert_false, assert_equals +import pytest from . import schema from datajoint.jobs import ERROR_MESSAGE_LENGTH, TRUNCATION_APPENDIX import random import string import datajoint as dj -subjects = schema.Subject() +@pytest.fixture +def subjects(): + yield schema.Subject() -def test_reserve_job(): - schema.schema.jobs.delete() + +def test_reserve_job(schema_any, subjects): + schema_any.jobs.delete() assert subjects table_name = "fake_table" # reserve jobs for key in subjects.fetch("KEY"): - assert schema.schema.jobs.reserve(table_name, key), "failed to reserve a job" + assert schema_any.jobs.reserve(table_name, key), "failed to reserve a job" # refuse jobs for key in subjects.fetch("KEY"): - assert not schema.schema.jobs.reserve(table_name, key), "failed to respect reservation" + assert not schema_any.jobs.reserve(table_name, key), "failed to respect reservation" # complete jobs for key in subjects.fetch("KEY"): - schema.schema.jobs.complete(table_name, key) - assert not schema.schema.jobs, "failed to free jobs" + schema_any.jobs.complete(table_name, key) + assert not schema_any.jobs, "failed to free jobs" # reserve jobs again for key in subjects.fetch("KEY"): - assert schema.schema.jobs.reserve(table_name, key), "failed to reserve new jobs" + assert schema_any.jobs.reserve(table_name, key), "failed to reserve new jobs" # finish with error for key in subjects.fetch("KEY"): - schema.schema.jobs.error(table_name, key, "error message") + schema_any.jobs.error(table_name, key, "error message") # refuse jobs with errors for key in subjects.fetch("KEY"): - assert not schema.schema.jobs.reserve(table_name, key), "failed to ignore error jobs" + assert not schema_any.jobs.reserve(table_name, key), "failed to ignore error jobs" # clear error jobs - (schema.schema.jobs & dict(status="error")).delete() - assert not schema.schema.jobs, "failed to clear error jobs" + (schema_any.jobs & dict(status="error")).delete() + assert not schema_any.jobs, "failed to clear error jobs" -def test_restrictions(): - jobs = schema.schema.jobs +def test_restrictions(schema_any, subjects): + jobs = schema_any.jobs jobs.delete() jobs.reserve("a", {"key": "a1"}) jobs.reserve("a", {"key": "a2"}) @@ -58,45 +61,45 @@ def test_restrictions(): jobs.delete() -def test_sigint(): +def test_sigint(schema_any, subjects): # clear out job table - schema.schema.jobs.delete() + schema_any.jobs.delete() try: schema.SigIntTable().populate(reserve_jobs=True) except KeyboardInterrupt: pass - status, error_message = schema.schema.jobs.fetch1("status", "error_message") + status, error_message = schema_any.jobs.fetch1("status", "error_message") assert status == "error" assert error_message == "KeyboardInterrupt" - schema.schema.jobs.delete() + schema_any.jobs.delete() -def test_sigterm(): +def test_sigterm(schema_any, subjects): # clear out job table - schema.schema.jobs.delete() + schema_any.jobs.delete() try: schema.SigTermTable().populate(reserve_jobs=True) except SystemExit: pass - status, error_message = schema.schema.jobs.fetch1("status", "error_message") + status, error_message = schema_any.jobs.fetch1("status", "error_message") assert status == "error" assert error_message == "SystemExit: SIGTERM received" - schema.schema.jobs.delete() + schema_any.jobs.delete() -def test_suppress_dj_errors(): +def test_suppress_dj_errors(schema_any, subjects): """test_suppress_dj_errors: dj errors suppressible w/o native py blobs""" - schema.schema.jobs.delete() + schema_any.jobs.delete() with dj.config(enable_python_native_blobs=False): schema.ErrorClass.populate(reserve_jobs=True, suppress_errors=True) - assert len(schema.DjExceptionName()) == len(schema.schema.jobs) > 0 + assert len(schema.DjExceptionName()) == len(schema_any.jobs) > 0 -def test_long_error_message(): +def test_long_error_message(schema_any, subjects): # clear out jobs table - schema.schema.jobs.delete() + schema_any.jobs.delete() # create long error message long_error_message = "".join( @@ -111,34 +114,25 @@ def test_long_error_message(): key = subjects.fetch("KEY")[0] # test long error message - schema.schema.jobs.reserve(table_name, key) - schema.schema.jobs.error(table_name, key, long_error_message) - error_message = schema.schema.jobs.fetch1("error_message") - assert_true( - len(error_message) == ERROR_MESSAGE_LENGTH, - "error message is longer than max allowed", - ) - assert_true( - error_message.endswith(TRUNCATION_APPENDIX), - "appropriate ending missing for truncated error message", - ) - schema.schema.jobs.delete() + schema_any.jobs.reserve(table_name, key) + schema_any.jobs.error(table_name, key, long_error_message) + error_message = schema_any.jobs.fetch1("error_message") + assert len(error_message) == ERROR_MESSAGE_LENGTH, "error message is longer than max allowed" + assert error_message.endswith(TRUNCATION_APPENDIX), "appropriate ending missing for truncated error message" + schema_any.jobs.delete() # test long error message - schema.schema.jobs.reserve(table_name, key) - schema.schema.jobs.error(table_name, key, short_error_message) - error_message = schema.schema.jobs.fetch1("error_message") + schema_any.jobs.reserve(table_name, key) + schema_any.jobs.error(table_name, key, short_error_message) + error_message = schema_any.jobs.fetch1("error_message") assert error_message == short_error_message, "error messages do not agree" - assert_false( - error_message.endswith(TRUNCATION_APPENDIX), - "error message should not be truncated", - ) - schema.schema.jobs.delete() + assert not error_message.endswith(TRUNCATION_APPENDIX), "error message should not be truncated" + schema_any.jobs.delete() -def test_long_error_stack(): +def test_long_error_stack(schema_any, subjects): # clear out jobs table - schema.schema.jobs.delete() + schema_any.jobs.delete() # create long error stack STACK_SIZE = ( @@ -153,8 +147,8 @@ def test_long_error_stack(): key = subjects.fetch("KEY")[0] # test long error stack - schema.schema.jobs.reserve(table_name, key) - schema.schema.jobs.error(table_name, key, "error message", long_error_stack) - error_stack = schema.schema.jobs.fetch1("error_stack") + schema_any.jobs.reserve(table_name, key) + schema_any.jobs.error(table_name, key, "error message", long_error_stack) + error_stack = schema_any.jobs.fetch1("error_stack") assert error_stack == long_error_stack, "error stacks do not agree" - schema.schema.jobs.delete() + schema_any.jobs.delete() From 4fa05a234da3c2b2b073dbb98829aea45f635540 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 19:11:50 -0700 Subject: [PATCH 117/212] Clean jobs table in fixture --- tests/conftest.py | 2 ++ tests/test_jobs.py | 17 ++--------------- 2 files changed, 4 insertions(+), 15 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 9d697ef47..e3c71353e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -263,7 +263,9 @@ def schema_any(connection_test): schema_any(schema.SessionDateA) schema_any(schema.Stimulus) schema_any(schema.Longblob) + schema_any.jobs.delete() yield schema_any + schema_any.jobs.delete() schema_any.drop() diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 0359bc597..c78de3922 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -12,7 +12,6 @@ def subjects(): def test_reserve_job(schema_any, subjects): - schema_any.jobs.delete() assert subjects table_name = "fake_table" @@ -62,45 +61,37 @@ def test_restrictions(schema_any, subjects): def test_sigint(schema_any, subjects): - # clear out job table - schema_any.jobs.delete() try: schema.SigIntTable().populate(reserve_jobs=True) except KeyboardInterrupt: pass + assert len(schema_any.jobs.fetch()), "SigInt jobs table is empty" status, error_message = schema_any.jobs.fetch1("status", "error_message") assert status == "error" assert error_message == "KeyboardInterrupt" - schema_any.jobs.delete() def test_sigterm(schema_any, subjects): - # clear out job table - schema_any.jobs.delete() try: schema.SigTermTable().populate(reserve_jobs=True) except SystemExit: pass + assert len(schema_any.jobs.fetch()), "SigTermjobs table is empty" status, error_message = schema_any.jobs.fetch1("status", "error_message") assert status == "error" assert error_message == "SystemExit: SIGTERM received" - schema_any.jobs.delete() def test_suppress_dj_errors(schema_any, subjects): """test_suppress_dj_errors: dj errors suppressible w/o native py blobs""" - schema_any.jobs.delete() with dj.config(enable_python_native_blobs=False): schema.ErrorClass.populate(reserve_jobs=True, suppress_errors=True) assert len(schema.DjExceptionName()) == len(schema_any.jobs) > 0 def test_long_error_message(schema_any, subjects): - # clear out jobs table - schema_any.jobs.delete() - # create long error message long_error_message = "".join( random.choice(string.ascii_letters) for _ in range(ERROR_MESSAGE_LENGTH + 100) @@ -131,9 +122,6 @@ def test_long_error_message(schema_any, subjects): def test_long_error_stack(schema_any, subjects): - # clear out jobs table - schema_any.jobs.delete() - # create long error stack STACK_SIZE = ( 89942 # Does not fit into small blob (should be 64k, but found to be higher) @@ -151,4 +139,3 @@ def test_long_error_stack(schema_any, subjects): schema_any.jobs.error(table_name, key, "error message", long_error_stack) error_stack = schema_any.jobs.fetch1("error_stack") assert error_stack == long_error_stack, "error stacks do not agree" - schema_any.jobs.delete() From c7a30364a78f36b770a837596dba9b9a56b926c2 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 19:12:02 -0700 Subject: [PATCH 118/212] Change from generator to list Later tests in test_jobs would fail because SimpleSource contents generator exhausted, so the child tables' populate reserves no jobs due to no keys. --- tests/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/schema.py b/tests/schema.py index 5a60b1c0b..81e5ac44c 100644 --- a/tests/schema.py +++ b/tests/schema.py @@ -258,7 +258,7 @@ class SimpleSource(dj.Lookup): definition = """ id : int # id """ - contents = ((x,) for x in range(10)) + contents = [(x,) for x in range(10)] class SigIntTable(dj.Computed): From dfdb805800dc7ebf537646825e8ba269409fb6aa Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 19:35:03 -0700 Subject: [PATCH 119/212] Tolerate error when cleaning up schema_any.jobs --- tests/conftest.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index e3c71353e..22104a750 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,7 +11,7 @@ import json from pathlib import Path from datajoint import errors -from datajoint.errors import ADAPTED_TYPE_SWITCH, FILEPATH_FEATURE_SWITCH +from datajoint.errors import ADAPTED_TYPE_SWITCH, FILEPATH_FEATURE_SWITCH, DataJointError from . import ( PREFIX, CONN_INFO, @@ -227,6 +227,10 @@ def schema_any(connection_test): PREFIX + "_test1", schema.LOCALS_ANY, connection=connection_test ) assert schema.LOCALS_ANY, "LOCALS_ANY is empty" + try: + schema_any.jobs.delete() + except DataJointError: + pass schema_any(schema.TTest) schema_any(schema.TTest2) schema_any(schema.TTest3) @@ -263,9 +267,11 @@ def schema_any(connection_test): schema_any(schema.SessionDateA) schema_any(schema.Stimulus) schema_any(schema.Longblob) - schema_any.jobs.delete() yield schema_any - schema_any.jobs.delete() + try: + schema_any.jobs.delete() + except DataJointError: + pass schema_any.drop() From a4ea5ffc78c276c70cf44c78edaa02f14a21bc7c Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 19:36:28 -0700 Subject: [PATCH 120/212] Format with black --- tests/conftest.py | 6 +++++- tests/test_jobs.py | 20 +++++++++++++++----- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 22104a750..3979efe50 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,7 +11,11 @@ import json from pathlib import Path from datajoint import errors -from datajoint.errors import ADAPTED_TYPE_SWITCH, FILEPATH_FEATURE_SWITCH, DataJointError +from datajoint.errors import ( + ADAPTED_TYPE_SWITCH, + FILEPATH_FEATURE_SWITCH, + DataJointError, +) from . import ( PREFIX, CONN_INFO, diff --git a/tests/test_jobs.py b/tests/test_jobs.py index c78de3922..8f09135cb 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -21,7 +21,9 @@ def test_reserve_job(schema_any, subjects): # refuse jobs for key in subjects.fetch("KEY"): - assert not schema_any.jobs.reserve(table_name, key), "failed to respect reservation" + assert not schema_any.jobs.reserve( + table_name, key + ), "failed to respect reservation" # complete jobs for key in subjects.fetch("KEY"): @@ -38,7 +40,9 @@ def test_reserve_job(schema_any, subjects): # refuse jobs with errors for key in subjects.fetch("KEY"): - assert not schema_any.jobs.reserve(table_name, key), "failed to ignore error jobs" + assert not schema_any.jobs.reserve( + table_name, key + ), "failed to ignore error jobs" # clear error jobs (schema_any.jobs & dict(status="error")).delete() @@ -108,8 +112,12 @@ def test_long_error_message(schema_any, subjects): schema_any.jobs.reserve(table_name, key) schema_any.jobs.error(table_name, key, long_error_message) error_message = schema_any.jobs.fetch1("error_message") - assert len(error_message) == ERROR_MESSAGE_LENGTH, "error message is longer than max allowed" - assert error_message.endswith(TRUNCATION_APPENDIX), "appropriate ending missing for truncated error message" + assert ( + len(error_message) == ERROR_MESSAGE_LENGTH + ), "error message is longer than max allowed" + assert error_message.endswith( + TRUNCATION_APPENDIX + ), "appropriate ending missing for truncated error message" schema_any.jobs.delete() # test long error message @@ -117,7 +125,9 @@ def test_long_error_message(schema_any, subjects): schema_any.jobs.error(table_name, key, short_error_message) error_message = schema_any.jobs.fetch1("error_message") assert error_message == short_error_message, "error messages do not agree" - assert not error_message.endswith(TRUNCATION_APPENDIX), "error message should not be truncated" + assert not error_message.endswith( + TRUNCATION_APPENDIX + ), "error message should not be truncated" schema_any.jobs.delete() From 69061937b3f8cb81279e5b9e3e0afe97e8e5d483 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 19:41:25 -0700 Subject: [PATCH 121/212] Remove unnecessary fixture usage --- tests/test_jobs.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 8f09135cb..03f56791a 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -49,7 +49,7 @@ def test_reserve_job(schema_any, subjects): assert not schema_any.jobs, "failed to clear error jobs" -def test_restrictions(schema_any, subjects): +def test_restrictions(schema_any): jobs = schema_any.jobs jobs.delete() jobs.reserve("a", {"key": "a1"}) @@ -64,7 +64,7 @@ def test_restrictions(schema_any, subjects): jobs.delete() -def test_sigint(schema_any, subjects): +def test_sigint(schema_any): try: schema.SigIntTable().populate(reserve_jobs=True) except KeyboardInterrupt: @@ -76,7 +76,7 @@ def test_sigint(schema_any, subjects): assert error_message == "KeyboardInterrupt" -def test_sigterm(schema_any, subjects): +def test_sigterm(schema_any): try: schema.SigTermTable().populate(reserve_jobs=True) except SystemExit: @@ -88,7 +88,7 @@ def test_sigterm(schema_any, subjects): assert error_message == "SystemExit: SIGTERM received" -def test_suppress_dj_errors(schema_any, subjects): +def test_suppress_dj_errors(schema_any): """test_suppress_dj_errors: dj errors suppressible w/o native py blobs""" with dj.config(enable_python_native_blobs=False): schema.ErrorClass.populate(reserve_jobs=True, suppress_errors=True) @@ -131,7 +131,7 @@ def test_long_error_message(schema_any, subjects): schema_any.jobs.delete() -def test_long_error_stack(schema_any, subjects): +def test_long_error_stack(schema_any): # create long error stack STACK_SIZE = ( 89942 # Does not fit into small blob (should be 64k, but found to be higher) From 4026a56a62b1038378172d4f8e9fd72152d04d4a Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 19:43:00 -0700 Subject: [PATCH 122/212] Fix typo --- tests/test_jobs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 03f56791a..3dfa0d682 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -82,7 +82,7 @@ def test_sigterm(schema_any): except SystemExit: pass - assert len(schema_any.jobs.fetch()), "SigTermjobs table is empty" + assert len(schema_any.jobs.fetch()), "SigTerm jobs table is empty" status, error_message = schema_any.jobs.fetch1("status", "error_message") assert status == "error" assert error_message == "SystemExit: SIGTERM received" From 52e78bbc9509a6ac69465f6a8674df047681237a Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 12 Dec 2023 19:51:37 -0700 Subject: [PATCH 123/212] test_long_error_stack requires subjects fixture --- tests/test_jobs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 3dfa0d682..37974ac86 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -131,7 +131,7 @@ def test_long_error_message(schema_any, subjects): schema_any.jobs.delete() -def test_long_error_stack(schema_any): +def test_long_error_stack(schema_any, subjects): # create long error stack STACK_SIZE = ( 89942 # Does not fit into small blob (should be 64k, but found to be higher) From ecbcac1314dbf42f290930abc2423e3f3d820c65 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Wed, 13 Dec 2023 10:00:17 -0700 Subject: [PATCH 124/212] cp to tests --- tests/schema_privileges.py | 35 ++++++++++++ tests/test_privileges.py | 109 +++++++++++++++++++++++++++++++++++++ 2 files changed, 144 insertions(+) create mode 100644 tests/schema_privileges.py create mode 100644 tests/test_privileges.py diff --git a/tests/schema_privileges.py b/tests/schema_privileges.py new file mode 100644 index 000000000..8b39e4aa1 --- /dev/null +++ b/tests/schema_privileges.py @@ -0,0 +1,35 @@ +import datajoint as dj + +schema = dj.Schema() + + +@schema +class Parent(dj.Lookup): + definition = """ + id: int + """ + contents = [(1,)] + + +@schema +class Child(dj.Computed): + definition = """ + -> Parent + """ + + def make(self, key): + self.insert1(key) + + +@schema +class NoAccess(dj.Lookup): + definition = """ + string: varchar(10) + """ + + +@schema +class NoAccessAgain(dj.Manual): + definition = """ + -> NoAccess + """ diff --git a/tests/test_privileges.py b/tests/test_privileges.py new file mode 100644 index 000000000..f32a1103f --- /dev/null +++ b/tests/test_privileges.py @@ -0,0 +1,109 @@ +import importlib +import datajoint as dj +from . import schema, CONN_INFO_ROOT, PREFIX +from . import schema_privileges as pipeline +from nose.tools import assert_true, raises + +namespace = locals() + + +class TestUnprivileged: + @classmethod + def setup_class(cls): + """A connection with only SELECT privilege to djtest schemas""" + cls.connection = dj.conn( + host=CONN_INFO_ROOT["host"], user="djview", password="djview", reset=True + ) + + @raises(dj.DataJointError) + def test_fail_create_schema(self): + """creating a schema with no CREATE privilege""" + return dj.Schema("forbidden_schema", namespace, connection=self.connection) + + @raises(dj.DataJointError) + def test_insert_failure(self): + unprivileged = dj.Schema( + schema.schema.database, namespace, connection=self.connection + ) + unprivileged.spawn_missing_classes() + assert_true( + issubclass(Language, dj.Lookup) + and len(Language()) == len(schema.Language()), + "failed to spawn missing classes", + ) + Language().insert1(("Socrates", "Greek")) + + @raises(dj.DataJointError) + def test_failure_to_create_table(self): + unprivileged = dj.Schema( + schema.schema.database, namespace, connection=self.connection + ) + + @unprivileged + class Try(dj.Manual): + definition = """ # should not matter really + id : int + --- + value : float + """ + + Try().insert1((1, 1.5)) + + +class TestSubset: + USER = "djsubset" + + @classmethod + def setup_class(cls): + conn = dj.conn( + host=CONN_INFO_ROOT["host"], + user=CONN_INFO_ROOT["user"], + password=CONN_INFO_ROOT["password"], + reset=True, + ) + pipeline.schema.activate(f"{PREFIX}_pipeline") + conn.query( + f""" + CREATE USER IF NOT EXISTS '{cls.USER}'@'%%' + IDENTIFIED BY '{cls.USER}' + """ + ) + conn.query( + f""" + GRANT SELECT, INSERT, UPDATE, DELETE + ON `{PREFIX}_pipeline`.`#parent` + TO '{cls.USER}'@'%%' + """ + ) + conn.query( + f""" + GRANT SELECT, INSERT, UPDATE, DELETE + ON `{PREFIX}_pipeline`.`__child` + TO '{cls.USER}'@'%%' + """ + ) + cls.connection = dj.conn( + host=CONN_INFO_ROOT["host"], + user=cls.USER, + password=cls.USER, + reset=True, + ) + + @classmethod + def teardown_class(cls): + conn = dj.conn( + host=CONN_INFO_ROOT["host"], + user=CONN_INFO_ROOT["user"], + password=CONN_INFO_ROOT["password"], + reset=True, + ) + conn.query(f"DROP USER {cls.USER}") + conn.query(f"DROP DATABASE {PREFIX}_pipeline") + + def test_populate_activate(self): + importlib.reload(pipeline) + pipeline.schema.activate( + f"{PREFIX}_pipeline", create_schema=True, create_tables=False + ) + pipeline.Child.populate() + assert pipeline.Child.progress(display=False)[0] == 0 From fdd435288e4268689e8f466575cc2cc612a36c8c Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Wed, 13 Dec 2023 10:14:58 -0700 Subject: [PATCH 125/212] Migrate TestUnprivileged --- tests/schema_privileges.py | 11 +++--- tests/test_privileges.py | 72 ++++++++++++++++++++------------------ 2 files changed, 43 insertions(+), 40 deletions(-) diff --git a/tests/schema_privileges.py b/tests/schema_privileges.py index 8b39e4aa1..57f8ebb74 100644 --- a/tests/schema_privileges.py +++ b/tests/schema_privileges.py @@ -1,9 +1,7 @@ import datajoint as dj +import inspect -schema = dj.Schema() - -@schema class Parent(dj.Lookup): definition = """ id: int @@ -11,7 +9,6 @@ class Parent(dj.Lookup): contents = [(1,)] -@schema class Child(dj.Computed): definition = """ -> Parent @@ -21,15 +18,17 @@ def make(self, key): self.insert1(key) -@schema class NoAccess(dj.Lookup): definition = """ string: varchar(10) """ -@schema class NoAccessAgain(dj.Manual): definition = """ -> NoAccess """ + + +LOCALS_PRIV = {k: v for k, v in locals().items() if inspect.isclass(v)} +__all__ = list(LOCALS_PRIV) \ No newline at end of file diff --git a/tests/test_privileges.py b/tests/test_privileges.py index f32a1103f..35ca35958 100644 --- a/tests/test_privileges.py +++ b/tests/test_privileges.py @@ -1,42 +1,45 @@ +import os +import pytest import importlib import datajoint as dj from . import schema, CONN_INFO_ROOT, PREFIX -from . import schema_privileges as pipeline -from nose.tools import assert_true, raises +from . import schema_privileges namespace = locals() +@pytest.fixture +def connection_djview(connection_root): + """ + A connection with only SELECT privilege to djtest schemas. + Requires connection_root fixture so that `djview` user exists. + """ + connection = dj.conn( + host=os.getenv("DJ_HOST"), + user="djview", + password="djview", + reset=True, + ) + yield connection -class TestUnprivileged: - @classmethod - def setup_class(cls): - """A connection with only SELECT privilege to djtest schemas""" - cls.connection = dj.conn( - host=CONN_INFO_ROOT["host"], user="djview", password="djview", reset=True - ) - @raises(dj.DataJointError) - def test_fail_create_schema(self): +class TestUnprivileged: + def test_fail_create_schema(self, connection_djview): """creating a schema with no CREATE privilege""" - return dj.Schema("forbidden_schema", namespace, connection=self.connection) + with pytest.raises(dj.DataJointError): + return dj.Schema("forbidden_schema", namespace, connection=connection_djview) - @raises(dj.DataJointError) - def test_insert_failure(self): + def test_insert_failure(self, connection_djview, schema_any): unprivileged = dj.Schema( - schema.schema.database, namespace, connection=self.connection + schema_any.database, namespace, connection=connection_djview ) unprivileged.spawn_missing_classes() - assert_true( - issubclass(Language, dj.Lookup) - and len(Language()) == len(schema.Language()), - "failed to spawn missing classes", - ) - Language().insert1(("Socrates", "Greek")) + assert issubclass(Language, dj.Lookup) and len(Language()) == len(schema.Language()), "failed to spawn missing classes" + with pytest.raises(dj.DataJointError): + Language().insert1(("Socrates", "Greek")) - @raises(dj.DataJointError) - def test_failure_to_create_table(self): + def test_failure_to_create_table(self, connection_djview, schema_any): unprivileged = dj.Schema( - schema.schema.database, namespace, connection=self.connection + schema_any.database, namespace, connection=connection_djview ) @unprivileged @@ -47,7 +50,8 @@ class Try(dj.Manual): value : float """ - Try().insert1((1, 1.5)) + with pytest.raises(dj.DataJointError): + Try().insert1((1, 1.5)) class TestSubset: @@ -61,7 +65,7 @@ def setup_class(cls): password=CONN_INFO_ROOT["password"], reset=True, ) - pipeline.schema.activate(f"{PREFIX}_pipeline") + schema_privileges.schema.activate(f"{PREFIX}_schema_privileges") conn.query( f""" CREATE USER IF NOT EXISTS '{cls.USER}'@'%%' @@ -71,14 +75,14 @@ def setup_class(cls): conn.query( f""" GRANT SELECT, INSERT, UPDATE, DELETE - ON `{PREFIX}_pipeline`.`#parent` + ON `{PREFIX}_schema_privileges`.`#parent` TO '{cls.USER}'@'%%' """ ) conn.query( f""" GRANT SELECT, INSERT, UPDATE, DELETE - ON `{PREFIX}_pipeline`.`__child` + ON `{PREFIX}_schema_privileges`.`__child` TO '{cls.USER}'@'%%' """ ) @@ -98,12 +102,12 @@ def teardown_class(cls): reset=True, ) conn.query(f"DROP USER {cls.USER}") - conn.query(f"DROP DATABASE {PREFIX}_pipeline") + conn.query(f"DROP DATABASE {PREFIX}_schema_privileges") def test_populate_activate(self): - importlib.reload(pipeline) - pipeline.schema.activate( - f"{PREFIX}_pipeline", create_schema=True, create_tables=False + importlib.reload(schema_privileges) + schema_privileges.schema.activate( + f"{PREFIX}_schema_privileges", create_schema=True, create_tables=False ) - pipeline.Child.populate() - assert pipeline.Child.progress(display=False)[0] == 0 + schema_privileges.Child.populate() + assert schema_privileges.Child.progress(display=False)[0] == 0 From 37248ff60d633085b4534c98d0f81e82bd508b31 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Wed, 13 Dec 2023 10:15:02 -0700 Subject: [PATCH 126/212] Format with black --- tests/schema_privileges.py | 2 +- tests/test_privileges.py | 9 +++++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/tests/schema_privileges.py b/tests/schema_privileges.py index 57f8ebb74..b53d6b264 100644 --- a/tests/schema_privileges.py +++ b/tests/schema_privileges.py @@ -31,4 +31,4 @@ class NoAccessAgain(dj.Manual): LOCALS_PRIV = {k: v for k, v in locals().items() if inspect.isclass(v)} -__all__ = list(LOCALS_PRIV) \ No newline at end of file +__all__ = list(LOCALS_PRIV) diff --git a/tests/test_privileges.py b/tests/test_privileges.py index 35ca35958..0cb807a6b 100644 --- a/tests/test_privileges.py +++ b/tests/test_privileges.py @@ -7,6 +7,7 @@ namespace = locals() + @pytest.fixture def connection_djview(connection_root): """ @@ -26,14 +27,18 @@ class TestUnprivileged: def test_fail_create_schema(self, connection_djview): """creating a schema with no CREATE privilege""" with pytest.raises(dj.DataJointError): - return dj.Schema("forbidden_schema", namespace, connection=connection_djview) + return dj.Schema( + "forbidden_schema", namespace, connection=connection_djview + ) def test_insert_failure(self, connection_djview, schema_any): unprivileged = dj.Schema( schema_any.database, namespace, connection=connection_djview ) unprivileged.spawn_missing_classes() - assert issubclass(Language, dj.Lookup) and len(Language()) == len(schema.Language()), "failed to spawn missing classes" + assert issubclass(Language, dj.Lookup) and len(Language()) == len( + schema.Language() + ), "failed to spawn missing classes" with pytest.raises(dj.DataJointError): Language().insert1(("Socrates", "Greek")) From 08fcac056c1d3207db160d908ba3dc3023805afe Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Wed, 13 Dec 2023 10:30:56 -0700 Subject: [PATCH 127/212] Separate fixture for DB creds dict --- tests/conftest.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 9d697ef47..470c2f440 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,6 @@ import datajoint as dj from packaging import version +from typing import Dict import os from os import environ, remove import minio @@ -52,12 +53,17 @@ def enable_filepath_feature(monkeypatch): @pytest.fixture(scope="session") -def connection_root_bare(): - connection = dj.Connection( +def db_creds_root() -> Dict: + return dict( host=os.getenv("DJ_HOST"), user=os.getenv("DJ_USER"), password=os.getenv("DJ_PASS"), ) + + +@pytest.fixture(scope="session") +def connection_root_bare(db_creds_root): + connection = dj.Connection(**db_creds_root) yield connection From 156d22968dc80df9d23e599dd0900f91979d66f3 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Wed, 13 Dec 2023 10:31:18 -0700 Subject: [PATCH 128/212] First pass at migrating test_privileges --- tests/test_privileges.py | 113 ++++++++++++++++++++------------------- 1 file changed, 58 insertions(+), 55 deletions(-) diff --git a/tests/test_privileges.py b/tests/test_privileges.py index 0cb807a6b..23e1dc327 100644 --- a/tests/test_privileges.py +++ b/tests/test_privileges.py @@ -1,21 +1,74 @@ import os import pytest -import importlib import datajoint as dj from . import schema, CONN_INFO_ROOT, PREFIX from . import schema_privileges namespace = locals() +USER = "djsubset" + + +@pytest.fixture +def schema_priv(connection_test): + schema_priv = dj.Schema( + # PREFIX + "_schema_privileges", + context=schema_privileges.LOCALS_PRIV, + connection=connection_test, + ) + schema_priv(schema_privileges.Parent) + schema_priv(schema_privileges.Child) + schema_priv(schema_privileges.NoAccess) + schema_priv(schema_privileges.NoAccessAgain) + yield schema_priv + if schema_priv.is_activated(): + schema_priv.drop() + + +@pytest.fixture +def connection_djsubset(connection_root, db_creds_root, schema_priv): + user = "djsubset" + conn = dj.conn(**db_creds_root, reset=True) + schema_priv.activate(f"{PREFIX}_schema_privileges") + conn.query( + f""" + CREATE USER IF NOT EXISTS '{user}'@'%%' + IDENTIFIED BY '{user}' + """ + ) + conn.query( + f""" + GRANT SELECT, INSERT, UPDATE, DELETE + ON `{PREFIX}_schema_privileges`.`#parent` + TO '{user}'@'%%' + """ + ) + conn.query( + f""" + GRANT SELECT, INSERT, UPDATE, DELETE + ON `{PREFIX}_schema_privileges`.`__child` + TO '{user}'@'%%' + """ + ) + conn_djsubset = dj.conn( + host=db_creds_root["host"], + user=user, + password=user, + reset=True, + ) + yield conn_djsubset + conn.query(f"DROP USER {user}") + conn.query(f"DROP DATABASE {PREFIX}_schema_privileges") + @pytest.fixture -def connection_djview(connection_root): +def connection_djview(connection_root, db_creds_root): """ A connection with only SELECT privilege to djtest schemas. Requires connection_root fixture so that `djview` user exists. """ connection = dj.conn( - host=os.getenv("DJ_HOST"), + host=db_creds_root["host"], user="djview", password="djview", reset=True, @@ -60,58 +113,8 @@ class Try(dj.Manual): class TestSubset: - USER = "djsubset" - - @classmethod - def setup_class(cls): - conn = dj.conn( - host=CONN_INFO_ROOT["host"], - user=CONN_INFO_ROOT["user"], - password=CONN_INFO_ROOT["password"], - reset=True, - ) - schema_privileges.schema.activate(f"{PREFIX}_schema_privileges") - conn.query( - f""" - CREATE USER IF NOT EXISTS '{cls.USER}'@'%%' - IDENTIFIED BY '{cls.USER}' - """ - ) - conn.query( - f""" - GRANT SELECT, INSERT, UPDATE, DELETE - ON `{PREFIX}_schema_privileges`.`#parent` - TO '{cls.USER}'@'%%' - """ - ) - conn.query( - f""" - GRANT SELECT, INSERT, UPDATE, DELETE - ON `{PREFIX}_schema_privileges`.`__child` - TO '{cls.USER}'@'%%' - """ - ) - cls.connection = dj.conn( - host=CONN_INFO_ROOT["host"], - user=cls.USER, - password=cls.USER, - reset=True, - ) - - @classmethod - def teardown_class(cls): - conn = dj.conn( - host=CONN_INFO_ROOT["host"], - user=CONN_INFO_ROOT["user"], - password=CONN_INFO_ROOT["password"], - reset=True, - ) - conn.query(f"DROP USER {cls.USER}") - conn.query(f"DROP DATABASE {PREFIX}_schema_privileges") - - def test_populate_activate(self): - importlib.reload(schema_privileges) - schema_privileges.schema.activate( + def test_populate_activate(self, connection_djsubset, schema_priv): + schema_priv.activate( f"{PREFIX}_schema_privileges", create_schema=True, create_tables=False ) schema_privileges.Child.populate() From 05b3b002495738aee74c36341cd7ca90b88eb6d4 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Wed, 13 Dec 2023 10:45:46 -0700 Subject: [PATCH 129/212] Clean up --- tests/test_privileges.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/test_privileges.py b/tests/test_privileges.py index 23e1dc327..4670e8e81 100644 --- a/tests/test_privileges.py +++ b/tests/test_privileges.py @@ -6,8 +6,6 @@ namespace = locals() -USER = "djsubset" - @pytest.fixture def schema_priv(connection_test): From 9b0df13058ab5fcfaeb2fa5f3723e0fdcd50824e Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Wed, 13 Dec 2023 11:03:02 -0700 Subject: [PATCH 130/212] rm commented code --- tests/test_privileges.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_privileges.py b/tests/test_privileges.py index 4670e8e81..949dbc8aa 100644 --- a/tests/test_privileges.py +++ b/tests/test_privileges.py @@ -10,7 +10,6 @@ @pytest.fixture def schema_priv(connection_test): schema_priv = dj.Schema( - # PREFIX + "_schema_privileges", context=schema_privileges.LOCALS_PRIV, connection=connection_test, ) From 9621733aa1febffee86cd6677e735b314853f225 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Wed, 13 Dec 2023 11:06:27 -0700 Subject: [PATCH 131/212] cp to tests --- tests/test_reconnection.py | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 tests/test_reconnection.py diff --git a/tests/test_reconnection.py b/tests/test_reconnection.py new file mode 100644 index 000000000..b275766ae --- /dev/null +++ b/tests/test_reconnection.py @@ -0,0 +1,35 @@ +""" +Collection of test cases to test connection module. +""" + +from nose.tools import assert_true, assert_false, raises +import datajoint as dj +from datajoint import DataJointError +from . import CONN_INFO + + +class TestReconnect: + """ + test reconnection + """ + + def setup(self): + self.conn = dj.conn(reset=True, **CONN_INFO) + + def test_close(self): + assert_true(self.conn.is_connected, "Connection should be alive") + self.conn.close() + assert_false(self.conn.is_connected, "Connection should now be closed") + + def test_reconnect(self): + assert_true(self.conn.is_connected, "Connection should be alive") + self.conn.close() + self.conn.query("SHOW DATABASES;", reconnect=True).fetchall() + assert_true(self.conn.is_connected, "Connection should be alive") + + @raises(DataJointError) + def test_reconnect_throws_error_in_transaction(self): + assert_true(self.conn.is_connected, "Connection should be alive") + with self.conn.transaction: + self.conn.close() + self.conn.query("SHOW DATABASES;", reconnect=True).fetchall() From f3a5dd1f36d761a0bef720c402c03c2ff27e3c85 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Wed, 13 Dec 2023 11:06:49 -0700 Subject: [PATCH 132/212] nose2pytest test_reconnection --- tests/test_reconnection.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/test_reconnection.py b/tests/test_reconnection.py index b275766ae..6eb0343b5 100644 --- a/tests/test_reconnection.py +++ b/tests/test_reconnection.py @@ -17,19 +17,19 @@ def setup(self): self.conn = dj.conn(reset=True, **CONN_INFO) def test_close(self): - assert_true(self.conn.is_connected, "Connection should be alive") + assert self.conn.is_connected, "Connection should be alive" self.conn.close() - assert_false(self.conn.is_connected, "Connection should now be closed") + assert not self.conn.is_connected, "Connection should now be closed" def test_reconnect(self): - assert_true(self.conn.is_connected, "Connection should be alive") + assert self.conn.is_connected, "Connection should be alive" self.conn.close() self.conn.query("SHOW DATABASES;", reconnect=True).fetchall() - assert_true(self.conn.is_connected, "Connection should be alive") + assert self.conn.is_connected, "Connection should be alive" @raises(DataJointError) def test_reconnect_throws_error_in_transaction(self): - assert_true(self.conn.is_connected, "Connection should be alive") + assert self.conn.is_connected, "Connection should be alive" with self.conn.transaction: self.conn.close() self.conn.query("SHOW DATABASES;", reconnect=True).fetchall() From e3672f60e68913a3bde1267350abc8bfdf39a1c6 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Wed, 13 Dec 2023 11:11:17 -0700 Subject: [PATCH 133/212] Migrate test_reconnection --- tests/test_reconnection.py | 45 +++++++++++++++++++------------------- 1 file changed, 23 insertions(+), 22 deletions(-) diff --git a/tests/test_reconnection.py b/tests/test_reconnection.py index 6eb0343b5..262531243 100644 --- a/tests/test_reconnection.py +++ b/tests/test_reconnection.py @@ -2,34 +2,35 @@ Collection of test cases to test connection module. """ -from nose.tools import assert_true, assert_false, raises +import pytest import datajoint as dj from datajoint import DataJointError from . import CONN_INFO +@pytest.fixture +def conn(connection_root): + return dj.conn(reset=True, **CONN_INFO) + + class TestReconnect: """ - test reconnection + Test reconnection """ - def setup(self): - self.conn = dj.conn(reset=True, **CONN_INFO) - - def test_close(self): - assert self.conn.is_connected, "Connection should be alive" - self.conn.close() - assert not self.conn.is_connected, "Connection should now be closed" - - def test_reconnect(self): - assert self.conn.is_connected, "Connection should be alive" - self.conn.close() - self.conn.query("SHOW DATABASES;", reconnect=True).fetchall() - assert self.conn.is_connected, "Connection should be alive" - - @raises(DataJointError) - def test_reconnect_throws_error_in_transaction(self): - assert self.conn.is_connected, "Connection should be alive" - with self.conn.transaction: - self.conn.close() - self.conn.query("SHOW DATABASES;", reconnect=True).fetchall() + def test_close(self, conn): + assert conn.is_connected, "Connection should be alive" + conn.close() + assert not conn.is_connected, "Connection should now be closed" + + def test_reconnect(self, conn): + assert conn.is_connected, "Connection should be alive" + conn.close() + conn.query("SHOW DATABASES;", reconnect=True).fetchall() + assert conn.is_connected, "Connection should be alive" + + def test_reconnect_throws_error_in_transaction(self, conn): + assert conn.is_connected, "Connection should be alive" + with conn.transaction, pytest.raises(DataJointError): + conn.close() + conn.query("SHOW DATABASES;", reconnect=True).fetchall() From 394cad97a9a10e6732038d8c822d98baf528f583 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Wed, 13 Dec 2023 11:24:07 -0700 Subject: [PATCH 134/212] Add default values for db_creds_root --- tests/conftest.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 470c2f440..249080601 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -55,9 +55,9 @@ def enable_filepath_feature(monkeypatch): @pytest.fixture(scope="session") def db_creds_root() -> Dict: return dict( - host=os.getenv("DJ_HOST"), - user=os.getenv("DJ_USER"), - password=os.getenv("DJ_PASS"), + host=os.getenv("DJ_HOST", "fakeservices.datajoint.io"), + user=os.getenv("DJ_USER", "root"), + password=os.getenv("DJ_PASS", "password"), ) From 36623f1cb7fcd25429d064c8788648e781fc0949 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Wed, 13 Dec 2023 12:39:12 -0700 Subject: [PATCH 135/212] Format with black --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 249080601..d4e4a23c3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -55,7 +55,7 @@ def enable_filepath_feature(monkeypatch): @pytest.fixture(scope="session") def db_creds_root() -> Dict: return dict( - host=os.getenv("DJ_HOST", "fakeservices.datajoint.io"), + host=os.getenv("DJ_HOST", "fakeservices.datajoint.io"), user=os.getenv("DJ_USER", "root"), password=os.getenv("DJ_PASS", "password"), ) From 37b8fb3a32f99be6457f41b8e4a3696b2d954156 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Wed, 13 Dec 2023 12:40:52 -0700 Subject: [PATCH 136/212] cp to tests --- tests/test_relation.py | 311 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 311 insertions(+) create mode 100644 tests/test_relation.py diff --git a/tests/test_relation.py b/tests/test_relation.py new file mode 100644 index 000000000..a5f5da3af --- /dev/null +++ b/tests/test_relation.py @@ -0,0 +1,311 @@ +from inspect import getmembers +import re +import pandas +import numpy as np +from nose.tools import ( + assert_equal, + assert_not_equal, + assert_true, + assert_list_equal, + raises, +) +import datajoint as dj +from datajoint.table import Table +from unittest.mock import patch + +from . import schema + + +def relation_selector(attr): + try: + return issubclass(attr, Table) + except TypeError: + return False + + +class TestRelation: + """ + Test base relations: insert, delete + """ + + @classmethod + def setup_class(cls): + cls.test = schema.TTest() + cls.test_extra = schema.TTestExtra() + cls.test_no_extra = schema.TTestNoExtra() + cls.user = schema.User() + cls.subject = schema.Subject() + cls.experiment = schema.Experiment() + cls.trial = schema.Trial() + cls.ephys = schema.Ephys() + cls.channel = schema.Ephys.Channel() + cls.img = schema.Image() + cls.trash = schema.UberTrash() + + def test_contents(self): + """ + test the ability of tables to self-populate using the contents property + """ + # test contents + assert_true(self.user) + assert_true(len(self.user) == len(self.user.contents)) + u = self.user.fetch(order_by=["username"]) + assert_list_equal( + list(u["username"]), sorted([s[0] for s in self.user.contents]) + ) + + # test prepare + assert_true(self.subject) + assert_true(len(self.subject) == len(self.subject.contents)) + u = self.subject.fetch(order_by=["subject_id"]) + assert_list_equal( + list(u["subject_id"]), sorted([s[0] for s in self.subject.contents]) + ) + + @raises(dj.DataJointError) + def test_misnamed_attribute1(self): + self.user.insert([dict(username="Bob"), dict(user="Alice")]) + + @raises(KeyError) + def test_misnamed_attribute2(self): + self.user.insert1(dict(user="Bob")) + + @raises(KeyError) + def test_extra_attribute1(self): + self.user.insert1(dict(username="Robert", spouse="Alice")) + + def test_extra_attribute2(self): + self.user.insert1( + dict(username="Robert", spouse="Alice"), ignore_extra_fields=True + ) + + @raises(NotImplementedError) + def test_missing_definition(self): + @schema.schema + class MissingDefinition(dj.Manual): + definitions = """ # misspelled definition + id : int + --- + comment : varchar(16) # otherwise everything's normal + """ + + @raises(dj.DataJointError) + def test_empty_insert1(self): + self.user.insert1(()) + + @raises(dj.DataJointError) + def test_empty_insert(self): + self.user.insert([()]) + + @raises(dj.DataJointError) + def test_wrong_arguments_insert(self): + self.user.insert1(("First", "Second")) + + @raises(dj.DataJointError) + def test_wrong_insert_type(self): + self.user.insert1(3) + + def test_insert_select(self): + schema.TTest2.delete() + schema.TTest2.insert(schema.TTest) + assert_equal(len(schema.TTest2()), len(schema.TTest())) + + original_length = len(self.subject) + elements = self.subject.proj(..., s="subject_id") + elements = elements.proj( + "real_id", + "date_of_birth", + "subject_notes", + subject_id="s+1000", + species='"human"', + ) + self.subject.insert(elements, ignore_extra_fields=True) + assert_equal(len(self.subject), 2 * original_length) + + def test_insert_pandas_roundtrip(self): + """ensure fetched frames can be inserted""" + schema.TTest2.delete() + n = len(schema.TTest()) + assert_true(n > 0) + df = schema.TTest.fetch(format="frame") + assert_true(isinstance(df, pandas.DataFrame)) + assert_equal(len(df), n) + schema.TTest2.insert(df) + assert_equal(len(schema.TTest2()), n) + + def test_insert_pandas_userframe(self): + """ + ensure simple user-created frames (1 field, non-custom index) + can be inserted without extra index adjustment + """ + schema.TTest2.delete() + n = len(schema.TTest()) + assert_true(n > 0) + df = pandas.DataFrame(schema.TTest.fetch()) + assert_true(isinstance(df, pandas.DataFrame)) + assert_equal(len(df), n) + schema.TTest2.insert(df) + assert_equal(len(schema.TTest2()), n) + + @raises(dj.DataJointError) + def test_insert_select_ignore_extra_fields0(self): + """need ignore extra fields for insert select""" + self.test_extra.insert1((self.test.fetch("key").max() + 1, 0, 0)) + self.test.insert(self.test_extra) + + def test_insert_select_ignore_extra_fields1(self): + """make sure extra fields works in insert select""" + self.test_extra.delete() + keyno = self.test.fetch("key").max() + 1 + self.test_extra.insert1((keyno, 0, 0)) + self.test.insert(self.test_extra, ignore_extra_fields=True) + assert keyno in self.test.fetch("key") + + def test_insert_select_ignore_extra_fields2(self): + """make sure insert select still works when ignoring extra fields when there are none""" + self.test_no_extra.delete() + self.test_no_extra.insert(self.test, ignore_extra_fields=True) + + def test_insert_select_ignore_extra_fields3(self): + """make sure insert select works for from query result""" + self.test_no_extra.delete() + keystr = str(self.test_extra.fetch("key").max()) + self.test_no_extra.insert( + (self.test_extra & "`key`=" + keystr), ignore_extra_fields=True + ) + + def test_skip_duplicates(self): + """test that skip_duplicates works when inserting from another table""" + self.test_no_extra.delete() + self.test_no_extra.insert( + self.test, ignore_extra_fields=True, skip_duplicates=True + ) + self.test_no_extra.insert( + self.test, ignore_extra_fields=True, skip_duplicates=True + ) + + def test_replace(self): + """ + Test replacing or ignoring duplicate entries + """ + key = dict(subject_id=7) + date = "2015-01-01" + self.subject.insert1(dict(key, real_id=7, date_of_birth=date, subject_notes="")) + assert_equal( + date, str((self.subject & key).fetch1("date_of_birth")), "incorrect insert" + ) + date = "2015-01-02" + self.subject.insert1( + dict(key, real_id=7, date_of_birth=date, subject_notes=""), + skip_duplicates=True, + ) + assert_not_equal( + date, + str((self.subject & key).fetch1("date_of_birth")), + "inappropriate replace", + ) + self.subject.insert1( + dict(key, real_id=7, date_of_birth=date, subject_notes=""), replace=True + ) + assert_equal( + date, str((self.subject & key).fetch1("date_of_birth")), "replace failed" + ) + + def test_delete_quick(self): + """Tests quick deletion""" + tmp = np.array( + [ + (2, "Klara", "monkey", "2010-01-01", ""), + (1, "Peter", "mouse", "2015-01-01", ""), + ], + dtype=self.subject.heading.as_dtype, + ) + self.subject.insert(tmp) + s = self.subject & ( + "subject_id in (%s)" % ",".join(str(r) for r in tmp["subject_id"]) + ) + assert_true(len(s) == 2, "insert did not work.") + s.delete_quick() + assert_true(len(s) == 0, "delete did not work.") + + def test_skip_duplicate(self): + """Tests if duplicates are properly skipped.""" + tmp = np.array( + [ + (2, "Klara", "monkey", "2010-01-01", ""), + (1, "Peter", "mouse", "2015-01-01", ""), + ], + dtype=self.subject.heading.as_dtype, + ) + self.subject.insert(tmp) + tmp = np.array( + [ + (2, "Klara", "monkey", "2010-01-01", ""), + (1, "Peter", "mouse", "2015-01-01", ""), + ], + dtype=self.subject.heading.as_dtype, + ) + self.subject.insert(tmp, skip_duplicates=True) + + @raises(dj.errors.DuplicateError) + def test_not_skip_duplicate(self): + """Tests if duplicates are not skipped.""" + tmp = np.array( + [ + (2, "Klara", "monkey", "2010-01-01", ""), + (2, "Klara", "monkey", "2010-01-01", ""), + (1, "Peter", "mouse", "2015-01-01", ""), + ], + dtype=self.subject.heading.as_dtype, + ) + self.subject.insert(tmp, skip_duplicates=False) + + @raises(dj.errors.MissingAttributeError) + def test_no_error_suppression(self): + """skip_duplicates=True should not suppress other errors""" + self.test.insert([dict(key=100)], skip_duplicates=True) + + def test_blob_insert(self): + """Tests inserting and retrieving blobs.""" + X = np.random.randn(20, 10) + self.img.insert1((1, X)) + Y = self.img.fetch()[0]["img"] + assert_true(np.all(X == Y), "Inserted and retrieved image are not identical") + + def test_drop(self): + """Tests dropping tables""" + dj.config["safemode"] = True + with patch.object(dj.utils, "input", create=True, return_value="yes"): + self.trash.drop() + try: + self.trash.fetch() + raise Exception("Fetched after table dropped.") + except dj.DataJointError: + pass + finally: + dj.config["safemode"] = False + + def test_table_regexp(self): + """Test whether table names are matched by regular expressions""" + tiers = [dj.Imported, dj.Manual, dj.Lookup, dj.Computed] + for name, rel in getmembers(schema, relation_selector): + assert_true( + re.match(rel.tier_regexp, rel.table_name), + "Regular expression does not match for {name}".format(name=name), + ) + for tier in tiers: + assert_true( + issubclass(rel, tier) + or not re.match(tier.tier_regexp, rel.table_name), + "Regular expression matches for {name} but should not".format( + name=name + ), + ) + + def test_table_size(self): + """test getting the size of the table and its indices in bytes""" + number_of_bytes = self.experiment.size_on_disk + assert_true(isinstance(number_of_bytes, int) and number_of_bytes > 100) + + def test_repr_html(self): + assert_true(self.ephys._repr_html_().strip().startswith(" Date: Wed, 13 Dec 2023 12:42:06 -0700 Subject: [PATCH 137/212] nose2pytest test_reconnection --- tests/test_relation.py | 58 ++++++++++++++++++++---------------------- 1 file changed, 27 insertions(+), 31 deletions(-) diff --git a/tests/test_relation.py b/tests/test_relation.py index a5f5da3af..0a6e2f436 100644 --- a/tests/test_relation.py +++ b/tests/test_relation.py @@ -47,20 +47,18 @@ def test_contents(self): test the ability of tables to self-populate using the contents property """ # test contents - assert_true(self.user) - assert_true(len(self.user) == len(self.user.contents)) + assert self.user + assert len(self.user) == len(self.user.contents) u = self.user.fetch(order_by=["username"]) - assert_list_equal( - list(u["username"]), sorted([s[0] for s in self.user.contents]) - ) + assert ( + list(u["username"]) == sorted([s[0] for s in self.user.contents])) # test prepare - assert_true(self.subject) - assert_true(len(self.subject) == len(self.subject.contents)) + assert self.subject + assert len(self.subject) == len(self.subject.contents) u = self.subject.fetch(order_by=["subject_id"]) - assert_list_equal( - list(u["subject_id"]), sorted([s[0] for s in self.subject.contents]) - ) + assert ( + list(u["subject_id"]) == sorted([s[0] for s in self.subject.contents])) @raises(dj.DataJointError) def test_misnamed_attribute1(self): @@ -108,7 +106,7 @@ def test_wrong_insert_type(self): def test_insert_select(self): schema.TTest2.delete() schema.TTest2.insert(schema.TTest) - assert_equal(len(schema.TTest2()), len(schema.TTest())) + assert len(schema.TTest2()) == len(schema.TTest()) original_length = len(self.subject) elements = self.subject.proj(..., s="subject_id") @@ -120,18 +118,18 @@ def test_insert_select(self): species='"human"', ) self.subject.insert(elements, ignore_extra_fields=True) - assert_equal(len(self.subject), 2 * original_length) + assert len(self.subject) == 2 * original_length def test_insert_pandas_roundtrip(self): """ensure fetched frames can be inserted""" schema.TTest2.delete() n = len(schema.TTest()) - assert_true(n > 0) + assert n > 0 df = schema.TTest.fetch(format="frame") - assert_true(isinstance(df, pandas.DataFrame)) - assert_equal(len(df), n) + assert isinstance(df, pandas.DataFrame) + assert len(df) == n schema.TTest2.insert(df) - assert_equal(len(schema.TTest2()), n) + assert len(schema.TTest2()) == n def test_insert_pandas_userframe(self): """ @@ -140,12 +138,12 @@ def test_insert_pandas_userframe(self): """ schema.TTest2.delete() n = len(schema.TTest()) - assert_true(n > 0) + assert n > 0 df = pandas.DataFrame(schema.TTest.fetch()) - assert_true(isinstance(df, pandas.DataFrame)) - assert_equal(len(df), n) + assert isinstance(df, pandas.DataFrame) + assert len(df) == n schema.TTest2.insert(df) - assert_equal(len(schema.TTest2()), n) + assert len(schema.TTest2()) == n @raises(dj.DataJointError) def test_insert_select_ignore_extra_fields0(self): @@ -191,9 +189,8 @@ def test_replace(self): key = dict(subject_id=7) date = "2015-01-01" self.subject.insert1(dict(key, real_id=7, date_of_birth=date, subject_notes="")) - assert_equal( - date, str((self.subject & key).fetch1("date_of_birth")), "incorrect insert" - ) + assert ( + date == str((self.subject & key).fetch1("date_of_birth"))), "incorrect insert" date = "2015-01-02" self.subject.insert1( dict(key, real_id=7, date_of_birth=date, subject_notes=""), @@ -207,9 +204,8 @@ def test_replace(self): self.subject.insert1( dict(key, real_id=7, date_of_birth=date, subject_notes=""), replace=True ) - assert_equal( - date, str((self.subject & key).fetch1("date_of_birth")), "replace failed" - ) + assert ( + date == str((self.subject & key).fetch1("date_of_birth"))), "replace failed" def test_delete_quick(self): """Tests quick deletion""" @@ -224,9 +220,9 @@ def test_delete_quick(self): s = self.subject & ( "subject_id in (%s)" % ",".join(str(r) for r in tmp["subject_id"]) ) - assert_true(len(s) == 2, "insert did not work.") + assert len(s) == 2, "insert did not work." s.delete_quick() - assert_true(len(s) == 0, "delete did not work.") + assert len(s) == 0, "delete did not work." def test_skip_duplicate(self): """Tests if duplicates are properly skipped.""" @@ -270,7 +266,7 @@ def test_blob_insert(self): X = np.random.randn(20, 10) self.img.insert1((1, X)) Y = self.img.fetch()[0]["img"] - assert_true(np.all(X == Y), "Inserted and retrieved image are not identical") + assert np.all(X == Y), "Inserted and retrieved image are not identical" def test_drop(self): """Tests dropping tables""" @@ -305,7 +301,7 @@ def test_table_regexp(self): def test_table_size(self): """test getting the size of the table and its indices in bytes""" number_of_bytes = self.experiment.size_on_disk - assert_true(isinstance(number_of_bytes, int) and number_of_bytes > 100) + assert isinstance(number_of_bytes, int) and number_of_bytes > 100 def test_repr_html(self): - assert_true(self.ephys._repr_html_().strip().startswith(" Date: Wed, 13 Dec 2023 14:16:28 -0700 Subject: [PATCH 138/212] WIP migrate test_relation --- tests/test_relation.py | 334 ++++++++++++++++++++++------------------- 1 file changed, 178 insertions(+), 156 deletions(-) diff --git a/tests/test_relation.py b/tests/test_relation.py index 0a6e2f436..e5e4a0ba0 100644 --- a/tests/test_relation.py +++ b/tests/test_relation.py @@ -1,14 +1,8 @@ +import pytest from inspect import getmembers import re import pandas import numpy as np -from nose.tools import ( - assert_equal, - assert_not_equal, - assert_true, - assert_list_equal, - raises, -) import datajoint as dj from datajoint.table import Table from unittest.mock import patch @@ -16,11 +10,55 @@ from . import schema -def relation_selector(attr): - try: - return issubclass(attr, Table) - except TypeError: - return False +@pytest.fixture +def test(schema_any): + assert len(schema.TTest.contents) + yield schema.TTest() + assert len(schema.TTest.contents) + + +@pytest.fixture +def test_extra(schema_any): + assert len(schema.TTest.contents) + yield schema.TTestExtra() + assert len(schema.TTest.contents) + + +@pytest.fixture +def test_no_extra(schema_any): + assert len(schema.TTest.contents) + yield schema.TTestNoExtra() + assert len(schema.TTest.contents) + + +@pytest.fixture +def user(schema_any): + return schema.User() + + +@pytest.fixture +def subject(schema_any): + return schema.Subject() + + +@pytest.fixture +def experiment(schema_any): + return schema.Experiment() + + +@pytest.fixture +def ephys(schema_any): + return schema.Ephys() + + +@pytest.fixture +def img(schema_any): + return schema.Image() + + +@pytest.fixture +def trash(schema_any): + return schema.UberTrash() class TestRelation: @@ -28,58 +66,38 @@ class TestRelation: Test base relations: insert, delete """ - @classmethod - def setup_class(cls): - cls.test = schema.TTest() - cls.test_extra = schema.TTestExtra() - cls.test_no_extra = schema.TTestNoExtra() - cls.user = schema.User() - cls.subject = schema.Subject() - cls.experiment = schema.Experiment() - cls.trial = schema.Trial() - cls.ephys = schema.Ephys() - cls.channel = schema.Ephys.Channel() - cls.img = schema.Image() - cls.trash = schema.UberTrash() - - def test_contents(self): + def test_contents(self, user, subject): """ test the ability of tables to self-populate using the contents property """ # test contents - assert self.user - assert len(self.user) == len(self.user.contents) - u = self.user.fetch(order_by=["username"]) - assert ( - list(u["username"]) == sorted([s[0] for s in self.user.contents])) + assert user + assert len(user) == len(user.contents) + u = user.fetch(order_by=["username"]) + assert list(u["username"]) == sorted([s[0] for s in user.contents]) # test prepare - assert self.subject - assert len(self.subject) == len(self.subject.contents) - u = self.subject.fetch(order_by=["subject_id"]) - assert ( - list(u["subject_id"]) == sorted([s[0] for s in self.subject.contents])) - - @raises(dj.DataJointError) - def test_misnamed_attribute1(self): - self.user.insert([dict(username="Bob"), dict(user="Alice")]) - - @raises(KeyError) - def test_misnamed_attribute2(self): - self.user.insert1(dict(user="Bob")) - - @raises(KeyError) - def test_extra_attribute1(self): - self.user.insert1(dict(username="Robert", spouse="Alice")) - - def test_extra_attribute2(self): - self.user.insert1( - dict(username="Robert", spouse="Alice"), ignore_extra_fields=True - ) + assert subject + assert len(subject) == len(subject.contents) + u = subject.fetch(order_by=["subject_id"]) + assert list(u["subject_id"]) == sorted([s[0] for s in subject.contents]) + + def test_misnamed_attribute1(self, user): + with pytest.raises(dj.DataJointError): + user.insert([dict(username="Bob"), dict(user="Alice")]) + + def test_misnamed_attribute2(self, user): + with pytest.raises(KeyError): + user.insert1(dict(user="Bob")) + + def test_extra_attribute1(self, user): + with pytest.raises(KeyError): + user.insert1(dict(username="Robert", spouse="Alice")) - @raises(NotImplementedError) - def test_missing_definition(self): - @schema.schema + def test_extra_attribute2(self, user): + user.insert1(dict(username="Robert", spouse="Alice"), ignore_extra_fields=True) + + def test_missing_definition(self, schema_any): class MissingDefinition(dj.Manual): definitions = """ # misspelled definition id : int @@ -87,29 +105,34 @@ class MissingDefinition(dj.Manual): comment : varchar(16) # otherwise everything's normal """ - @raises(dj.DataJointError) - def test_empty_insert1(self): - self.user.insert1(()) + with pytest.raises(NotImplementedError): + schema_any( + MissingDefinition, context=dict(MissingDefinition=MissingDefinition) + ) + + def test_empty_insert1(self, user): + with pytest.raises(dj.DataJointError): + user.insert1(()) - @raises(dj.DataJointError) - def test_empty_insert(self): - self.user.insert([()]) + def test_empty_insert(self, user): + with pytest.raises(dj.DataJointError): + user.insert([()]) - @raises(dj.DataJointError) - def test_wrong_arguments_insert(self): - self.user.insert1(("First", "Second")) + def test_wrong_arguments_insert(self, user): + with pytest.raises(dj.DataJointError): + user.insert1(("First", "Second")) - @raises(dj.DataJointError) - def test_wrong_insert_type(self): - self.user.insert1(3) + def test_wrong_insert_type(self, user): + with pytest.raises(dj.DataJointError): + user.insert1(3) - def test_insert_select(self): + def test_insert_select(self, subject): schema.TTest2.delete() schema.TTest2.insert(schema.TTest) assert len(schema.TTest2()) == len(schema.TTest()) - original_length = len(self.subject) - elements = self.subject.proj(..., s="subject_id") + original_length = len(subject) + elements = subject.proj(..., s="subject_id") elements = elements.proj( "real_id", "date_of_birth", @@ -117,10 +140,10 @@ def test_insert_select(self): subject_id="s+1000", species='"human"', ) - self.subject.insert(elements, ignore_extra_fields=True) - assert len(self.subject) == 2 * original_length + subject.insert(elements, ignore_extra_fields=True) + assert len(subject) == 2 * original_length - def test_insert_pandas_roundtrip(self): + def test_insert_pandas_roundtrip(self, schema_any): """ensure fetched frames can be inserted""" schema.TTest2.delete() n = len(schema.TTest()) @@ -131,7 +154,7 @@ def test_insert_pandas_roundtrip(self): schema.TTest2.insert(df) assert len(schema.TTest2()) == n - def test_insert_pandas_userframe(self): + def test_insert_pandas_userframe(self, schema_any): """ ensure simple user-created frames (1 field, non-custom index) can be inserted without extra index adjustment @@ -145,106 +168,102 @@ def test_insert_pandas_userframe(self): schema.TTest2.insert(df) assert len(schema.TTest2()) == n - @raises(dj.DataJointError) - def test_insert_select_ignore_extra_fields0(self): + def test_insert_select_ignore_extra_fields0(self, test, test_extra): """need ignore extra fields for insert select""" - self.test_extra.insert1((self.test.fetch("key").max() + 1, 0, 0)) - self.test.insert(self.test_extra) + test_extra.insert1((test.fetch("key").max() + 1, 0, 0)) + with pytest.raises(dj.DataJointError): + test.insert(test_extra) - def test_insert_select_ignore_extra_fields1(self): + def test_insert_select_ignore_extra_fields1(self, test, test_extra): """make sure extra fields works in insert select""" - self.test_extra.delete() - keyno = self.test.fetch("key").max() + 1 - self.test_extra.insert1((keyno, 0, 0)) - self.test.insert(self.test_extra, ignore_extra_fields=True) - assert keyno in self.test.fetch("key") + test_extra.delete() + keyno = test.fetch("key").max() + 1 + test_extra.insert1((keyno, 0, 0)) + test.insert(test_extra, ignore_extra_fields=True) + assert keyno in test.fetch("key") - def test_insert_select_ignore_extra_fields2(self): + def test_insert_select_ignore_extra_fields2(self, test_no_extra, test): """make sure insert select still works when ignoring extra fields when there are none""" - self.test_no_extra.delete() - self.test_no_extra.insert(self.test, ignore_extra_fields=True) + test_no_extra.delete() + test_no_extra.insert(test, ignore_extra_fields=True) - def test_insert_select_ignore_extra_fields3(self): + def test_insert_select_ignore_extra_fields3(self, test, test_no_extra, test_extra): """make sure insert select works for from query result""" - self.test_no_extra.delete() - keystr = str(self.test_extra.fetch("key").max()) - self.test_no_extra.insert( - (self.test_extra & "`key`=" + keystr), ignore_extra_fields=True - ) - - def test_skip_duplicates(self): + # Recreate table state from previous tests + keyno = test.fetch("key").max() + 1 + test_extra.insert1((keyno, 0, 0)) + test.insert(test_extra, ignore_extra_fields=True) + + assert len(test_extra.fetch("key")), "test_extra is empty" + test_no_extra.delete() + assert len(test_extra.fetch("key")), "test_extra is empty" + keystr = str(test_extra.fetch("key").max()) + test_no_extra.insert((test_extra & "`key`=" + keystr), ignore_extra_fields=True) + + def test_skip_duplicates(self, test_no_extra, test): """test that skip_duplicates works when inserting from another table""" - self.test_no_extra.delete() - self.test_no_extra.insert( - self.test, ignore_extra_fields=True, skip_duplicates=True - ) - self.test_no_extra.insert( - self.test, ignore_extra_fields=True, skip_duplicates=True - ) + test_no_extra.delete() + test_no_extra.insert(test, ignore_extra_fields=True, skip_duplicates=True) + test_no_extra.insert(test, ignore_extra_fields=True, skip_duplicates=True) - def test_replace(self): + def test_replace(self, subject): """ Test replacing or ignoring duplicate entries """ key = dict(subject_id=7) date = "2015-01-01" - self.subject.insert1(dict(key, real_id=7, date_of_birth=date, subject_notes="")) - assert ( - date == str((self.subject & key).fetch1("date_of_birth"))), "incorrect insert" + subject.insert1(dict(key, real_id=7, date_of_birth=date, subject_notes="")) + assert date == str((subject & key).fetch1("date_of_birth")), "incorrect insert" date = "2015-01-02" - self.subject.insert1( + subject.insert1( dict(key, real_id=7, date_of_birth=date, subject_notes=""), skip_duplicates=True, ) - assert_not_equal( - date, - str((self.subject & key).fetch1("date_of_birth")), - "inappropriate replace", - ) - self.subject.insert1( + assert date != str( + (subject & key).fetch1("date_of_birth") + ), "inappropriate replace" + subject.insert1( dict(key, real_id=7, date_of_birth=date, subject_notes=""), replace=True ) - assert ( - date == str((self.subject & key).fetch1("date_of_birth"))), "replace failed" + assert date == str((subject & key).fetch1("date_of_birth")), "replace failed" - def test_delete_quick(self): + def test_delete_quick(self, subject): """Tests quick deletion""" tmp = np.array( [ (2, "Klara", "monkey", "2010-01-01", ""), (1, "Peter", "mouse", "2015-01-01", ""), ], - dtype=self.subject.heading.as_dtype, + dtype=subject.heading.as_dtype, ) - self.subject.insert(tmp) - s = self.subject & ( + subject.insert(tmp) + s = subject & ( "subject_id in (%s)" % ",".join(str(r) for r in tmp["subject_id"]) ) assert len(s) == 2, "insert did not work." s.delete_quick() assert len(s) == 0, "delete did not work." - def test_skip_duplicate(self): + def test_skip_duplicate(self, subject): """Tests if duplicates are properly skipped.""" tmp = np.array( [ (2, "Klara", "monkey", "2010-01-01", ""), (1, "Peter", "mouse", "2015-01-01", ""), ], - dtype=self.subject.heading.as_dtype, + dtype=subject.heading.as_dtype, ) - self.subject.insert(tmp) + subject.insert(tmp) tmp = np.array( [ (2, "Klara", "monkey", "2010-01-01", ""), (1, "Peter", "mouse", "2015-01-01", ""), ], - dtype=self.subject.heading.as_dtype, + dtype=subject.heading.as_dtype, ) - self.subject.insert(tmp, skip_duplicates=True) + subject.insert(tmp, skip_duplicates=True) - @raises(dj.errors.DuplicateError) - def test_not_skip_duplicate(self): + def test_not_skip_duplicate(self, subject): """Tests if duplicates are not skipped.""" tmp = np.array( [ @@ -252,56 +271,59 @@ def test_not_skip_duplicate(self): (2, "Klara", "monkey", "2010-01-01", ""), (1, "Peter", "mouse", "2015-01-01", ""), ], - dtype=self.subject.heading.as_dtype, + dtype=subject.heading.as_dtype, ) - self.subject.insert(tmp, skip_duplicates=False) + with pytest.raises(dj.errors.DuplicateError): + subject.insert(tmp, skip_duplicates=False) - @raises(dj.errors.MissingAttributeError) - def test_no_error_suppression(self): + def test_no_error_suppression(self, test): """skip_duplicates=True should not suppress other errors""" - self.test.insert([dict(key=100)], skip_duplicates=True) + with pytest.raises(dj.errors.MissingAttributeError): + test.insert([dict(key=100)], skip_duplicates=True) - def test_blob_insert(self): + def test_blob_insert(self, img): """Tests inserting and retrieving blobs.""" X = np.random.randn(20, 10) - self.img.insert1((1, X)) - Y = self.img.fetch()[0]["img"] + img.insert1((1, X)) + Y = img.fetch()[0]["img"] assert np.all(X == Y), "Inserted and retrieved image are not identical" - def test_drop(self): + def test_drop(self, trash): """Tests dropping tables""" dj.config["safemode"] = True with patch.object(dj.utils, "input", create=True, return_value="yes"): - self.trash.drop() + trash.drop() try: - self.trash.fetch() + trash.fetch() raise Exception("Fetched after table dropped.") except dj.DataJointError: pass finally: dj.config["safemode"] = False - def test_table_regexp(self): + def test_table_regexp(self, schema_any): """Test whether table names are matched by regular expressions""" + + def relation_selector(attr): + try: + return issubclass(attr, Table) + except TypeError: + return False + tiers = [dj.Imported, dj.Manual, dj.Lookup, dj.Computed] for name, rel in getmembers(schema, relation_selector): - assert_true( - re.match(rel.tier_regexp, rel.table_name), - "Regular expression does not match for {name}".format(name=name), - ) + assert re.match( + rel.tier_regexp, rel.table_name + ) == "Regular expression does not match for {name}".format(name=name) for tier in tiers: - assert_true( - issubclass(rel, tier) - or not re.match(tier.tier_regexp, rel.table_name), - "Regular expression matches for {name} but should not".format( - name=name - ), - ) - - def test_table_size(self): + assert issubclass(rel, tier) or not re.match( + tier.tier_regexp, rel.table_name + ), "Regular expression matches for {name} but should not".format(name=name) + + def test_table_size(self, experiment): """test getting the size of the table and its indices in bytes""" - number_of_bytes = self.experiment.size_on_disk + number_of_bytes = experiment.size_on_disk assert isinstance(number_of_bytes, int) and number_of_bytes > 100 - def test_repr_html(self): - assert self.ephys._repr_html_().strip().startswith(" Date: Wed, 13 Dec 2023 15:04:59 -0700 Subject: [PATCH 139/212] Move tests to top level --- tests/test_relation.py | 517 ++++++++++++++++++++--------------------- 1 file changed, 256 insertions(+), 261 deletions(-) diff --git a/tests/test_relation.py b/tests/test_relation.py index e5e4a0ba0..f03328886 100644 --- a/tests/test_relation.py +++ b/tests/test_relation.py @@ -61,269 +61,264 @@ def trash(schema_any): return schema.UberTrash() -class TestRelation: +def test_contents(user, subject): """ - Test base relations: insert, delete + test the ability of tables to self-populate using the contents property """ - - def test_contents(self, user, subject): - """ - test the ability of tables to self-populate using the contents property - """ - # test contents - assert user - assert len(user) == len(user.contents) - u = user.fetch(order_by=["username"]) - assert list(u["username"]) == sorted([s[0] for s in user.contents]) - - # test prepare - assert subject - assert len(subject) == len(subject.contents) - u = subject.fetch(order_by=["subject_id"]) - assert list(u["subject_id"]) == sorted([s[0] for s in subject.contents]) - - def test_misnamed_attribute1(self, user): - with pytest.raises(dj.DataJointError): - user.insert([dict(username="Bob"), dict(user="Alice")]) - - def test_misnamed_attribute2(self, user): - with pytest.raises(KeyError): - user.insert1(dict(user="Bob")) - - def test_extra_attribute1(self, user): - with pytest.raises(KeyError): - user.insert1(dict(username="Robert", spouse="Alice")) - - def test_extra_attribute2(self, user): - user.insert1(dict(username="Robert", spouse="Alice"), ignore_extra_fields=True) - - def test_missing_definition(self, schema_any): - class MissingDefinition(dj.Manual): - definitions = """ # misspelled definition - id : int - --- - comment : varchar(16) # otherwise everything's normal - """ - - with pytest.raises(NotImplementedError): - schema_any( - MissingDefinition, context=dict(MissingDefinition=MissingDefinition) - ) - - def test_empty_insert1(self, user): - with pytest.raises(dj.DataJointError): - user.insert1(()) - - def test_empty_insert(self, user): - with pytest.raises(dj.DataJointError): - user.insert([()]) - - def test_wrong_arguments_insert(self, user): - with pytest.raises(dj.DataJointError): - user.insert1(("First", "Second")) - - def test_wrong_insert_type(self, user): - with pytest.raises(dj.DataJointError): - user.insert1(3) - - def test_insert_select(self, subject): - schema.TTest2.delete() - schema.TTest2.insert(schema.TTest) - assert len(schema.TTest2()) == len(schema.TTest()) - - original_length = len(subject) - elements = subject.proj(..., s="subject_id") - elements = elements.proj( - "real_id", - "date_of_birth", - "subject_notes", - subject_id="s+1000", - species='"human"', - ) - subject.insert(elements, ignore_extra_fields=True) - assert len(subject) == 2 * original_length - - def test_insert_pandas_roundtrip(self, schema_any): - """ensure fetched frames can be inserted""" - schema.TTest2.delete() - n = len(schema.TTest()) - assert n > 0 - df = schema.TTest.fetch(format="frame") - assert isinstance(df, pandas.DataFrame) - assert len(df) == n - schema.TTest2.insert(df) - assert len(schema.TTest2()) == n - - def test_insert_pandas_userframe(self, schema_any): - """ - ensure simple user-created frames (1 field, non-custom index) - can be inserted without extra index adjustment + # test contents + assert user + assert len(user) == len(user.contents) + u = user.fetch(order_by=["username"]) + assert list(u["username"]) == sorted([s[0] for s in user.contents]) + + # test prepare + assert subject + assert len(subject) == len(subject.contents) + u = subject.fetch(order_by=["subject_id"]) + assert list(u["subject_id"]) == sorted([s[0] for s in subject.contents]) + +def test_misnamed_attribute1(user): + with pytest.raises(dj.DataJointError): + user.insert([dict(username="Bob"), dict(user="Alice")]) + +def test_misnamed_attribute2(user): + with pytest.raises(KeyError): + user.insert1(dict(user="Bob")) + +def test_extra_attribute1(user): + with pytest.raises(KeyError): + user.insert1(dict(username="Robert", spouse="Alice")) + +def test_extra_attribute2(user): + user.insert1(dict(username="Robert", spouse="Alice"), ignore_extra_fields=True) + +def test_missing_definition(schema_any): + class MissingDefinition(dj.Manual): + definitions = """ # misspelled definition + id : int + --- + comment : varchar(16) # otherwise everything's normal """ - schema.TTest2.delete() - n = len(schema.TTest()) - assert n > 0 - df = pandas.DataFrame(schema.TTest.fetch()) - assert isinstance(df, pandas.DataFrame) - assert len(df) == n - schema.TTest2.insert(df) - assert len(schema.TTest2()) == n - - def test_insert_select_ignore_extra_fields0(self, test, test_extra): - """need ignore extra fields for insert select""" - test_extra.insert1((test.fetch("key").max() + 1, 0, 0)) - with pytest.raises(dj.DataJointError): - test.insert(test_extra) - - def test_insert_select_ignore_extra_fields1(self, test, test_extra): - """make sure extra fields works in insert select""" - test_extra.delete() - keyno = test.fetch("key").max() + 1 - test_extra.insert1((keyno, 0, 0)) - test.insert(test_extra, ignore_extra_fields=True) - assert keyno in test.fetch("key") - - def test_insert_select_ignore_extra_fields2(self, test_no_extra, test): - """make sure insert select still works when ignoring extra fields when there are none""" - test_no_extra.delete() - test_no_extra.insert(test, ignore_extra_fields=True) - - def test_insert_select_ignore_extra_fields3(self, test, test_no_extra, test_extra): - """make sure insert select works for from query result""" - # Recreate table state from previous tests - keyno = test.fetch("key").max() + 1 - test_extra.insert1((keyno, 0, 0)) - test.insert(test_extra, ignore_extra_fields=True) - - assert len(test_extra.fetch("key")), "test_extra is empty" - test_no_extra.delete() - assert len(test_extra.fetch("key")), "test_extra is empty" - keystr = str(test_extra.fetch("key").max()) - test_no_extra.insert((test_extra & "`key`=" + keystr), ignore_extra_fields=True) - - def test_skip_duplicates(self, test_no_extra, test): - """test that skip_duplicates works when inserting from another table""" - test_no_extra.delete() - test_no_extra.insert(test, ignore_extra_fields=True, skip_duplicates=True) - test_no_extra.insert(test, ignore_extra_fields=True, skip_duplicates=True) - - def test_replace(self, subject): - """ - Test replacing or ignoring duplicate entries - """ - key = dict(subject_id=7) - date = "2015-01-01" - subject.insert1(dict(key, real_id=7, date_of_birth=date, subject_notes="")) - assert date == str((subject & key).fetch1("date_of_birth")), "incorrect insert" - date = "2015-01-02" - subject.insert1( - dict(key, real_id=7, date_of_birth=date, subject_notes=""), - skip_duplicates=True, - ) - assert date != str( - (subject & key).fetch1("date_of_birth") - ), "inappropriate replace" - subject.insert1( - dict(key, real_id=7, date_of_birth=date, subject_notes=""), replace=True - ) - assert date == str((subject & key).fetch1("date_of_birth")), "replace failed" - - def test_delete_quick(self, subject): - """Tests quick deletion""" - tmp = np.array( - [ - (2, "Klara", "monkey", "2010-01-01", ""), - (1, "Peter", "mouse", "2015-01-01", ""), - ], - dtype=subject.heading.as_dtype, - ) - subject.insert(tmp) - s = subject & ( - "subject_id in (%s)" % ",".join(str(r) for r in tmp["subject_id"]) - ) - assert len(s) == 2, "insert did not work." - s.delete_quick() - assert len(s) == 0, "delete did not work." - - def test_skip_duplicate(self, subject): - """Tests if duplicates are properly skipped.""" - tmp = np.array( - [ - (2, "Klara", "monkey", "2010-01-01", ""), - (1, "Peter", "mouse", "2015-01-01", ""), - ], - dtype=subject.heading.as_dtype, - ) - subject.insert(tmp) - tmp = np.array( - [ - (2, "Klara", "monkey", "2010-01-01", ""), - (1, "Peter", "mouse", "2015-01-01", ""), - ], - dtype=subject.heading.as_dtype, - ) - subject.insert(tmp, skip_duplicates=True) - - def test_not_skip_duplicate(self, subject): - """Tests if duplicates are not skipped.""" - tmp = np.array( - [ - (2, "Klara", "monkey", "2010-01-01", ""), - (2, "Klara", "monkey", "2010-01-01", ""), - (1, "Peter", "mouse", "2015-01-01", ""), - ], - dtype=subject.heading.as_dtype, + + with pytest.raises(NotImplementedError): + schema_any( + MissingDefinition, context=dict(MissingDefinition=MissingDefinition) ) - with pytest.raises(dj.errors.DuplicateError): - subject.insert(tmp, skip_duplicates=False) - - def test_no_error_suppression(self, test): - """skip_duplicates=True should not suppress other errors""" - with pytest.raises(dj.errors.MissingAttributeError): - test.insert([dict(key=100)], skip_duplicates=True) - - def test_blob_insert(self, img): - """Tests inserting and retrieving blobs.""" - X = np.random.randn(20, 10) - img.insert1((1, X)) - Y = img.fetch()[0]["img"] - assert np.all(X == Y), "Inserted and retrieved image are not identical" - - def test_drop(self, trash): - """Tests dropping tables""" - dj.config["safemode"] = True - with patch.object(dj.utils, "input", create=True, return_value="yes"): - trash.drop() + +def test_empty_insert1(user): + with pytest.raises(dj.DataJointError): + user.insert1(()) + +def test_empty_insert(user): + with pytest.raises(dj.DataJointError): + user.insert([()]) + +def test_wrong_arguments_insert(user): + with pytest.raises(dj.DataJointError): + user.insert1(("First", "Second")) + +def test_wrong_insert_type(user): + with pytest.raises(dj.DataJointError): + user.insert1(3) + +def test_insert_select(subject): + schema.TTest2.delete() + schema.TTest2.insert(schema.TTest) + assert len(schema.TTest2()) == len(schema.TTest()) + + original_length = len(subject) + elements = subject.proj(..., s="subject_id") + elements = elements.proj( + "real_id", + "date_of_birth", + "subject_notes", + subject_id="s+1000", + species='"human"', + ) + subject.insert(elements, ignore_extra_fields=True) + assert len(subject) == 2 * original_length + +def test_insert_pandas_roundtrip(schema_any): + """ensure fetched frames can be inserted""" + schema.TTest2.delete() + n = len(schema.TTest()) + assert n > 0 + df = schema.TTest.fetch(format="frame") + assert isinstance(df, pandas.DataFrame) + assert len(df) == n + schema.TTest2.insert(df) + assert len(schema.TTest2()) == n + +def test_insert_pandas_userframe(schema_any): + """ + ensure simple user-created frames (1 field, non-custom index) + can be inserted without extra index adjustment + """ + schema.TTest2.delete() + n = len(schema.TTest()) + assert n > 0 + df = pandas.DataFrame(schema.TTest.fetch()) + assert isinstance(df, pandas.DataFrame) + assert len(df) == n + schema.TTest2.insert(df) + assert len(schema.TTest2()) == n + +def test_insert_select_ignore_extra_fields0(test, test_extra): + """need ignore extra fields for insert select""" + test_extra.insert1((test.fetch("key").max() + 1, 0, 0)) + with pytest.raises(dj.DataJointError): + test.insert(test_extra) + +def test_insert_select_ignore_extra_fields1(test, test_extra): + """make sure extra fields works in insert select""" + test_extra.delete() + keyno = test.fetch("key").max() + 1 + test_extra.insert1((keyno, 0, 0)) + test.insert(test_extra, ignore_extra_fields=True) + assert keyno in test.fetch("key") + +def test_insert_select_ignore_extra_fields2(test_no_extra, test): + """make sure insert select still works when ignoring extra fields when there are none""" + test_no_extra.delete() + test_no_extra.insert(test, ignore_extra_fields=True) + +def test_insert_select_ignore_extra_fields3(test, test_no_extra, test_extra): + """make sure insert select works for from query result""" + # Recreate table state from previous tests + keyno = test.fetch("key").max() + 1 + test_extra.insert1((keyno, 0, 0)) + test.insert(test_extra, ignore_extra_fields=True) + + assert len(test_extra.fetch("key")), "test_extra is empty" + test_no_extra.delete() + assert len(test_extra.fetch("key")), "test_extra is empty" + keystr = str(test_extra.fetch("key").max()) + test_no_extra.insert((test_extra & "`key`=" + keystr), ignore_extra_fields=True) + +def test_skip_duplicates(test_no_extra, test): + """test that skip_duplicates works when inserting from another table""" + test_no_extra.delete() + test_no_extra.insert(test, ignore_extra_fields=True, skip_duplicates=True) + test_no_extra.insert(test, ignore_extra_fields=True, skip_duplicates=True) + +def test_replace(subject): + """ + Test replacing or ignoring duplicate entries + """ + key = dict(subject_id=7) + date = "2015-01-01" + subject.insert1(dict(key, real_id=7, date_of_birth=date, subject_notes="")) + assert date == str((subject & key).fetch1("date_of_birth")), "incorrect insert" + date = "2015-01-02" + subject.insert1( + dict(key, real_id=7, date_of_birth=date, subject_notes=""), + skip_duplicates=True, + ) + assert date != str( + (subject & key).fetch1("date_of_birth") + ), "inappropriate replace" + subject.insert1( + dict(key, real_id=7, date_of_birth=date, subject_notes=""), replace=True + ) + assert date == str((subject & key).fetch1("date_of_birth")), "replace failed" + +def test_delete_quick(subject): + """Tests quick deletion""" + tmp = np.array( + [ + (2, "Klara", "monkey", "2010-01-01", ""), + (1, "Peter", "mouse", "2015-01-01", ""), + ], + dtype=subject.heading.as_dtype, + ) + subject.insert(tmp) + s = subject & ( + "subject_id in (%s)" % ",".join(str(r) for r in tmp["subject_id"]) + ) + assert len(s) == 2, "insert did not work." + s.delete_quick() + assert len(s) == 0, "delete did not work." + +def test_skip_duplicate(subject): + """Tests if duplicates are properly skipped.""" + tmp = np.array( + [ + (2, "Klara", "monkey", "2010-01-01", ""), + (1, "Peter", "mouse", "2015-01-01", ""), + ], + dtype=subject.heading.as_dtype, + ) + subject.insert(tmp) + tmp = np.array( + [ + (2, "Klara", "monkey", "2010-01-01", ""), + (1, "Peter", "mouse", "2015-01-01", ""), + ], + dtype=subject.heading.as_dtype, + ) + subject.insert(tmp, skip_duplicates=True) + +def test_not_skip_duplicate(subject): + """Tests if duplicates are not skipped.""" + tmp = np.array( + [ + (2, "Klara", "monkey", "2010-01-01", ""), + (2, "Klara", "monkey", "2010-01-01", ""), + (1, "Peter", "mouse", "2015-01-01", ""), + ], + dtype=subject.heading.as_dtype, + ) + with pytest.raises(dj.errors.DuplicateError): + subject.insert(tmp, skip_duplicates=False) + +def test_no_error_suppression(test): + """skip_duplicates=True should not suppress other errors""" + with pytest.raises(dj.errors.MissingAttributeError): + test.insert([dict(key=100)], skip_duplicates=True) + +def test_blob_insert(img): + """Tests inserting and retrieving blobs.""" + X = np.random.randn(20, 10) + img.insert1((1, X)) + Y = img.fetch()[0]["img"] + assert np.all(X == Y), "Inserted and retrieved image are not identical" + +def test_drop(trash): + """Tests dropping tables""" + dj.config["safemode"] = True + with patch.object(dj.utils, "input", create=True, return_value="yes"): + trash.drop() + try: + trash.fetch() + raise Exception("Fetched after table dropped.") + except dj.DataJointError: + pass + finally: + dj.config["safemode"] = False + +def test_table_regexp(schema_any): + """Test whether table names are matched by regular expressions""" + + def relation_selector(attr): try: - trash.fetch() - raise Exception("Fetched after table dropped.") - except dj.DataJointError: - pass - finally: - dj.config["safemode"] = False - - def test_table_regexp(self, schema_any): - """Test whether table names are matched by regular expressions""" - - def relation_selector(attr): - try: - return issubclass(attr, Table) - except TypeError: - return False - - tiers = [dj.Imported, dj.Manual, dj.Lookup, dj.Computed] - for name, rel in getmembers(schema, relation_selector): - assert re.match( - rel.tier_regexp, rel.table_name - ) == "Regular expression does not match for {name}".format(name=name) - for tier in tiers: - assert issubclass(rel, tier) or not re.match( - tier.tier_regexp, rel.table_name - ), "Regular expression matches for {name} but should not".format(name=name) - - def test_table_size(self, experiment): - """test getting the size of the table and its indices in bytes""" - number_of_bytes = experiment.size_on_disk - assert isinstance(number_of_bytes, int) and number_of_bytes > 100 - - def test_repr_html(self, ephys): - assert ephys._repr_html_().strip().startswith(" 100 + +def test_repr_html(ephys): + assert ephys._repr_html_().strip().startswith(" Date: Wed, 13 Dec 2023 20:16:02 -0700 Subject: [PATCH 140/212] Fix typo in test --- tests/test_relation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_relation.py b/tests/test_relation.py index f03328886..6ef5de3c4 100644 --- a/tests/test_relation.py +++ b/tests/test_relation.py @@ -309,7 +309,7 @@ def relation_selector(attr): for name, rel in getmembers(schema, relation_selector): assert re.match( rel.tier_regexp, rel.table_name - ) == "Regular expression does not match for {name}".format(name=name) + ), "Regular expression does not match for {name}".format(name=name) for tier in tiers: assert issubclass(rel, tier) or not re.match( tier.tier_regexp, rel.table_name From b7ce66879ee198bbe498d4b3ce9114bb3db7ba9e Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Wed, 13 Dec 2023 20:16:22 -0700 Subject: [PATCH 141/212] Format with black --- tests/test_relation.py | 39 ++++++++++++++++++++++++++++++--------- 1 file changed, 30 insertions(+), 9 deletions(-) diff --git a/tests/test_relation.py b/tests/test_relation.py index 6ef5de3c4..05f6fe7c8 100644 --- a/tests/test_relation.py +++ b/tests/test_relation.py @@ -77,21 +77,26 @@ def test_contents(user, subject): u = subject.fetch(order_by=["subject_id"]) assert list(u["subject_id"]) == sorted([s[0] for s in subject.contents]) + def test_misnamed_attribute1(user): with pytest.raises(dj.DataJointError): user.insert([dict(username="Bob"), dict(user="Alice")]) + def test_misnamed_attribute2(user): with pytest.raises(KeyError): user.insert1(dict(user="Bob")) + def test_extra_attribute1(user): with pytest.raises(KeyError): user.insert1(dict(username="Robert", spouse="Alice")) + def test_extra_attribute2(user): user.insert1(dict(username="Robert", spouse="Alice"), ignore_extra_fields=True) + def test_missing_definition(schema_any): class MissingDefinition(dj.Manual): definitions = """ # misspelled definition @@ -101,26 +106,29 @@ class MissingDefinition(dj.Manual): """ with pytest.raises(NotImplementedError): - schema_any( - MissingDefinition, context=dict(MissingDefinition=MissingDefinition) - ) + schema_any(MissingDefinition, context=dict(MissingDefinition=MissingDefinition)) + def test_empty_insert1(user): with pytest.raises(dj.DataJointError): user.insert1(()) + def test_empty_insert(user): with pytest.raises(dj.DataJointError): user.insert([()]) + def test_wrong_arguments_insert(user): with pytest.raises(dj.DataJointError): user.insert1(("First", "Second")) + def test_wrong_insert_type(user): with pytest.raises(dj.DataJointError): user.insert1(3) + def test_insert_select(subject): schema.TTest2.delete() schema.TTest2.insert(schema.TTest) @@ -138,6 +146,7 @@ def test_insert_select(subject): subject.insert(elements, ignore_extra_fields=True) assert len(subject) == 2 * original_length + def test_insert_pandas_roundtrip(schema_any): """ensure fetched frames can be inserted""" schema.TTest2.delete() @@ -149,6 +158,7 @@ def test_insert_pandas_roundtrip(schema_any): schema.TTest2.insert(df) assert len(schema.TTest2()) == n + def test_insert_pandas_userframe(schema_any): """ ensure simple user-created frames (1 field, non-custom index) @@ -163,12 +173,14 @@ def test_insert_pandas_userframe(schema_any): schema.TTest2.insert(df) assert len(schema.TTest2()) == n + def test_insert_select_ignore_extra_fields0(test, test_extra): """need ignore extra fields for insert select""" test_extra.insert1((test.fetch("key").max() + 1, 0, 0)) with pytest.raises(dj.DataJointError): test.insert(test_extra) + def test_insert_select_ignore_extra_fields1(test, test_extra): """make sure extra fields works in insert select""" test_extra.delete() @@ -177,11 +189,13 @@ def test_insert_select_ignore_extra_fields1(test, test_extra): test.insert(test_extra, ignore_extra_fields=True) assert keyno in test.fetch("key") + def test_insert_select_ignore_extra_fields2(test_no_extra, test): """make sure insert select still works when ignoring extra fields when there are none""" test_no_extra.delete() test_no_extra.insert(test, ignore_extra_fields=True) + def test_insert_select_ignore_extra_fields3(test, test_no_extra, test_extra): """make sure insert select works for from query result""" # Recreate table state from previous tests @@ -195,12 +209,14 @@ def test_insert_select_ignore_extra_fields3(test, test_no_extra, test_extra): keystr = str(test_extra.fetch("key").max()) test_no_extra.insert((test_extra & "`key`=" + keystr), ignore_extra_fields=True) + def test_skip_duplicates(test_no_extra, test): """test that skip_duplicates works when inserting from another table""" test_no_extra.delete() test_no_extra.insert(test, ignore_extra_fields=True, skip_duplicates=True) test_no_extra.insert(test, ignore_extra_fields=True, skip_duplicates=True) + def test_replace(subject): """ Test replacing or ignoring duplicate entries @@ -214,14 +230,13 @@ def test_replace(subject): dict(key, real_id=7, date_of_birth=date, subject_notes=""), skip_duplicates=True, ) - assert date != str( - (subject & key).fetch1("date_of_birth") - ), "inappropriate replace" + assert date != str((subject & key).fetch1("date_of_birth")), "inappropriate replace" subject.insert1( dict(key, real_id=7, date_of_birth=date, subject_notes=""), replace=True ) assert date == str((subject & key).fetch1("date_of_birth")), "replace failed" + def test_delete_quick(subject): """Tests quick deletion""" tmp = np.array( @@ -232,13 +247,12 @@ def test_delete_quick(subject): dtype=subject.heading.as_dtype, ) subject.insert(tmp) - s = subject & ( - "subject_id in (%s)" % ",".join(str(r) for r in tmp["subject_id"]) - ) + s = subject & ("subject_id in (%s)" % ",".join(str(r) for r in tmp["subject_id"])) assert len(s) == 2, "insert did not work." s.delete_quick() assert len(s) == 0, "delete did not work." + def test_skip_duplicate(subject): """Tests if duplicates are properly skipped.""" tmp = np.array( @@ -258,6 +272,7 @@ def test_skip_duplicate(subject): ) subject.insert(tmp, skip_duplicates=True) + def test_not_skip_duplicate(subject): """Tests if duplicates are not skipped.""" tmp = np.array( @@ -271,11 +286,13 @@ def test_not_skip_duplicate(subject): with pytest.raises(dj.errors.DuplicateError): subject.insert(tmp, skip_duplicates=False) + def test_no_error_suppression(test): """skip_duplicates=True should not suppress other errors""" with pytest.raises(dj.errors.MissingAttributeError): test.insert([dict(key=100)], skip_duplicates=True) + def test_blob_insert(img): """Tests inserting and retrieving blobs.""" X = np.random.randn(20, 10) @@ -283,6 +300,7 @@ def test_blob_insert(img): Y = img.fetch()[0]["img"] assert np.all(X == Y), "Inserted and retrieved image are not identical" + def test_drop(trash): """Tests dropping tables""" dj.config["safemode"] = True @@ -296,6 +314,7 @@ def test_drop(trash): finally: dj.config["safemode"] = False + def test_table_regexp(schema_any): """Test whether table names are matched by regular expressions""" @@ -315,10 +334,12 @@ def relation_selector(attr): tier.tier_regexp, rel.table_name ), "Regular expression matches for {name} but should not".format(name=name) + def test_table_size(experiment): """test getting the size of the table and its indices in bytes""" number_of_bytes = experiment.size_on_disk assert isinstance(number_of_bytes, int) and number_of_bytes > 100 + def test_repr_html(ephys): assert ephys._repr_html_().strip().startswith(" Date: Wed, 13 Dec 2023 20:19:31 -0700 Subject: [PATCH 142/212] Clean up --- tests/test_relation.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/tests/test_relation.py b/tests/test_relation.py index 05f6fe7c8..5f60b88eb 100644 --- a/tests/test_relation.py +++ b/tests/test_relation.py @@ -12,23 +12,17 @@ @pytest.fixture def test(schema_any): - assert len(schema.TTest.contents) yield schema.TTest() - assert len(schema.TTest.contents) @pytest.fixture def test_extra(schema_any): - assert len(schema.TTest.contents) yield schema.TTestExtra() - assert len(schema.TTest.contents) @pytest.fixture def test_no_extra(schema_any): - assert len(schema.TTest.contents) yield schema.TTestNoExtra() - assert len(schema.TTest.contents) @pytest.fixture From 3998cb48017b67640245a85bf2b257ad024a39fd Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 08:37:05 -0700 Subject: [PATCH 143/212] Use fixture for TTest2 --- tests/test_relation.py | 29 +++++++++++++++++------------ 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/tests/test_relation.py b/tests/test_relation.py index 5f60b88eb..4a13df448 100644 --- a/tests/test_relation.py +++ b/tests/test_relation.py @@ -15,6 +15,11 @@ def test(schema_any): yield schema.TTest() +@pytest.fixture +def test2(schema_any): + yield schema.TTest2() + + @pytest.fixture def test_extra(schema_any): yield schema.TTestExtra() @@ -123,10 +128,10 @@ def test_wrong_insert_type(user): user.insert1(3) -def test_insert_select(subject): - schema.TTest2.delete() - schema.TTest2.insert(schema.TTest) - assert len(schema.TTest2()) == len(schema.TTest()) +def test_insert_select(subject, test2): + test2.delete() + test2.insert(schema.TTest) + assert len(test2) == len(schema.TTest()) original_length = len(subject) elements = subject.proj(..., s="subject_id") @@ -141,31 +146,31 @@ def test_insert_select(subject): assert len(subject) == 2 * original_length -def test_insert_pandas_roundtrip(schema_any): +def test_insert_pandas_roundtrip(test2): """ensure fetched frames can be inserted""" - schema.TTest2.delete() + test2.delete() n = len(schema.TTest()) assert n > 0 df = schema.TTest.fetch(format="frame") assert isinstance(df, pandas.DataFrame) assert len(df) == n - schema.TTest2.insert(df) - assert len(schema.TTest2()) == n + test2.insert(df) + assert len(test2) == n -def test_insert_pandas_userframe(schema_any): +def test_insert_pandas_userframe(test2): """ ensure simple user-created frames (1 field, non-custom index) can be inserted without extra index adjustment """ - schema.TTest2.delete() + test2.delete() n = len(schema.TTest()) assert n > 0 df = pandas.DataFrame(schema.TTest.fetch()) assert isinstance(df, pandas.DataFrame) assert len(df) == n - schema.TTest2.insert(df) - assert len(schema.TTest2()) == n + test2.insert(df) + assert len(test2) == n def test_insert_select_ignore_extra_fields0(test, test_extra): From 3830c5589d789a04de6d5924b45d486a44323959 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 08:48:18 -0700 Subject: [PATCH 144/212] Use fixture for TTest --- tests/test_relation.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/tests/test_relation.py b/tests/test_relation.py index 4a13df448..2011a1901 100644 --- a/tests/test_relation.py +++ b/tests/test_relation.py @@ -128,10 +128,10 @@ def test_wrong_insert_type(user): user.insert1(3) -def test_insert_select(subject, test2): +def test_insert_select(subject, test, test2): test2.delete() - test2.insert(schema.TTest) - assert len(test2) == len(schema.TTest()) + test2.insert(test) + assert len(test2) == len(test) original_length = len(subject) elements = subject.proj(..., s="subject_id") @@ -146,27 +146,27 @@ def test_insert_select(subject, test2): assert len(subject) == 2 * original_length -def test_insert_pandas_roundtrip(test2): +def test_insert_pandas_roundtrip(test, test2): """ensure fetched frames can be inserted""" test2.delete() - n = len(schema.TTest()) + n = len(test) assert n > 0 - df = schema.TTest.fetch(format="frame") + df = test.fetch(format="frame") assert isinstance(df, pandas.DataFrame) assert len(df) == n test2.insert(df) assert len(test2) == n -def test_insert_pandas_userframe(test2): +def test_insert_pandas_userframe(test, test2): """ ensure simple user-created frames (1 field, non-custom index) can be inserted without extra index adjustment """ test2.delete() - n = len(schema.TTest()) + n = len(test) assert n > 0 - df = pandas.DataFrame(schema.TTest.fetch()) + df = pandas.DataFrame(test.fetch()) assert isinstance(df, pandas.DataFrame) assert len(df) == n test2.insert(df) From aa84da95fa417a717183db1b9fa3950479ca28fc Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 08:55:57 -0700 Subject: [PATCH 145/212] cp to tests --- tests/test_relational_operand.py | 672 +++++++++++++++++++++++++++++++ 1 file changed, 672 insertions(+) create mode 100644 tests/test_relational_operand.py diff --git a/tests/test_relational_operand.py b/tests/test_relational_operand.py new file mode 100644 index 000000000..0611ab267 --- /dev/null +++ b/tests/test_relational_operand.py @@ -0,0 +1,672 @@ +import random +import string +import pandas +import datetime + +import numpy as np +from nose.tools import ( + assert_equal, + assert_false, + assert_true, + raises, + assert_set_equal, + assert_list_equal, +) + +import datajoint as dj +from .schema_simple import ( + A, + B, + D, + E, + F, + L, + DataA, + DataB, + TTestUpdate, + IJ, + JI, + ReservedWord, + OutfitLaunch, +) +from .schema import ( + Experiment, + TTest3, + Trial, + Ephys, + Child, + Parent, + SubjectA, + SessionA, + SessionStatusA, + SessionDateA, +) + +from . import PREFIX, CONN_INFO + + +def setup(): + """ + module-level test setup + """ + A.insert(A.contents, skip_duplicates=True) + L.insert(L.contents, skip_duplicates=True) + B.populate() + D.populate() + E.populate() + Experiment.populate() + + +class TestRelational: + @staticmethod + def test_populate(): + assert_false(B().progress(display=False)[0], "B incompletely populated") + assert_false(D().progress(display=False)[0], "D incompletely populated") + assert_false(E().progress(display=False)[0], "E incompletely populated") + + assert_true(len(B()) == 40, "B populated incorrectly") + assert_true(len(B.C()) > 0, "C populated incorrectly") + assert_true(len(D()) == 40, "D populated incorrectly") + assert_true( + len(E()) == len(B()) * len(D()) / len(A()), "E populated incorrectly" + ) + assert_true(len(E.F()) > 0, "F populated incorrectly") + + @staticmethod + def test_free_relation(): + b = B() + free = dj.FreeTable(b.connection, b.full_table_name) + assert_true( + repr(free).startswith("FreeTable") and b.full_table_name in repr(free) + ) + r = "n>5" + assert_equal((B() & r).make_sql(), (free & r).make_sql()) + + @staticmethod + def test_rename(): + # test renaming + x = B().proj(i="id_a") & "i in (1,2,3,4)" + lenx = len(x) + assert_equal( + len(x), + len(B() & "id_a in (1,2,3,4)"), + "incorrect restriction of renamed attributes", + ) + assert_equal( + len(x & "id_b in (1,2)"), + len(B() & "id_b in (1,2) and id_a in (1,2,3,4)"), + "incorrect restriction of renamed restriction", + ) + assert_equal(len(x), lenx, "restriction modified original") + y = x.proj(j="i") + assert_equal( + len(y), + len(B() & "id_a in (1,2,3,4)"), + "incorrect projection of restriction", + ) + z = y & "j in (3, 4, 5, 6)" + assert_equal(len(z), len(B() & "id_a in (3,4)"), "incorrect nested subqueries") + + @staticmethod + def test_rename_order(): + """ + Renaming projection should not change the order of the primary key attributes. + See issues #483 and #516. + """ + pk1 = D.primary_key + pk2 = D.proj(a="id_a").primary_key + assert_list_equal(["a" if i == "id_a" else i for i in pk1], pk2) + + @staticmethod + def test_join(): + # Test cartesian product + x = A() + y = L() + rel = x * y + assert_equal(len(rel), len(x) * len(y), "incorrect join") + assert_equal( + set(x.heading.names).union(y.heading.names), + set(rel.heading.names), + "incorrect join heading", + ) + assert_equal( + set(x.primary_key).union(y.primary_key), + set(rel.primary_key), + "incorrect join primary_key", + ) + + # Test cartesian product of restricted relations + x = A() & "cond_in_a=1" + y = L() & "cond_in_l=1" + rel = x * y + assert_equal(len(rel), len(x) * len(y), "incorrect join") + assert_equal( + set(x.heading.names).union(y.heading.names), + set(rel.heading.names), + "incorrect join heading", + ) + assert_equal( + set(x.primary_key).union(y.primary_key), + set(rel.primary_key), + "incorrect join primary_key", + ) + + # Test join with common attributes + cond = A() & "cond_in_a=1" + x = B() & cond + y = D() + rel = x * y + assert_true(len(rel) >= len(x) and len(rel) >= len(y), "incorrect join") + assert_false(rel - cond, "incorrect join, restriction, or antijoin") + assert_equal( + set(x.heading.names).union(y.heading.names), + set(rel.heading.names), + "incorrect join heading", + ) + assert_equal( + set(x.primary_key).union(y.primary_key), + set(rel.primary_key), + "incorrect join primary_key", + ) + + # test renamed join + x = B().proj( + i="id_a" + ) # rename the common attribute to achieve full cartesian product + y = D() + rel = x * y + assert_equal(len(rel), len(x) * len(y), "incorrect join") + assert_equal( + set(x.heading.names).union(y.heading.names), + set(rel.heading.names), + "incorrect join heading", + ) + assert_equal( + set(x.primary_key).union(y.primary_key), + set(rel.primary_key), + "incorrect join primary_key", + ) + x = B().proj(a="id_a") + y = D() + rel = x * y + assert_equal(len(rel), len(x) * len(y), "incorrect join") + assert_equal( + set(x.heading.names).union(y.heading.names), + set(rel.heading.names), + "incorrect join heading", + ) + assert_equal( + set(x.primary_key).union(y.primary_key), + set(rel.primary_key), + "incorrect join primary_key", + ) + + # test pairing + # Approach 1: join then restrict + x = A.proj(a1="id_a", c1="cond_in_a") + y = A.proj(a2="id_a", c2="cond_in_a") + rel = x * y & "c1=0" & "c2=1" + lenx = len(x & "c1=0") + leny = len(y & "c2=1") + assert_equal(lenx + leny, len(A()), "incorrect restriction") + assert_equal(len(rel), len(x & "c1=0") * len(y & "c2=1"), "incorrect pairing") + # Approach 2: restrict then join + x = (A & "cond_in_a=0").proj(a1="id_a") + y = (A & "cond_in_a=1").proj(a2="id_a") + assert_equal(len(rel), len(x * y)) + + @staticmethod + def test_issue_376(): + tab = TTest3() + tab.delete_quick() + tab.insert(((1, "%%%"), (2, "one%"), (3, "one"))) + assert_equal(len(tab & 'value="%%%"'), 1) + assert_equal(len(tab & {"value": "%%%"}), 1) + assert_equal(len(tab & 'value like "o%"'), 2) + assert_equal(len(tab & 'value like "o%%"'), 2) + + @staticmethod + def test_issue_463(): + assert_equal(((A & B) * B).fetch().size, len(A * B)) + + @staticmethod + def test_project(): + x = A().proj(a="id_a") # rename + assert_equal(x.heading.names, ["a"], "renaming does not work") + x = A().proj(a="(id_a)") # extend + assert_equal(set(x.heading.names), set(("id_a", "a")), "extend does not work") + + # projection after restriction + cond = L() & "cond_in_l" + assert_equal( + len(D() & cond) + len(D() - cond), len(D()), "failed semijoin or antijoin" + ) + assert_equal( + len((D() & cond).proj()), + len((D() & cond)), + "projection failed: altered its argument" "s cardinality", + ) + + @staticmethod + def test_rename_non_dj_attribute(): + schema = PREFIX + "_test1" + connection = dj.conn(**CONN_INFO) + connection.query( + f"CREATE TABLE {schema}.test_table (oldID int PRIMARY KEY)" + ).fetchall() + mySchema = dj.VirtualModule(schema, schema) + assert ( + "oldID" + not in mySchema.TestTable.proj(new_name="oldID").heading.attributes.keys() + ), "Failed to rename attribute correctly" + connection.query(f"DROP TABLE {schema}.test_table") + + @staticmethod + def test_union(): + x = set(zip(*IJ.fetch("i", "j"))) + y = set(zip(*JI.fetch("i", "j"))) + assert_true( + len(x) > 0 and len(y) > 0 and len(IJ() * JI()) < len(x) + ) # ensure the IJ and JI are non-trivial + z = set(zip(*(IJ + JI).fetch("i", "j"))) # union + assert_set_equal(x.union(y), z) + assert_equal(len(IJ + JI), len(z)) + + @staticmethod + @raises(dj.DataJointError) + def test_outer_union_fail(): + """Union of two tables with different primary keys raises an error.""" + A() + B() + + @staticmethod + def test_outer_union_fail(): + """Union of two tables with different primary keys raises an error.""" + t = Trial + Ephys + t.fetch() + assert_set_equal( + set(t.heading.names), set(Trial.heading.names) | set(Ephys.heading.names) + ) + len(t) + + @staticmethod + def test_preview(): + with dj.config(display__limit=7): + x = A().proj(a="id_a") + s = x.preview() + assert_equal(len(s.split("\n")), len(x) + 2) + + @staticmethod + def test_heading_repr(): + x = A * D + s = repr(x.heading) + assert_equal( + len( + list( + 1 + for g in s.split("\n") + if g.strip() and not g.strip().startswith(("-", "#")) + ) + ), + len(x.heading.attributes), + ) + + @staticmethod + def test_aggregate(): + x = B().aggregate(B.C()) + assert_equal(len(x), len(B() & B.C())) + + x = B().aggregate(B.C(), keep_all_rows=True) + assert_equal(len(x), len(B())) # test LEFT join + + assert_equal( + len((x & "id_b=0").fetch()), len(B() & "id_b=0") + ) # test restricted aggregation + + x = B().aggregate( + B.C(), + "n", + count="count(id_c)", + mean="avg(value)", + max="max(value)", + keep_all_rows=True, + ) + assert_equal(len(x), len(B())) + y = x & "mean>0" # restricted aggregation + assert_true(len(y) > 0) + assert_true(all(y.fetch("mean") > 0)) + for n, count, mean, max_, key in zip( + *x.fetch("n", "count", "mean", "max", dj.key) + ): + assert_equal(n, count, "aggregation failed (count)") + values = (B.C() & key).fetch("value") + assert_true( + bool(len(values)) == bool(n), "aggregation failed (restriction)" + ) + if n: + assert_true( + np.isclose(mean, values.mean(), rtol=1e-4, atol=1e-5), + "aggregation failed (mean)", + ) + assert_true( + np.isclose(max_, values.max(), rtol=1e-4, atol=1e-5), + "aggregation failed (max)", + ) + + @staticmethod + def test_aggr(): + x = B.aggr(B.C) + l1 = len(x) + l2 = len(B & B.C) + assert_equal(l1, l2) + + x = B().aggr(B.C(), keep_all_rows=True) + assert_equal(len(x), len(B())) # test LEFT join + + assert_equal( + len((x & "id_b=0").fetch()), len(B() & "id_b=0") + ) # test restricted aggregation + + x = B().aggr( + B.C(), + "n", + count="count(id_c)", + mean="avg(value)", + max="max(value)", + keep_all_rows=True, + ) + assert_equal(len(x), len(B())) + y = x & "mean>0" # restricted aggregation + assert_true(len(y) > 0) + assert_true(all(y.fetch("mean") > 0)) + for n, count, mean, max_, key in zip( + *x.fetch("n", "count", "mean", "max", dj.key) + ): + assert_equal(n, count, "aggregation failed (count)") + values = (B.C() & key).fetch("value") + assert_true( + bool(len(values)) == bool(n), "aggregation failed (restriction)" + ) + if n: + assert_true( + np.isclose(mean, values.mean(), rtol=1e-4, atol=1e-5), + "aggregation failed (mean)", + ) + assert_true( + np.isclose(max_, values.max(), rtol=1e-4, atol=1e-5), + "aggregation failed (max)", + ) + + @staticmethod + def test_semijoin(): + """ + test that semijoins and antijoins are formed correctly + """ + x = IJ() + y = JI() + n = len(x & y.fetch(as_dict=True)) + m = len(x - y.fetch(as_dict=True)) + assert_true(n > 0 and m > 0) + assert_true(len(x) == m + n) + assert_true(len(x & y.fetch()) == n) + assert_true(len(x - y.fetch()) == m) + semi = x & y + anti = x - y + assert_true(len(semi) == n) + assert_true(len(anti) == m) + + @staticmethod + def test_pandas_fetch_and_restriction(): + q = L & "cond_in_l = 0" + df = q.fetch(format="frame") # pandas dataframe + assert_true(isinstance(df, pandas.DataFrame)) + assert_equal(len(E & q), len(E & df)) + + @staticmethod + def test_restriction_by_null(): + assert_true(len(Experiment & "username is null") > 0) + assert_true(len(Experiment & "username is not null") > 0) + + @staticmethod + def test_restriction_between(): # see issue + assert_true( + len(Experiment & 'username between "S" and "Z"') < len(Experiment()) + ) + + @staticmethod + def test_restrictions_by_lists(): + x = D() + y = L() & "cond_in_l" + + lenx = len(x) + assert_true( + lenx > 0 and len(y) > 0 and len(x & y) < len(x), "incorrect test setup" + ) + + assert_equal(len(D()), len(D & dj.AndList([]))) + assert_true(len(D & []) == 0) + assert_true(len(D & [[]]) == 0) # an OR-list of OR-list + + lenx = len(x) + assert_true( + lenx > 0 and len(y) > 0 and len(x & y) < len(x), "incorrect test setup" + ) + assert_equal(len(x & y), len(D * L & "cond_in_l"), "incorrect semijoin") + assert_equal(len(x - y), len(x) - len(x & y), "incorrect antijoin") + assert_equal(len(y - x), len(y) - len(y & x), "incorrect antijoin") + assert_true(len(x & []) == 0, "incorrect restriction by an empty list") + assert_true(len(x & ()) == 0, "incorrect restriction by an empty tuple") + assert_true(len(x & set()) == 0, "incorrect restriction by an empty set") + assert_equal(len(x - []), lenx, "incorrect restriction by an empty list") + assert_equal(len(x - ()), lenx, "incorrect restriction by an empty tuple") + assert_equal(len(x - set()), lenx, "incorrect restriction by an empty set") + assert_equal( + len(x & {}), lenx, "incorrect restriction by a tuple with no attributes" + ) + assert_true( + len(x - {}) == 0, "incorrect restriction by a tuple with no attributes" + ) + assert_equal( + len(x & {"foo": 0}), + lenx, + "incorrect restriction by a tuple with no matching attributes", + ) + assert_true( + len(x - {"foo": 0}) == 0, + "incorrect restriction by a tuple with no matching attributes", + ) + assert_equal(len(x & y), len(x & y.fetch()), "incorrect restriction by a list") + assert_equal(len(x - y), len(x - y.fetch()), "incorrect restriction by a list") + w = A() + assert_true(len(w) > 0, "incorrect test setup: w is empty") + assert_false( + bool(set(w.heading.names) & set(y.heading.names)), + "incorrect test setup: w and y should have no common attributes", + ) + assert_equal( + len(w), len(w & y), "incorrect restriction without common attributes" + ) + assert_true(len(w - y) == 0, "incorrect restriction without common attributes") + + @staticmethod + def test_datetime(): + """Test date retrieval""" + date = Experiment().fetch("experiment_date")[0] + e1 = Experiment() & dict(experiment_date=str(date)) + e2 = Experiment() & dict(experiment_date=date) + assert_true( + len(e1) == len(e2) > 0, "Two date restriction do not yield the same result" + ) + + @staticmethod + def test_date(): + """Test date update""" + # https://github.com/datajoint/datajoint-python/issues/664 + F.insert1((2, "2019-09-25")) + + new_value = None + F.update1(dict((F & "id=2").fetch1("KEY"), date=new_value)) + assert_equal((F & "id=2").fetch1("date"), new_value) + + new_value = datetime.date(2019, 10, 25) + F.update1(dict((F & "id=2").fetch1("KEY"), date=new_value)) + assert_equal((F & "id=2").fetch1("date"), new_value) + + F.update1(dict((F & "id=2").fetch1("KEY"), date=None)) + assert_equal((F & "id=2").fetch1("date"), None) + + @staticmethod + def test_join_project(): + """Test join of projected relations with matching non-primary key""" + q = DataA.proj() * DataB.proj() + assert_true( + len(q) == len(DataA()) == len(DataB()), + "Join of projected relations does not work", + ) + + @staticmethod + def test_ellipsis(): + r = Experiment.proj(..., "- data_path").head(1, as_dict=True) + assert_set_equal(set(Experiment.heading).difference(r[0]), {"data_path"}) + + @staticmethod + @raises(dj.DataJointError) + def test_update_single_key(): + """Test that only one row can be updated""" + TTestUpdate.update1( + dict(TTestUpdate.fetch1("KEY"), string_attr="my new string") + ) + + @staticmethod + @raises(dj.DataJointError) + def test_update_no_primary(): + """Test that no primary key can be updated""" + TTestUpdate.update1(dict(TTestUpdate.fetch1("KEY"), primary_key=2)) + + @staticmethod + @raises(dj.DataJointError) + def test_update_missing_attribute(): + """Test that attribute is in table""" + TTestUpdate.update1(dict(TTestUpdate.fetch1("KEY"), not_existing=2)) + + @staticmethod + def test_update_string_attribute(): + """Test replacing a string value""" + rel = TTestUpdate() & dict(primary_key=0) + s = "".join( + random.choice(string.ascii_uppercase + string.digits) for _ in range(10) + ) + TTestUpdate.update1(dict(rel.fetch1("KEY"), string_attr=s)) + assert_equal(s, rel.fetch1("string_attr"), "Updated string does not match") + + @staticmethod + def test_update_numeric_attribute(): + """Test replacing a string value""" + rel = TTestUpdate() & dict(primary_key=0) + s = random.randint(0, 10) + TTestUpdate.update1(dict(rel.fetch1("KEY"), num_attr=s)) + assert_equal(s, rel.fetch1("num_attr"), "Updated integer does not match") + TTestUpdate.update1(dict(rel.fetch1("KEY"), num_attr=None)) + assert_true(np.isnan(rel.fetch1("num_attr")), "Numeric value is not NaN") + + @staticmethod + def test_update_blob_attribute(): + """Test replacing a string value""" + rel = TTestUpdate() & dict(primary_key=0) + s = rel.fetch1("blob_attr") + TTestUpdate.update1(dict(rel.fetch1("KEY"), blob_attr=s.T)) + assert_equal( + s.T.shape, rel.fetch1("blob_attr").shape, "Array dimensions do not match" + ) + + @staticmethod + def test_reserved_words(): + """Test the user of SQL reserved words as attributes""" + rel = ReservedWord() + rel.insert1( + {"key": 1, "in": "ouch", "from": "bummer", "int": 3, "select": "major pain"} + ) + assert_true( + (rel & {"key": 1, "in": "ouch", "from": "bummer"}).fetch1("int") == 3 + ) + assert_true( + (rel.proj("int", double="from") & {"double": "bummer"}).fetch1("int") == 3 + ) + (rel & {"key": 1}).delete() + + @staticmethod + @raises(dj.DataJointError) + def test_reserved_words2(): + """Test the user of SQL reserved words as attributes""" + rel = ReservedWord() + rel.insert1( + {"key": 1, "in": "ouch", "from": "bummer", "int": 3, "select": "major pain"} + ) + (rel & "key=1").fetch( + "in" + ) # error because reserved word `key` is not in backquotes. See issue #249 + + @staticmethod + def test_permissive_join_basic(): + """Verify join compatibility check is skipped for join""" + Child @ Parent + + @staticmethod + def test_permissive_restriction_basic(): + """Verify join compatibility check is skipped for restriction""" + Child ^ Parent + + @staticmethod + def test_complex_date_restriction(): + # https://github.com/datajoint/datajoint-python/issues/892 + """Test a complex date restriction""" + q = OutfitLaunch & "day between curdate() - interval 30 day and curdate()" + assert len(q) == 1 + q = OutfitLaunch & "day between curdate() - interval 4 week and curdate()" + assert len(q) == 1 + q = OutfitLaunch & "day between curdate() - interval 1 month and curdate()" + assert len(q) == 1 + q = OutfitLaunch & "day between curdate() - interval 1 year and curdate()" + assert len(q) == 1 + q = OutfitLaunch & "`day` between curdate() - interval 30 day and curdate()" + assert len(q) == 1 + q.delete() + + @staticmethod + def test_null_dict_restriction(): + # https://github.com/datajoint/datajoint-python/issues/824 + """Test a restriction for null using dict""" + F.insert([dict(id=5)]) + q = F & dj.AndList([dict(id=5), "date is NULL"]) + assert len(q) == 1 + q = F & dict(id=5, date=None) + assert len(q) == 1 + + @staticmethod + def test_joins_with_aggregation(): + # https://github.com/datajoint/datajoint-python/issues/898 + # https://github.com/datajoint/datajoint-python/issues/899 + subjects = SubjectA.aggr( + SessionStatusA & 'status="trained_1a" or status="trained_1b"', + date_trained="min(date(session_start_time))", + ) + assert len(SessionDateA * subjects) == 4 + assert len(subjects * SessionDateA) == 4 + + subj_query = SubjectA.aggr( + SessionA * SessionStatusA & 'status="trained_1a" or status="trained_1b"', + date_trained="min(date(session_start_time))", + ) + session_dates = ( + SessionDateA * (subj_query & 'date_trained<"2020-12-21"') + ) & "session_date Date: Thu, 14 Dec 2023 08:56:12 -0700 Subject: [PATCH 146/212] nose2pytest test_relational_operand --- tests/test_relational_operand.py | 240 ++++++++++++++----------------- 1 file changed, 104 insertions(+), 136 deletions(-) diff --git a/tests/test_relational_operand.py b/tests/test_relational_operand.py index 0611ab267..63ecf3409 100644 --- a/tests/test_relational_operand.py +++ b/tests/test_relational_operand.py @@ -60,27 +60,23 @@ def setup(): class TestRelational: @staticmethod def test_populate(): - assert_false(B().progress(display=False)[0], "B incompletely populated") - assert_false(D().progress(display=False)[0], "D incompletely populated") - assert_false(E().progress(display=False)[0], "E incompletely populated") + assert not B().progress(display=False)[0], "B incompletely populated" + assert not D().progress(display=False)[0], "D incompletely populated" + assert not E().progress(display=False)[0], "E incompletely populated" - assert_true(len(B()) == 40, "B populated incorrectly") - assert_true(len(B.C()) > 0, "C populated incorrectly") - assert_true(len(D()) == 40, "D populated incorrectly") - assert_true( - len(E()) == len(B()) * len(D()) / len(A()), "E populated incorrectly" - ) - assert_true(len(E.F()) > 0, "F populated incorrectly") + assert len(B()) == 40, "B populated incorrectly" + assert len(B.C()) > 0, "C populated incorrectly" + assert len(D()) == 40, "D populated incorrectly" + assert len(E()) == len(B()) * len(D()) / len(A()), "E populated incorrectly" + assert len(E.F()) > 0, "F populated incorrectly" @staticmethod def test_free_relation(): b = B() free = dj.FreeTable(b.connection, b.full_table_name) - assert_true( - repr(free).startswith("FreeTable") and b.full_table_name in repr(free) - ) + assert repr(free).startswith("FreeTable") and b.full_table_name in repr(free) r = "n>5" - assert_equal((B() & r).make_sql(), (free & r).make_sql()) + assert (B() & r).make_sql() == (free & r).make_sql() @staticmethod def test_rename(): @@ -97,7 +93,7 @@ def test_rename(): len(B() & "id_b in (1,2) and id_a in (1,2,3,4)"), "incorrect restriction of renamed restriction", ) - assert_equal(len(x), lenx, "restriction modified original") + assert len(x) == lenx, "restriction modified original" y = x.proj(j="i") assert_equal( len(y), @@ -105,7 +101,7 @@ def test_rename(): "incorrect projection of restriction", ) z = y & "j in (3, 4, 5, 6)" - assert_equal(len(z), len(B() & "id_a in (3,4)"), "incorrect nested subqueries") + assert len(z) == len(B() & "id_a in (3,4)"), "incorrect nested subqueries" @staticmethod def test_rename_order(): @@ -115,7 +111,7 @@ def test_rename_order(): """ pk1 = D.primary_key pk2 = D.proj(a="id_a").primary_key - assert_list_equal(["a" if i == "id_a" else i for i in pk1], pk2) + assert ["a" if i == "id_a" else i for i in pk1] == pk2 @staticmethod def test_join(): @@ -123,7 +119,7 @@ def test_join(): x = A() y = L() rel = x * y - assert_equal(len(rel), len(x) * len(y), "incorrect join") + assert len(rel) == len(x) * len(y), "incorrect join" assert_equal( set(x.heading.names).union(y.heading.names), set(rel.heading.names), @@ -139,7 +135,7 @@ def test_join(): x = A() & "cond_in_a=1" y = L() & "cond_in_l=1" rel = x * y - assert_equal(len(rel), len(x) * len(y), "incorrect join") + assert len(rel) == len(x) * len(y), "incorrect join" assert_equal( set(x.heading.names).union(y.heading.names), set(rel.heading.names), @@ -156,8 +152,8 @@ def test_join(): x = B() & cond y = D() rel = x * y - assert_true(len(rel) >= len(x) and len(rel) >= len(y), "incorrect join") - assert_false(rel - cond, "incorrect join, restriction, or antijoin") + assert len(rel) >= len(x) and len(rel) >= len(y), "incorrect join" + assert not rel - cond, "incorrect join, restriction, or antijoin" assert_equal( set(x.heading.names).union(y.heading.names), set(rel.heading.names), @@ -175,7 +171,7 @@ def test_join(): ) # rename the common attribute to achieve full cartesian product y = D() rel = x * y - assert_equal(len(rel), len(x) * len(y), "incorrect join") + assert len(rel) == len(x) * len(y), "incorrect join" assert_equal( set(x.heading.names).union(y.heading.names), set(rel.heading.names), @@ -189,7 +185,7 @@ def test_join(): x = B().proj(a="id_a") y = D() rel = x * y - assert_equal(len(rel), len(x) * len(y), "incorrect join") + assert len(rel) == len(x) * len(y), "incorrect join" assert_equal( set(x.heading.names).union(y.heading.names), set(rel.heading.names), @@ -208,39 +204,38 @@ def test_join(): rel = x * y & "c1=0" & "c2=1" lenx = len(x & "c1=0") leny = len(y & "c2=1") - assert_equal(lenx + leny, len(A()), "incorrect restriction") - assert_equal(len(rel), len(x & "c1=0") * len(y & "c2=1"), "incorrect pairing") + assert lenx + leny == len(A()), "incorrect restriction" + assert len(rel) == len(x & "c1=0") * len(y & "c2=1"), "incorrect pairing" # Approach 2: restrict then join x = (A & "cond_in_a=0").proj(a1="id_a") y = (A & "cond_in_a=1").proj(a2="id_a") - assert_equal(len(rel), len(x * y)) + assert len(rel) == len(x * y) @staticmethod def test_issue_376(): tab = TTest3() tab.delete_quick() tab.insert(((1, "%%%"), (2, "one%"), (3, "one"))) - assert_equal(len(tab & 'value="%%%"'), 1) - assert_equal(len(tab & {"value": "%%%"}), 1) - assert_equal(len(tab & 'value like "o%"'), 2) - assert_equal(len(tab & 'value like "o%%"'), 2) + assert len(tab & 'value="%%%"') == 1 + assert len(tab & {"value": "%%%"}) == 1 + assert len(tab & 'value like "o%"') == 2 + assert len(tab & 'value like "o%%"') == 2 @staticmethod def test_issue_463(): - assert_equal(((A & B) * B).fetch().size, len(A * B)) + assert ((A & B) * B).fetch().size == len(A * B) @staticmethod def test_project(): x = A().proj(a="id_a") # rename - assert_equal(x.heading.names, ["a"], "renaming does not work") + assert x.heading.names == ["a"], "renaming does not work" x = A().proj(a="(id_a)") # extend - assert_equal(set(x.heading.names), set(("id_a", "a")), "extend does not work") + assert set(x.heading.names) == set(("id_a", "a")), "extend does not work" # projection after restriction cond = L() & "cond_in_l" - assert_equal( - len(D() & cond) + len(D() - cond), len(D()), "failed semijoin or antijoin" - ) + assert ( + len(D() & cond) + len(D() - cond) == len(D())), "failed semijoin or antijoin" assert_equal( len((D() & cond).proj()), len((D() & cond)), @@ -265,12 +260,10 @@ def test_rename_non_dj_attribute(): def test_union(): x = set(zip(*IJ.fetch("i", "j"))) y = set(zip(*JI.fetch("i", "j"))) - assert_true( - len(x) > 0 and len(y) > 0 and len(IJ() * JI()) < len(x) - ) # ensure the IJ and JI are non-trivial + assert len(x) > 0 and len(y) > 0 and len(IJ() * JI()) < len(x) # ensure the IJ and JI are non-trivial z = set(zip(*(IJ + JI).fetch("i", "j"))) # union - assert_set_equal(x.union(y), z) - assert_equal(len(IJ + JI), len(z)) + assert x.union(y) == z + assert len(IJ + JI) == len(z) @staticmethod @raises(dj.DataJointError) @@ -283,9 +276,8 @@ def test_outer_union_fail(): """Union of two tables with different primary keys raises an error.""" t = Trial + Ephys t.fetch() - assert_set_equal( - set(t.heading.names), set(Trial.heading.names) | set(Ephys.heading.names) - ) + assert ( + set(t.heading.names) == set(Trial.heading.names) | set(Ephys.heading.names)) len(t) @staticmethod @@ -293,34 +285,32 @@ def test_preview(): with dj.config(display__limit=7): x = A().proj(a="id_a") s = x.preview() - assert_equal(len(s.split("\n")), len(x) + 2) + assert len(s.split("\n")) == len(x) + 2 @staticmethod def test_heading_repr(): x = A * D s = repr(x.heading) - assert_equal( + assert ( len( list( 1 for g in s.split("\n") if g.strip() and not g.strip().startswith(("-", "#")) ) - ), - len(x.heading.attributes), - ) + ) == + len(x.heading.attributes)) @staticmethod def test_aggregate(): x = B().aggregate(B.C()) - assert_equal(len(x), len(B() & B.C())) + assert len(x) == len(B() & B.C()) x = B().aggregate(B.C(), keep_all_rows=True) - assert_equal(len(x), len(B())) # test LEFT join + assert len(x) == len(B()) # test LEFT join - assert_equal( - len((x & "id_b=0").fetch()), len(B() & "id_b=0") - ) # test restricted aggregation + assert ( + len((x & "id_b=0").fetch()) == len(B() & "id_b=0")) # test restricted aggregation x = B().aggregate( B.C(), @@ -330,18 +320,16 @@ def test_aggregate(): max="max(value)", keep_all_rows=True, ) - assert_equal(len(x), len(B())) + assert len(x) == len(B()) y = x & "mean>0" # restricted aggregation - assert_true(len(y) > 0) - assert_true(all(y.fetch("mean") > 0)) + assert len(y) > 0 + assert all(y.fetch("mean") > 0) for n, count, mean, max_, key in zip( *x.fetch("n", "count", "mean", "max", dj.key) ): - assert_equal(n, count, "aggregation failed (count)") + assert n == count, "aggregation failed (count)" values = (B.C() & key).fetch("value") - assert_true( - bool(len(values)) == bool(n), "aggregation failed (restriction)" - ) + assert bool(len(values)) == bool(n), "aggregation failed (restriction)" if n: assert_true( np.isclose(mean, values.mean(), rtol=1e-4, atol=1e-5), @@ -357,14 +345,13 @@ def test_aggr(): x = B.aggr(B.C) l1 = len(x) l2 = len(B & B.C) - assert_equal(l1, l2) + assert l1 == l2 x = B().aggr(B.C(), keep_all_rows=True) - assert_equal(len(x), len(B())) # test LEFT join + assert len(x) == len(B()) # test LEFT join - assert_equal( - len((x & "id_b=0").fetch()), len(B() & "id_b=0") - ) # test restricted aggregation + assert ( + len((x & "id_b=0").fetch()) == len(B() & "id_b=0")) # test restricted aggregation x = B().aggr( B.C(), @@ -374,18 +361,16 @@ def test_aggr(): max="max(value)", keep_all_rows=True, ) - assert_equal(len(x), len(B())) + assert len(x) == len(B()) y = x & "mean>0" # restricted aggregation - assert_true(len(y) > 0) - assert_true(all(y.fetch("mean") > 0)) + assert len(y) > 0 + assert all(y.fetch("mean") > 0) for n, count, mean, max_, key in zip( *x.fetch("n", "count", "mean", "max", dj.key) ): - assert_equal(n, count, "aggregation failed (count)") + assert n == count, "aggregation failed (count)" values = (B.C() & key).fetch("value") - assert_true( - bool(len(values)) == bool(n), "aggregation failed (restriction)" - ) + assert bool(len(values)) == bool(n), "aggregation failed (restriction)" if n: assert_true( np.isclose(mean, values.mean(), rtol=1e-4, atol=1e-5), @@ -405,32 +390,30 @@ def test_semijoin(): y = JI() n = len(x & y.fetch(as_dict=True)) m = len(x - y.fetch(as_dict=True)) - assert_true(n > 0 and m > 0) - assert_true(len(x) == m + n) - assert_true(len(x & y.fetch()) == n) - assert_true(len(x - y.fetch()) == m) + assert n > 0 and m > 0 + assert len(x) == m + n + assert len(x & y.fetch()) == n + assert len(x - y.fetch()) == m semi = x & y anti = x - y - assert_true(len(semi) == n) - assert_true(len(anti) == m) + assert len(semi) == n + assert len(anti) == m @staticmethod def test_pandas_fetch_and_restriction(): q = L & "cond_in_l = 0" df = q.fetch(format="frame") # pandas dataframe - assert_true(isinstance(df, pandas.DataFrame)) - assert_equal(len(E & q), len(E & df)) + assert isinstance(df, pandas.DataFrame) + assert len(E & q) == len(E & df) @staticmethod def test_restriction_by_null(): - assert_true(len(Experiment & "username is null") > 0) - assert_true(len(Experiment & "username is not null") > 0) + assert len(Experiment & "username is null") > 0 + assert len(Experiment & "username is not null") > 0 @staticmethod def test_restriction_between(): # see issue - assert_true( - len(Experiment & 'username between "S" and "Z"') < len(Experiment()) - ) + assert len(Experiment & 'username between "S" and "Z"') < len(Experiment()) @staticmethod def test_restrictions_by_lists(): @@ -438,33 +421,26 @@ def test_restrictions_by_lists(): y = L() & "cond_in_l" lenx = len(x) - assert_true( - lenx > 0 and len(y) > 0 and len(x & y) < len(x), "incorrect test setup" - ) + assert lenx > 0 and len(y) > 0 and len(x & y) < len(x), "incorrect test setup" - assert_equal(len(D()), len(D & dj.AndList([]))) - assert_true(len(D & []) == 0) - assert_true(len(D & [[]]) == 0) # an OR-list of OR-list + assert len(D()) == len(D & dj.AndList([])) + assert len(D & []) == 0 + assert len(D & [[]]) == 0 # an OR-list of OR-list lenx = len(x) - assert_true( - lenx > 0 and len(y) > 0 and len(x & y) < len(x), "incorrect test setup" - ) - assert_equal(len(x & y), len(D * L & "cond_in_l"), "incorrect semijoin") - assert_equal(len(x - y), len(x) - len(x & y), "incorrect antijoin") - assert_equal(len(y - x), len(y) - len(y & x), "incorrect antijoin") - assert_true(len(x & []) == 0, "incorrect restriction by an empty list") - assert_true(len(x & ()) == 0, "incorrect restriction by an empty tuple") - assert_true(len(x & set()) == 0, "incorrect restriction by an empty set") - assert_equal(len(x - []), lenx, "incorrect restriction by an empty list") - assert_equal(len(x - ()), lenx, "incorrect restriction by an empty tuple") - assert_equal(len(x - set()), lenx, "incorrect restriction by an empty set") - assert_equal( - len(x & {}), lenx, "incorrect restriction by a tuple with no attributes" - ) - assert_true( - len(x - {}) == 0, "incorrect restriction by a tuple with no attributes" - ) + assert lenx > 0 and len(y) > 0 and len(x & y) < len(x), "incorrect test setup" + assert len(x & y) == len(D * L & "cond_in_l"), "incorrect semijoin" + assert len(x - y) == len(x) - len(x & y), "incorrect antijoin" + assert len(y - x) == len(y) - len(y & x), "incorrect antijoin" + assert len(x & []) == 0, "incorrect restriction by an empty list" + assert len(x & ()) == 0, "incorrect restriction by an empty tuple" + assert len(x & set()) == 0, "incorrect restriction by an empty set" + assert len(x - []) == lenx, "incorrect restriction by an empty list" + assert len(x - ()) == lenx, "incorrect restriction by an empty tuple" + assert len(x - set()) == lenx, "incorrect restriction by an empty set" + assert ( + len(x & {}) == lenx), "incorrect restriction by a tuple with no attributes" + assert len(x - {}) == 0, "incorrect restriction by a tuple with no attributes" assert_equal( len(x & {"foo": 0}), lenx, @@ -474,18 +450,17 @@ def test_restrictions_by_lists(): len(x - {"foo": 0}) == 0, "incorrect restriction by a tuple with no matching attributes", ) - assert_equal(len(x & y), len(x & y.fetch()), "incorrect restriction by a list") - assert_equal(len(x - y), len(x - y.fetch()), "incorrect restriction by a list") + assert len(x & y) == len(x & y.fetch()), "incorrect restriction by a list" + assert len(x - y) == len(x - y.fetch()), "incorrect restriction by a list" w = A() - assert_true(len(w) > 0, "incorrect test setup: w is empty") + assert len(w) > 0, "incorrect test setup: w is empty" assert_false( bool(set(w.heading.names) & set(y.heading.names)), "incorrect test setup: w and y should have no common attributes", ) - assert_equal( - len(w), len(w & y), "incorrect restriction without common attributes" - ) - assert_true(len(w - y) == 0, "incorrect restriction without common attributes") + assert ( + len(w) == len(w & y)), "incorrect restriction without common attributes" + assert len(w - y) == 0, "incorrect restriction without common attributes" @staticmethod def test_datetime(): @@ -493,9 +468,7 @@ def test_datetime(): date = Experiment().fetch("experiment_date")[0] e1 = Experiment() & dict(experiment_date=str(date)) e2 = Experiment() & dict(experiment_date=date) - assert_true( - len(e1) == len(e2) > 0, "Two date restriction do not yield the same result" - ) + assert len(e1) == len(e2) > 0, "Two date restriction do not yield the same result" @staticmethod def test_date(): @@ -505,14 +478,14 @@ def test_date(): new_value = None F.update1(dict((F & "id=2").fetch1("KEY"), date=new_value)) - assert_equal((F & "id=2").fetch1("date"), new_value) + assert (F & "id=2").fetch1("date") == new_value new_value = datetime.date(2019, 10, 25) F.update1(dict((F & "id=2").fetch1("KEY"), date=new_value)) - assert_equal((F & "id=2").fetch1("date"), new_value) + assert (F & "id=2").fetch1("date") == new_value F.update1(dict((F & "id=2").fetch1("KEY"), date=None)) - assert_equal((F & "id=2").fetch1("date"), None) + assert (F & "id=2").fetch1("date") == None @staticmethod def test_join_project(): @@ -526,7 +499,7 @@ def test_join_project(): @staticmethod def test_ellipsis(): r = Experiment.proj(..., "- data_path").head(1, as_dict=True) - assert_set_equal(set(Experiment.heading).difference(r[0]), {"data_path"}) + assert set(Experiment.heading).difference(r[0]) == {"data_path"} @staticmethod @raises(dj.DataJointError) @@ -556,7 +529,7 @@ def test_update_string_attribute(): random.choice(string.ascii_uppercase + string.digits) for _ in range(10) ) TTestUpdate.update1(dict(rel.fetch1("KEY"), string_attr=s)) - assert_equal(s, rel.fetch1("string_attr"), "Updated string does not match") + assert s == rel.fetch1("string_attr"), "Updated string does not match" @staticmethod def test_update_numeric_attribute(): @@ -564,9 +537,9 @@ def test_update_numeric_attribute(): rel = TTestUpdate() & dict(primary_key=0) s = random.randint(0, 10) TTestUpdate.update1(dict(rel.fetch1("KEY"), num_attr=s)) - assert_equal(s, rel.fetch1("num_attr"), "Updated integer does not match") + assert s == rel.fetch1("num_attr"), "Updated integer does not match" TTestUpdate.update1(dict(rel.fetch1("KEY"), num_attr=None)) - assert_true(np.isnan(rel.fetch1("num_attr")), "Numeric value is not NaN") + assert np.isnan(rel.fetch1("num_attr")), "Numeric value is not NaN" @staticmethod def test_update_blob_attribute(): @@ -574,9 +547,8 @@ def test_update_blob_attribute(): rel = TTestUpdate() & dict(primary_key=0) s = rel.fetch1("blob_attr") TTestUpdate.update1(dict(rel.fetch1("KEY"), blob_attr=s.T)) - assert_equal( - s.T.shape, rel.fetch1("blob_attr").shape, "Array dimensions do not match" - ) + assert ( + s.T.shape == rel.fetch1("blob_attr").shape), "Array dimensions do not match" @staticmethod def test_reserved_words(): @@ -585,12 +557,8 @@ def test_reserved_words(): rel.insert1( {"key": 1, "in": "ouch", "from": "bummer", "int": 3, "select": "major pain"} ) - assert_true( - (rel & {"key": 1, "in": "ouch", "from": "bummer"}).fetch1("int") == 3 - ) - assert_true( - (rel.proj("int", double="from") & {"double": "bummer"}).fetch1("int") == 3 - ) + assert (rel & {"key": 1, "in": "ouch", "from": "bummer"}).fetch1("int") == 3 + assert (rel.proj("int", double="from") & {"double": "bummer"}).fetch1("int") == 3 (rel & {"key": 1}).delete() @staticmethod From bad9a22f0dbfe4e0103bac78471515ca3373c147 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 09:56:12 -0700 Subject: [PATCH 147/212] First pass at migrating test_relational_operand --- tests/schema.py | 2 +- tests/test_relational_operand.py | 1046 +++++++++++++----------------- 2 files changed, 464 insertions(+), 584 deletions(-) diff --git a/tests/schema.py b/tests/schema.py index 81e5ac44c..3f31649cc 100644 --- a/tests/schema.py +++ b/tests/schema.py @@ -147,7 +147,7 @@ def make(self, key): from datetime import date, timedelta users = [None, None] + list(User().fetch()["username"]) - random.seed("Amazing Seed") + random.seed("Amazing Seed4") self.insert( dict( key, diff --git a/tests/test_relational_operand.py b/tests/test_relational_operand.py index 63ecf3409..635a5df9b 100644 --- a/tests/test_relational_operand.py +++ b/tests/test_relational_operand.py @@ -1,18 +1,9 @@ +import pytest import random import string import pandas import datetime - import numpy as np -from nose.tools import ( - assert_equal, - assert_false, - assert_true, - raises, - assert_set_equal, - assert_list_equal, -) - import datajoint as dj from .schema_simple import ( A, @@ -41,600 +32,489 @@ SessionStatusA, SessionDateA, ) - from . import PREFIX, CONN_INFO -def setup(): +@pytest.fixture +def schema_simp_pop(schema_simp): """ - module-level test setup + Schema simple with data populated. """ - A.insert(A.contents, skip_duplicates=True) - L.insert(L.contents, skip_duplicates=True) + og_a_contents = A.contents.copy() + og_l_contents = L.contents.copy() B.populate() D.populate() E.populate() - Experiment.populate() - + yield schema_simp + A.contents = og_a_contents + L.contents = og_l_contents -class TestRelational: - @staticmethod - def test_populate(): - assert not B().progress(display=False)[0], "B incompletely populated" - assert not D().progress(display=False)[0], "D incompletely populated" - assert not E().progress(display=False)[0], "E incompletely populated" - - assert len(B()) == 40, "B populated incorrectly" - assert len(B.C()) > 0, "C populated incorrectly" - assert len(D()) == 40, "D populated incorrectly" - assert len(E()) == len(B()) * len(D()) / len(A()), "E populated incorrectly" - assert len(E.F()) > 0, "F populated incorrectly" - - @staticmethod - def test_free_relation(): - b = B() - free = dj.FreeTable(b.connection, b.full_table_name) - assert repr(free).startswith("FreeTable") and b.full_table_name in repr(free) - r = "n>5" - assert (B() & r).make_sql() == (free & r).make_sql() - - @staticmethod - def test_rename(): - # test renaming - x = B().proj(i="id_a") & "i in (1,2,3,4)" - lenx = len(x) - assert_equal( - len(x), - len(B() & "id_a in (1,2,3,4)"), - "incorrect restriction of renamed attributes", - ) - assert_equal( - len(x & "id_b in (1,2)"), - len(B() & "id_b in (1,2) and id_a in (1,2,3,4)"), - "incorrect restriction of renamed restriction", - ) - assert len(x) == lenx, "restriction modified original" - y = x.proj(j="i") - assert_equal( - len(y), - len(B() & "id_a in (1,2,3,4)"), - "incorrect projection of restriction", - ) - z = y & "j in (3, 4, 5, 6)" - assert len(z) == len(B() & "id_a in (3,4)"), "incorrect nested subqueries" - - @staticmethod - def test_rename_order(): - """ - Renaming projection should not change the order of the primary key attributes. - See issues #483 and #516. - """ - pk1 = D.primary_key - pk2 = D.proj(a="id_a").primary_key - assert ["a" if i == "id_a" else i for i in pk1] == pk2 - - @staticmethod - def test_join(): - # Test cartesian product - x = A() - y = L() - rel = x * y - assert len(rel) == len(x) * len(y), "incorrect join" - assert_equal( - set(x.heading.names).union(y.heading.names), - set(rel.heading.names), - "incorrect join heading", - ) - assert_equal( - set(x.primary_key).union(y.primary_key), - set(rel.primary_key), - "incorrect join primary_key", - ) - # Test cartesian product of restricted relations - x = A() & "cond_in_a=1" - y = L() & "cond_in_l=1" - rel = x * y - assert len(rel) == len(x) * len(y), "incorrect join" - assert_equal( - set(x.heading.names).union(y.heading.names), - set(rel.heading.names), - "incorrect join heading", - ) - assert_equal( - set(x.primary_key).union(y.primary_key), - set(rel.primary_key), - "incorrect join primary_key", - ) - - # Test join with common attributes - cond = A() & "cond_in_a=1" - x = B() & cond - y = D() - rel = x * y - assert len(rel) >= len(x) and len(rel) >= len(y), "incorrect join" - assert not rel - cond, "incorrect join, restriction, or antijoin" - assert_equal( - set(x.heading.names).union(y.heading.names), - set(rel.heading.names), - "incorrect join heading", - ) - assert_equal( - set(x.primary_key).union(y.primary_key), - set(rel.primary_key), - "incorrect join primary_key", - ) - - # test renamed join - x = B().proj( - i="id_a" - ) # rename the common attribute to achieve full cartesian product - y = D() - rel = x * y - assert len(rel) == len(x) * len(y), "incorrect join" - assert_equal( - set(x.heading.names).union(y.heading.names), - set(rel.heading.names), - "incorrect join heading", - ) - assert_equal( - set(x.primary_key).union(y.primary_key), - set(rel.primary_key), - "incorrect join primary_key", - ) - x = B().proj(a="id_a") - y = D() - rel = x * y - assert len(rel) == len(x) * len(y), "incorrect join" - assert_equal( - set(x.heading.names).union(y.heading.names), - set(rel.heading.names), - "incorrect join heading", - ) - assert_equal( - set(x.primary_key).union(y.primary_key), - set(rel.primary_key), - "incorrect join primary_key", - ) - - # test pairing - # Approach 1: join then restrict - x = A.proj(a1="id_a", c1="cond_in_a") - y = A.proj(a2="id_a", c2="cond_in_a") - rel = x * y & "c1=0" & "c2=1" - lenx = len(x & "c1=0") - leny = len(y & "c2=1") - assert lenx + leny == len(A()), "incorrect restriction" - assert len(rel) == len(x & "c1=0") * len(y & "c2=1"), "incorrect pairing" - # Approach 2: restrict then join - x = (A & "cond_in_a=0").proj(a1="id_a") - y = (A & "cond_in_a=1").proj(a2="id_a") - assert len(rel) == len(x * y) - - @staticmethod - def test_issue_376(): - tab = TTest3() - tab.delete_quick() - tab.insert(((1, "%%%"), (2, "one%"), (3, "one"))) - assert len(tab & 'value="%%%"') == 1 - assert len(tab & {"value": "%%%"}) == 1 - assert len(tab & 'value like "o%"') == 2 - assert len(tab & 'value like "o%%"') == 2 - - @staticmethod - def test_issue_463(): - assert ((A & B) * B).fetch().size == len(A * B) - - @staticmethod - def test_project(): - x = A().proj(a="id_a") # rename - assert x.heading.names == ["a"], "renaming does not work" - x = A().proj(a="(id_a)") # extend - assert set(x.heading.names) == set(("id_a", "a")), "extend does not work" - - # projection after restriction - cond = L() & "cond_in_l" - assert ( - len(D() & cond) + len(D() - cond) == len(D())), "failed semijoin or antijoin" - assert_equal( - len((D() & cond).proj()), - len((D() & cond)), - "projection failed: altered its argument" "s cardinality", - ) - - @staticmethod - def test_rename_non_dj_attribute(): - schema = PREFIX + "_test1" - connection = dj.conn(**CONN_INFO) - connection.query( - f"CREATE TABLE {schema}.test_table (oldID int PRIMARY KEY)" - ).fetchall() - mySchema = dj.VirtualModule(schema, schema) - assert ( - "oldID" - not in mySchema.TestTable.proj(new_name="oldID").heading.attributes.keys() - ), "Failed to rename attribute correctly" - connection.query(f"DROP TABLE {schema}.test_table") - - @staticmethod - def test_union(): - x = set(zip(*IJ.fetch("i", "j"))) - y = set(zip(*JI.fetch("i", "j"))) - assert len(x) > 0 and len(y) > 0 and len(IJ() * JI()) < len(x) # ensure the IJ and JI are non-trivial - z = set(zip(*(IJ + JI).fetch("i", "j"))) # union - assert x.union(y) == z - assert len(IJ + JI) == len(z) - - @staticmethod - @raises(dj.DataJointError) - def test_outer_union_fail(): - """Union of two tables with different primary keys raises an error.""" +@pytest.fixture +def schema_any_pop(schema_any): + """ + Schema any with data populated. + """ + Experiment.populate() + yield schema_any + + +def test_populate(schema_simp_pop): + assert not B().progress(display=False)[0], "B incompletely populated" + assert not D().progress(display=False)[0], "D incompletely populated" + assert not E().progress(display=False)[0], "E incompletely populated" + + assert len(B()) == 40, "B populated incorrectly" + assert len(B.C()) > 0, "C populated incorrectly" + assert len(D()) == 40, "D populated incorrectly" + assert len(E()) == len(B()) * len(D()) / len(A()), "E populated incorrectly" + assert len(E.F()) > 0, "F populated incorrectly" + +def test_free_relation(schema_simp_pop): + b = B() + free = dj.FreeTable(b.connection, b.full_table_name) + assert repr(free).startswith("FreeTable") and b.full_table_name in repr(free) + r = "n>5" + assert (B() & r).make_sql() == (free & r).make_sql() + +def test_rename(schema_simp_pop): + # test renaming + x = B().proj(i="id_a") & "i in (1,2,3,4)" + lenx = len(x) + assert len(x) == len(B() & "id_a in (1,2,3,4)"), "incorrect restriction of renamed attributes" + assert len(x & "id_b in (1,2)") == len(B() & "id_b in (1,2) and id_a in (1,2,3,4)"), "incorrect restriction of renamed restriction" + assert len(x) == lenx, "restriction modified original" + y = x.proj(j="i") + assert len(y) == len(B() & "id_a in (1,2,3,4)"), "incorrect projection of restriction" + z = y & "j in (3, 4, 5, 6)" + assert len(z) == len(B() & "id_a in (3,4)"), "incorrect nested subqueries" + +def test_rename_order(schema_simp_pop): + """ + Renaming projection should not change the order of the primary key attributes. + See issues #483 and #516. + """ + pk1 = D.primary_key + pk2 = D.proj(a="id_a").primary_key + assert ["a" if i == "id_a" else i for i in pk1] == pk2 + +def test_join(schema_simp_pop): + # Test cartesian product + x = A() + y = L() + rel = x * y + assert len(rel) == len(x) * len(y), "incorrect join" + assert set(x.heading.names).union(y.heading.names) == set(rel.heading.names), "incorrect join heading" + assert set(x.primary_key).union(y.primary_key) == set(rel.primary_key), "incorrect join primary_key" + + # Test cartesian product of restricted relations + x = A() & "cond_in_a=1" + y = L() & "cond_in_l=1" + rel = x * y + assert len(rel) == len(x) * len(y), "incorrect join" + assert set(x.heading.names).union(y.heading.names) == set(rel.heading.names), "incorrect join heading" + assert set(x.primary_key).union(y.primary_key) == set(rel.primary_key), "incorrect join primary_key" + + # Test join with common attributes + cond = A() & "cond_in_a=1" + x = B() & cond + y = D() + rel = x * y + assert len(rel) >= len(x) and len(rel) >= len(y), "incorrect join" + assert not rel - cond, "incorrect join, restriction, or antijoin" + assert set(x.heading.names).union(y.heading.names) == set(rel.heading.names), "incorrect join heading" + assert set(x.primary_key).union(y.primary_key) == set(rel.primary_key), "incorrect join primary_key" + + # test renamed join + x = B().proj( + i="id_a" + ) # rename the common attribute to achieve full cartesian product + y = D() + rel = x * y + assert len(rel) == len(x) * len(y), "incorrect join" + assert set(x.heading.names).union(y.heading.names) == set(rel.heading.names), "incorrect join heading" + assert set(x.primary_key).union(y.primary_key) == set(rel.primary_key), "incorrect join primary_key" + x = B().proj(a="id_a") + y = D() + rel = x * y + assert len(rel) == len(x) * len(y), "incorrect join" + assert set(x.heading.names).union(y.heading.names) == set(rel.heading.names), "incorrect join heading" + assert set(x.primary_key).union(y.primary_key) == set(rel.primary_key), "incorrect join primary_key" + + # test pairing + # Approach 1: join then restrict + x = A.proj(a1="id_a", c1="cond_in_a") + y = A.proj(a2="id_a", c2="cond_in_a") + rel = x * y & "c1=0" & "c2=1" + lenx = len(x & "c1=0") + leny = len(y & "c2=1") + assert lenx + leny == len(A()), "incorrect restriction" + assert len(rel) == len(x & "c1=0") * len(y & "c2=1"), "incorrect pairing" + # Approach 2: restrict then join + x = (A & "cond_in_a=0").proj(a1="id_a") + y = (A & "cond_in_a=1").proj(a2="id_a") + assert len(rel) == len(x * y) + +def test_issue_376(schema_any_pop): + tab = TTest3() + tab.delete_quick() + tab.insert(((1, "%%%"), (2, "one%"), (3, "one"))) + assert len(tab & 'value="%%%"') == 1 + assert len(tab & {"value": "%%%"}) == 1 + assert len(tab & 'value like "o%"') == 2 + assert len(tab & 'value like "o%%"') == 2 + +def test_issue_463(schema_simp_pop): + assert ((A & B) * B).fetch().size == len(A * B) + +def test_project(schema_simp_pop): + x = A().proj(a="id_a") # rename + assert x.heading.names == ["a"], "renaming does not work" + x = A().proj(a="(id_a)") # extend + assert set(x.heading.names) == set(("id_a", "a")), "extend does not work" + + # projection after restriction + cond = L() & "cond_in_l" + assert ( + len(D() & cond) + len(D() - cond) == len(D())), "failed semijoin or antijoin" + assert len((D() & cond).proj()) == len((D() & cond)), "projection failed: altered its argument" "s cardinality" + +def test_rename_non_dj_attribute(connection_test, schema_simp_pop, schema_any_pop): + schema = PREFIX + "_test1" + connection_test.query( + f"CREATE TABLE {schema}.test_table (oldID int PRIMARY KEY)" + ).fetchall() + mySchema = dj.VirtualModule(schema, schema) + assert ( + "oldID" + not in mySchema.TestTable.proj(new_name="oldID").heading.attributes.keys() + ), "Failed to rename attribute correctly" + connection_test.query(f"DROP TABLE {schema}.test_table") + +def test_union(schema_simp_pop): + x = set(zip(*IJ.fetch("i", "j"))) + y = set(zip(*JI.fetch("i", "j"))) + assert len(x) > 0 and len(y) > 0 and len(IJ() * JI()) < len(x) # ensure the IJ and JI are non-trivial + z = set(zip(*(IJ + JI).fetch("i", "j"))) # union + assert x.union(y) == z + assert len(IJ + JI) == len(z) + +def test_outer_union_fail(schema_simp_pop): + """Union of two tables with different primary keys raises an error.""" + with pytest.raises(dj.DataJointError): A() + B() - @staticmethod - def test_outer_union_fail(): - """Union of two tables with different primary keys raises an error.""" - t = Trial + Ephys - t.fetch() - assert ( - set(t.heading.names) == set(Trial.heading.names) | set(Ephys.heading.names)) - len(t) - - @staticmethod - def test_preview(): - with dj.config(display__limit=7): - x = A().proj(a="id_a") - s = x.preview() - assert len(s.split("\n")) == len(x) + 2 - - @staticmethod - def test_heading_repr(): - x = A * D - s = repr(x.heading) - assert ( - len( - list( - 1 - for g in s.split("\n") - if g.strip() and not g.strip().startswith(("-", "#")) - ) - ) == - len(x.heading.attributes)) - - @staticmethod - def test_aggregate(): - x = B().aggregate(B.C()) - assert len(x) == len(B() & B.C()) - - x = B().aggregate(B.C(), keep_all_rows=True) - assert len(x) == len(B()) # test LEFT join - - assert ( - len((x & "id_b=0").fetch()) == len(B() & "id_b=0")) # test restricted aggregation - - x = B().aggregate( - B.C(), - "n", - count="count(id_c)", - mean="avg(value)", - max="max(value)", - keep_all_rows=True, - ) - assert len(x) == len(B()) - y = x & "mean>0" # restricted aggregation - assert len(y) > 0 - assert all(y.fetch("mean") > 0) - for n, count, mean, max_, key in zip( - *x.fetch("n", "count", "mean", "max", dj.key) - ): - assert n == count, "aggregation failed (count)" - values = (B.C() & key).fetch("value") - assert bool(len(values)) == bool(n), "aggregation failed (restriction)" - if n: - assert_true( - np.isclose(mean, values.mean(), rtol=1e-4, atol=1e-5), - "aggregation failed (mean)", - ) - assert_true( - np.isclose(max_, values.max(), rtol=1e-4, atol=1e-5), - "aggregation failed (max)", - ) - - @staticmethod - def test_aggr(): - x = B.aggr(B.C) - l1 = len(x) - l2 = len(B & B.C) - assert l1 == l2 - - x = B().aggr(B.C(), keep_all_rows=True) - assert len(x) == len(B()) # test LEFT join - - assert ( - len((x & "id_b=0").fetch()) == len(B() & "id_b=0")) # test restricted aggregation - - x = B().aggr( - B.C(), - "n", - count="count(id_c)", - mean="avg(value)", - max="max(value)", - keep_all_rows=True, - ) - assert len(x) == len(B()) - y = x & "mean>0" # restricted aggregation - assert len(y) > 0 - assert all(y.fetch("mean") > 0) - for n, count, mean, max_, key in zip( - *x.fetch("n", "count", "mean", "max", dj.key) - ): - assert n == count, "aggregation failed (count)" - values = (B.C() & key).fetch("value") - assert bool(len(values)) == bool(n), "aggregation failed (restriction)" - if n: - assert_true( - np.isclose(mean, values.mean(), rtol=1e-4, atol=1e-5), - "aggregation failed (mean)", - ) - assert_true( - np.isclose(max_, values.max(), rtol=1e-4, atol=1e-5), - "aggregation failed (max)", - ) - - @staticmethod - def test_semijoin(): - """ - test that semijoins and antijoins are formed correctly - """ - x = IJ() - y = JI() - n = len(x & y.fetch(as_dict=True)) - m = len(x - y.fetch(as_dict=True)) - assert n > 0 and m > 0 - assert len(x) == m + n - assert len(x & y.fetch()) == n - assert len(x - y.fetch()) == m - semi = x & y - anti = x - y - assert len(semi) == n - assert len(anti) == m - - @staticmethod - def test_pandas_fetch_and_restriction(): - q = L & "cond_in_l = 0" - df = q.fetch(format="frame") # pandas dataframe - assert isinstance(df, pandas.DataFrame) - assert len(E & q) == len(E & df) - - @staticmethod - def test_restriction_by_null(): - assert len(Experiment & "username is null") > 0 - assert len(Experiment & "username is not null") > 0 - - @staticmethod - def test_restriction_between(): # see issue - assert len(Experiment & 'username between "S" and "Z"') < len(Experiment()) - - @staticmethod - def test_restrictions_by_lists(): - x = D() - y = L() & "cond_in_l" - - lenx = len(x) - assert lenx > 0 and len(y) > 0 and len(x & y) < len(x), "incorrect test setup" - - assert len(D()) == len(D & dj.AndList([])) - assert len(D & []) == 0 - assert len(D & [[]]) == 0 # an OR-list of OR-list - - lenx = len(x) - assert lenx > 0 and len(y) > 0 and len(x & y) < len(x), "incorrect test setup" - assert len(x & y) == len(D * L & "cond_in_l"), "incorrect semijoin" - assert len(x - y) == len(x) - len(x & y), "incorrect antijoin" - assert len(y - x) == len(y) - len(y & x), "incorrect antijoin" - assert len(x & []) == 0, "incorrect restriction by an empty list" - assert len(x & ()) == 0, "incorrect restriction by an empty tuple" - assert len(x & set()) == 0, "incorrect restriction by an empty set" - assert len(x - []) == lenx, "incorrect restriction by an empty list" - assert len(x - ()) == lenx, "incorrect restriction by an empty tuple" - assert len(x - set()) == lenx, "incorrect restriction by an empty set" - assert ( - len(x & {}) == lenx), "incorrect restriction by a tuple with no attributes" - assert len(x - {}) == 0, "incorrect restriction by a tuple with no attributes" - assert_equal( - len(x & {"foo": 0}), - lenx, - "incorrect restriction by a tuple with no matching attributes", - ) - assert_true( - len(x - {"foo": 0}) == 0, - "incorrect restriction by a tuple with no matching attributes", - ) - assert len(x & y) == len(x & y.fetch()), "incorrect restriction by a list" - assert len(x - y) == len(x - y.fetch()), "incorrect restriction by a list" - w = A() - assert len(w) > 0, "incorrect test setup: w is empty" - assert_false( - bool(set(w.heading.names) & set(y.heading.names)), - "incorrect test setup: w and y should have no common attributes", - ) - assert ( - len(w) == len(w & y)), "incorrect restriction without common attributes" - assert len(w - y) == 0, "incorrect restriction without common attributes" - - @staticmethod - def test_datetime(): - """Test date retrieval""" - date = Experiment().fetch("experiment_date")[0] - e1 = Experiment() & dict(experiment_date=str(date)) - e2 = Experiment() & dict(experiment_date=date) - assert len(e1) == len(e2) > 0, "Two date restriction do not yield the same result" - - @staticmethod - def test_date(): - """Test date update""" - # https://github.com/datajoint/datajoint-python/issues/664 - F.insert1((2, "2019-09-25")) - - new_value = None - F.update1(dict((F & "id=2").fetch1("KEY"), date=new_value)) - assert (F & "id=2").fetch1("date") == new_value - - new_value = datetime.date(2019, 10, 25) - F.update1(dict((F & "id=2").fetch1("KEY"), date=new_value)) - assert (F & "id=2").fetch1("date") == new_value - - F.update1(dict((F & "id=2").fetch1("KEY"), date=None)) - assert (F & "id=2").fetch1("date") == None - - @staticmethod - def test_join_project(): - """Test join of projected relations with matching non-primary key""" - q = DataA.proj() * DataB.proj() - assert_true( - len(q) == len(DataA()) == len(DataB()), - "Join of projected relations does not work", - ) - - @staticmethod - def test_ellipsis(): - r = Experiment.proj(..., "- data_path").head(1, as_dict=True) - assert set(Experiment.heading).difference(r[0]) == {"data_path"} - - @staticmethod - @raises(dj.DataJointError) - def test_update_single_key(): - """Test that only one row can be updated""" +def test_outer_union_fail(schema_any_pop): + """Union of two tables with different primary keys raises an error.""" + t = Trial + Ephys + t.fetch() + assert ( + set(t.heading.names) == set(Trial.heading.names) | set(Ephys.heading.names)) + len(t) + +def test_preview(schema_simp_pop): + with dj.config(display__limit=7): + x = A().proj(a="id_a") + s = x.preview() + assert len(s.split("\n")) == len(x) + 2 + +def test_heading_repr(schema_simp_pop): + x = A * D + s = repr(x.heading) + assert ( + len( + list( + 1 + for g in s.split("\n") + if g.strip() and not g.strip().startswith(("-", "#")) + ) + ) == + len(x.heading.attributes)) + +def test_aggregate(schema_simp_pop): + x = B().aggregate(B.C()) + assert len(x) == len(B() & B.C()) + + x = B().aggregate(B.C(), keep_all_rows=True) + assert len(x) == len(B()) # test LEFT join + + assert ( + len((x & "id_b=0").fetch()) == len(B() & "id_b=0")) # test restricted aggregation + + x = B().aggregate( + B.C(), + "n", + count="count(id_c)", + mean="avg(value)", + max="max(value)", + keep_all_rows=True, + ) + assert len(x) == len(B()) + y = x & "mean>0" # restricted aggregation + assert len(y) > 0 + assert all(y.fetch("mean") > 0) + for n, count, mean, max_, key in zip( + *x.fetch("n", "count", "mean", "max", dj.key) + ): + assert n == count, "aggregation failed (count)" + values = (B.C() & key).fetch("value") + assert bool(len(values)) == bool(n), "aggregation failed (restriction)" + if n: + assert np.isclose(mean, values.mean(), rtol=1e-4, atol=1e-5), "aggregation failed (mean)" + assert np.isclose(max_, values.max(), rtol=1e-4, atol=1e-5), "aggregation failed (max)" + +def test_aggr(schema_simp_pop): + x = B.aggr(B.C) + l1 = len(x) + l2 = len(B & B.C) + assert l1 == l2 + + x = B().aggr(B.C(), keep_all_rows=True) + assert len(x) == len(B()) # test LEFT join + + assert ( + len((x & "id_b=0").fetch()) == len(B() & "id_b=0")) # test restricted aggregation + + x = B().aggr( + B.C(), + "n", + count="count(id_c)", + mean="avg(value)", + max="max(value)", + keep_all_rows=True, + ) + assert len(x) == len(B()) + y = x & "mean>0" # restricted aggregation + assert len(y) > 0 + assert all(y.fetch("mean") > 0) + for n, count, mean, max_, key in zip( + *x.fetch("n", "count", "mean", "max", dj.key) + ): + assert n == count, "aggregation failed (count)" + values = (B.C() & key).fetch("value") + assert bool(len(values)) == bool(n), "aggregation failed (restriction)" + if n: + assert np.isclose(mean, values.mean(), rtol=1e-4, atol=1e-5), "aggregation failed (mean)" + assert np.isclose(max_, values.max(), rtol=1e-4, atol=1e-5), "aggregation failed (max)" + +def test_semijoin(schema_simp_pop): + """ + test that semijoins and antijoins are formed correctly + """ + x = IJ() + y = JI() + n = len(x & y.fetch(as_dict=True)) + m = len(x - y.fetch(as_dict=True)) + assert n > 0 and m > 0 + assert len(x) == m + n + assert len(x & y.fetch()) == n + assert len(x - y.fetch()) == m + semi = x & y + anti = x - y + assert len(semi) == n + assert len(anti) == m + +def test_pandas_fetch_and_restriction(schema_simp_pop): + q = L & "cond_in_l = 0" + df = q.fetch(format="frame") # pandas dataframe + assert isinstance(df, pandas.DataFrame) + assert len(E & q) == len(E & df) + +def test_restriction_by_null(schema_any_pop): + assert len(Experiment & "username is null") > 0 + assert len(Experiment & "username is not null") > 0 + +def test_restriction_between(schema_any_pop): # see issue + assert len(Experiment & 'username between "S" and "Z"') < len(Experiment()) + +def test_restrictions_by_lists(schema_simp_pop): + x = D() + y = L() & "cond_in_l" + + lenx = len(x) + assert lenx > 0 and len(y) > 0 and len(x & y) < len(x), "incorrect test setup" + + assert len(D()) == len(D & dj.AndList([])) + assert len(D & []) == 0 + assert len(D & [[]]) == 0 # an OR-list of OR-list + + lenx = len(x) + assert lenx > 0 and len(y) > 0 and len(x & y) < len(x), "incorrect test setup" + assert len(x & y) == len(D * L & "cond_in_l"), "incorrect semijoin" + assert len(x - y) == len(x) - len(x & y), "incorrect antijoin" + assert len(y - x) == len(y) - len(y & x), "incorrect antijoin" + assert len(x & []) == 0, "incorrect restriction by an empty list" + assert len(x & ()) == 0, "incorrect restriction by an empty tuple" + assert len(x & set()) == 0, "incorrect restriction by an empty set" + assert len(x - []) == lenx, "incorrect restriction by an empty list" + assert len(x - ()) == lenx, "incorrect restriction by an empty tuple" + assert len(x - set()) == lenx, "incorrect restriction by an empty set" + assert ( + len(x & {}) == lenx), "incorrect restriction by a tuple with no attributes" + assert len(x - {}) == 0, "incorrect restriction by a tuple with no attributes" + assert len(x & {"foo": 0}) == lenx, "incorrect restriction by a tuple with no matching attributes" + assert len(x - {"foo": 0}) == 0, "incorrect restriction by a tuple with no matching attributes" + assert len(x & y) == len(x & y.fetch()), "incorrect restriction by a list" + assert len(x - y) == len(x - y.fetch()), "incorrect restriction by a list" + w = A() + assert len(w) > 0, "incorrect test setup: w is empty" + assert bool(set(w.heading.names) & set(y.heading.names)) != "incorrect test setup: w and y should have no common attributes" + assert ( + len(w) == len(w & y)), "incorrect restriction without common attributes" + assert len(w - y) == 0, "incorrect restriction without common attributes" + +def test_datetime(schema_any_pop): + """Test date retrieval""" + date = Experiment().fetch("experiment_date")[0] + e1 = Experiment() & dict(experiment_date=str(date)) + e2 = Experiment() & dict(experiment_date=date) + assert len(e1) == len(e2) > 0, "Two date restriction do not yield the same result" + +def test_date(schema_simp_pop): + """Test date update""" + # https://github.com/datajoint/datajoint-python/issues/664 + F.insert1((2, "2019-09-25")) + + new_value = None + F.update1(dict((F & "id=2").fetch1("KEY"), date=new_value)) + assert (F & "id=2").fetch1("date") == new_value + + new_value = datetime.date(2019, 10, 25) + F.update1(dict((F & "id=2").fetch1("KEY"), date=new_value)) + assert (F & "id=2").fetch1("date") == new_value + + F.update1(dict((F & "id=2").fetch1("KEY"), date=None)) + assert (F & "id=2").fetch1("date") == None + +def test_join_project(schema_simp_pop): + """Test join of projected relations with matching non-primary key""" + q = DataA.proj() * DataB.proj() + assert len(q) == len(DataA()) == len(DataB()), "Join of projected relations does not work" + +def test_ellipsis(schema_any_pop): + r = Experiment.proj(..., "- data_path").head(1, as_dict=True) + assert set(Experiment.heading).difference(r[0]) == {"data_path"} + +def test_update_single_key(schema_simp_pop): + """Test that only one row can be updated""" + with pytest.raises(dj.DataJointError): TTestUpdate.update1( dict(TTestUpdate.fetch1("KEY"), string_attr="my new string") ) - @staticmethod - @raises(dj.DataJointError) - def test_update_no_primary(): - """Test that no primary key can be updated""" +def test_update_no_primary(schema_simp_pop): + """Test that no primary key can be updated""" + with pytest.raises(dj.DataJointError): TTestUpdate.update1(dict(TTestUpdate.fetch1("KEY"), primary_key=2)) - @staticmethod - @raises(dj.DataJointError) - def test_update_missing_attribute(): - """Test that attribute is in table""" +def test_update_missing_attribute(schema_simp_pop): + """Test that attribute is in table""" + with pytest.raises(dj.DataJointError): TTestUpdate.update1(dict(TTestUpdate.fetch1("KEY"), not_existing=2)) - @staticmethod - def test_update_string_attribute(): - """Test replacing a string value""" - rel = TTestUpdate() & dict(primary_key=0) - s = "".join( - random.choice(string.ascii_uppercase + string.digits) for _ in range(10) - ) - TTestUpdate.update1(dict(rel.fetch1("KEY"), string_attr=s)) - assert s == rel.fetch1("string_attr"), "Updated string does not match" - - @staticmethod - def test_update_numeric_attribute(): - """Test replacing a string value""" - rel = TTestUpdate() & dict(primary_key=0) - s = random.randint(0, 10) - TTestUpdate.update1(dict(rel.fetch1("KEY"), num_attr=s)) - assert s == rel.fetch1("num_attr"), "Updated integer does not match" - TTestUpdate.update1(dict(rel.fetch1("KEY"), num_attr=None)) - assert np.isnan(rel.fetch1("num_attr")), "Numeric value is not NaN" - - @staticmethod - def test_update_blob_attribute(): - """Test replacing a string value""" - rel = TTestUpdate() & dict(primary_key=0) - s = rel.fetch1("blob_attr") - TTestUpdate.update1(dict(rel.fetch1("KEY"), blob_attr=s.T)) - assert ( - s.T.shape == rel.fetch1("blob_attr").shape), "Array dimensions do not match" - - @staticmethod - def test_reserved_words(): - """Test the user of SQL reserved words as attributes""" - rel = ReservedWord() - rel.insert1( - {"key": 1, "in": "ouch", "from": "bummer", "int": 3, "select": "major pain"} - ) - assert (rel & {"key": 1, "in": "ouch", "from": "bummer"}).fetch1("int") == 3 - assert (rel.proj("int", double="from") & {"double": "bummer"}).fetch1("int") == 3 - (rel & {"key": 1}).delete() - - @staticmethod - @raises(dj.DataJointError) - def test_reserved_words2(): - """Test the user of SQL reserved words as attributes""" - rel = ReservedWord() - rel.insert1( - {"key": 1, "in": "ouch", "from": "bummer", "int": 3, "select": "major pain"} - ) +def test_update_string_attribute(schema_simp_pop): + """Test replacing a string value""" + rel = TTestUpdate() & dict(primary_key=0) + s = "".join( + random.choice(string.ascii_uppercase + string.digits) for _ in range(10) + ) + TTestUpdate.update1(dict(rel.fetch1("KEY"), string_attr=s)) + assert s == rel.fetch1("string_attr"), "Updated string does not match" + +def test_update_numeric_attribute(schema_simp_pop): + """Test replacing a string value""" + rel = TTestUpdate() & dict(primary_key=0) + s = random.randint(0, 10) + TTestUpdate.update1(dict(rel.fetch1("KEY"), num_attr=s)) + assert s == rel.fetch1("num_attr"), "Updated integer does not match" + TTestUpdate.update1(dict(rel.fetch1("KEY"), num_attr=None)) + assert np.isnan(rel.fetch1("num_attr")), "Numeric value is not NaN" + +def test_update_blob_attribute(schema_simp_pop): + """Test replacing a string value""" + rel = TTestUpdate() & dict(primary_key=0) + s = rel.fetch1("blob_attr") + TTestUpdate.update1(dict(rel.fetch1("KEY"), blob_attr=s.T)) + assert ( + s.T.shape == rel.fetch1("blob_attr").shape), "Array dimensions do not match" + +def test_reserved_words(schema_simp_pop): + """Test the user of SQL reserved words as attributes""" + rel = ReservedWord() + rel.insert1( + {"key": 1, "in": "ouch", "from": "bummer", "int": 3, "select": "major pain"} + ) + assert (rel & {"key": 1, "in": "ouch", "from": "bummer"}).fetch1("int") == 3 + assert (rel.proj("int", double="from") & {"double": "bummer"}).fetch1("int") == 3 + (rel & {"key": 1}).delete() + +def test_reserved_words2(schema_simp_pop): + """Test the user of SQL reserved words as attributes""" + rel = ReservedWord() + rel.insert1( + {"key": 1, "in": "ouch", "from": "bummer", "int": 3, "select": "major pain"} + ) + with pytest.raises(dj.DataJointError): (rel & "key=1").fetch( "in" ) # error because reserved word `key` is not in backquotes. See issue #249 - @staticmethod - def test_permissive_join_basic(): - """Verify join compatibility check is skipped for join""" - Child @ Parent - - @staticmethod - def test_permissive_restriction_basic(): - """Verify join compatibility check is skipped for restriction""" - Child ^ Parent - - @staticmethod - def test_complex_date_restriction(): - # https://github.com/datajoint/datajoint-python/issues/892 - """Test a complex date restriction""" - q = OutfitLaunch & "day between curdate() - interval 30 day and curdate()" - assert len(q) == 1 - q = OutfitLaunch & "day between curdate() - interval 4 week and curdate()" - assert len(q) == 1 - q = OutfitLaunch & "day between curdate() - interval 1 month and curdate()" - assert len(q) == 1 - q = OutfitLaunch & "day between curdate() - interval 1 year and curdate()" - assert len(q) == 1 - q = OutfitLaunch & "`day` between curdate() - interval 30 day and curdate()" - assert len(q) == 1 - q.delete() - - @staticmethod - def test_null_dict_restriction(): - # https://github.com/datajoint/datajoint-python/issues/824 - """Test a restriction for null using dict""" - F.insert([dict(id=5)]) - q = F & dj.AndList([dict(id=5), "date is NULL"]) - assert len(q) == 1 - q = F & dict(id=5, date=None) - assert len(q) == 1 - - @staticmethod - def test_joins_with_aggregation(): - # https://github.com/datajoint/datajoint-python/issues/898 - # https://github.com/datajoint/datajoint-python/issues/899 - subjects = SubjectA.aggr( - SessionStatusA & 'status="trained_1a" or status="trained_1b"', - date_trained="min(date(session_start_time))", - ) - assert len(SessionDateA * subjects) == 4 - assert len(subjects * SessionDateA) == 4 - - subj_query = SubjectA.aggr( - SessionA * SessionStatusA & 'status="trained_1a" or status="trained_1b"', - date_trained="min(date(session_start_time))", - ) - session_dates = ( - SessionDateA * (subj_query & 'date_trained<"2020-12-21"') - ) & "session_date Date: Thu, 14 Dec 2023 10:02:23 -0700 Subject: [PATCH 148/212] Format with black --- tests/test_relational_operand.py | 178 ++++++++++++++++++++++--------- 1 file changed, 127 insertions(+), 51 deletions(-) diff --git a/tests/test_relational_operand.py b/tests/test_relational_operand.py index 635a5df9b..06adee5c8 100644 --- a/tests/test_relational_operand.py +++ b/tests/test_relational_operand.py @@ -70,6 +70,7 @@ def test_populate(schema_simp_pop): assert len(E()) == len(B()) * len(D()) / len(A()), "E populated incorrectly" assert len(E.F()) > 0, "F populated incorrectly" + def test_free_relation(schema_simp_pop): b = B() free = dj.FreeTable(b.connection, b.full_table_name) @@ -77,18 +78,26 @@ def test_free_relation(schema_simp_pop): r = "n>5" assert (B() & r).make_sql() == (free & r).make_sql() + def test_rename(schema_simp_pop): # test renaming x = B().proj(i="id_a") & "i in (1,2,3,4)" lenx = len(x) - assert len(x) == len(B() & "id_a in (1,2,3,4)"), "incorrect restriction of renamed attributes" - assert len(x & "id_b in (1,2)") == len(B() & "id_b in (1,2) and id_a in (1,2,3,4)"), "incorrect restriction of renamed restriction" + assert len(x) == len( + B() & "id_a in (1,2,3,4)" + ), "incorrect restriction of renamed attributes" + assert len(x & "id_b in (1,2)") == len( + B() & "id_b in (1,2) and id_a in (1,2,3,4)" + ), "incorrect restriction of renamed restriction" assert len(x) == lenx, "restriction modified original" y = x.proj(j="i") - assert len(y) == len(B() & "id_a in (1,2,3,4)"), "incorrect projection of restriction" + assert len(y) == len( + B() & "id_a in (1,2,3,4)" + ), "incorrect projection of restriction" z = y & "j in (3, 4, 5, 6)" assert len(z) == len(B() & "id_a in (3,4)"), "incorrect nested subqueries" + def test_rename_order(schema_simp_pop): """ Renaming projection should not change the order of the primary key attributes. @@ -98,22 +107,31 @@ def test_rename_order(schema_simp_pop): pk2 = D.proj(a="id_a").primary_key assert ["a" if i == "id_a" else i for i in pk1] == pk2 + def test_join(schema_simp_pop): # Test cartesian product x = A() y = L() rel = x * y assert len(rel) == len(x) * len(y), "incorrect join" - assert set(x.heading.names).union(y.heading.names) == set(rel.heading.names), "incorrect join heading" - assert set(x.primary_key).union(y.primary_key) == set(rel.primary_key), "incorrect join primary_key" + assert set(x.heading.names).union(y.heading.names) == set( + rel.heading.names + ), "incorrect join heading" + assert set(x.primary_key).union(y.primary_key) == set( + rel.primary_key + ), "incorrect join primary_key" # Test cartesian product of restricted relations x = A() & "cond_in_a=1" y = L() & "cond_in_l=1" rel = x * y assert len(rel) == len(x) * len(y), "incorrect join" - assert set(x.heading.names).union(y.heading.names) == set(rel.heading.names), "incorrect join heading" - assert set(x.primary_key).union(y.primary_key) == set(rel.primary_key), "incorrect join primary_key" + assert set(x.heading.names).union(y.heading.names) == set( + rel.heading.names + ), "incorrect join heading" + assert set(x.primary_key).union(y.primary_key) == set( + rel.primary_key + ), "incorrect join primary_key" # Test join with common attributes cond = A() & "cond_in_a=1" @@ -122,8 +140,12 @@ def test_join(schema_simp_pop): rel = x * y assert len(rel) >= len(x) and len(rel) >= len(y), "incorrect join" assert not rel - cond, "incorrect join, restriction, or antijoin" - assert set(x.heading.names).union(y.heading.names) == set(rel.heading.names), "incorrect join heading" - assert set(x.primary_key).union(y.primary_key) == set(rel.primary_key), "incorrect join primary_key" + assert set(x.heading.names).union(y.heading.names) == set( + rel.heading.names + ), "incorrect join heading" + assert set(x.primary_key).union(y.primary_key) == set( + rel.primary_key + ), "incorrect join primary_key" # test renamed join x = B().proj( @@ -132,14 +154,22 @@ def test_join(schema_simp_pop): y = D() rel = x * y assert len(rel) == len(x) * len(y), "incorrect join" - assert set(x.heading.names).union(y.heading.names) == set(rel.heading.names), "incorrect join heading" - assert set(x.primary_key).union(y.primary_key) == set(rel.primary_key), "incorrect join primary_key" + assert set(x.heading.names).union(y.heading.names) == set( + rel.heading.names + ), "incorrect join heading" + assert set(x.primary_key).union(y.primary_key) == set( + rel.primary_key + ), "incorrect join primary_key" x = B().proj(a="id_a") y = D() rel = x * y assert len(rel) == len(x) * len(y), "incorrect join" - assert set(x.heading.names).union(y.heading.names) == set(rel.heading.names), "incorrect join heading" - assert set(x.primary_key).union(y.primary_key) == set(rel.primary_key), "incorrect join primary_key" + assert set(x.heading.names).union(y.heading.names) == set( + rel.heading.names + ), "incorrect join heading" + assert set(x.primary_key).union(y.primary_key) == set( + rel.primary_key + ), "incorrect join primary_key" # test pairing # Approach 1: join then restrict @@ -155,6 +185,7 @@ def test_join(schema_simp_pop): y = (A & "cond_in_a=1").proj(a2="id_a") assert len(rel) == len(x * y) + def test_issue_376(schema_any_pop): tab = TTest3() tab.delete_quick() @@ -164,9 +195,11 @@ def test_issue_376(schema_any_pop): assert len(tab & 'value like "o%"') == 2 assert len(tab & 'value like "o%%"') == 2 + def test_issue_463(schema_simp_pop): assert ((A & B) * B).fetch().size == len(A * B) + def test_project(schema_simp_pop): x = A().proj(a="id_a") # rename assert x.heading.names == ["a"], "renaming does not work" @@ -175,9 +208,11 @@ def test_project(schema_simp_pop): # projection after restriction cond = L() & "cond_in_l" - assert ( - len(D() & cond) + len(D() - cond) == len(D())), "failed semijoin or antijoin" - assert len((D() & cond).proj()) == len((D() & cond)), "projection failed: altered its argument" "s cardinality" + assert len(D() & cond) + len(D() - cond) == len(D()), "failed semijoin or antijoin" + assert len((D() & cond).proj()) == len((D() & cond)), ( + "projection failed: altered its argument" "s cardinality" + ) + def test_rename_non_dj_attribute(connection_test, schema_simp_pop, schema_any_pop): schema = PREFIX + "_test1" @@ -191,45 +226,50 @@ def test_rename_non_dj_attribute(connection_test, schema_simp_pop, schema_any_po ), "Failed to rename attribute correctly" connection_test.query(f"DROP TABLE {schema}.test_table") + def test_union(schema_simp_pop): x = set(zip(*IJ.fetch("i", "j"))) y = set(zip(*JI.fetch("i", "j"))) - assert len(x) > 0 and len(y) > 0 and len(IJ() * JI()) < len(x) # ensure the IJ and JI are non-trivial + assert ( + len(x) > 0 and len(y) > 0 and len(IJ() * JI()) < len(x) + ) # ensure the IJ and JI are non-trivial z = set(zip(*(IJ + JI).fetch("i", "j"))) # union assert x.union(y) == z assert len(IJ + JI) == len(z) + def test_outer_union_fail(schema_simp_pop): """Union of two tables with different primary keys raises an error.""" with pytest.raises(dj.DataJointError): A() + B() + def test_outer_union_fail(schema_any_pop): """Union of two tables with different primary keys raises an error.""" t = Trial + Ephys t.fetch() - assert ( - set(t.heading.names) == set(Trial.heading.names) | set(Ephys.heading.names)) + assert set(t.heading.names) == set(Trial.heading.names) | set(Ephys.heading.names) len(t) + def test_preview(schema_simp_pop): with dj.config(display__limit=7): x = A().proj(a="id_a") s = x.preview() assert len(s.split("\n")) == len(x) + 2 + def test_heading_repr(schema_simp_pop): x = A * D s = repr(x.heading) - assert ( - len( - list( - 1 - for g in s.split("\n") - if g.strip() and not g.strip().startswith(("-", "#")) - ) - ) == - len(x.heading.attributes)) + assert len( + list( + 1 + for g in s.split("\n") + if g.strip() and not g.strip().startswith(("-", "#")) + ) + ) == len(x.heading.attributes) + def test_aggregate(schema_simp_pop): x = B().aggregate(B.C()) @@ -238,8 +278,9 @@ def test_aggregate(schema_simp_pop): x = B().aggregate(B.C(), keep_all_rows=True) assert len(x) == len(B()) # test LEFT join - assert ( - len((x & "id_b=0").fetch()) == len(B() & "id_b=0")) # test restricted aggregation + assert len((x & "id_b=0").fetch()) == len( + B() & "id_b=0" + ) # test restricted aggregation x = B().aggregate( B.C(), @@ -253,15 +294,18 @@ def test_aggregate(schema_simp_pop): y = x & "mean>0" # restricted aggregation assert len(y) > 0 assert all(y.fetch("mean") > 0) - for n, count, mean, max_, key in zip( - *x.fetch("n", "count", "mean", "max", dj.key) - ): + for n, count, mean, max_, key in zip(*x.fetch("n", "count", "mean", "max", dj.key)): assert n == count, "aggregation failed (count)" values = (B.C() & key).fetch("value") assert bool(len(values)) == bool(n), "aggregation failed (restriction)" if n: - assert np.isclose(mean, values.mean(), rtol=1e-4, atol=1e-5), "aggregation failed (mean)" - assert np.isclose(max_, values.max(), rtol=1e-4, atol=1e-5), "aggregation failed (max)" + assert np.isclose( + mean, values.mean(), rtol=1e-4, atol=1e-5 + ), "aggregation failed (mean)" + assert np.isclose( + max_, values.max(), rtol=1e-4, atol=1e-5 + ), "aggregation failed (max)" + def test_aggr(schema_simp_pop): x = B.aggr(B.C) @@ -272,8 +316,9 @@ def test_aggr(schema_simp_pop): x = B().aggr(B.C(), keep_all_rows=True) assert len(x) == len(B()) # test LEFT join - assert ( - len((x & "id_b=0").fetch()) == len(B() & "id_b=0")) # test restricted aggregation + assert len((x & "id_b=0").fetch()) == len( + B() & "id_b=0" + ) # test restricted aggregation x = B().aggr( B.C(), @@ -287,15 +332,18 @@ def test_aggr(schema_simp_pop): y = x & "mean>0" # restricted aggregation assert len(y) > 0 assert all(y.fetch("mean") > 0) - for n, count, mean, max_, key in zip( - *x.fetch("n", "count", "mean", "max", dj.key) - ): + for n, count, mean, max_, key in zip(*x.fetch("n", "count", "mean", "max", dj.key)): assert n == count, "aggregation failed (count)" values = (B.C() & key).fetch("value") assert bool(len(values)) == bool(n), "aggregation failed (restriction)" if n: - assert np.isclose(mean, values.mean(), rtol=1e-4, atol=1e-5), "aggregation failed (mean)" - assert np.isclose(max_, values.max(), rtol=1e-4, atol=1e-5), "aggregation failed (max)" + assert np.isclose( + mean, values.mean(), rtol=1e-4, atol=1e-5 + ), "aggregation failed (mean)" + assert np.isclose( + max_, values.max(), rtol=1e-4, atol=1e-5 + ), "aggregation failed (max)" + def test_semijoin(schema_simp_pop): """ @@ -314,19 +362,23 @@ def test_semijoin(schema_simp_pop): assert len(semi) == n assert len(anti) == m + def test_pandas_fetch_and_restriction(schema_simp_pop): q = L & "cond_in_l = 0" df = q.fetch(format="frame") # pandas dataframe assert isinstance(df, pandas.DataFrame) assert len(E & q) == len(E & df) + def test_restriction_by_null(schema_any_pop): assert len(Experiment & "username is null") > 0 assert len(Experiment & "username is not null") > 0 + def test_restriction_between(schema_any_pop): # see issue assert len(Experiment & 'username between "S" and "Z"') < len(Experiment()) + def test_restrictions_by_lists(schema_simp_pop): x = D() y = L() & "cond_in_l" @@ -349,20 +401,26 @@ def test_restrictions_by_lists(schema_simp_pop): assert len(x - []) == lenx, "incorrect restriction by an empty list" assert len(x - ()) == lenx, "incorrect restriction by an empty tuple" assert len(x - set()) == lenx, "incorrect restriction by an empty set" - assert ( - len(x & {}) == lenx), "incorrect restriction by a tuple with no attributes" + assert len(x & {}) == lenx, "incorrect restriction by a tuple with no attributes" assert len(x - {}) == 0, "incorrect restriction by a tuple with no attributes" - assert len(x & {"foo": 0}) == lenx, "incorrect restriction by a tuple with no matching attributes" - assert len(x - {"foo": 0}) == 0, "incorrect restriction by a tuple with no matching attributes" + assert ( + len(x & {"foo": 0}) == lenx + ), "incorrect restriction by a tuple with no matching attributes" + assert ( + len(x - {"foo": 0}) == 0 + ), "incorrect restriction by a tuple with no matching attributes" assert len(x & y) == len(x & y.fetch()), "incorrect restriction by a list" assert len(x - y) == len(x - y.fetch()), "incorrect restriction by a list" w = A() assert len(w) > 0, "incorrect test setup: w is empty" - assert bool(set(w.heading.names) & set(y.heading.names)) != "incorrect test setup: w and y should have no common attributes" assert ( - len(w) == len(w & y)), "incorrect restriction without common attributes" + bool(set(w.heading.names) & set(y.heading.names)) + != "incorrect test setup: w and y should have no common attributes" + ) + assert len(w) == len(w & y), "incorrect restriction without common attributes" assert len(w - y) == 0, "incorrect restriction without common attributes" + def test_datetime(schema_any_pop): """Test date retrieval""" date = Experiment().fetch("experiment_date")[0] @@ -370,6 +428,7 @@ def test_datetime(schema_any_pop): e2 = Experiment() & dict(experiment_date=date) assert len(e1) == len(e2) > 0, "Two date restriction do not yield the same result" + def test_date(schema_simp_pop): """Test date update""" # https://github.com/datajoint/datajoint-python/issues/664 @@ -386,15 +445,20 @@ def test_date(schema_simp_pop): F.update1(dict((F & "id=2").fetch1("KEY"), date=None)) assert (F & "id=2").fetch1("date") == None + def test_join_project(schema_simp_pop): """Test join of projected relations with matching non-primary key""" q = DataA.proj() * DataB.proj() - assert len(q) == len(DataA()) == len(DataB()), "Join of projected relations does not work" + assert ( + len(q) == len(DataA()) == len(DataB()) + ), "Join of projected relations does not work" + def test_ellipsis(schema_any_pop): r = Experiment.proj(..., "- data_path").head(1, as_dict=True) assert set(Experiment.heading).difference(r[0]) == {"data_path"} + def test_update_single_key(schema_simp_pop): """Test that only one row can be updated""" with pytest.raises(dj.DataJointError): @@ -402,16 +466,19 @@ def test_update_single_key(schema_simp_pop): dict(TTestUpdate.fetch1("KEY"), string_attr="my new string") ) + def test_update_no_primary(schema_simp_pop): """Test that no primary key can be updated""" with pytest.raises(dj.DataJointError): TTestUpdate.update1(dict(TTestUpdate.fetch1("KEY"), primary_key=2)) + def test_update_missing_attribute(schema_simp_pop): """Test that attribute is in table""" with pytest.raises(dj.DataJointError): TTestUpdate.update1(dict(TTestUpdate.fetch1("KEY"), not_existing=2)) + def test_update_string_attribute(schema_simp_pop): """Test replacing a string value""" rel = TTestUpdate() & dict(primary_key=0) @@ -421,6 +488,7 @@ def test_update_string_attribute(schema_simp_pop): TTestUpdate.update1(dict(rel.fetch1("KEY"), string_attr=s)) assert s == rel.fetch1("string_attr"), "Updated string does not match" + def test_update_numeric_attribute(schema_simp_pop): """Test replacing a string value""" rel = TTestUpdate() & dict(primary_key=0) @@ -430,13 +498,14 @@ def test_update_numeric_attribute(schema_simp_pop): TTestUpdate.update1(dict(rel.fetch1("KEY"), num_attr=None)) assert np.isnan(rel.fetch1("num_attr")), "Numeric value is not NaN" + def test_update_blob_attribute(schema_simp_pop): """Test replacing a string value""" rel = TTestUpdate() & dict(primary_key=0) s = rel.fetch1("blob_attr") TTestUpdate.update1(dict(rel.fetch1("KEY"), blob_attr=s.T)) - assert ( - s.T.shape == rel.fetch1("blob_attr").shape), "Array dimensions do not match" + assert s.T.shape == rel.fetch1("blob_attr").shape, "Array dimensions do not match" + def test_reserved_words(schema_simp_pop): """Test the user of SQL reserved words as attributes""" @@ -448,6 +517,7 @@ def test_reserved_words(schema_simp_pop): assert (rel.proj("int", double="from") & {"double": "bummer"}).fetch1("int") == 3 (rel & {"key": 1}).delete() + def test_reserved_words2(schema_simp_pop): """Test the user of SQL reserved words as attributes""" rel = ReservedWord() @@ -459,14 +529,17 @@ def test_reserved_words2(schema_simp_pop): "in" ) # error because reserved word `key` is not in backquotes. See issue #249 + def test_permissive_join_basic(schema_any_pop): """Verify join compatibility check is skipped for join""" Child @ Parent + def test_permissive_restriction_basic(schema_any_pop): """Verify join compatibility check is skipped for restriction""" Child ^ Parent + def test_complex_date_restriction(schema_simp_pop): # https://github.com/datajoint/datajoint-python/issues/892 """Test a complex date restriction""" @@ -482,6 +555,7 @@ def test_complex_date_restriction(schema_simp_pop): assert len(q) == 1 q.delete() + def test_null_dict_restriction(schema_simp_pop): # https://github.com/datajoint/datajoint-python/issues/824 """Test a restriction for null using dict""" @@ -491,6 +565,7 @@ def test_null_dict_restriction(schema_simp_pop): q = F & dict(id=5, date=None) assert len(q) == 1 + def test_joins_with_aggregation(schema_any_pop): # https://github.com/datajoint/datajoint-python/issues/898 # https://github.com/datajoint/datajoint-python/issues/899 @@ -510,6 +585,7 @@ def test_joins_with_aggregation(schema_any_pop): ) & "session_date Date: Thu, 14 Dec 2023 10:25:32 -0700 Subject: [PATCH 149/212] cp to tests --- tests/test_schema.py | 190 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 190 insertions(+) create mode 100644 tests/test_schema.py diff --git a/tests/test_schema.py b/tests/test_schema.py new file mode 100644 index 000000000..8ec24fc49 --- /dev/null +++ b/tests/test_schema.py @@ -0,0 +1,190 @@ +from nose.tools import assert_false, assert_true, raises +import datajoint as dj +from inspect import getmembers +from . import schema +from . import schema_empty +from . import PREFIX, CONN_INFO, CONN_INFO_ROOT +from .schema_simple import schema as schema_simple + + +def relation_selector(attr): + try: + return issubclass(attr, dj.Table) + except TypeError: + return False + + +def part_selector(attr): + try: + return issubclass(attr, dj.Part) + except TypeError: + return False + + +def test_schema_size_on_disk(): + number_of_bytes = schema.schema.size_on_disk + assert_true(isinstance(number_of_bytes, int)) + + +def test_schema_list(): + schemas = dj.list_schemas() + assert_true(schema.schema.database in schemas) + + +@raises(dj.errors.AccessError) +def test_drop_unauthorized(): + info_schema = dj.schema("information_schema") + info_schema.drop() + + +def test_namespace_population(): + for name, rel in getmembers(schema, relation_selector): + assert_true( + hasattr(schema_empty, name), + "{name} not found in schema_empty".format(name=name), + ) + assert_true( + rel.__base__ is getattr(schema_empty, name).__base__, + "Wrong tier for {name}".format(name=name), + ) + + for name_part in dir(rel): + if name_part[0].isupper() and part_selector(getattr(rel, name_part)): + assert_true( + getattr(rel, name_part).__base__ is dj.Part, + "Wrong tier for {name}".format(name=name_part), + ) + + +@raises(dj.DataJointError) +def test_undecorated_table(): + """ + Undecorated user table classes should raise an informative exception upon first use + """ + + class UndecoratedClass(dj.Manual): + definition = "" + + a = UndecoratedClass() + print(a.full_table_name) + + +@raises(dj.DataJointError) +def test_reject_decorated_part(): + """ + Decorating a dj.Part table should raise an informative exception. + """ + + @schema.schema + class A(dj.Manual): + definition = ... + + @schema.schema + class B(dj.Part): + definition = ... + + +@raises(dj.DataJointError) +def test_unauthorized_database(): + """ + an attempt to create a database to which user has no privileges should raise an informative exception. + """ + dj.Schema("unauthorized_schema", connection=dj.conn(reset=True, **CONN_INFO)) + + +def test_drop_database(): + schema = dj.Schema( + PREFIX + "_drop_test", connection=dj.conn(reset=True, **CONN_INFO) + ) + assert_true(schema.exists) + schema.drop() + assert_false(schema.exists) + schema.drop() # should do nothing + + +def test_overlapping_name(): + test_schema = dj.Schema( + PREFIX + "_overlapping_schema", connection=dj.conn(**CONN_INFO) + ) + + @test_schema + class Unit(dj.Manual): + definition = """ + id: int # simple id + """ + + # hack to update the locals dictionary + locals() + + @test_schema + class Cell(dj.Manual): + definition = """ + type: varchar(32) # type of cell + """ + + class Unit(dj.Part): + definition = """ + -> master + -> Unit + """ + + test_schema.drop() + + +def test_list_tables(): + # https://github.com/datajoint/datajoint-python/issues/838 + assert set( + [ + "reserved_word", + "#l", + "#a", + "__d", + "__b", + "__b__c", + "__e", + "__e__f", + "#outfit_launch", + "#outfit_launch__outfit_piece", + "#i_j", + "#j_i", + "#t_test_update", + "#data_a", + "#data_b", + "f", + "#argmax_test", + "#website", + "profile", + "profile__website", + ] + ) == set(schema_simple.list_tables()) + + +def test_schema_save(): + assert_true("class Experiment(dj.Imported)" in schema.schema.code) + assert_true("class Experiment(dj.Imported)" in schema_empty.schema.code) + + +def test_uppercase_schema(): + # https://github.com/datajoint/datajoint-python/issues/564 + dj.conn(**CONN_INFO_ROOT, reset=True) + schema1 = dj.Schema("Schema_A") + + @schema1 + class Subject(dj.Manual): + definition = """ + name: varchar(32) + """ + + Schema_A = dj.VirtualModule("Schema_A", "Schema_A") + + schema2 = dj.Schema("schema_b") + + @schema2 + class Recording(dj.Manual): + definition = """ + -> Schema_A.Subject + id: smallint + """ + + schema2.drop() + schema1.drop() From 75984419cad06298fee47a679b6d0e84b2c91ce6 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 10:25:48 -0700 Subject: [PATCH 150/212] nose2pytest test_schema --- tests/test_schema.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/test_schema.py b/tests/test_schema.py index 8ec24fc49..0e88a134a 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -23,12 +23,12 @@ def part_selector(attr): def test_schema_size_on_disk(): number_of_bytes = schema.schema.size_on_disk - assert_true(isinstance(number_of_bytes, int)) + assert isinstance(number_of_bytes, int) def test_schema_list(): schemas = dj.list_schemas() - assert_true(schema.schema.database in schemas) + assert schema.schema.database in schemas @raises(dj.errors.AccessError) @@ -96,9 +96,9 @@ def test_drop_database(): schema = dj.Schema( PREFIX + "_drop_test", connection=dj.conn(reset=True, **CONN_INFO) ) - assert_true(schema.exists) + assert schema.exists schema.drop() - assert_false(schema.exists) + assert not schema.exists schema.drop() # should do nothing @@ -160,8 +160,8 @@ def test_list_tables(): def test_schema_save(): - assert_true("class Experiment(dj.Imported)" in schema.schema.code) - assert_true("class Experiment(dj.Imported)" in schema_empty.schema.code) + assert "class Experiment(dj.Imported)" in schema.schema.code + assert "class Experiment(dj.Imported)" in schema_empty.schema.code def test_uppercase_schema(): From 55bf4ea716a541d579443155f2fd0746ca2a7e8b Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 14:00:50 -0700 Subject: [PATCH 151/212] Add db_creds_test fixture --- tests/conftest.py | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 5a38eef90..a9474b502 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -56,6 +56,15 @@ def enable_filepath_feature(monkeypatch): monkeypatch.delenv(FILEPATH_FEATURE_SWITCH, raising=True) +@pytest.fixture(scope="session") +def db_creds_test() -> Dict: + return dict( + host=os.getenv("DJ_TEST_HOST", "fakeservices.datajoint.io"), + user=os.getenv("DJ_TEST_USER", "datajoint"), + password=os.getenv("DJ_TEST_PASSWORD", "datajoint"), + ) + + @pytest.fixture(scope="session") def db_creds_root() -> Dict: return dict( @@ -142,12 +151,9 @@ def connection_root(connection_root_bare): @pytest.fixture(scope="session") -def connection_test(connection_root): +def connection_test(connection_root, db_creds_test): """Test user database connection.""" database = f"{PREFIX}%%" - credentials = dict( - host=os.getenv("DJ_HOST"), user="datajoint", password="datajoint" - ) permission = "ALL PRIVILEGES" # Create MySQL users @@ -157,14 +163,14 @@ def connection_test(connection_root): # create user if necessary on mysql8 connection_root.query( f""" - CREATE USER IF NOT EXISTS '{credentials["user"]}'@'%%' - IDENTIFIED BY '{credentials["password"]}'; + CREATE USER IF NOT EXISTS '{db_creds_test["user"]}'@'%%' + IDENTIFIED BY '{db_creds_test["password"]}'; """ ) connection_root.query( f""" GRANT {permission} ON `{database}`.* - TO '{credentials["user"]}'@'%%'; + TO '{db_creds_test["user"]}'@'%%'; """ ) else: @@ -173,14 +179,14 @@ def connection_test(connection_root): connection_root.query( f""" GRANT {permission} ON `{database}`.* - TO '{credentials["user"]}'@'%%' - IDENTIFIED BY '{credentials["password"]}'; + TO '{db_creds_test["user"]}'@'%%' + IDENTIFIED BY '{db_creds_test["password"]}'; """ ) - connection = dj.Connection(**credentials) + connection = dj.Connection(**db_creds_test) yield connection - connection_root.query(f"""DROP USER `{credentials["user"]}`""") + connection_root.query(f"""DROP USER `{db_creds_test["user"]}`""") connection.close() From 5fed6a515176e2cd20043f292fcc6d06d4358992 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 14:01:03 -0700 Subject: [PATCH 152/212] First pass at migrating test_schema --- tests/test_schema.py | 108 +++++++++++++++++++++++++------------------ 1 file changed, 63 insertions(+), 45 deletions(-) diff --git a/tests/test_schema.py b/tests/test_schema.py index 0e88a134a..31825bc5d 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -1,10 +1,13 @@ -from nose.tools import assert_false, assert_true, raises +import pytest import datajoint as dj from inspect import getmembers from . import schema -from . import schema_empty -from . import PREFIX, CONN_INFO, CONN_INFO_ROOT -from .schema_simple import schema as schema_simple +from . import PREFIX + + +class Ephys(dj.Imported): + definition = """ # This is already declared in ./schema.py + """ def relation_selector(attr): @@ -21,42 +24,47 @@ def part_selector(attr): return False -def test_schema_size_on_disk(): - number_of_bytes = schema.schema.size_on_disk +@pytest.fixture +def schema_empty(connection_test, schema_any): + context = { + **schema.LOCALS_ANY, + "Ephys": Ephys + } + schema_emp = dj.Schema(PREFIX + "_test1", context=context, connection=connection_test) + schema_emp(Ephys) + # load the rest of the classes + schema_emp.spawn_missing_classes() + breakpoint() + yield schema_emp + schema_emp.drop() + + +def test_schema_size_on_disk(schema_any): + number_of_bytes = schema_any.size_on_disk assert isinstance(number_of_bytes, int) -def test_schema_list(): +def test_schema_list(schema_any): schemas = dj.list_schemas() - assert schema.schema.database in schemas + assert schema_any.database in schemas -@raises(dj.errors.AccessError) def test_drop_unauthorized(): info_schema = dj.schema("information_schema") - info_schema.drop() + with pytest.raises(dj.errors.AccessError): + info_schema.drop() -def test_namespace_population(): +def test_namespace_population(schema_empty, schema_any): for name, rel in getmembers(schema, relation_selector): - assert_true( - hasattr(schema_empty, name), - "{name} not found in schema_empty".format(name=name), - ) - assert_true( - rel.__base__ is getattr(schema_empty, name).__base__, - "Wrong tier for {name}".format(name=name), - ) + assert hasattr(schema_empty, name), "{name} not found in schema_empty".format(name=name) + assert rel.__base__ is getattr(schema_empty, name).__base__, "Wrong tier for {name}".format(name=name) for name_part in dir(rel): if name_part[0].isupper() and part_selector(getattr(rel, name_part)): - assert_true( - getattr(rel, name_part).__base__ is dj.Part, - "Wrong tier for {name}".format(name=name_part), - ) + assert getattr(rel, name_part).__base__ is dj.Part, "Wrong tier for {name}".format(name=name_part) -@raises(dj.DataJointError) def test_undecorated_table(): """ Undecorated user table classes should raise an informative exception upon first use @@ -66,35 +74,38 @@ class UndecoratedClass(dj.Manual): definition = "" a = UndecoratedClass() - print(a.full_table_name) + with pytest.raises(dj.DataJointError): + print(a.full_table_name) -@raises(dj.DataJointError) -def test_reject_decorated_part(): +def test_reject_decorated_part(schema_any): """ Decorating a dj.Part table should raise an informative exception. """ - @schema.schema class A(dj.Manual): definition = ... - @schema.schema class B(dj.Part): definition = ... -@raises(dj.DataJointError) -def test_unauthorized_database(): + with pytest.raises(dj.DataJointError): + schema_any(A.B) + schema_any(A) + + +def test_unauthorized_database(db_creds_test): """ an attempt to create a database to which user has no privileges should raise an informative exception. """ - dj.Schema("unauthorized_schema", connection=dj.conn(reset=True, **CONN_INFO)) + with pytest.raises(dj.DataJointError): + dj.Schema("unauthorized_schema", connection=dj.conn(reset=True, **db_creds_test)) -def test_drop_database(): +def test_drop_database(db_creds_test): schema = dj.Schema( - PREFIX + "_drop_test", connection=dj.conn(reset=True, **CONN_INFO) + PREFIX + "_drop_test", connection=dj.conn(reset=True, **db_creds_test) ) assert schema.exists schema.drop() @@ -102,9 +113,9 @@ def test_drop_database(): schema.drop() # should do nothing -def test_overlapping_name(): +def test_overlapping_name(connection_test): test_schema = dj.Schema( - PREFIX + "_overlapping_schema", connection=dj.conn(**CONN_INFO) + PREFIX + "_overlapping_schema", connection=connection_test ) @test_schema @@ -131,8 +142,10 @@ class Unit(dj.Part): test_schema.drop() -def test_list_tables(): - # https://github.com/datajoint/datajoint-python/issues/838 +def test_list_tables(schema_simp): + """ + https://github.com/datajoint/datajoint-python/issues/838 + """ assert set( [ "reserved_word", @@ -156,17 +169,22 @@ def test_list_tables(): "profile", "profile__website", ] - ) == set(schema_simple.list_tables()) + ) == set(schema_simp.list_tables()) + +def test_schema_save_any(schema_any): + assert "class Experiment(dj.Imported)" in schema_any.code -def test_schema_save(): - assert "class Experiment(dj.Imported)" in schema.schema.code - assert "class Experiment(dj.Imported)" in schema_empty.schema.code +def test_schema_save_empty(schema_empty): + assert "class Experiment(dj.Imported)" in schema_empty.code -def test_uppercase_schema(): - # https://github.com/datajoint/datajoint-python/issues/564 - dj.conn(**CONN_INFO_ROOT, reset=True) + +def test_uppercase_schema(db_creds_root): + """ + https://github.com/datajoint/datajoint-python/issues/564 + """ + dj.conn(**db_creds_root, reset=True) schema1 = dj.Schema("Schema_A") @schema1 From 58c6103f52c21995692d83b1fe62b25f3b635a5a Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 15:07:50 -0700 Subject: [PATCH 153/212] Mock schema_empty module --- tests/test_schema.py | 46 ++++++++++++++++++++++++++++++++++---------- 1 file changed, 36 insertions(+), 10 deletions(-) diff --git a/tests/test_schema.py b/tests/test_schema.py index 31825bc5d..88b7422cf 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -1,12 +1,15 @@ +import types import pytest +import inspect import datajoint as dj +from unittest.mock import patch from inspect import getmembers from . import schema from . import PREFIX class Ephys(dj.Imported): - definition = """ # This is already declared in ./schema.py + definition = """ # This is already declare in ./schema.py """ @@ -24,19 +27,42 @@ def part_selector(attr): return False +@pytest.fixture +def schema_empty_module(schema_any, schema_empty): + # Mimic tests_old/schema_empty + namespace_dict = { + '_': schema_any, + 'schema': schema_empty, + 'Ephys': Ephys, + } + module = types.ModuleType('schema_empty') + + # Add classes to the module's namespace + for k, v in namespace_dict.items(): + setattr(module, k, v) + + # Spawn missing classes in the caller's (self) namespace. + # Then add them to the mock module's namespace. + module.schema.context = None + module.schema.spawn_missing_classes(context=None) + for k, v in locals().items(): + if inspect.isclass(v): + setattr(module, k, v) + return module + + @pytest.fixture def schema_empty(connection_test, schema_any): context = { **schema.LOCALS_ANY, "Ephys": Ephys } - schema_emp = dj.Schema(PREFIX + "_test1", context=context, connection=connection_test) - schema_emp(Ephys) + schema_empty = dj.Schema(PREFIX + "_test1", context=context, connection=connection_test) + schema_empty(Ephys) # load the rest of the classes - schema_emp.spawn_missing_classes() - breakpoint() - yield schema_emp - schema_emp.drop() + schema_empty.spawn_missing_classes(context=context) + yield schema_empty + schema_empty.drop() def test_schema_size_on_disk(schema_any): @@ -55,10 +81,10 @@ def test_drop_unauthorized(): info_schema.drop() -def test_namespace_population(schema_empty, schema_any): +def test_namespace_population(schema_empty_module): for name, rel in getmembers(schema, relation_selector): - assert hasattr(schema_empty, name), "{name} not found in schema_empty".format(name=name) - assert rel.__base__ is getattr(schema_empty, name).__base__, "Wrong tier for {name}".format(name=name) + assert hasattr(schema_empty_module, name), "{name} not found in schema_empty".format(name=name) + assert rel.__base__ is getattr(schema_empty_module, name).__base__, "Wrong tier for {name}".format(name=name) for name_part in dir(rel): if name_part[0].isupper() and part_selector(getattr(rel, name_part)): From b9ccb4fc23a431c4861a605347f44dabb331333c Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 15:18:18 -0700 Subject: [PATCH 154/212] Move call to spawn_missing_classes to test --- tests/test_schema.py | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/tests/test_schema.py b/tests/test_schema.py index 88b7422cf..09ae46ca0 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -29,7 +29,11 @@ def part_selector(attr): @pytest.fixture def schema_empty_module(schema_any, schema_empty): - # Mimic tests_old/schema_empty + """ + Mock the module tests_old.schema_empty. + The test `test_namespace_population` will check that the module contains all the + classes in schema_any, after running `spawn_missing_classes`. + """ namespace_dict = { '_': schema_any, 'schema': schema_empty, @@ -41,13 +45,6 @@ def schema_empty_module(schema_any, schema_empty): for k, v in namespace_dict.items(): setattr(module, k, v) - # Spawn missing classes in the caller's (self) namespace. - # Then add them to the mock module's namespace. - module.schema.context = None - module.schema.spawn_missing_classes(context=None) - for k, v in locals().items(): - if inspect.isclass(v): - setattr(module, k, v) return module @@ -82,6 +79,19 @@ def test_drop_unauthorized(): def test_namespace_population(schema_empty_module): + """ + With the schema_empty_module fixture, this test + mimics the behavior of `spawn_missing_classes`, as if the schema + was declared in a separate module and `spawn_missing_classes` was called in that namespace. + """ + # Spawn missing classes in the caller's (self) namespace. + schema_empty_module.schema.context = None + schema_empty_module.schema.spawn_missing_classes(context=None) + # Then add them to the mock module's namespace. + for k, v in locals().items(): + if inspect.isclass(v): + setattr(schema_empty_module, k, v) + for name, rel in getmembers(schema, relation_selector): assert hasattr(schema_empty_module, name), "{name} not found in schema_empty".format(name=name) assert rel.__base__ is getattr(schema_empty_module, name).__base__, "Wrong tier for {name}".format(name=name) From 7bf18f0c5562e9185e3468119c80c1f6bee36bc6 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 15:26:53 -0700 Subject: [PATCH 155/212] Format with black --- tests/test_schema.py | 38 +++++++++++++++++++++----------------- 1 file changed, 21 insertions(+), 17 deletions(-) diff --git a/tests/test_schema.py b/tests/test_schema.py index 09ae46ca0..7b262204f 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -35,11 +35,11 @@ def schema_empty_module(schema_any, schema_empty): classes in schema_any, after running `spawn_missing_classes`. """ namespace_dict = { - '_': schema_any, - 'schema': schema_empty, - 'Ephys': Ephys, + "_": schema_any, + "schema": schema_empty, + "Ephys": Ephys, } - module = types.ModuleType('schema_empty') + module = types.ModuleType("schema_empty") # Add classes to the module's namespace for k, v in namespace_dict.items(): @@ -50,11 +50,10 @@ def schema_empty_module(schema_any, schema_empty): @pytest.fixture def schema_empty(connection_test, schema_any): - context = { - **schema.LOCALS_ANY, - "Ephys": Ephys - } - schema_empty = dj.Schema(PREFIX + "_test1", context=context, connection=connection_test) + context = {**schema.LOCALS_ANY, "Ephys": Ephys} + schema_empty = dj.Schema( + PREFIX + "_test1", context=context, connection=connection_test + ) schema_empty(Ephys) # load the rest of the classes schema_empty.spawn_missing_classes(context=context) @@ -93,12 +92,18 @@ def test_namespace_population(schema_empty_module): setattr(schema_empty_module, k, v) for name, rel in getmembers(schema, relation_selector): - assert hasattr(schema_empty_module, name), "{name} not found in schema_empty".format(name=name) - assert rel.__base__ is getattr(schema_empty_module, name).__base__, "Wrong tier for {name}".format(name=name) + assert hasattr( + schema_empty_module, name + ), "{name} not found in schema_empty".format(name=name) + assert ( + rel.__base__ is getattr(schema_empty_module, name).__base__ + ), "Wrong tier for {name}".format(name=name) for name_part in dir(rel): if name_part[0].isupper() and part_selector(getattr(rel, name_part)): - assert getattr(rel, name_part).__base__ is dj.Part, "Wrong tier for {name}".format(name=name_part) + assert ( + getattr(rel, name_part).__base__ is dj.Part + ), "Wrong tier for {name}".format(name=name_part) def test_undecorated_table(): @@ -125,7 +130,6 @@ class A(dj.Manual): class B(dj.Part): definition = ... - with pytest.raises(dj.DataJointError): schema_any(A.B) schema_any(A) @@ -136,7 +140,9 @@ def test_unauthorized_database(db_creds_test): an attempt to create a database to which user has no privileges should raise an informative exception. """ with pytest.raises(dj.DataJointError): - dj.Schema("unauthorized_schema", connection=dj.conn(reset=True, **db_creds_test)) + dj.Schema( + "unauthorized_schema", connection=dj.conn(reset=True, **db_creds_test) + ) def test_drop_database(db_creds_test): @@ -150,9 +156,7 @@ def test_drop_database(db_creds_test): def test_overlapping_name(connection_test): - test_schema = dj.Schema( - PREFIX + "_overlapping_schema", connection=connection_test - ) + test_schema = dj.Schema(PREFIX + "_overlapping_schema", connection=connection_test) @test_schema class Unit(dj.Manual): From acb2ab35c235fc8beee3c8b5abdf4666880068eb Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 15:33:16 -0700 Subject: [PATCH 156/212] Checkout conftest from dev-tests-plat-166-schema --- tests/conftest.py | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 5a38eef90..a9474b502 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -56,6 +56,15 @@ def enable_filepath_feature(monkeypatch): monkeypatch.delenv(FILEPATH_FEATURE_SWITCH, raising=True) +@pytest.fixture(scope="session") +def db_creds_test() -> Dict: + return dict( + host=os.getenv("DJ_TEST_HOST", "fakeservices.datajoint.io"), + user=os.getenv("DJ_TEST_USER", "datajoint"), + password=os.getenv("DJ_TEST_PASSWORD", "datajoint"), + ) + + @pytest.fixture(scope="session") def db_creds_root() -> Dict: return dict( @@ -142,12 +151,9 @@ def connection_root(connection_root_bare): @pytest.fixture(scope="session") -def connection_test(connection_root): +def connection_test(connection_root, db_creds_test): """Test user database connection.""" database = f"{PREFIX}%%" - credentials = dict( - host=os.getenv("DJ_HOST"), user="datajoint", password="datajoint" - ) permission = "ALL PRIVILEGES" # Create MySQL users @@ -157,14 +163,14 @@ def connection_test(connection_root): # create user if necessary on mysql8 connection_root.query( f""" - CREATE USER IF NOT EXISTS '{credentials["user"]}'@'%%' - IDENTIFIED BY '{credentials["password"]}'; + CREATE USER IF NOT EXISTS '{db_creds_test["user"]}'@'%%' + IDENTIFIED BY '{db_creds_test["password"]}'; """ ) connection_root.query( f""" GRANT {permission} ON `{database}`.* - TO '{credentials["user"]}'@'%%'; + TO '{db_creds_test["user"]}'@'%%'; """ ) else: @@ -173,14 +179,14 @@ def connection_test(connection_root): connection_root.query( f""" GRANT {permission} ON `{database}`.* - TO '{credentials["user"]}'@'%%' - IDENTIFIED BY '{credentials["password"]}'; + TO '{db_creds_test["user"]}'@'%%' + IDENTIFIED BY '{db_creds_test["password"]}'; """ ) - connection = dj.Connection(**credentials) + connection = dj.Connection(**db_creds_test) yield connection - connection_root.query(f"""DROP USER `{credentials["user"]}`""") + connection_root.query(f"""DROP USER `{db_creds_test["user"]}`""") connection.close() From ffc61b8bd36342097cdf64297d9a020af608bfa4 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 15:34:09 -0700 Subject: [PATCH 157/212] cp to tests --- tests/test_tls.py | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 tests/test_tls.py diff --git a/tests/test_tls.py b/tests/test_tls.py new file mode 100644 index 000000000..1bac17e7e --- /dev/null +++ b/tests/test_tls.py @@ -0,0 +1,37 @@ +from nose.tools import ( + assert_true, + assert_false, + assert_equal, + assert_list_equal, + raises, +) +import datajoint as dj +from . import CONN_INFO +from pymysql.err import OperationalError + + +class TestTLS: + @staticmethod + def test_secure_connection(): + result = ( + dj.conn(reset=True, **CONN_INFO) + .query("SHOW STATUS LIKE 'Ssl_cipher';") + .fetchone()[1] + ) + assert_true(len(result) > 0) + + @staticmethod + def test_insecure_connection(): + result = ( + dj.conn(use_tls=False, reset=True, **CONN_INFO) + .query("SHOW STATUS LIKE 'Ssl_cipher';") + .fetchone()[1] + ) + assert_equal(result, "") + + @staticmethod + @raises(OperationalError) + def test_reject_insecure(): + dj.conn( + CONN_INFO["host"], user="djssl", password="djssl", use_tls=False, reset=True + ).query("SHOW STATUS LIKE 'Ssl_cipher';").fetchone()[1] From 9974818e3838940666107036611035ba9aa7dc37 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 15:34:42 -0700 Subject: [PATCH 158/212] nose2pytest test_tls --- tests/test_tls.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/tests/test_tls.py b/tests/test_tls.py index 1bac17e7e..a974bc354 100644 --- a/tests/test_tls.py +++ b/tests/test_tls.py @@ -1,8 +1,4 @@ from nose.tools import ( - assert_true, - assert_false, - assert_equal, - assert_list_equal, raises, ) import datajoint as dj @@ -18,7 +14,7 @@ def test_secure_connection(): .query("SHOW STATUS LIKE 'Ssl_cipher';") .fetchone()[1] ) - assert_true(len(result) > 0) + assert len(result) > 0 @staticmethod def test_insecure_connection(): @@ -27,7 +23,7 @@ def test_insecure_connection(): .query("SHOW STATUS LIKE 'Ssl_cipher';") .fetchone()[1] ) - assert_equal(result, "") + assert result == "" @staticmethod @raises(OperationalError) From 0a94b204c03dd67c0731a7abbafe7049d19b2d24 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 15:36:33 -0700 Subject: [PATCH 159/212] Migrate test_tls --- tests/test_tls.py | 45 ++++++++++++++++++++------------------------- 1 file changed, 20 insertions(+), 25 deletions(-) diff --git a/tests/test_tls.py b/tests/test_tls.py index a974bc354..0032a79a7 100644 --- a/tests/test_tls.py +++ b/tests/test_tls.py @@ -1,33 +1,28 @@ -from nose.tools import ( - raises, -) +import pytest import datajoint as dj -from . import CONN_INFO from pymysql.err import OperationalError -class TestTLS: - @staticmethod - def test_secure_connection(): - result = ( - dj.conn(reset=True, **CONN_INFO) - .query("SHOW STATUS LIKE 'Ssl_cipher';") - .fetchone()[1] - ) - assert len(result) > 0 +def test_secure_connection(db_creds_test, connection_test): + result = ( + dj.conn(reset=True, **db_creds_test) + .query("SHOW STATUS LIKE 'Ssl_cipher';") + .fetchone()[1] + ) + assert len(result) > 0 - @staticmethod - def test_insecure_connection(): - result = ( - dj.conn(use_tls=False, reset=True, **CONN_INFO) - .query("SHOW STATUS LIKE 'Ssl_cipher';") - .fetchone()[1] - ) - assert result == "" - @staticmethod - @raises(OperationalError) - def test_reject_insecure(): +def test_insecure_connection(db_creds_test, connection_test): + result = ( + dj.conn(use_tls=False, reset=True, **db_creds_test) + .query("SHOW STATUS LIKE 'Ssl_cipher';") + .fetchone()[1] + ) + assert result == "" + + +def test_reject_insecure(db_creds_test, connection_test): + with pytest.raises(OperationalError): dj.conn( - CONN_INFO["host"], user="djssl", password="djssl", use_tls=False, reset=True + db_creds_test["host"], user="djssl", password="djssl", use_tls=False, reset=True ).query("SHOW STATUS LIKE 'Ssl_cipher';").fetchone()[1] From 1e3ba5c3a239bcb93c0fe940a08ba807cd4a793a Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 15:36:40 -0700 Subject: [PATCH 160/212] Format with black --- tests/test_tls.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/test_tls.py b/tests/test_tls.py index 0032a79a7..22558af5b 100644 --- a/tests/test_tls.py +++ b/tests/test_tls.py @@ -24,5 +24,9 @@ def test_insecure_connection(db_creds_test, connection_test): def test_reject_insecure(db_creds_test, connection_test): with pytest.raises(OperationalError): dj.conn( - db_creds_test["host"], user="djssl", password="djssl", use_tls=False, reset=True + db_creds_test["host"], + user="djssl", + password="djssl", + use_tls=False, + reset=True, ).query("SHOW STATUS LIKE 'Ssl_cipher';").fetchone()[1] From baf74a6bd833a329406e7a10844701660b49bb3c Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 15:42:13 -0700 Subject: [PATCH 161/212] cp to tests --- tests/schema_university.py | 119 ++++++++++++++++++++++++++++++ tests/test_university.py | 145 +++++++++++++++++++++++++++++++++++++ 2 files changed, 264 insertions(+) create mode 100644 tests/schema_university.py create mode 100644 tests/test_university.py diff --git a/tests/schema_university.py b/tests/schema_university.py new file mode 100644 index 000000000..619ea459f --- /dev/null +++ b/tests/schema_university.py @@ -0,0 +1,119 @@ +import datajoint as dj + +schema = dj.Schema() + + +@schema +class Student(dj.Manual): + definition = """ + student_id : int unsigned # university-wide ID number + --- + first_name : varchar(40) + last_name : varchar(40) + sex : enum('F', 'M', 'U') + date_of_birth : date + home_address : varchar(120) # mailing street address + home_city : varchar(60) # mailing address + home_state : char(2) # US state acronym: e.g. OH + home_zip : char(10) # zipcode e.g. 93979-4979 + home_phone : varchar(20) # e.g. 414.657.6883x0881 + """ + + +@schema +class Department(dj.Manual): + definition = """ + dept : varchar(6) # abbreviated department name, e.g. BIOL + --- + dept_name : varchar(200) # full department name + dept_address : varchar(200) # mailing address + dept_phone : varchar(20) + """ + + +@schema +class StudentMajor(dj.Manual): + definition = """ + -> Student + --- + -> Department + declare_date : date # when student declared her major + """ + + +@schema +class Course(dj.Manual): + definition = """ + -> Department + course : int unsigned # course number, e.g. 1010 + --- + course_name : varchar(200) # e.g. "Neurobiology of Sensation and Movement." + credits : decimal(3,1) # number of credits earned by completing the course + """ + + +@schema +class Term(dj.Manual): + definition = """ + term_year : year + term : enum('Spring', 'Summer', 'Fall') + """ + + +@schema +class Section(dj.Manual): + definition = """ + -> Course + -> Term + section : char(1) + --- + auditorium : varchar(12) + """ + + +@schema +class CurrentTerm(dj.Manual): + definition = """ + omega=0 : tinyint + --- + -> Term + """ + + +@schema +class Enroll(dj.Manual): + definition = """ + -> Student + -> Section + """ + + +@schema +class LetterGrade(dj.Lookup): + definition = """ + grade : char(2) + --- + points : decimal(3,2) + """ + contents = [ + ["A", 4.00], + ["A-", 3.67], + ["B+", 3.33], + ["B", 3.00], + ["B-", 2.67], + ["C+", 2.33], + ["C", 2.00], + ["C-", 1.67], + ["D+", 1.33], + ["D", 1.00], + ["F", 0.00], + ] + + +@schema +class Grade(dj.Manual): + definition = """ + -> Enroll + --- + -> LetterGrade + """ diff --git a/tests/test_university.py b/tests/test_university.py new file mode 100644 index 000000000..34380d37c --- /dev/null +++ b/tests/test_university.py @@ -0,0 +1,145 @@ +from nose.tools import assert_true, assert_list_equal, assert_false, raises +import hashlib +from datajoint import DataJointError +from .schema_university import * +from . import PREFIX, CONN_INFO + + +def _hash4(table): + """hash of table contents""" + data = table.fetch(order_by="KEY", as_dict=True) + blob = dj.blob.pack(data, compress=False) + return hashlib.md5(blob).digest().hex()[:4] + + +@raises(DataJointError) +def test_activate_unauthorized(): + schema.activate("unauthorized", connection=dj.conn(**CONN_INFO)) + + +def test_activate(): + schema.activate( + PREFIX + "_university", connection=dj.conn(**CONN_INFO) + ) # deferred activation + # --------------- Fill University ------------------- + for table in ( + Student, + Department, + StudentMajor, + Course, + Term, + CurrentTerm, + Section, + Enroll, + Grade, + ): + from pathlib import Path + + table().insert(Path("./data/" + table.__name__ + ".csv")) + + +def test_fill(): + """check that the randomized tables are consistently defined""" + # check randomized tables + assert_true(len(Student()) == 300 and _hash4(Student) == "1e1a") + assert_true(len(StudentMajor()) == 226 and _hash4(StudentMajor) == "3129") + assert_true(len(Section()) == 756 and _hash4(Section) == "dc7e") + assert_true(len(Enroll()) == 3364 and _hash4(Enroll) == "177d") + assert_true(len(Grade()) == 3027 and _hash4(Grade) == "4a9d") + + +def test_restrict(): + """ + test diverse restrictions from the university database. + This test relies on a specific instantiation of the database. + """ + utahns1 = Student & {"home_state": "UT"} + utahns2 = Student & 'home_state="UT"' + assert_true(len(utahns1) == len(utahns2.fetch("KEY")) == 7) + + # male nonutahns + sex1, state1 = ((Student & 'sex="M"') - {"home_state": "UT"}).fetch( + "sex", "home_state", order_by="student_id" + ) + sex2, state2 = ((Student & 'sex="M"') - {"home_state": "UT"}).fetch( + "sex", "home_state", order_by="student_id" + ) + assert_true(len(set(state1)) == len(set(state2)) == 44) + assert_true(set(sex1).pop() == set(sex2).pop() == "M") + + # students from OK, NM, TX + s1 = (Student & [{"home_state": s} for s in ("OK", "NM", "TX")]).fetch( + "KEY", order_by="student_id" + ) + s2 = (Student & 'home_state in ("OK", "NM", "TX")').fetch( + "KEY", order_by="student_id" + ) + assert_true(len(s1) == 11) + assert_list_equal(s1, s2) + + millennials = Student & 'date_of_birth between "1981-01-01" and "1996-12-31"' + assert_true(len(millennials) == 170) + millennials_no_math = millennials - (Enroll & 'dept="MATH"') + assert_true(len(millennials_no_math) == 53) + + inactive_students = Student - (Enroll & CurrentTerm) + assert_true(len(inactive_students) == 204) + + # Females who are active or major in non-math + special = Student & [Enroll, StudentMajor - {"dept": "MATH"}] & {"sex": "F"} + assert_true(len(special) == 158) + + +def test_advanced_join(): + """test advanced joins""" + # Students with ungraded courses in current term + ungraded = Enroll * CurrentTerm - Grade + assert_true(len(ungraded) == 34) + + # add major + major = StudentMajor.proj(..., major="dept") + assert_true(len(ungraded.join(major, left=True)) == len(ungraded) == 34) + assert_true(len(ungraded.join(major)) == len(ungraded & major) == 31) + + +def test_union(): + # effective left join Enroll with Major + q1 = (Enroll & "student_id=101") + (Enroll & "student_id=102") + q2 = Enroll & "student_id in (101, 102)" + assert_true(len(q1) == len(q2) == 41) + + +def test_aggr(): + avg_grade_per_course = Course.aggr( + Grade * LetterGrade, avg_grade="round(avg(points), 2)" + ) + assert_true(len(avg_grade_per_course) == 45) + + # GPA + student_gpa = Student.aggr( + Course * Grade * LetterGrade, gpa="round(sum(points*credits)/sum(credits), 2)" + ) + gpa = student_gpa.fetch("gpa") + assert_true(len(gpa) == 261) + assert_true(2 < gpa.mean() < 3) + + # Sections in biology department with zero students in them + section = (Section & {"dept": "BIOL"}).aggr( + Enroll, n="count(student_id)", keep_all_rows=True + ) & "n=0" + assert_true(len(set(section.fetch("dept"))) == 1) + assert_true(len(section) == 17) + assert_true(bool(section)) + + # Test correct use of ellipses in a similar query + section = (Section & {"dept": "BIOL"}).aggr( + Grade, ..., n="count(student_id)", keep_all_rows=True + ) & "n>1" + assert_false( + any( + name in section.heading.names for name in Grade.heading.secondary_attributes + ) + ) + assert_true(len(set(section.fetch("dept"))) == 1) + assert_true(len(section) == 168) + assert_true(bool(section)) From a66da4d1db82853dceb55cc3d8dcdbe88989d98c Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 15:43:28 -0700 Subject: [PATCH 162/212] nose2pytest test_university --- tests/test_university.py | 58 +++++++++++++++++++--------------------- 1 file changed, 28 insertions(+), 30 deletions(-) diff --git a/tests/test_university.py b/tests/test_university.py index 34380d37c..02520a4b8 100644 --- a/tests/test_university.py +++ b/tests/test_university.py @@ -41,11 +41,11 @@ def test_activate(): def test_fill(): """check that the randomized tables are consistently defined""" # check randomized tables - assert_true(len(Student()) == 300 and _hash4(Student) == "1e1a") - assert_true(len(StudentMajor()) == 226 and _hash4(StudentMajor) == "3129") - assert_true(len(Section()) == 756 and _hash4(Section) == "dc7e") - assert_true(len(Enroll()) == 3364 and _hash4(Enroll) == "177d") - assert_true(len(Grade()) == 3027 and _hash4(Grade) == "4a9d") + assert len(Student()) == 300 and _hash4(Student) == "1e1a" + assert len(StudentMajor()) == 226 and _hash4(StudentMajor) == "3129" + assert len(Section()) == 756 and _hash4(Section) == "dc7e" + assert len(Enroll()) == 3364 and _hash4(Enroll) == "177d" + assert len(Grade()) == 3027 and _hash4(Grade) == "4a9d" def test_restrict(): @@ -55,7 +55,7 @@ def test_restrict(): """ utahns1 = Student & {"home_state": "UT"} utahns2 = Student & 'home_state="UT"' - assert_true(len(utahns1) == len(utahns2.fetch("KEY")) == 7) + assert len(utahns1) == len(utahns2.fetch("KEY")) == 7 # male nonutahns sex1, state1 = ((Student & 'sex="M"') - {"home_state": "UT"}).fetch( @@ -64,8 +64,8 @@ def test_restrict(): sex2, state2 = ((Student & 'sex="M"') - {"home_state": "UT"}).fetch( "sex", "home_state", order_by="student_id" ) - assert_true(len(set(state1)) == len(set(state2)) == 44) - assert_true(set(sex1).pop() == set(sex2).pop() == "M") + assert len(set(state1)) == len(set(state2)) == 44 + assert set(sex1).pop() == set(sex2).pop() == "M" # students from OK, NM, TX s1 = (Student & [{"home_state": s} for s in ("OK", "NM", "TX")]).fetch( @@ -74,72 +74,70 @@ def test_restrict(): s2 = (Student & 'home_state in ("OK", "NM", "TX")').fetch( "KEY", order_by="student_id" ) - assert_true(len(s1) == 11) - assert_list_equal(s1, s2) + assert len(s1) == 11 + assert s1 == s2 millennials = Student & 'date_of_birth between "1981-01-01" and "1996-12-31"' - assert_true(len(millennials) == 170) + assert len(millennials) == 170 millennials_no_math = millennials - (Enroll & 'dept="MATH"') - assert_true(len(millennials_no_math) == 53) + assert len(millennials_no_math) == 53 inactive_students = Student - (Enroll & CurrentTerm) - assert_true(len(inactive_students) == 204) + assert len(inactive_students) == 204 # Females who are active or major in non-math special = Student & [Enroll, StudentMajor - {"dept": "MATH"}] & {"sex": "F"} - assert_true(len(special) == 158) + assert len(special) == 158 def test_advanced_join(): """test advanced joins""" # Students with ungraded courses in current term ungraded = Enroll * CurrentTerm - Grade - assert_true(len(ungraded) == 34) + assert len(ungraded) == 34 # add major major = StudentMajor.proj(..., major="dept") - assert_true(len(ungraded.join(major, left=True)) == len(ungraded) == 34) - assert_true(len(ungraded.join(major)) == len(ungraded & major) == 31) + assert len(ungraded.join(major, left=True)) == len(ungraded) == 34 + assert len(ungraded.join(major)) == len(ungraded & major) == 31 def test_union(): # effective left join Enroll with Major q1 = (Enroll & "student_id=101") + (Enroll & "student_id=102") q2 = Enroll & "student_id in (101, 102)" - assert_true(len(q1) == len(q2) == 41) + assert len(q1) == len(q2) == 41 def test_aggr(): avg_grade_per_course = Course.aggr( Grade * LetterGrade, avg_grade="round(avg(points), 2)" ) - assert_true(len(avg_grade_per_course) == 45) + assert len(avg_grade_per_course) == 45 # GPA student_gpa = Student.aggr( Course * Grade * LetterGrade, gpa="round(sum(points*credits)/sum(credits), 2)" ) gpa = student_gpa.fetch("gpa") - assert_true(len(gpa) == 261) - assert_true(2 < gpa.mean() < 3) + assert len(gpa) == 261 + assert 2 < gpa.mean() < 3 # Sections in biology department with zero students in them section = (Section & {"dept": "BIOL"}).aggr( Enroll, n="count(student_id)", keep_all_rows=True ) & "n=0" - assert_true(len(set(section.fetch("dept"))) == 1) - assert_true(len(section) == 17) - assert_true(bool(section)) + assert len(set(section.fetch("dept"))) == 1 + assert len(section) == 17 + assert bool(section) # Test correct use of ellipses in a similar query section = (Section & {"dept": "BIOL"}).aggr( Grade, ..., n="count(student_id)", keep_all_rows=True ) & "n>1" - assert_false( - any( + assert not any( name in section.heading.names for name in Grade.heading.secondary_attributes ) - ) - assert_true(len(set(section.fetch("dept"))) == 1) - assert_true(len(section) == 168) - assert_true(bool(section)) + assert len(set(section.fetch("dept"))) == 1 + assert len(section) == 168 + assert bool(section) From 7577cfed2942ed7a6e3d047f8f1e313147b8bf15 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 15:44:34 -0700 Subject: [PATCH 163/212] Checkout conftest from dev-tests-plat-166-schema --- tests/conftest.py | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 5a38eef90..a9474b502 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -56,6 +56,15 @@ def enable_filepath_feature(monkeypatch): monkeypatch.delenv(FILEPATH_FEATURE_SWITCH, raising=True) +@pytest.fixture(scope="session") +def db_creds_test() -> Dict: + return dict( + host=os.getenv("DJ_TEST_HOST", "fakeservices.datajoint.io"), + user=os.getenv("DJ_TEST_USER", "datajoint"), + password=os.getenv("DJ_TEST_PASSWORD", "datajoint"), + ) + + @pytest.fixture(scope="session") def db_creds_root() -> Dict: return dict( @@ -142,12 +151,9 @@ def connection_root(connection_root_bare): @pytest.fixture(scope="session") -def connection_test(connection_root): +def connection_test(connection_root, db_creds_test): """Test user database connection.""" database = f"{PREFIX}%%" - credentials = dict( - host=os.getenv("DJ_HOST"), user="datajoint", password="datajoint" - ) permission = "ALL PRIVILEGES" # Create MySQL users @@ -157,14 +163,14 @@ def connection_test(connection_root): # create user if necessary on mysql8 connection_root.query( f""" - CREATE USER IF NOT EXISTS '{credentials["user"]}'@'%%' - IDENTIFIED BY '{credentials["password"]}'; + CREATE USER IF NOT EXISTS '{db_creds_test["user"]}'@'%%' + IDENTIFIED BY '{db_creds_test["password"]}'; """ ) connection_root.query( f""" GRANT {permission} ON `{database}`.* - TO '{credentials["user"]}'@'%%'; + TO '{db_creds_test["user"]}'@'%%'; """ ) else: @@ -173,14 +179,14 @@ def connection_test(connection_root): connection_root.query( f""" GRANT {permission} ON `{database}`.* - TO '{credentials["user"]}'@'%%' - IDENTIFIED BY '{credentials["password"]}'; + TO '{db_creds_test["user"]}'@'%%' + IDENTIFIED BY '{db_creds_test["password"]}'; """ ) - connection = dj.Connection(**credentials) + connection = dj.Connection(**db_creds_test) yield connection - connection_root.query(f"""DROP USER `{credentials["user"]}`""") + connection_root.query(f"""DROP USER `{db_creds_test["user"]}`""") connection.close() From b40d4ecb10629ea7e77365c82f5502552fe37801 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 15:57:06 -0700 Subject: [PATCH 164/212] cp tests_old/data to tests --- tests/data/Course.csv | 46 + tests/data/CurrentTerm.csv | 2 + tests/data/Department.csv | 9 + tests/data/Enroll.csv | 3365 +++++++++++++++++++++++++++++++++++ tests/data/Grade.csv | 3028 +++++++++++++++++++++++++++++++ tests/data/Section.csv | 757 ++++++++ tests/data/Student.csv | 301 ++++ tests/data/StudentMajor.csv | 227 +++ tests/data/Term.csv | 19 + 9 files changed, 7754 insertions(+) create mode 100644 tests/data/Course.csv create mode 100644 tests/data/CurrentTerm.csv create mode 100644 tests/data/Department.csv create mode 100644 tests/data/Enroll.csv create mode 100644 tests/data/Grade.csv create mode 100644 tests/data/Section.csv create mode 100644 tests/data/Student.csv create mode 100644 tests/data/StudentMajor.csv create mode 100644 tests/data/Term.csv diff --git a/tests/data/Course.csv b/tests/data/Course.csv new file mode 100644 index 000000000..a308d8d6a --- /dev/null +++ b/tests/data/Course.csv @@ -0,0 +1,46 @@ +dept,course,course_name,credits +BIOL,1006,World of Dinosaurs,3.0 +BIOL,1010,Biology in the 21st Century,3.0 +BIOL,1030,Human Biology,3.0 +BIOL,1210,Principles of Biology,4.0 +BIOL,2010,Evolution & Diversity of Life,3.0 +BIOL,2020,Principles of Cell Biology,3.0 +BIOL,2021,Principles of Cell Science,4.0 +BIOL,2030,Principles of Genetics,3.0 +BIOL,2210,Human Genetics,3.0 +BIOL,2325,Human Anatomy,4.0 +BIOL,2330,Plants & Society,3.0 +BIOL,2355,Field Botany,2.0 +BIOL,2420,Human Physiology,4.0 +CS,1030,Foundations of Computer Science,3.0 +CS,1410,Introduction to Object-Oriented Programming,4.0 +CS,2100,Discrete Structures,3.0 +CS,2420,Introduction to Algorithms & Data Structures,4.0 +CS,3100,Models of Computation,3.0 +CS,3200,Introduction to Scientific Computing,3.0 +CS,3500,Software Practice,4.0 +CS,3505,Software Practice II,3.0 +CS,3810,Computer Organization,4.0 +CS,4000,Senior Capstone Project - Design Phase,3.0 +CS,4150,Algorithms,3.0 +CS,4400,Computer Systems,4.0 +CS,4500,Senior Capstone Project,3.0 +CS,4940,Undergraduate Research,3.0 +CS,4970,Computer Science Bachelors Thesis,3.0 +MATH,1210,Calculus I,4.0 +MATH,1220,Calculus II,4.0 +MATH,1250,Calculus for AP Students I,4.0 +MATH,1260,Calculus for AP Students II,4.0 +MATH,2210,Calculus III,3.0 +MATH,2270,Linear Algebra,4.0 +MATH,2280,Introduction to Differential Equations,4.0 +MATH,3210,Foundations of Analysis I,4.0 +MATH,3220,Foundations of Analysis II,4.0 +PHYS,2040,Classical Theoretical Physics II,4.0 +PHYS,2060,Quantum Mechanics,3.0 +PHYS,2100,General Relativity and Cosmology,3.0 +PHYS,2140,Statistical Mechanics,4.0 +PHYS,2210,Physics for Scientists and Engineers I,4.0 +PHYS,2220,Physics for Scientists and Engineers II,4.0 +PHYS,3210,Physics for Scientists I (Honors),4.0 +PHYS,3220,Physics for Scientists II (Honors),4.0 diff --git a/tests/data/CurrentTerm.csv b/tests/data/CurrentTerm.csv new file mode 100644 index 000000000..037d9b344 --- /dev/null +++ b/tests/data/CurrentTerm.csv @@ -0,0 +1,2 @@ +omega,term_year,term +1,2020,Fall diff --git a/tests/data/Department.csv b/tests/data/Department.csv new file mode 100644 index 000000000..5a7857eef --- /dev/null +++ b/tests/data/Department.csv @@ -0,0 +1,9 @@ +dept,dept_name,dept_address,dept_phone +BIOL,Life Sciences,"931 Eric Trail Suite 331 +Lake Scott, CT 53527",(238)497-9162x0223 +CS,Computer Science,"0104 Santos Hill Apt. 497 +Michelleland, MT 94473",3828723244 +MATH,Mathematics,"8358 Bryan Ports +Lake Matthew, SC 36983",+1-461-767-9298x842 +PHYS,Physics,"7744 Haley Meadows Suite 661 +Lake Eddie, CT 51544",4097052774 diff --git a/tests/data/Enroll.csv b/tests/data/Enroll.csv new file mode 100644 index 000000000..fc9a6b2a0 --- /dev/null +++ b/tests/data/Enroll.csv @@ -0,0 +1,3365 @@ +student_id,dept,course,term_year,term,section +394,BIOL,1006,2015,Spring,b +138,BIOL,1006,2015,Summer,a +182,BIOL,1006,2015,Summer,a +246,BIOL,1006,2015,Summer,a +249,BIOL,1006,2015,Summer,b +290,BIOL,1006,2015,Summer,b +115,BIOL,1006,2016,Spring,a +160,BIOL,1006,2016,Spring,a +176,BIOL,1006,2016,Spring,a +276,BIOL,1006,2016,Spring,a +285,BIOL,1006,2016,Spring,a +123,BIOL,1006,2016,Spring,b +312,BIOL,1006,2016,Summer,a +179,BIOL,1006,2016,Summer,b +214,BIOL,1006,2016,Summer,d +389,BIOL,1006,2016,Summer,d +124,BIOL,1006,2017,Fall,a +128,BIOL,1006,2017,Fall,a +199,BIOL,1006,2017,Fall,a +262,BIOL,1006,2017,Fall,a +288,BIOL,1006,2017,Fall,a +321,BIOL,1006,2017,Fall,a +326,BIOL,1006,2017,Fall,a +345,BIOL,1006,2017,Fall,a +392,BIOL,1006,2017,Fall,a +165,BIOL,1006,2017,Fall,b +229,BIOL,1006,2017,Fall,b +318,BIOL,1006,2017,Fall,b +107,BIOL,1006,2018,Spring,a +117,BIOL,1006,2018,Spring,a +164,BIOL,1006,2018,Spring,a +362,BIOL,1006,2018,Spring,a +366,BIOL,1006,2018,Spring,a +397,BIOL,1006,2018,Spring,a +227,BIOL,1006,2018,Spring,b +261,BIOL,1006,2018,Spring,b +270,BIOL,1006,2018,Spring,b +292,BIOL,1006,2018,Spring,b +294,BIOL,1006,2018,Spring,b +348,BIOL,1006,2018,Spring,b +373,BIOL,1006,2018,Spring,b +375,BIOL,1006,2018,Spring,b +102,BIOL,1006,2018,Fall,a +113,BIOL,1006,2018,Fall,a +131,BIOL,1006,2018,Fall,a +296,BIOL,1006,2018,Fall,a +391,BIOL,1006,2018,Fall,a +127,BIOL,1006,2019,Spring,a +139,BIOL,1006,2019,Summer,a +143,BIOL,1006,2019,Summer,a +178,BIOL,1006,2019,Summer,a +234,BIOL,1006,2019,Summer,a +247,BIOL,1006,2019,Summer,a +259,BIOL,1006,2019,Summer,a +303,BIOL,1006,2019,Summer,a +329,BIOL,1006,2019,Summer,a +356,BIOL,1006,2019,Summer,a +109,BIOL,1006,2019,Fall,a +173,BIOL,1006,2019,Fall,a +187,BIOL,1006,2019,Fall,a +364,BIOL,1006,2019,Fall,a +169,BIOL,1006,2019,Fall,b +332,BIOL,1006,2019,Fall,b +398,BIOL,1006,2019,Fall,b +142,BIOL,1006,2020,Spring,a +194,BIOL,1006,2020,Spring,a +267,BIOL,1006,2020,Spring,a +330,BIOL,1006,2020,Spring,a +340,BIOL,1006,2020,Spring,a +365,BIOL,1006,2020,Spring,a +129,BIOL,1006,2020,Fall,a +222,BIOL,1006,2020,Fall,a +241,BIOL,1006,2020,Fall,a +297,BIOL,1006,2020,Fall,a +313,BIOL,1006,2020,Fall,a +333,BIOL,1006,2020,Fall,a +376,BIOL,1006,2020,Fall,a +379,BIOL,1006,2020,Fall,a +390,BIOL,1006,2020,Fall,a +220,BIOL,1006,2020,Fall,b +255,BIOL,1006,2020,Fall,b +272,BIOL,1006,2020,Fall,b +277,BIOL,1006,2020,Fall,b +313,BIOL,1006,2020,Fall,b +371,BIOL,1006,2020,Fall,b +378,BIOL,1006,2020,Fall,b +118,BIOL,1006,2020,Fall,c +235,BIOL,1006,2020,Fall,c +271,BIOL,1006,2020,Fall,c +289,BIOL,1006,2020,Fall,c +313,BIOL,1006,2020,Fall,c +378,BIOL,1006,2020,Fall,c +182,BIOL,1010,2015,Summer,a +276,BIOL,1010,2015,Summer,a +277,BIOL,1010,2015,Summer,a +382,BIOL,1010,2015,Summer,a +123,BIOL,1010,2015,Summer,b +177,BIOL,1010,2015,Summer,b +382,BIOL,1010,2015,Summer,b +277,BIOL,1010,2015,Summer,c +301,BIOL,1010,2015,Summer,c +163,BIOL,1010,2015,Summer,d +179,BIOL,1010,2015,Fall,a +210,BIOL,1010,2015,Fall,a +211,BIOL,1010,2015,Fall,b +290,BIOL,1010,2015,Fall,b +211,BIOL,1010,2015,Fall,c +176,BIOL,1010,2016,Summer,a +192,BIOL,1010,2016,Summer,a +195,BIOL,1010,2016,Summer,a +282,BIOL,1010,2016,Summer,a +317,BIOL,1010,2016,Summer,a +249,BIOL,1010,2017,Spring,a +278,BIOL,1010,2017,Spring,a +312,BIOL,1010,2017,Spring,a +373,BIOL,1010,2017,Spring,a +391,BIOL,1010,2017,Spring,a +397,BIOL,1010,2017,Spring,a +151,BIOL,1010,2017,Summer,a +321,BIOL,1010,2017,Summer,a +353,BIOL,1010,2017,Summer,a +102,BIOL,1010,2018,Summer,a +105,BIOL,1010,2018,Summer,a +214,BIOL,1010,2018,Summer,a +260,BIOL,1010,2018,Summer,a +294,BIOL,1010,2018,Summer,a +318,BIOL,1010,2018,Summer,a +368,BIOL,1010,2018,Summer,a +392,BIOL,1010,2018,Summer,a +399,BIOL,1010,2018,Summer,a +133,BIOL,1010,2018,Summer,b +173,BIOL,1010,2018,Summer,b +197,BIOL,1010,2018,Summer,b +238,BIOL,1010,2018,Summer,b +275,BIOL,1010,2018,Summer,b +285,BIOL,1010,2018,Summer,b +292,BIOL,1010,2018,Summer,b +311,BIOL,1010,2018,Summer,b +313,BIOL,1010,2018,Summer,b +366,BIOL,1010,2018,Summer,b +378,BIOL,1010,2018,Summer,b +259,BIOL,1010,2018,Summer,c +262,BIOL,1010,2018,Summer,c +309,BIOL,1010,2018,Summer,c +313,BIOL,1010,2018,Summer,c +329,BIOL,1010,2018,Summer,c +342,BIOL,1010,2018,Summer,c +374,BIOL,1010,2018,Summer,c +169,BIOL,1010,2018,Fall,a +239,BIOL,1010,2018,Fall,a +252,BIOL,1010,2018,Fall,a +258,BIOL,1010,2018,Fall,a +345,BIOL,1010,2018,Fall,a +362,BIOL,1010,2018,Fall,a +164,BIOL,1010,2018,Fall,b +298,BIOL,1010,2018,Fall,b +139,BIOL,1010,2019,Spring,a +372,BIOL,1010,2019,Spring,a +375,BIOL,1010,2019,Spring,a +109,BIOL,1010,2019,Spring,b +165,BIOL,1010,2019,Spring,b +217,BIOL,1010,2019,Spring,b +228,BIOL,1010,2019,Spring,b +231,BIOL,1010,2019,Spring,b +240,BIOL,1010,2019,Spring,c +332,BIOL,1010,2019,Spring,c +247,BIOL,1010,2019,Spring,d +314,BIOL,1010,2019,Spring,d +379,BIOL,1010,2019,Spring,d +113,BIOL,1010,2020,Summer,a +122,BIOL,1010,2020,Summer,a +148,BIOL,1010,2020,Summer,a +153,BIOL,1010,2020,Summer,a +178,BIOL,1010,2020,Summer,a +200,BIOL,1010,2020,Summer,a +256,BIOL,1010,2020,Summer,a +270,BIOL,1010,2020,Summer,a +340,BIOL,1010,2020,Summer,a +108,BIOL,1010,2020,Summer,b +118,BIOL,1010,2020,Summer,b +122,BIOL,1010,2020,Summer,b +175,BIOL,1010,2020,Summer,b +244,BIOL,1010,2020,Summer,b +257,BIOL,1010,2020,Summer,b +270,BIOL,1010,2020,Summer,b +306,BIOL,1010,2020,Summer,b +348,BIOL,1010,2020,Summer,b +384,BIOL,1010,2020,Summer,b +112,BIOL,1010,2020,Summer,c +131,BIOL,1010,2020,Summer,c +146,BIOL,1010,2020,Summer,c +185,BIOL,1010,2020,Summer,c +270,BIOL,1010,2020,Summer,c +348,BIOL,1010,2020,Summer,c +371,BIOL,1010,2020,Summer,c +390,BIOL,1010,2020,Summer,c +398,BIOL,1010,2020,Summer,c +100,BIOL,1010,2020,Summer,d +121,BIOL,1010,2020,Summer,d +244,BIOL,1010,2020,Summer,d +254,BIOL,1010,2020,Summer,d +263,BIOL,1010,2020,Summer,d +270,BIOL,1010,2020,Summer,d +300,BIOL,1010,2020,Summer,d +323,BIOL,1010,2020,Summer,d +340,BIOL,1010,2020,Summer,d +371,BIOL,1010,2020,Summer,d +211,BIOL,1030,2015,Spring,c +379,BIOL,1030,2015,Spring,d +204,BIOL,1030,2015,Summer,a +246,BIOL,1030,2015,Summer,a +321,BIOL,1030,2015,Summer,a +117,BIOL,1030,2016,Spring,a +273,BIOL,1030,2016,Spring,a +282,BIOL,1030,2016,Spring,a +392,BIOL,1030,2016,Spring,a +160,BIOL,1030,2016,Summer,a +195,BIOL,1030,2016,Summer,a +270,BIOL,1030,2016,Summer,a +277,BIOL,1030,2016,Summer,a +290,BIOL,1030,2016,Summer,a +329,BIOL,1030,2016,Summer,a +395,BIOL,1030,2016,Summer,a +120,BIOL,1030,2016,Fall,a +176,BIOL,1030,2016,Fall,a +213,BIOL,1030,2016,Fall,a +276,BIOL,1030,2016,Fall,a +115,BIOL,1030,2017,Spring,a +257,BIOL,1030,2017,Spring,a +299,BIOL,1030,2017,Spring,a +313,BIOL,1030,2017,Spring,a +214,BIOL,1030,2017,Spring,b +243,BIOL,1030,2017,Spring,b +374,BIOL,1030,2017,Spring,b +151,BIOL,1030,2017,Spring,c +215,BIOL,1030,2017,Spring,c +257,BIOL,1030,2017,Spring,c +335,BIOL,1030,2017,Spring,c +348,BIOL,1030,2017,Spring,c +388,BIOL,1030,2017,Spring,c +132,BIOL,1030,2018,Summer,a +197,BIOL,1030,2018,Summer,a +285,BIOL,1030,2018,Summer,a +372,BIOL,1030,2018,Summer,a +378,BIOL,1030,2018,Summer,a +102,BIOL,1030,2018,Fall,a +183,BIOL,1030,2018,Fall,a +199,BIOL,1030,2018,Fall,a +230,BIOL,1030,2018,Fall,a +253,BIOL,1030,2018,Fall,a +259,BIOL,1030,2018,Fall,a +275,BIOL,1030,2018,Fall,a +387,BIOL,1030,2018,Fall,a +391,BIOL,1030,2018,Fall,a +179,BIOL,1030,2019,Spring,a +333,BIOL,1030,2019,Spring,a +139,BIOL,1030,2019,Spring,b +217,BIOL,1030,2019,Spring,b +258,BIOL,1030,2019,Spring,b +143,BIOL,1030,2019,Spring,c +177,BIOL,1030,2019,Spring,c +248,BIOL,1030,2019,Spring,c +256,BIOL,1030,2019,Spring,c +258,BIOL,1030,2019,Spring,c +298,BIOL,1030,2019,Spring,c +307,BIOL,1030,2019,Spring,c +318,BIOL,1030,2019,Spring,c +375,BIOL,1030,2019,Spring,c +397,BIOL,1030,2019,Spring,c +231,BIOL,1030,2019,Spring,d +384,BIOL,1030,2019,Spring,d +128,BIOL,1030,2019,Summer,a +167,BIOL,1030,2019,Summer,a +260,BIOL,1030,2019,Summer,a +314,BIOL,1030,2019,Summer,a +347,BIOL,1030,2019,Summer,a +380,BIOL,1030,2019,Summer,a +100,BIOL,1030,2020,Spring,a +135,BIOL,1030,2020,Spring,a +153,BIOL,1030,2020,Spring,a +254,BIOL,1030,2020,Spring,a +292,BIOL,1030,2020,Spring,a +325,BIOL,1030,2020,Spring,a +341,BIOL,1030,2020,Spring,a +109,BIOL,1030,2020,Summer,a +113,BIOL,1030,2020,Summer,a +123,BIOL,1030,2020,Summer,a +131,BIOL,1030,2020,Summer,a +164,BIOL,1030,2020,Summer,a +170,BIOL,1030,2020,Summer,a +185,BIOL,1030,2020,Summer,a +332,BIOL,1030,2020,Summer,a +340,BIOL,1030,2020,Summer,a +360,BIOL,1030,2020,Summer,a +371,BIOL,1030,2020,Summer,a +386,BIOL,1030,2020,Summer,a +144,BIOL,1210,2016,Spring,a +182,BIOL,1210,2016,Spring,a +270,BIOL,1210,2016,Spring,a +301,BIOL,1210,2016,Spring,a +115,BIOL,1210,2017,Spring,a +117,BIOL,1210,2017,Spring,a +210,BIOL,1210,2017,Spring,a +278,BIOL,1210,2017,Spring,a +299,BIOL,1210,2017,Spring,a +372,BIOL,1210,2017,Spring,a +377,BIOL,1210,2017,Spring,a +275,BIOL,1210,2017,Summer,a +282,BIOL,1210,2017,Summer,a +120,BIOL,1210,2018,Spring,a +131,BIOL,1210,2018,Spring,a +134,BIOL,1210,2018,Spring,a +177,BIOL,1210,2018,Spring,a +332,BIOL,1210,2018,Spring,a +220,BIOL,1210,2018,Fall,a +255,BIOL,1210,2018,Fall,a +151,BIOL,1210,2018,Fall,b +179,BIOL,1210,2018,Fall,b +366,BIOL,1210,2018,Fall,b +173,BIOL,1210,2019,Spring,a +230,BIOL,1210,2019,Spring,a +256,BIOL,1210,2019,Spring,a +305,BIOL,1210,2019,Spring,a +307,BIOL,1210,2019,Spring,a +342,BIOL,1210,2019,Spring,a +356,BIOL,1210,2019,Spring,a +193,BIOL,2010,2015,Spring,a +182,BIOL,2010,2015,Summer,a +195,BIOL,2010,2015,Summer,a +377,BIOL,2010,2015,Summer,a +336,BIOL,2010,2015,Fall,a +123,BIOL,2010,2017,Summer,a +127,BIOL,2010,2017,Summer,a +173,BIOL,2010,2017,Summer,a +259,BIOL,2010,2017,Summer,a +277,BIOL,2010,2017,Summer,a +120,BIOL,2010,2017,Fall,a +208,BIOL,2010,2017,Fall,a +262,BIOL,2010,2017,Fall,a +304,BIOL,2010,2017,Fall,a +355,BIOL,2010,2017,Fall,a +372,BIOL,2010,2017,Fall,a +391,BIOL,2010,2017,Fall,a +134,BIOL,2010,2018,Spring,a +197,BIOL,2010,2018,Spring,a +210,BIOL,2010,2018,Spring,a +214,BIOL,2010,2018,Spring,a +255,BIOL,2010,2018,Spring,a +270,BIOL,2010,2018,Spring,a +285,BIOL,2010,2018,Spring,a +348,BIOL,2010,2018,Spring,a +373,BIOL,2010,2018,Spring,a +385,BIOL,2010,2018,Spring,a +309,BIOL,2010,2019,Fall,a +312,BIOL,2010,2019,Fall,a +313,BIOL,2010,2019,Fall,a +316,BIOL,2010,2019,Fall,a +109,BIOL,2010,2020,Spring,a +113,BIOL,2010,2020,Spring,a +135,BIOL,2010,2020,Spring,a +169,BIOL,2010,2020,Spring,a +223,BIOL,2010,2020,Spring,a +231,BIOL,2010,2020,Spring,a +384,BIOL,2010,2020,Spring,a +386,BIOL,2010,2020,Spring,a +108,BIOL,2010,2020,Spring,b +164,BIOL,2010,2020,Spring,b +178,BIOL,2010,2020,Spring,b +179,BIOL,2010,2020,Spring,b +292,BIOL,2010,2020,Spring,b +146,BIOL,2010,2020,Summer,a +166,BIOL,2010,2020,Summer,a +167,BIOL,2010,2020,Summer,a +170,BIOL,2010,2020,Summer,a +175,BIOL,2010,2020,Summer,a +221,BIOL,2010,2020,Summer,a +228,BIOL,2010,2020,Summer,a +242,BIOL,2010,2020,Summer,a +248,BIOL,2010,2020,Summer,a +250,BIOL,2010,2020,Summer,a +251,BIOL,2010,2020,Summer,a +256,BIOL,2010,2020,Summer,a +311,BIOL,2010,2020,Summer,a +333,BIOL,2010,2020,Summer,a +364,BIOL,2010,2020,Summer,a +375,BIOL,2010,2020,Summer,a +378,BIOL,2010,2020,Summer,a +128,BIOL,2010,2020,Summer,b +177,BIOL,2010,2020,Summer,b +228,BIOL,2010,2020,Summer,b +235,BIOL,2010,2020,Summer,b +293,BIOL,2010,2020,Summer,b +296,BIOL,2010,2020,Summer,b +306,BIOL,2010,2020,Summer,b +363,BIOL,2010,2020,Summer,b +390,BIOL,2010,2020,Summer,b +120,BIOL,2020,2015,Summer,a +144,BIOL,2020,2015,Summer,a +210,BIOL,2020,2015,Summer,a +126,BIOL,2020,2015,Fall,a +140,BIOL,2020,2015,Fall,a +374,BIOL,2020,2015,Fall,b +392,BIOL,2020,2015,Fall,b +176,BIOL,2020,2015,Fall,c +182,BIOL,2020,2015,Fall,c +295,BIOL,2020,2015,Fall,c +377,BIOL,2020,2015,Fall,c +192,BIOL,2020,2015,Fall,d +115,BIOL,2020,2016,Spring,a +117,BIOL,2020,2016,Spring,a +212,BIOL,2020,2016,Spring,a +214,BIOL,2020,2016,Spring,a +313,BIOL,2020,2016,Spring,a +357,BIOL,2020,2016,Spring,a +123,BIOL,2020,2018,Spring,a +129,BIOL,2020,2018,Spring,a +139,BIOL,2020,2018,Spring,a +285,BIOL,2020,2018,Spring,a +292,BIOL,2020,2018,Spring,a +321,BIOL,2020,2018,Spring,a +332,BIOL,2020,2018,Spring,a +152,BIOL,2020,2018,Fall,a +158,BIOL,2020,2018,Fall,a +163,BIOL,2020,2018,Fall,a +165,BIOL,2020,2018,Fall,a +177,BIOL,2020,2018,Fall,a +183,BIOL,2020,2018,Fall,a +199,BIOL,2020,2018,Fall,a +255,BIOL,2020,2018,Fall,a +257,BIOL,2020,2018,Fall,a +261,BIOL,2020,2018,Fall,a +270,BIOL,2020,2018,Fall,a +274,BIOL,2020,2018,Fall,a +276,BIOL,2020,2018,Fall,a +399,BIOL,2020,2018,Fall,a +100,BIOL,2020,2018,Fall,b +113,BIOL,2020,2018,Fall,b +260,BIOL,2020,2018,Fall,b +262,BIOL,2020,2018,Fall,b +267,BIOL,2020,2018,Fall,b +344,BIOL,2020,2018,Fall,b +345,BIOL,2020,2018,Fall,b +373,BIOL,2020,2018,Fall,b +378,BIOL,2020,2018,Fall,b +362,BIOL,2020,2018,Fall,c +387,BIOL,2020,2018,Fall,c +101,BIOL,2020,2018,Fall,d +231,BIOL,2020,2018,Fall,d +288,BIOL,2020,2018,Fall,d +325,BIOL,2020,2018,Fall,d +342,BIOL,2020,2018,Fall,d +379,BIOL,2020,2018,Fall,d +102,BIOL,2020,2019,Summer,a +119,BIOL,2020,2019,Summer,a +289,BIOL,2020,2019,Summer,a +293,BIOL,2020,2019,Summer,a +307,BIOL,2020,2019,Summer,a +282,BIOL,2021,2015,Spring,a +377,BIOL,2021,2015,Spring,a +394,BIOL,2021,2015,Spring,a +249,BIOL,2021,2015,Summer,b +290,BIOL,2021,2015,Summer,c +179,BIOL,2021,2016,Fall,a +243,BIOL,2021,2016,Fall,a +268,BIOL,2021,2016,Fall,a +270,BIOL,2021,2016,Fall,a +379,BIOL,2021,2016,Fall,a +115,BIOL,2021,2017,Summer,a +182,BIOL,2021,2017,Summer,a +348,BIOL,2021,2017,Summer,a +388,BIOL,2021,2017,Summer,a +207,BIOL,2021,2017,Fall,a +264,BIOL,2021,2017,Fall,a +292,BIOL,2021,2017,Fall,a +345,BIOL,2021,2017,Fall,a +102,BIOL,2021,2018,Spring,a +177,BIOL,2021,2018,Spring,a +311,BIOL,2021,2018,Spring,a +361,BIOL,2021,2018,Spring,a +373,BIOL,2021,2018,Spring,a +117,BIOL,2021,2018,Summer,a +169,BIOL,2021,2018,Summer,a +257,BIOL,2021,2018,Summer,a +312,BIOL,2021,2018,Summer,a +318,BIOL,2021,2018,Summer,a +344,BIOL,2021,2018,Summer,a +356,BIOL,2021,2018,Summer,a +366,BIOL,2021,2018,Summer,a +378,BIOL,2021,2018,Summer,a +127,BIOL,2021,2018,Fall,a +152,BIOL,2021,2018,Fall,a +199,BIOL,2021,2018,Fall,a +239,BIOL,2021,2018,Fall,a +256,BIOL,2021,2018,Fall,a +152,BIOL,2021,2018,Fall,b +309,BIOL,2021,2018,Fall,b +397,BIOL,2021,2018,Fall,b +248,BIOL,2021,2018,Fall,c +296,BIOL,2021,2018,Fall,c +342,BIOL,2021,2018,Fall,c +384,BIOL,2021,2018,Fall,c +133,BIOL,2021,2018,Fall,d +296,BIOL,2021,2018,Fall,d +196,BIOL,2021,2019,Spring,a +399,BIOL,2021,2019,Spring,a +139,BIOL,2021,2019,Spring,b +178,BIOL,2021,2019,Spring,b +238,BIOL,2021,2019,Spring,b +313,BIOL,2021,2019,Spring,b +107,BIOL,2021,2019,Fall,a +164,BIOL,2021,2019,Fall,a +300,BIOL,2021,2019,Fall,a +303,BIOL,2021,2019,Fall,a +340,BIOL,2021,2019,Fall,a +364,BIOL,2021,2019,Fall,a +140,BIOL,2030,2015,Fall,a +212,BIOL,2030,2015,Fall,a +215,BIOL,2030,2015,Fall,a +249,BIOL,2030,2015,Fall,a +379,BIOL,2030,2015,Fall,a +119,BIOL,2030,2016,Summer,a +163,BIOL,2030,2016,Summer,b +207,BIOL,2030,2016,Summer,b +392,BIOL,2030,2016,Summer,b +151,BIOL,2030,2016,Fall,a +213,BIOL,2030,2016,Fall,a +277,BIOL,2030,2016,Fall,a +314,BIOL,2030,2016,Fall,a +397,BIOL,2030,2016,Fall,a +123,BIOL,2030,2017,Spring,a +179,BIOL,2030,2017,Spring,a +182,BIOL,2030,2017,Spring,a +257,BIOL,2030,2017,Spring,a +313,BIOL,2030,2017,Spring,a +374,BIOL,2030,2017,Spring,a +377,BIOL,2030,2017,Spring,a +243,BIOL,2030,2017,Spring,b +246,BIOL,2030,2017,Spring,b +285,BIOL,2030,2017,Spring,b +348,BIOL,2030,2017,Spring,b +372,BIOL,2030,2017,Spring,b +378,BIOL,2030,2017,Spring,c +120,BIOL,2030,2017,Spring,d +285,BIOL,2030,2017,Spring,d +355,BIOL,2030,2017,Spring,d +393,BIOL,2030,2017,Spring,d +230,BIOL,2030,2018,Summer,a +342,BIOL,2030,2018,Summer,a +373,BIOL,2030,2018,Summer,a +101,BIOL,2030,2018,Summer,b +132,BIOL,2030,2018,Summer,b +214,BIOL,2030,2018,Summer,b +276,BIOL,2030,2018,Summer,b +371,BIOL,2030,2018,Summer,b +312,BIOL,2030,2019,Summer,a +318,BIOL,2030,2019,Summer,a +100,BIOL,2030,2019,Summer,b +113,BIOL,2030,2019,Summer,b +173,BIOL,2030,2019,Summer,b +228,BIOL,2030,2019,Summer,b +270,BIOL,2030,2019,Summer,b +309,BIOL,2030,2019,Summer,b +362,BIOL,2030,2019,Summer,b +396,BIOL,2030,2019,Summer,b +109,BIOL,2030,2019,Summer,c +135,BIOL,2030,2019,Summer,c +188,BIOL,2030,2019,Summer,c +247,BIOL,2030,2019,Summer,c +270,BIOL,2030,2019,Summer,c +296,BIOL,2030,2019,Summer,c +320,BIOL,2030,2019,Summer,c +399,BIOL,2030,2019,Summer,c +131,BIOL,2030,2019,Summer,d +143,BIOL,2030,2019,Summer,d +241,BIOL,2030,2019,Summer,d +300,BIOL,2030,2019,Summer,d +345,BIOL,2030,2019,Summer,d +164,BIOL,2030,2020,Spring,a +171,BIOL,2030,2020,Spring,a +366,BIOL,2030,2020,Spring,a +102,BIOL,2030,2020,Spring,b +199,BIOL,2030,2020,Spring,b +311,BIOL,2030,2020,Spring,b +347,BIOL,2030,2020,Spring,b +375,BIOL,2030,2020,Spring,b +243,BIOL,2210,2016,Summer,a +278,BIOL,2210,2016,Summer,a +312,BIOL,2210,2016,Summer,a +356,BIOL,2210,2016,Summer,a +392,BIOL,2210,2016,Summer,a +115,BIOL,2210,2017,Spring,a +231,BIOL,2210,2017,Spring,a +182,BIOL,2210,2017,Spring,b +215,BIOL,2210,2017,Spring,b +255,BIOL,2210,2017,Spring,b +309,BIOL,2210,2017,Spring,b +348,BIOL,2210,2017,Spring,b +107,BIOL,2210,2017,Spring,c +177,BIOL,2210,2017,Spring,c +215,BIOL,2210,2017,Spring,c +277,BIOL,2210,2017,Spring,c +393,BIOL,2210,2017,Spring,c +397,BIOL,2210,2017,Spring,c +151,BIOL,2210,2017,Summer,a +187,BIOL,2210,2017,Summer,a +214,BIOL,2210,2017,Summer,a +257,BIOL,2210,2017,Summer,a +120,BIOL,2210,2017,Summer,b +164,BIOL,2210,2017,Summer,b +259,BIOL,2210,2017,Summer,b +270,BIOL,2210,2017,Summer,b +342,BIOL,2210,2017,Summer,b +378,BIOL,2210,2017,Summer,b +387,BIOL,2210,2017,Summer,b +285,BIOL,2210,2017,Summer,c +374,BIOL,2210,2017,Summer,c +375,BIOL,2210,2017,Summer,c +128,BIOL,2210,2018,Spring,a +275,BIOL,2210,2018,Spring,a +276,BIOL,2210,2018,Spring,a +391,BIOL,2210,2018,Spring,a +131,BIOL,2210,2018,Summer,a +143,BIOL,2210,2018,Summer,a +169,BIOL,2210,2018,Summer,a +174,BIOL,2210,2018,Summer,a +239,BIOL,2210,2018,Summer,a +260,BIOL,2210,2018,Summer,a +298,BIOL,2210,2018,Summer,a +369,BIOL,2210,2018,Summer,a +227,BIOL,2210,2018,Summer,b +230,BIOL,2210,2018,Summer,b +311,BIOL,2210,2018,Summer,b +313,BIOL,2210,2018,Summer,b +173,BIOL,2210,2018,Summer,c +210,BIOL,2210,2018,Summer,c +258,BIOL,2210,2018,Summer,c +102,BIOL,2210,2019,Summer,a +179,BIOL,2210,2019,Summer,a +314,BIOL,2210,2019,Summer,a +329,BIOL,2210,2019,Summer,a +368,BIOL,2210,2019,Summer,a +377,BIOL,2210,2019,Summer,a +119,BIOL,2210,2019,Summer,b +228,BIOL,2210,2019,Summer,b +318,BIOL,2210,2019,Summer,b +386,BIOL,2210,2019,Summer,b +293,BIOL,2210,2019,Fall,a +380,BIOL,2210,2019,Fall,a +289,BIOL,2210,2019,Fall,b +293,BIOL,2210,2019,Fall,b +121,BIOL,2210,2020,Fall,a +185,BIOL,2210,2020,Fall,a +219,BIOL,2210,2020,Fall,a +220,BIOL,2210,2020,Fall,a +240,BIOL,2210,2020,Fall,a +271,BIOL,2210,2020,Fall,a +297,BIOL,2210,2020,Fall,a +347,BIOL,2210,2020,Fall,a +360,BIOL,2210,2020,Fall,a +366,BIOL,2210,2020,Fall,a +371,BIOL,2210,2020,Fall,a +373,BIOL,2210,2020,Fall,a +321,BIOL,2325,2015,Spring,a +182,BIOL,2325,2015,Fall,a +277,BIOL,2325,2015,Fall,b +290,BIOL,2325,2015,Fall,b +379,BIOL,2325,2015,Fall,b +149,BIOL,2325,2015,Fall,c +163,BIOL,2325,2015,Fall,c +192,BIOL,2325,2015,Fall,c +204,BIOL,2325,2015,Fall,c +312,BIOL,2325,2015,Fall,c +138,BIOL,2325,2016,Summer,a +357,BIOL,2325,2016,Summer,a +369,BIOL,2325,2016,Summer,a +394,BIOL,2325,2016,Summer,a +127,BIOL,2325,2017,Fall,a +385,BIOL,2325,2017,Fall,a +102,BIOL,2325,2017,Fall,b +123,BIOL,2325,2017,Fall,b +260,BIOL,2325,2017,Fall,b +296,BIOL,2325,2017,Fall,b +387,BIOL,2325,2017,Fall,b +100,BIOL,2325,2018,Spring,a +105,BIOL,2325,2018,Spring,a +119,BIOL,2325,2018,Spring,a +214,BIOL,2325,2018,Spring,a +332,BIOL,2325,2018,Spring,a +373,BIOL,2325,2018,Spring,a +374,BIOL,2325,2018,Spring,a +132,BIOL,2325,2018,Summer,a +151,BIOL,2325,2018,Summer,a +255,BIOL,2325,2018,Summer,a +262,BIOL,2325,2018,Summer,a +275,BIOL,2325,2018,Summer,a +318,BIOL,2325,2018,Summer,a +386,BIOL,2325,2018,Summer,a +393,BIOL,2325,2018,Summer,a +397,BIOL,2325,2018,Summer,a +124,BIOL,2325,2018,Fall,a +133,BIOL,2325,2018,Fall,a +164,BIOL,2325,2018,Fall,a +220,BIOL,2325,2018,Fall,a +247,BIOL,2325,2018,Fall,a +309,BIOL,2325,2018,Fall,a +129,BIOL,2325,2018,Fall,b +131,BIOL,2325,2018,Fall,b +167,BIOL,2325,2018,Fall,b +129,BIOL,2325,2018,Fall,c +217,BIOL,2325,2018,Fall,c +239,BIOL,2325,2018,Fall,c +274,BIOL,2325,2018,Fall,c +356,BIOL,2325,2018,Fall,c +399,BIOL,2325,2018,Fall,c +152,BIOL,2325,2019,Spring,a +292,BIOL,2325,2019,Spring,a +329,BIOL,2325,2019,Spring,a +333,BIOL,2325,2019,Spring,a +342,BIOL,2325,2019,Spring,a +377,BIOL,2325,2019,Spring,a +391,BIOL,2325,2019,Spring,a +270,BIOL,2325,2019,Spring,b +313,BIOL,2325,2019,Spring,b +314,BIOL,2325,2019,Spring,b +342,BIOL,2325,2019,Spring,b +120,BIOL,2325,2019,Summer,a +135,BIOL,2325,2019,Summer,a +139,BIOL,2325,2019,Summer,a +179,BIOL,2325,2019,Summer,a +276,BIOL,2325,2019,Summer,a +285,BIOL,2325,2019,Summer,a +325,BIOL,2325,2019,Summer,a +290,BIOL,2330,2015,Fall,a +138,BIOL,2330,2015,Fall,b +204,BIOL,2330,2015,Fall,d +312,BIOL,2330,2015,Fall,d +120,BIOL,2330,2016,Spring,a +123,BIOL,2330,2016,Spring,a +195,BIOL,2330,2016,Spring,a +282,BIOL,2330,2016,Spring,a +357,BIOL,2330,2016,Spring,a +377,BIOL,2330,2016,Spring,a +177,BIOL,2330,2016,Fall,a +270,BIOL,2330,2016,Fall,a +291,BIOL,2330,2016,Fall,a +335,BIOL,2330,2016,Fall,a +369,BIOL,2330,2016,Fall,a +393,BIOL,2330,2016,Fall,a +214,BIOL,2330,2017,Summer,a +229,BIOL,2330,2017,Summer,a +277,BIOL,2330,2017,Summer,a +309,BIOL,2330,2017,Summer,a +155,BIOL,2330,2017,Fall,a +165,BIOL,2330,2017,Fall,a +208,BIOL,2330,2017,Fall,a +342,BIOL,2330,2017,Fall,a +355,BIOL,2330,2017,Fall,a +387,BIOL,2330,2017,Fall,a +391,BIOL,2330,2017,Fall,a +187,BIOL,2330,2017,Fall,b +199,BIOL,2330,2017,Fall,b +266,BIOL,2330,2017,Fall,b +288,BIOL,2330,2017,Fall,b +392,BIOL,2330,2017,Fall,b +106,BIOL,2330,2019,Fall,a +125,BIOL,2330,2019,Fall,a +227,BIOL,2330,2019,Fall,a +240,BIOL,2330,2019,Fall,a +307,BIOL,2330,2019,Fall,a +378,BIOL,2330,2019,Fall,a +380,BIOL,2330,2019,Fall,a +183,BIOL,2330,2020,Spring,a +210,BIOL,2330,2020,Spring,a +300,BIOL,2330,2020,Spring,a +340,BIOL,2330,2020,Spring,a +348,BIOL,2330,2020,Spring,a +211,BIOL,2355,2015,Spring,a +192,BIOL,2355,2015,Summer,a +246,BIOL,2355,2015,Summer,a +377,BIOL,2355,2015,Summer,a +144,BIOL,2355,2016,Spring,a +395,BIOL,2355,2016,Spring,a +215,BIOL,2355,2016,Spring,b +321,BIOL,2355,2016,Spring,b +392,BIOL,2355,2016,Spring,b +395,BIOL,2355,2016,Spring,b +105,BIOL,2355,2017,Spring,a +145,BIOL,2355,2017,Spring,a +278,BIOL,2355,2017,Spring,a +290,BIOL,2355,2017,Spring,a +312,BIOL,2355,2017,Spring,a +105,BIOL,2355,2017,Spring,b +270,BIOL,2355,2017,Spring,b +329,BIOL,2355,2017,Spring,b +282,BIOL,2355,2017,Spring,c +299,BIOL,2355,2017,Spring,c +369,BIOL,2355,2017,Spring,c +397,BIOL,2355,2017,Spring,c +102,BIOL,2355,2017,Spring,d +163,BIOL,2355,2017,Spring,d +179,BIOL,2355,2017,Spring,d +243,BIOL,2355,2017,Spring,d +285,BIOL,2355,2017,Spring,d +329,BIOL,2355,2017,Spring,d +374,BIOL,2355,2017,Spring,d +378,BIOL,2355,2017,Spring,d +123,BIOL,2355,2017,Summer,a +318,BIOL,2355,2017,Summer,a +375,BIOL,2355,2017,Summer,a +237,BIOL,2355,2017,Fall,a +335,BIOL,2355,2017,Fall,a +366,BIOL,2355,2017,Fall,a +155,BIOL,2355,2017,Fall,b +182,BIOL,2355,2017,Fall,b +256,BIOL,2355,2017,Fall,b +264,BIOL,2355,2017,Fall,b +373,BIOL,2355,2017,Fall,b +169,BIOL,2355,2018,Spring,a +214,BIOL,2355,2018,Spring,a +230,BIOL,2355,2018,Spring,a +277,BIOL,2355,2018,Spring,a +393,BIOL,2355,2018,Spring,a +119,BIOL,2355,2018,Summer,a +128,BIOL,2355,2018,Summer,a +131,BIOL,2355,2018,Summer,a +185,BIOL,2355,2018,Summer,a +227,BIOL,2355,2018,Summer,a +262,BIOL,2355,2018,Summer,a +332,BIOL,2355,2018,Summer,a +342,BIOL,2355,2018,Summer,a +187,BIOL,2355,2018,Summer,b +276,BIOL,2355,2018,Summer,b +311,BIOL,2355,2018,Summer,b +348,BIOL,2355,2018,Summer,b +379,BIOL,2355,2018,Summer,b +391,BIOL,2355,2018,Summer,b +398,BIOL,2355,2018,Summer,b +113,BIOL,2355,2018,Summer,c +129,BIOL,2355,2018,Summer,c +274,BIOL,2355,2018,Summer,c +275,BIOL,2355,2018,Summer,c +332,BIOL,2355,2018,Summer,c +119,BIOL,2355,2018,Summer,d +207,BIOL,2355,2018,Summer,d +276,BIOL,2355,2018,Summer,d +347,BIOL,2355,2018,Summer,d +379,BIOL,2355,2018,Summer,d +387,BIOL,2355,2018,Summer,d +127,BIOL,2355,2018,Fall,a +292,BIOL,2355,2018,Fall,a +313,BIOL,2355,2018,Fall,a +314,BIOL,2355,2018,Fall,a +359,BIOL,2355,2018,Fall,a +380,BIOL,2355,2018,Fall,a +178,BIOL,2355,2019,Spring,a +247,BIOL,2355,2019,Spring,a +356,BIOL,2355,2019,Spring,a +151,BIOL,2355,2019,Spring,b +372,BIOL,2355,2019,Spring,b +146,BIOL,2355,2019,Spring,c +248,BIOL,2355,2019,Spring,c +255,BIOL,2355,2019,Spring,c +345,BIOL,2355,2019,Spring,c +109,BIOL,2355,2019,Spring,d +107,BIOL,2355,2020,Spring,a +118,BIOL,2355,2020,Spring,a +309,BIOL,2355,2020,Spring,a +362,BIOL,2355,2020,Spring,a +106,BIOL,2355,2020,Summer,a +122,BIOL,2355,2020,Summer,a +221,BIOL,2355,2020,Summer,a +258,BIOL,2355,2020,Summer,a +323,BIOL,2355,2020,Summer,a +333,BIOL,2355,2020,Summer,a +106,BIOL,2355,2020,Summer,b +137,BIOL,2355,2020,Summer,b +177,BIOL,2355,2020,Summer,b +244,BIOL,2355,2020,Summer,b +307,BIOL,2355,2020,Summer,b +325,BIOL,2355,2020,Summer,b +363,BIOL,2355,2020,Summer,b +120,BIOL,2355,2020,Fall,a +124,BIOL,2355,2020,Fall,a +135,BIOL,2355,2020,Fall,a +142,BIOL,2355,2020,Fall,a +167,BIOL,2355,2020,Fall,a +175,BIOL,2355,2020,Fall,a +181,BIOL,2355,2020,Fall,a +186,BIOL,2355,2020,Fall,a +220,BIOL,2355,2020,Fall,a +233,BIOL,2355,2020,Fall,a +271,BIOL,2355,2020,Fall,a +390,BIOL,2355,2020,Fall,a +177,BIOL,2420,2015,Spring,a +246,BIOL,2420,2015,Spring,b +140,BIOL,2420,2015,Spring,c +192,BIOL,2420,2015,Spring,d +374,BIOL,2420,2015,Summer,a +290,BIOL,2420,2015,Fall,a +119,BIOL,2420,2016,Spring,a +162,BIOL,2420,2016,Spring,a +115,BIOL,2420,2017,Summer,a +117,BIOL,2420,2017,Summer,a +132,BIOL,2420,2017,Summer,a +164,BIOL,2420,2017,Summer,a +182,BIOL,2420,2017,Summer,a +229,BIOL,2420,2017,Summer,a +264,BIOL,2420,2017,Summer,a +107,BIOL,2420,2017,Summer,b +123,BIOL,2420,2017,Summer,b +207,BIOL,2420,2017,Summer,b +309,BIOL,2420,2017,Summer,b +348,BIOL,2420,2017,Summer,b +169,BIOL,2420,2018,Spring,a +185,BIOL,2420,2018,Spring,a +270,BIOL,2420,2018,Spring,a +375,BIOL,2420,2018,Spring,a +120,BIOL,2420,2020,Spring,a +210,BIOL,2420,2020,Spring,a +235,BIOL,2420,2020,Spring,a +242,BIOL,2420,2020,Spring,a +248,BIOL,2420,2020,Spring,a +285,BIOL,2420,2020,Spring,a +373,BIOL,2420,2020,Spring,a +397,BIOL,2420,2020,Spring,a +121,BIOL,2420,2020,Spring,b +183,BIOL,2420,2020,Spring,b +230,BIOL,2420,2020,Spring,b +241,BIOL,2420,2020,Spring,b +248,BIOL,2420,2020,Spring,b +365,BIOL,2420,2020,Spring,b +124,BIOL,2420,2020,Summer,a +128,BIOL,2420,2020,Summer,a +131,BIOL,2420,2020,Summer,a +151,BIOL,2420,2020,Summer,a +189,BIOL,2420,2020,Summer,a +200,BIOL,2420,2020,Summer,a +292,BIOL,2420,2020,Summer,a +311,BIOL,2420,2020,Summer,a +313,BIOL,2420,2020,Summer,a +323,BIOL,2420,2020,Summer,a +333,BIOL,2420,2020,Summer,a +347,BIOL,2420,2020,Summer,a +363,BIOL,2420,2020,Summer,a +368,BIOL,2420,2020,Summer,a +122,BIOL,2420,2020,Fall,a +146,BIOL,2420,2020,Fall,a +175,BIOL,2420,2020,Fall,a +224,BIOL,2420,2020,Fall,a +255,BIOL,2420,2020,Fall,a +272,BIOL,2420,2020,Fall,a +321,BIOL,2420,2020,Fall,a +329,BIOL,2420,2020,Fall,a +342,BIOL,2420,2020,Fall,a +391,BIOL,2420,2020,Fall,a +138,CS,1030,2016,Spring,a +149,CS,1030,2016,Spring,a +162,CS,1030,2016,Spring,a +290,CS,1030,2016,Spring,a +291,CS,1030,2016,Spring,a +312,CS,1030,2016,Spring,a +348,CS,1030,2016,Spring,a +395,CS,1030,2016,Spring,a +123,CS,1030,2016,Summer,a +214,CS,1030,2016,Summer,a +245,CS,1030,2016,Summer,a +277,CS,1030,2016,Summer,a +385,CS,1030,2016,Summer,a +393,CS,1030,2016,Summer,a +102,CS,1030,2016,Fall,a +116,CS,1030,2016,Fall,a +243,CS,1030,2016,Fall,a +262,CS,1030,2016,Fall,a +321,CS,1030,2016,Fall,a +128,CS,1030,2018,Fall,a +238,CS,1030,2018,Fall,a +256,CS,1030,2018,Fall,a +305,CS,1030,2018,Fall,a +344,CS,1030,2018,Fall,a +366,CS,1030,2018,Fall,a +387,CS,1030,2018,Fall,a +143,CS,1030,2019,Fall,a +260,CS,1030,2019,Fall,a +285,CS,1030,2019,Fall,a +398,CS,1030,2019,Fall,a +173,CS,1030,2019,Fall,b +185,CS,1030,2019,Fall,b +210,CS,1030,2019,Fall,b +247,CS,1030,2019,Fall,b +303,CS,1030,2019,Fall,b +329,CS,1030,2019,Fall,b +359,CS,1030,2019,Fall,b +100,CS,1030,2020,Spring,a +122,CS,1030,2020,Spring,a +175,CS,1030,2020,Spring,a +221,CS,1030,2020,Spring,a +307,CS,1030,2020,Spring,a +170,CS,1030,2020,Spring,b +332,CS,1030,2020,Spring,b +391,CS,1030,2020,Spring,b +118,CS,1030,2020,Spring,c +120,CS,1030,2020,Spring,c +124,CS,1030,2020,Spring,c +135,CS,1030,2020,Spring,c +309,CS,1030,2020,Spring,c +119,CS,1030,2020,Fall,a +131,CS,1030,2020,Fall,a +167,CS,1030,2020,Fall,a +181,CS,1030,2020,Fall,a +202,CS,1030,2020,Fall,a +227,CS,1030,2020,Fall,a +255,CS,1030,2020,Fall,a +271,CS,1030,2020,Fall,a +342,CS,1030,2020,Fall,a +347,CS,1030,2020,Fall,a +215,CS,1410,2015,Summer,b +276,CS,1410,2015,Summer,b +182,CS,1410,2015,Summer,c +172,CS,1410,2015,Summer,d +270,CS,1410,2015,Summer,d +301,CS,1410,2015,Summer,d +382,CS,1410,2015,Summer,d +216,CS,1410,2016,Spring,a +335,CS,1410,2016,Spring,a +355,CS,1410,2016,Spring,a +216,CS,1410,2016,Spring,b +273,CS,1410,2016,Spring,b +291,CS,1410,2016,Spring,b +335,CS,1410,2016,Spring,b +207,CS,1410,2016,Summer,a +389,CS,1410,2016,Summer,a +394,CS,1410,2016,Summer,a +290,CS,1410,2017,Spring,a +391,CS,1410,2017,Spring,a +120,CS,1410,2018,Spring,a +231,CS,1410,2018,Spring,a +348,CS,1410,2018,Spring,a +100,CS,1410,2018,Spring,b +107,CS,1410,2018,Spring,b +109,CS,1410,2018,Spring,b +120,CS,1410,2018,Spring,b +164,CS,1410,2018,Spring,b +199,CS,1410,2018,Spring,b +203,CS,1410,2018,Spring,b +229,CS,1410,2018,Spring,b +109,CS,1410,2018,Spring,c +388,CS,1410,2018,Spring,c +199,CS,1410,2018,Spring,d +275,CS,1410,2018,Spring,d +307,CS,1410,2018,Spring,d +366,CS,1410,2018,Spring,d +392,CS,1410,2018,Spring,d +121,CS,1410,2020,Spring,a +122,CS,1410,2020,Spring,a +267,CS,1410,2020,Spring,a +312,CS,1410,2020,Spring,a +200,CS,1410,2020,Spring,b +277,CS,1410,2020,Spring,b +329,CS,1410,2020,Spring,b +375,CS,1410,2020,Spring,b +277,CS,2100,2015,Summer,a +313,CS,2100,2015,Summer,a +214,CS,2100,2016,Spring,a +276,CS,2100,2016,Spring,a +295,CS,2100,2016,Spring,a +123,CS,2100,2016,Summer,a +179,CS,2100,2016,Summer,a +160,CS,2100,2016,Summer,b +179,CS,2100,2016,Summer,b +262,CS,2100,2016,Summer,b +335,CS,2100,2016,Summer,b +374,CS,2100,2016,Summer,b +388,CS,2100,2016,Summer,b +134,CS,2100,2016,Summer,c +278,CS,2100,2016,Summer,c +256,CS,2100,2017,Spring,a +377,CS,2100,2017,Spring,a +378,CS,2100,2017,Spring,a +143,CS,2100,2017,Fall,a +163,CS,2100,2017,Fall,a +215,CS,2100,2017,Fall,a +311,CS,2100,2017,Fall,a +348,CS,2100,2017,Fall,a +356,CS,2100,2017,Fall,a +366,CS,2100,2017,Fall,a +101,CS,2100,2018,Spring,a +185,CS,2100,2018,Spring,a +255,CS,2100,2018,Spring,a +361,CS,2100,2018,Spring,a +387,CS,2100,2018,Spring,a +258,CS,2100,2018,Summer,a +261,CS,2100,2018,Summer,a +270,CS,2100,2018,Summer,a +369,CS,2100,2018,Summer,a +133,CS,2100,2018,Summer,b +182,CS,2100,2018,Summer,b +285,CS,2100,2018,Summer,b +329,CS,2100,2018,Summer,b +139,CS,2100,2018,Summer,c +258,CS,2100,2018,Summer,c +298,CS,2100,2018,Summer,c +329,CS,2100,2018,Summer,c +332,CS,2100,2018,Summer,c +345,CS,2100,2018,Summer,c +371,CS,2100,2018,Summer,c +381,CS,2100,2018,Summer,c +392,CS,2100,2018,Summer,c +393,CS,2100,2018,Summer,c +158,CS,2100,2018,Fall,a +230,CS,2100,2018,Fall,a +292,CS,2100,2018,Fall,a +373,CS,2100,2018,Fall,a +257,CS,2100,2018,Fall,b +309,CS,2100,2018,Fall,b +344,CS,2100,2018,Fall,b +384,CS,2100,2018,Fall,b +124,CS,2100,2018,Fall,c +196,CS,2100,2018,Fall,c +217,CS,2100,2018,Fall,c +231,CS,2100,2018,Fall,c +252,CS,2100,2018,Fall,c +257,CS,2100,2018,Fall,c +164,CS,2100,2018,Fall,d +199,CS,2100,2018,Fall,d +253,CS,2100,2018,Fall,d +259,CS,2100,2018,Fall,d +391,CS,2100,2018,Fall,d +399,CS,2100,2018,Fall,d +107,CS,2100,2019,Spring,a +240,CS,2100,2019,Spring,a +307,CS,2100,2019,Spring,a +379,CS,2100,2019,Spring,a +156,CS,2100,2019,Spring,b +312,CS,2100,2019,Spring,b +241,CS,2100,2019,Summer,a +293,CS,2100,2019,Summer,a +296,CS,2100,2019,Summer,a +314,CS,2100,2019,Summer,a +347,CS,2100,2019,Summer,a +390,CS,2100,2019,Summer,a +106,CS,2100,2019,Summer,b +131,CS,2100,2019,Summer,b +169,CS,2100,2019,Summer,b +194,CS,2100,2019,Summer,b +238,CS,2100,2019,Summer,b +359,CS,2100,2019,Summer,b +368,CS,2100,2019,Summer,b +118,CS,2100,2019,Fall,a +181,CS,2100,2019,Fall,a +223,CS,2100,2019,Fall,a +386,CS,2100,2019,Fall,a +118,CS,2100,2019,Fall,b +178,CS,2100,2019,Fall,b +235,CS,2100,2019,Fall,b +321,CS,2100,2019,Fall,b +397,CS,2100,2019,Fall,b +118,CS,2100,2019,Fall,c +146,CS,2100,2019,Fall,c +220,CS,2100,2019,Fall,c +260,CS,2100,2019,Fall,c +318,CS,2100,2019,Fall,c +397,CS,2100,2019,Fall,c +120,CS,2100,2019,Fall,d +146,CS,2100,2019,Fall,d +181,CS,2100,2019,Fall,d +183,CS,2100,2019,Fall,d +316,CS,2100,2019,Fall,d +152,CS,2100,2020,Spring,a +167,CS,2100,2020,Spring,a +228,CS,2100,2020,Spring,a +122,CS,2100,2020,Fall,a +171,CS,2100,2020,Fall,a +177,CS,2100,2020,Fall,a +191,CS,2100,2020,Fall,a +219,CS,2100,2020,Fall,a +247,CS,2100,2020,Fall,a +289,CS,2100,2020,Fall,a +333,CS,2100,2020,Fall,a +138,CS,2420,2015,Spring,a +277,CS,2420,2015,Spring,a +377,CS,2420,2015,Spring,a +160,CS,2420,2015,Summer,a +204,CS,2420,2015,Summer,a +140,CS,2420,2015,Summer,c +302,CS,2420,2015,Summer,c +276,CS,2420,2015,Fall,a +115,CS,2420,2016,Spring,a +312,CS,2420,2016,Spring,a +348,CS,2420,2016,Spring,a +385,CS,2420,2016,Spring,a +389,CS,2420,2016,Spring,a +172,CS,2420,2016,Summer,a +195,CS,2420,2016,Summer,a +314,CS,2420,2016,Summer,a +321,CS,2420,2016,Summer,a +163,CS,2420,2016,Fall,a +177,CS,2420,2016,Fall,a +229,CS,2420,2016,Fall,a +245,CS,2420,2016,Fall,a +282,CS,2420,2016,Fall,a +313,CS,2420,2016,Fall,a +369,CS,2420,2016,Fall,a +392,CS,2420,2016,Fall,a +105,CS,2420,2016,Fall,b +117,CS,2420,2016,Fall,b +151,CS,2420,2016,Fall,b +215,CS,2420,2016,Fall,b +262,CS,2420,2016,Fall,b +268,CS,2420,2016,Fall,b +295,CS,2420,2016,Fall,b +329,CS,2420,2016,Fall,b +243,CS,2420,2016,Fall,c +270,CS,2420,2016,Fall,c +397,CS,2420,2016,Fall,c +119,CS,2420,2017,Summer,a +353,CS,2420,2017,Summer,a +361,CS,2420,2017,Summer,a +132,CS,2420,2017,Summer,b +285,CS,2420,2017,Summer,b +299,CS,2420,2017,Summer,b +309,CS,2420,2017,Summer,b +179,CS,2420,2017,Summer,c +208,CS,2420,2017,Summer,c +261,CS,2420,2017,Summer,c +288,CS,2420,2017,Summer,c +311,CS,2420,2017,Summer,c +372,CS,2420,2017,Summer,c +120,CS,2420,2017,Fall,a +123,CS,2420,2017,Fall,a +128,CS,2420,2017,Fall,a +326,CS,2420,2017,Fall,a +387,CS,2420,2017,Fall,a +107,CS,2420,2018,Spring,a +296,CS,2420,2018,Spring,a +124,CS,2420,2019,Summer,a +131,CS,2420,2019,Summer,a +199,CS,2420,2019,Summer,a +356,CS,2420,2019,Summer,a +390,CS,2420,2019,Summer,a +133,CS,2420,2020,Summer,a +153,CS,2420,2020,Summer,a +167,CS,2420,2020,Summer,a +219,CS,2420,2020,Summer,a +220,CS,2420,2020,Summer,a +231,CS,2420,2020,Summer,a +233,CS,2420,2020,Summer,a +263,CS,2420,2020,Summer,a +365,CS,2420,2020,Summer,a +368,CS,2420,2020,Summer,a +168,CS,2420,2020,Fall,a +222,CS,2420,2020,Fall,a +225,CS,2420,2020,Fall,a +230,CS,2420,2020,Fall,a +345,CS,2420,2020,Fall,a +163,CS,3100,2015,Summer,a +172,CS,3100,2015,Summer,a +276,CS,3100,2015,Summer,a +302,CS,3100,2015,Summer,a +215,CS,3100,2015,Summer,b +214,CS,3100,2016,Spring,a +243,CS,3100,2016,Spring,a +120,CS,3100,2016,Spring,b +138,CS,3100,2016,Spring,b +285,CS,3100,2016,Spring,b +374,CS,3100,2016,Spring,b +134,CS,3100,2016,Spring,d +138,CS,3100,2016,Spring,d +192,CS,3100,2016,Spring,d +195,CS,3100,2016,Spring,d +207,CS,3100,2016,Summer,a +182,CS,3100,2016,Fall,a +213,CS,3100,2016,Fall,a +277,CS,3100,2016,Fall,a +314,CS,3100,2016,Fall,a +378,CS,3100,2016,Fall,a +392,CS,3100,2016,Fall,a +210,CS,3100,2017,Spring,a +261,CS,3100,2017,Spring,a +210,CS,3100,2017,Spring,b +255,CS,3100,2017,Spring,b +355,CS,3100,2017,Spring,b +385,CS,3100,2017,Spring,b +393,CS,3100,2017,Summer,a +123,CS,3100,2017,Fall,a +124,CS,3100,2017,Fall,a +139,CS,3100,2017,Fall,a +237,CS,3100,2017,Fall,a +260,CS,3100,2017,Fall,a +264,CS,3100,2017,Fall,a +296,CS,3100,2017,Fall,a +391,CS,3100,2017,Fall,a +397,CS,3100,2017,Fall,a +196,CS,3100,2019,Spring,a +129,CS,3100,2019,Spring,b +288,CS,3100,2019,Spring,b +348,CS,3100,2019,Spring,b +366,CS,3100,2019,Spring,b +399,CS,3100,2019,Spring,b +211,CS,3200,2015,Spring,b +138,CS,3200,2015,Fall,a +249,CS,3200,2015,Fall,a +134,CS,3200,2015,Fall,b +179,CS,3200,2015,Fall,b +312,CS,3200,2015,Fall,c +336,CS,3200,2015,Fall,c +282,CS,3200,2015,Fall,d +295,CS,3200,2015,Fall,d +182,CS,3200,2016,Summer,a +246,CS,3200,2016,Summer,a +270,CS,3200,2016,Summer,a +290,CS,3200,2016,Summer,a +357,CS,3200,2016,Summer,a +373,CS,3200,2016,Summer,a +379,CS,3200,2016,Summer,a +176,CS,3200,2016,Summer,b +207,CS,3200,2016,Summer,b +246,CS,3200,2016,Summer,b +120,CS,3200,2016,Fall,a +268,CS,3200,2016,Fall,a +102,CS,3200,2016,Fall,b +313,CS,3200,2016,Fall,b +348,CS,3200,2016,Fall,b +123,CS,3200,2016,Fall,c +229,CS,3200,2016,Fall,c +291,CS,3200,2016,Fall,c +105,CS,3200,2016,Fall,d +107,CS,3200,2016,Fall,d +151,CS,3200,2016,Fall,d +369,CS,3200,2016,Fall,d +385,CS,3200,2016,Fall,d +116,CS,3200,2017,Spring,a +264,CS,3200,2017,Spring,a +377,CS,3200,2017,Spring,a +397,CS,3200,2017,Spring,a +133,CS,3200,2018,Spring,a +165,CS,3200,2018,Spring,a +197,CS,3200,2018,Spring,a +257,CS,3200,2018,Spring,a +274,CS,3200,2018,Spring,a +255,CS,3200,2018,Spring,b +276,CS,3200,2018,Spring,b +391,CS,3200,2018,Spring,b +109,CS,3200,2018,Spring,c +285,CS,3200,2018,Spring,c +388,CS,3200,2018,Spring,c +139,CS,3200,2019,Spring,a +164,CS,3200,2019,Spring,a +277,CS,3200,2019,Spring,a +372,CS,3200,2019,Spring,a +131,CS,3200,2020,Spring,a +194,CS,3200,2020,Spring,a +228,CS,3200,2020,Spring,a +303,CS,3200,2020,Spring,a +342,CS,3200,2020,Spring,a +187,CS,3200,2020,Spring,b +108,CS,3200,2020,Spring,c +248,CS,3200,2020,Spring,c +325,CS,3200,2020,Spring,c +332,CS,3200,2020,Spring,c +378,CS,3200,2020,Spring,c +398,CS,3200,2020,Spring,c +112,CS,3200,2020,Summer,a +113,CS,3200,2020,Summer,a +177,CS,3200,2020,Summer,a +185,CS,3200,2020,Summer,a +231,CS,3200,2020,Summer,a +242,CS,3200,2020,Summer,a +254,CS,3200,2020,Summer,a +260,CS,3200,2020,Summer,a +292,CS,3200,2020,Summer,a +306,CS,3200,2020,Summer,a +311,CS,3200,2020,Summer,a +375,CS,3200,2020,Summer,a +124,CS,3200,2020,Fall,a +135,CS,3200,2020,Fall,a +161,CS,3200,2020,Fall,a +178,CS,3200,2020,Fall,a +230,CS,3200,2020,Fall,a +345,CS,3200,2020,Fall,a +376,CS,3200,2020,Fall,a +149,CS,3500,2015,Fall,b +246,CS,3500,2015,Fall,b +313,CS,3500,2015,Fall,b +123,CS,3500,2016,Spring,a +229,CS,3500,2016,Spring,a +277,CS,3500,2016,Spring,a +374,CS,3500,2016,Spring,a +395,CS,3500,2016,Spring,a +107,CS,3500,2016,Summer,a +282,CS,3500,2016,Summer,a +288,CS,3500,2016,Summer,a +379,CS,3500,2016,Summer,a +292,CS,3500,2017,Summer,a +311,CS,3500,2017,Summer,a +182,CS,3500,2017,Fall,a +314,CS,3500,2017,Fall,a +335,CS,3500,2017,Fall,a +391,CS,3500,2017,Fall,a +109,CS,3500,2017,Fall,b +131,CS,3500,2017,Fall,b +355,CS,3500,2017,Fall,b +203,CS,3500,2017,Fall,c +275,CS,3500,2017,Fall,c +294,CS,3500,2017,Fall,c +309,CS,3500,2017,Fall,c +385,CS,3500,2017,Fall,c +392,CS,3500,2017,Fall,c +118,CS,3500,2019,Summer,a +152,CS,3500,2019,Summer,a +179,CS,3500,2019,Summer,a +228,CS,3500,2019,Summer,a +258,CS,3500,2019,Summer,a +276,CS,3500,2019,Summer,a +396,CS,3500,2019,Summer,a +180,CS,3500,2019,Fall,a +255,CS,3500,2019,Fall,a +332,CS,3500,2019,Fall,a +377,CS,3500,2019,Fall,a +380,CS,3500,2019,Fall,a +397,CS,3500,2019,Fall,a +108,CS,3500,2019,Fall,b +133,CS,3500,2019,Fall,b +171,CS,3500,2019,Fall,b +199,CS,3500,2019,Fall,b +223,CS,3500,2019,Fall,b +270,CS,3500,2019,Fall,b +321,CS,3500,2019,Fall,b +375,CS,3500,2019,Fall,b +143,CS,3500,2019,Fall,c +363,CS,3500,2019,Fall,c +112,CS,3500,2020,Summer,a +124,CS,3500,2020,Summer,a +127,CS,3500,2020,Summer,a +142,CS,3500,2020,Summer,a +164,CS,3500,2020,Summer,a +166,CS,3500,2020,Summer,a +247,CS,3500,2020,Summer,a +260,CS,3500,2020,Summer,a +281,CS,3500,2020,Summer,a +312,CS,3500,2020,Summer,a +325,CS,3500,2020,Summer,a +329,CS,3500,2020,Summer,a +331,CS,3500,2020,Summer,a +333,CS,3500,2020,Summer,a +347,CS,3500,2020,Summer,a +348,CS,3500,2020,Summer,a +364,CS,3500,2020,Summer,a +365,CS,3500,2020,Summer,a +373,CS,3500,2020,Summer,a +386,CS,3500,2020,Summer,a +192,CS,3505,2015,Spring,a +282,CS,3505,2015,Spring,a +211,CS,3505,2015,Fall,a +313,CS,3505,2015,Fall,a +182,CS,3505,2015,Fall,b +335,CS,3505,2015,Fall,b +392,CS,3505,2015,Fall,b +126,CS,3505,2015,Fall,c +162,CS,3505,2015,Fall,c +348,CS,3505,2015,Fall,d +107,CS,3505,2016,Summer,a +163,CS,3505,2016,Summer,a +290,CS,3505,2016,Summer,a +378,CS,3505,2016,Summer,a +393,CS,3505,2016,Summer,a +123,CS,3505,2016,Fall,a +379,CS,3505,2016,Fall,a +116,CS,3505,2016,Fall,b +249,CS,3505,2016,Fall,b +329,CS,3505,2016,Fall,b +151,CS,3505,2017,Summer,a +260,CS,3505,2017,Summer,a +312,CS,3505,2017,Summer,a +124,CS,3505,2017,Fall,a +128,CS,3505,2017,Fall,a +199,CS,3505,2017,Fall,a +214,CS,3505,2017,Fall,a +355,CS,3505,2017,Fall,a +397,CS,3505,2017,Fall,a +102,CS,3505,2017,Fall,b +131,CS,3505,2017,Fall,b +177,CS,3505,2017,Fall,b +199,CS,3505,2017,Fall,b +208,CS,3505,2017,Fall,b +294,CS,3505,2017,Fall,b +321,CS,3505,2017,Fall,b +385,CS,3505,2017,Fall,b +100,CS,3505,2018,Summer,a +101,CS,3505,2018,Summer,a +197,CS,3505,2018,Summer,a +247,CS,3505,2018,Summer,a +255,CS,3505,2018,Summer,a +368,CS,3505,2018,Summer,a +374,CS,3505,2018,Summer,a +377,CS,3505,2018,Summer,a +386,CS,3505,2018,Summer,a +127,CS,3505,2018,Summer,b +143,CS,3505,2018,Summer,b +173,CS,3505,2018,Summer,b +185,CS,3505,2018,Summer,b +247,CS,3505,2018,Summer,b +259,CS,3505,2018,Summer,b +262,CS,3505,2018,Summer,b +288,CS,3505,2018,Summer,b +156,CS,3505,2018,Fall,a +179,CS,3505,2018,Fall,a +240,CS,3505,2018,Fall,a +256,CS,3505,2018,Fall,a +258,CS,3505,2018,Fall,a +305,CS,3505,2018,Fall,a +345,CS,3505,2018,Fall,a +371,CS,3505,2018,Fall,a +252,CS,3505,2018,Fall,b +285,CS,3505,2018,Fall,c +371,CS,3505,2018,Fall,c +396,CS,3505,2018,Fall,c +152,CS,3505,2019,Spring,a +228,CS,3505,2019,Spring,a +241,CS,3505,2019,Spring,a +276,CS,3505,2019,Spring,a +320,CS,3505,2019,Spring,a +187,CS,3505,2019,Spring,b +230,CS,3505,2019,Spring,b +314,CS,3505,2019,Spring,b +358,CS,3505,2019,Spring,b +119,CS,3505,2019,Summer,a +169,CS,3505,2019,Summer,a +220,CS,3505,2019,Summer,a +296,CS,3505,2019,Summer,a +307,CS,3505,2019,Summer,a +129,CS,3505,2019,Summer,b +223,CS,3505,2019,Summer,b +238,CS,3505,2019,Summer,b +296,CS,3505,2019,Summer,b +298,CS,3505,2019,Summer,b +300,CS,3505,2019,Summer,b +340,CS,3505,2019,Summer,b +372,CS,3505,2019,Summer,b +373,CS,3505,2019,Summer,b +380,CS,3505,2019,Summer,b +129,CS,3505,2019,Summer,c +300,CS,3505,2019,Summer,c +384,CS,3505,2019,Summer,c +113,CS,3505,2019,Summer,d +133,CS,3505,2019,Summer,d +270,CS,3505,2019,Summer,d +292,CS,3505,2019,Summer,d +318,CS,3505,2019,Summer,d +356,CS,3505,2019,Summer,d +362,CS,3505,2019,Summer,d +178,CS,3505,2019,Fall,a +284,CS,3505,2019,Fall,a +391,CS,3505,2019,Fall,a +118,CS,3505,2019,Fall,b +289,CS,3505,2019,Fall,b +309,CS,3505,2019,Fall,b +399,CS,3505,2019,Fall,b +194,CS,3505,2019,Fall,c +235,CS,3505,2019,Fall,c +248,CS,3505,2019,Fall,c +311,CS,3505,2019,Fall,c +391,CS,3505,2019,Fall,c +146,CS,3505,2020,Spring,a +164,CS,3505,2020,Spring,a +277,CS,3505,2020,Spring,a +332,CS,3505,2020,Spring,a +137,CS,3505,2020,Summer,a +200,CS,3505,2020,Summer,a +219,CS,3505,2020,Summer,a +257,CS,3505,2020,Summer,a +267,CS,3505,2020,Summer,a +306,CS,3505,2020,Summer,a +365,CS,3505,2020,Summer,a +142,CS,3505,2020,Fall,a +339,CS,3505,2020,Fall,a +398,CS,3505,2020,Fall,a +106,CS,3505,2020,Fall,b +110,CS,3505,2020,Fall,b +121,CS,3505,2020,Fall,b +333,CS,3505,2020,Fall,b +109,CS,3505,2020,Fall,c +120,CS,3505,2020,Fall,c +171,CS,3505,2020,Fall,c +250,CS,3505,2020,Fall,c +293,CS,3505,2020,Fall,c +390,CS,3505,2020,Fall,c +140,CS,3810,2015,Spring,a +276,CS,3810,2015,Spring,a +123,CS,3810,2016,Summer,a +160,CS,3810,2016,Summer,a +314,CS,3810,2016,Summer,a +393,CS,3810,2016,Summer,a +107,CS,3810,2016,Fall,a +195,CS,3810,2016,Fall,a +213,CS,3810,2016,Fall,a +282,CS,3810,2016,Fall,a +285,CS,3810,2016,Fall,a +348,CS,3810,2016,Fall,a +105,CS,3810,2016,Fall,b +116,CS,3810,2016,Fall,b +245,CS,3810,2016,Fall,b +264,CS,3810,2016,Fall,b +329,CS,3810,2016,Fall,b +335,CS,3810,2016,Fall,b +173,CS,3810,2018,Spring,a +179,CS,3810,2018,Spring,a +230,CS,3810,2018,Spring,a +237,CS,3810,2018,Spring,a +255,CS,3810,2018,Spring,a +305,CS,3810,2018,Spring,a +313,CS,3810,2018,Spring,a +372,CS,3810,2018,Spring,a +388,CS,3810,2018,Spring,a +129,CS,3810,2018,Summer,a +177,CS,3810,2018,Summer,a +260,CS,3810,2018,Summer,a +374,CS,3810,2018,Summer,a +386,CS,3810,2018,Summer,a +177,CS,3810,2018,Summer,b +214,CS,3810,2018,Summer,b +231,CS,3810,2018,Summer,b +270,CS,3810,2018,Summer,b +288,CS,3810,2018,Summer,b +344,CS,3810,2018,Summer,b +377,CS,3810,2018,Summer,b +399,CS,3810,2018,Summer,b +128,CS,3810,2018,Summer,c +129,CS,3810,2018,Summer,c +133,CS,3810,2018,Summer,c +151,CS,3810,2018,Summer,c +240,CS,3810,2018,Summer,c +257,CS,3810,2018,Summer,c +311,CS,3810,2018,Summer,c +182,CS,3810,2018,Summer,d +210,CS,3810,2018,Summer,d +252,CS,3810,2018,Summer,d +270,CS,3810,2018,Summer,d +312,CS,3810,2018,Summer,d +356,CS,3810,2018,Summer,d +379,CS,3810,2018,Summer,d +127,CS,3810,2019,Fall,a +131,CS,3810,2019,Fall,a +241,CS,3810,2019,Fall,a +258,CS,3810,2019,Fall,a +333,CS,3810,2019,Fall,a +102,CS,3810,2019,Fall,b +359,CS,3810,2019,Fall,b +113,CS,3810,2020,Fall,a +124,CS,3810,2020,Fall,a +171,CS,3810,2020,Fall,a +187,CS,3810,2020,Fall,a +220,CS,3810,2020,Fall,a +225,CS,3810,2020,Fall,a +233,CS,3810,2020,Fall,a +340,CS,3810,2020,Fall,a +347,CS,3810,2020,Fall,a +193,CS,4000,2015,Spring,a +160,CS,4000,2015,Summer,a +282,CS,4000,2015,Fall,a +307,CS,4000,2015,Fall,a +138,CS,4000,2016,Fall,a +276,CS,4000,2016,Fall,a +321,CS,4000,2016,Fall,a +378,CS,4000,2016,Fall,a +393,CS,4000,2016,Fall,a +151,CS,4000,2017,Spring,a +187,CS,4000,2017,Spring,a +207,CS,4000,2017,Spring,a +255,CS,4000,2017,Spring,a +134,CS,4000,2017,Summer,a +139,CS,4000,2017,Summer,a +179,CS,4000,2017,Summer,a +259,CS,4000,2017,Summer,a +318,CS,4000,2017,Summer,a +373,CS,4000,2017,Summer,a +107,CS,4000,2017,Fall,a +163,CS,4000,2017,Fall,a +252,CS,4000,2017,Fall,a +262,CS,4000,2017,Fall,a +291,CS,4000,2017,Fall,a +342,CS,4000,2017,Fall,a +361,CS,4000,2017,Fall,a +163,CS,4000,2017,Fall,b +329,CS,4000,2017,Fall,b +345,CS,4000,2017,Fall,b +361,CS,4000,2017,Fall,b +164,CS,4000,2018,Spring,a +173,CS,4000,2018,Spring,a +203,CS,4000,2018,Spring,a +275,CS,4000,2018,Spring,a +313,CS,4000,2018,Spring,a +385,CS,4000,2018,Spring,a +127,CS,4000,2019,Spring,a +256,CS,4000,2019,Spring,a +169,CS,4000,2020,Spring,a +181,CS,4000,2020,Spring,a +254,CS,4000,2020,Spring,a +257,CS,4000,2020,Spring,a +285,CS,4000,2020,Spring,a +312,CS,4000,2020,Spring,a +364,CS,4000,2020,Spring,a +375,CS,4000,2020,Spring,a +386,CS,4000,2020,Spring,a +123,CS,4000,2020,Spring,b +152,CS,4000,2020,Spring,b +181,CS,4000,2020,Spring,b +257,CS,4000,2020,Spring,b +309,CS,4000,2020,Spring,b +311,CS,4000,2020,Spring,b +371,CS,4000,2020,Spring,b +109,CS,4000,2020,Fall,a +110,CS,4000,2020,Fall,a +118,CS,4000,2020,Fall,a +120,CS,4000,2020,Fall,a +131,CS,4000,2020,Fall,a +161,CS,4000,2020,Fall,a +185,CS,4000,2020,Fall,a +277,CS,4000,2020,Fall,a +292,CS,4000,2020,Fall,a +341,CS,4000,2020,Fall,a +348,CS,4000,2020,Fall,a +366,CS,4000,2020,Fall,a +368,CS,4000,2020,Fall,a +376,CS,4000,2020,Fall,a +397,CS,4000,2020,Fall,a +162,CS,4150,2015,Summer,a +176,CS,4150,2015,Summer,a +192,CS,4150,2015,Summer,a +204,CS,4150,2015,Summer,a +348,CS,4150,2015,Summer,b +163,CS,4150,2016,Summer,a +245,CS,4150,2016,Summer,a +249,CS,4150,2016,Summer,a +378,CS,4150,2016,Summer,a +249,CS,4150,2016,Summer,b +264,CS,4150,2016,Summer,b +285,CS,4150,2016,Summer,b +288,CS,4150,2016,Summer,b +131,CS,4150,2018,Fall,a +240,CS,4150,2018,Fall,a +270,CS,4150,2018,Fall,a +292,CS,4150,2018,Fall,a +362,CS,4150,2018,Fall,a +391,CS,4150,2018,Fall,a +255,CS,4150,2018,Fall,b +371,CS,4150,2018,Fall,b +102,CS,4150,2019,Spring,a +210,CS,4150,2019,Spring,a +260,CS,4150,2019,Spring,a +106,CS,4150,2020,Spring,a +120,CS,4150,2020,Spring,a +123,CS,4150,2020,Spring,a +125,CS,4150,2020,Spring,a +179,CS,4150,2020,Spring,a +277,CS,4150,2020,Spring,a +314,CS,4150,2020,Spring,a +396,CS,4150,2020,Spring,a +397,CS,4150,2020,Spring,a +135,CS,4150,2020,Fall,a +148,CS,4150,2020,Fall,a +235,CS,4150,2020,Fall,a +309,CS,4150,2020,Fall,a +329,CS,4150,2020,Fall,a +339,CS,4150,2020,Fall,a +347,CS,4150,2020,Fall,a +386,CS,4150,2020,Fall,a +120,CS,4400,2015,Summer,a +140,CS,4400,2015,Summer,a +215,CS,4400,2015,Summer,a +277,CS,4400,2015,Summer,a +290,CS,4400,2015,Summer,a +392,CS,4400,2015,Fall,b +282,CS,4400,2015,Fall,c +373,CS,4400,2015,Fall,c +149,CS,4400,2016,Spring,a +307,CS,4400,2016,Spring,a +179,CS,4400,2016,Summer,a +262,CS,4400,2016,Summer,a +138,CS,4400,2016,Fall,a +102,CS,4400,2017,Spring,a +246,CS,4400,2017,Spring,a +249,CS,4400,2017,Spring,a +329,CS,4400,2017,Spring,a +369,CS,4400,2017,Spring,a +231,CS,4400,2017,Spring,b +255,CS,4400,2017,Spring,b +309,CS,4400,2017,Spring,b +276,CS,4400,2017,Spring,c +313,CS,4400,2017,Spring,c +388,CS,4400,2017,Spring,c +321,CS,4400,2019,Spring,a +333,CS,4400,2019,Spring,a +379,CS,4400,2019,Spring,a +109,CS,4400,2019,Spring,b +128,CS,4400,2019,Spring,b +151,CS,4400,2019,Spring,b +275,CS,4400,2019,Spring,b +169,CS,4400,2019,Spring,c +187,CS,4400,2019,Spring,c +248,CS,4400,2019,Spring,c +257,CS,4400,2019,Spring,d +312,CS,4400,2019,Spring,d +345,CS,4400,2019,Spring,d +146,CS,4400,2019,Summer,a +167,CS,4400,2019,Summer,a +173,CS,4400,2019,Summer,a +234,CS,4400,2019,Summer,a +285,CS,4400,2019,Summer,a +287,CS,4400,2019,Summer,a +294,CS,4400,2019,Summer,a +325,CS,4400,2019,Summer,a +397,CS,4400,2019,Summer,a +398,CS,4400,2019,Summer,a +135,CS,4400,2019,Summer,b +143,CS,4400,2019,Summer,b +177,CS,4400,2019,Summer,b +267,CS,4400,2019,Summer,b +285,CS,4400,2019,Summer,b +298,CS,4400,2019,Summer,b +332,CS,4400,2019,Summer,b +368,CS,4400,2019,Summer,b +391,CS,4400,2019,Summer,b +183,CS,4400,2019,Fall,a +241,CS,4400,2019,Fall,a +124,CS,4400,2019,Fall,b +259,CS,4400,2019,Fall,b +364,CS,4400,2019,Fall,b +377,CS,4400,2019,Fall,b +113,CS,4400,2020,Spring,a +170,CS,4400,2020,Spring,a +199,CS,4400,2020,Spring,a +228,CS,4400,2020,Spring,a +348,CS,4400,2020,Spring,a +390,CS,4400,2020,Spring,a +119,CS,4400,2020,Fall,a +123,CS,4400,2020,Fall,a +131,CS,4400,2020,Fall,a +152,CS,4400,2020,Fall,a +230,CS,4400,2020,Fall,a +258,CS,4400,2020,Fall,a +272,CS,4400,2020,Fall,a +378,CS,4400,2020,Fall,a +106,CS,4400,2020,Fall,b +127,CS,4400,2020,Fall,b +185,CS,4400,2020,Fall,b +202,CS,4400,2020,Fall,b +235,CS,4400,2020,Fall,b +292,CS,4400,2020,Fall,b +340,CS,4400,2020,Fall,b +276,CS,4500,2015,Summer,a +290,CS,4500,2015,Summer,b +215,CS,4500,2016,Spring,a +317,CS,4500,2016,Spring,a +119,CS,4500,2016,Spring,b +138,CS,4500,2016,Spring,b +149,CS,4500,2016,Spring,b +162,CS,4500,2016,Spring,b +179,CS,4500,2016,Spring,b +215,CS,4500,2016,Spring,b +285,CS,4500,2016,Spring,b +301,CS,4500,2016,Spring,b +307,CS,4500,2016,Spring,b +321,CS,4500,2016,Spring,b +357,CS,4500,2016,Spring,b +117,CS,4500,2016,Fall,a +176,CS,4500,2016,Fall,a +177,CS,4500,2016,Fall,a +309,CS,4500,2016,Fall,a +139,CS,4500,2017,Summer,a +207,CS,4500,2017,Summer,a +335,CS,4500,2017,Summer,a +348,CS,4500,2017,Summer,a +378,CS,4500,2017,Summer,a +101,CS,4500,2018,Spring,a +128,CS,4500,2018,Spring,a +132,CS,4500,2018,Spring,a +182,CS,4500,2018,Spring,a +203,CS,4500,2018,Spring,a +231,CS,4500,2018,Spring,a +294,CS,4500,2018,Spring,a +329,CS,4500,2018,Spring,a +361,CS,4500,2018,Spring,a +132,CS,4500,2018,Spring,b +270,CS,4500,2018,Spring,b +305,CS,4500,2018,Spring,b +318,CS,4500,2018,Spring,b +379,CS,4500,2018,Spring,b +133,CS,4500,2018,Spring,c +164,CS,4500,2018,Spring,c +312,CS,4500,2018,Spring,c +369,CS,4500,2018,Spring,c +128,CS,4500,2018,Spring,d +313,CS,4500,2018,Spring,d +345,CS,4500,2018,Spring,d +366,CS,4500,2018,Spring,d +391,CS,4500,2018,Spring,d +107,CS,4500,2019,Summer,a +123,CS,4500,2019,Summer,a +185,CS,4500,2019,Summer,a +248,CS,4500,2019,Summer,a +333,CS,4500,2019,Summer,a +340,CS,4500,2019,Summer,a +371,CS,4500,2019,Summer,a +386,CS,4500,2019,Summer,a +256,CS,4500,2019,Fall,a +260,CS,4500,2019,Fall,a +293,CS,4500,2019,Fall,a +303,CS,4500,2019,Fall,a +131,CS,4500,2019,Fall,b +173,CS,4500,2019,Fall,b +250,CS,4500,2019,Fall,b +255,CS,4500,2019,Fall,b +300,CS,4500,2019,Fall,b +398,CS,4500,2019,Fall,b +131,CS,4500,2019,Fall,c +143,CS,4500,2019,Fall,c +256,CS,4500,2019,Fall,c +274,CS,4500,2019,Fall,c +316,CS,4500,2019,Fall,c +109,CS,4500,2019,Fall,d +194,CS,4500,2019,Fall,d +220,CS,4500,2019,Fall,d +254,CS,4500,2019,Fall,d +255,CS,4500,2019,Fall,d +296,CS,4500,2019,Fall,d +341,CS,4500,2019,Fall,d +365,CS,4500,2019,Fall,d +108,CS,4500,2020,Spring,a +142,CS,4500,2020,Spring,a +169,CS,4500,2020,Spring,a +200,CS,4500,2020,Spring,a +364,CS,4500,2020,Spring,a +373,CS,4500,2020,Spring,a +127,CS,4500,2020,Summer,a +152,CS,4500,2020,Summer,a +167,CS,4500,2020,Summer,a +240,CS,4500,2020,Summer,a +368,CS,4500,2020,Summer,a +397,CS,4500,2020,Summer,a +138,CS,4940,2015,Summer,a +117,CS,4940,2017,Fall,a +143,CS,4940,2017,Fall,a +260,CS,4940,2017,Fall,a +294,CS,4940,2017,Fall,a +311,CS,4940,2017,Fall,a +326,CS,4940,2017,Fall,a +119,CS,4940,2017,Fall,b +379,CS,4940,2017,Fall,b +167,CS,4940,2019,Fall,a +220,CS,4940,2019,Fall,a +255,CS,4940,2019,Fall,a +256,CS,4940,2019,Fall,a +285,CS,4940,2019,Fall,a +314,CS,4940,2019,Fall,a +398,CS,4940,2019,Fall,a +100,CS,4940,2020,Summer,a +170,CS,4940,2020,Summer,a +200,CS,4940,2020,Summer,a +228,CS,4940,2020,Summer,a +251,CS,4940,2020,Summer,a +258,CS,4940,2020,Summer,a +277,CS,4940,2020,Summer,a +292,CS,4940,2020,Summer,a +313,CS,4940,2020,Summer,a +331,CS,4940,2020,Summer,a +362,CS,4940,2020,Summer,a +378,CS,4940,2020,Summer,a +386,CS,4940,2020,Summer,a +391,CS,4940,2020,Summer,a +397,CS,4940,2020,Summer,a +100,CS,4940,2020,Summer,b +123,CS,4940,2020,Summer,b +127,CS,4940,2020,Summer,b +171,CS,4940,2020,Summer,b +177,CS,4940,2020,Summer,b +194,CS,4940,2020,Summer,b +231,CS,4940,2020,Summer,b +233,CS,4940,2020,Summer,b +247,CS,4940,2020,Summer,b +250,CS,4940,2020,Summer,b +251,CS,4940,2020,Summer,b +258,CS,4940,2020,Summer,b +271,CS,4940,2020,Summer,b +277,CS,4940,2020,Summer,b +300,CS,4940,2020,Summer,b +312,CS,4940,2020,Summer,b +321,CS,4940,2020,Summer,b +339,CS,4940,2020,Summer,b +345,CS,4940,2020,Summer,b +391,CS,4940,2020,Summer,b +397,CS,4940,2020,Summer,b +107,CS,4970,2016,Fall,a +123,CS,4970,2016,Fall,a +145,CS,4970,2016,Fall,a +268,CS,4970,2016,Fall,a +276,CS,4970,2016,Fall,a +285,CS,4970,2016,Fall,a +335,CS,4970,2016,Fall,a +394,CS,4970,2016,Fall,a +177,CS,4970,2016,Fall,b +179,CS,4970,2016,Fall,b +249,CS,4970,2016,Fall,b +276,CS,4970,2016,Fall,b +285,CS,4970,2016,Fall,b +291,CS,4970,2016,Fall,b +312,CS,4970,2016,Fall,b +313,CS,4970,2016,Fall,b +397,CS,4970,2016,Fall,b +116,CS,4970,2017,Spring,a +120,CS,4970,2017,Spring,a +282,CS,4970,2017,Spring,a +295,CS,4970,2017,Spring,a +314,CS,4970,2017,Spring,a +393,CS,4970,2017,Spring,a +117,CS,4970,2017,Summer,a +261,CS,4970,2017,Summer,a +288,CS,4970,2017,Summer,a +231,CS,4970,2018,Summer,a +270,CS,4970,2018,Summer,a +277,CS,4970,2018,Summer,a +344,CS,4970,2018,Summer,a +398,CS,4970,2018,Summer,a +100,CS,4970,2018,Summer,b +105,CS,4970,2018,Summer,b +132,CS,4970,2018,Summer,b +227,CS,4970,2018,Summer,b +277,CS,4970,2018,Summer,b +348,CS,4970,2018,Summer,b +133,CS,4970,2018,Summer,c +163,CS,4970,2018,Summer,c +185,CS,4970,2018,Summer,c +214,CS,4970,2018,Summer,c +220,CS,4970,2018,Summer,c +372,CS,4970,2018,Summer,c +387,CS,4970,2018,Summer,c +392,CS,4970,2018,Summer,c +274,CS,4970,2018,Fall,a +128,CS,4970,2018,Fall,b +247,CS,4970,2018,Fall,b +262,CS,4970,2018,Fall,b +267,CS,4970,2018,Fall,b +386,CS,4970,2018,Fall,b +121,CS,4970,2018,Fall,c +143,CS,4970,2018,Fall,c +196,CS,4970,2018,Fall,c +102,CS,4970,2018,Fall,d +121,CS,4970,2018,Fall,d +178,CS,4970,2018,Fall,d +255,CS,4970,2018,Fall,d +267,CS,4970,2018,Fall,d +342,CS,4970,2018,Fall,d +356,CS,4970,2018,Fall,d +165,CS,4970,2019,Spring,a +275,CS,4970,2019,Spring,a +351,CS,4970,2019,Spring,a +366,CS,4970,2019,Spring,a +311,CS,4970,2019,Spring,b +345,CS,4970,2019,Spring,b +364,CS,4970,2019,Spring,b +124,CS,4970,2019,Summer,a +199,CS,4970,2019,Summer,a +289,CS,4970,2019,Summer,a +300,CS,4970,2019,Summer,a +368,CS,4970,2019,Summer,a +378,CS,4970,2019,Summer,a +113,CS,4970,2019,Summer,b +164,CS,4970,2019,Summer,b +298,CS,4970,2019,Summer,b +325,CS,4970,2019,Summer,b +359,CS,4970,2019,Summer,b +378,CS,4970,2019,Summer,b +391,CS,4970,2019,Summer,b +173,CS,4970,2019,Summer,c +333,CS,4970,2019,Summer,c +363,CS,4970,2019,Summer,c +119,CS,4970,2019,Summer,d +135,CS,4970,2019,Summer,d +164,CS,4970,2019,Summer,d +294,CS,4970,2019,Summer,d +303,CS,4970,2019,Summer,d +329,CS,4970,2019,Summer,d +362,CS,4970,2019,Summer,d +399,CS,4970,2019,Summer,d +194,CS,4970,2019,Fall,a +235,CS,4970,2019,Fall,a +250,CS,4970,2019,Fall,a +127,CS,4970,2019,Fall,b +131,CS,4970,2019,Fall,b +293,CS,4970,2019,Fall,b +321,CS,4970,2019,Fall,b +152,CS,4970,2019,Fall,c +200,CS,4970,2019,Fall,c +259,CS,4970,2019,Fall,c +318,CS,4970,2019,Fall,d +340,CS,4970,2019,Fall,d +347,CS,4970,2019,Fall,d +112,CS,4970,2020,Summer,a +221,CS,4970,2020,Summer,a +242,CS,4970,2020,Summer,a +251,CS,4970,2020,Summer,a +257,CS,4970,2020,Summer,a +118,CS,4970,2020,Summer,b +151,CS,4970,2020,Summer,b +187,CS,4970,2020,Summer,b +219,CS,4970,2020,Summer,b +221,CS,4970,2020,Summer,b +222,CS,4970,2020,Summer,b +309,CS,4970,2020,Summer,b +373,CS,4970,2020,Summer,b +379,CS,4970,2020,Summer,b +146,CS,4970,2020,Summer,c +233,CS,4970,2020,Summer,c +257,CS,4970,2020,Summer,c +260,CS,4970,2020,Summer,c +292,CS,4970,2020,Summer,c +339,CS,4970,2020,Summer,c +379,CS,4970,2020,Summer,c +384,CS,4970,2020,Summer,c +109,CS,4970,2020,Summer,d +146,CS,4970,2020,Summer,d +151,CS,4970,2020,Summer,d +171,CS,4970,2020,Summer,d +228,CS,4970,2020,Summer,d +254,CS,4970,2020,Summer,d +307,CS,4970,2020,Summer,d +309,CS,4970,2020,Summer,d +379,CS,4970,2020,Summer,d +390,CS,4970,2020,Summer,d +122,CS,4970,2020,Fall,a +191,CS,4970,2020,Fall,a +136,CS,4970,2020,Fall,b +283,CS,4970,2020,Fall,b +130,CS,4970,2020,Fall,c +148,CS,4970,2020,Fall,c +281,CS,4970,2020,Fall,c +186,CS,4970,2020,Fall,d +202,CS,4970,2020,Fall,d +323,CS,4970,2020,Fall,d +341,CS,4970,2020,Fall,d +120,MATH,1210,2015,Summer,a +138,MATH,1210,2015,Summer,a +117,MATH,1210,2016,Spring,a +119,MATH,1210,2016,Spring,a +144,MATH,1210,2016,Spring,a +270,MATH,1210,2016,Spring,a +276,MATH,1210,2016,Spring,a +229,MATH,1210,2016,Spring,b +295,MATH,1210,2016,Spring,b +335,MATH,1210,2016,Spring,b +182,MATH,1210,2016,Spring,c +277,MATH,1210,2016,Spring,c +179,MATH,1210,2016,Spring,d +273,MATH,1210,2016,Spring,d +277,MATH,1210,2016,Spring,d +295,MATH,1210,2016,Spring,d +214,MATH,1210,2016,Fall,a +249,MATH,1210,2016,Fall,a +397,MATH,1210,2016,Fall,a +215,MATH,1210,2016,Fall,b +278,MATH,1210,2016,Fall,b +357,MATH,1210,2016,Fall,b +378,MATH,1210,2016,Fall,b +107,MATH,1210,2016,Fall,c +195,MATH,1210,2016,Fall,c +285,MATH,1210,2016,Fall,c +369,MATH,1210,2016,Fall,c +379,MATH,1210,2016,Fall,c +195,MATH,1210,2016,Fall,d +385,MATH,1210,2016,Fall,d +356,MATH,1210,2017,Spring,a +394,MATH,1210,2017,Spring,a +345,MATH,1210,2017,Summer,a +230,MATH,1210,2017,Summer,b +210,MATH,1210,2017,Summer,c +342,MATH,1210,2017,Summer,c +387,MATH,1210,2017,Summer,c +392,MATH,1210,2017,Summer,c +102,MATH,1210,2018,Spring,a +199,MATH,1210,2018,Spring,a +372,MATH,1210,2018,Spring,a +257,MATH,1210,2018,Summer,a +279,MATH,1210,2018,Summer,a +288,MATH,1210,2018,Summer,a +368,MATH,1210,2018,Summer,a +371,MATH,1210,2018,Summer,a +398,MATH,1210,2018,Summer,a +167,MATH,1210,2018,Fall,a +177,MATH,1210,2018,Fall,a +185,MATH,1210,2018,Fall,a +231,MATH,1210,2018,Fall,a +311,MATH,1210,2018,Fall,a +312,MATH,1210,2018,Fall,a +384,MATH,1210,2018,Fall,a +104,MATH,1210,2018,Fall,b +128,MATH,1210,2018,Fall,b +163,MATH,1210,2018,Fall,b +178,MATH,1210,2018,Fall,b +133,MATH,1210,2019,Spring,a +294,MATH,1210,2019,Spring,a +307,MATH,1210,2019,Spring,a +332,MATH,1210,2019,Spring,a +333,MATH,1210,2019,Spring,a +348,MATH,1210,2019,Spring,a +351,MATH,1210,2019,Spring,a +275,MATH,1210,2019,Spring,b +123,MATH,1210,2019,Summer,a +124,MATH,1210,2019,Summer,a +228,MATH,1210,2019,Summer,a +255,MATH,1210,2019,Summer,a +313,MATH,1210,2019,Summer,a +135,MATH,1210,2020,Spring,a +220,MATH,1210,2020,Spring,a +310,MATH,1210,2020,Spring,a +373,MATH,1210,2020,Spring,a +390,MATH,1210,2020,Spring,a +106,MATH,1210,2020,Spring,b +108,MATH,1210,2020,Spring,b +260,MATH,1210,2020,Spring,b +386,MATH,1210,2020,Spring,b +192,MATH,1220,2015,Summer,a +211,MATH,1220,2015,Summer,a +162,MATH,1220,2015,Summer,b +270,MATH,1220,2015,Summer,b +280,MATH,1220,2015,Summer,b +195,MATH,1220,2015,Summer,c +245,MATH,1220,2015,Summer,c +282,MATH,1220,2015,Summer,c +377,MATH,1220,2015,Summer,c +210,MATH,1220,2016,Spring,a +307,MATH,1220,2016,Spring,a +313,MATH,1220,2016,Spring,a +357,MATH,1220,2016,Spring,a +389,MATH,1220,2016,Spring,a +116,MATH,1220,2017,Spring,a +187,MATH,1220,2017,Spring,a +256,MATH,1220,2017,Spring,a +299,MATH,1220,2017,Spring,a +117,MATH,1220,2017,Spring,b +163,MATH,1220,2017,Spring,b +179,MATH,1220,2017,Spring,b +182,MATH,1220,2017,Spring,b +259,MATH,1220,2017,Spring,b +260,MATH,1220,2017,Spring,b +285,MATH,1220,2017,Spring,b +314,MATH,1220,2017,Spring,b +388,MATH,1220,2017,Spring,b +393,MATH,1220,2017,Spring,b +117,MATH,1220,2017,Spring,c +145,MATH,1220,2017,Spring,c +277,MATH,1220,2017,Spring,c +355,MATH,1220,2017,Spring,c +385,MATH,1220,2017,Spring,c +105,MATH,1220,2017,Spring,d +260,MATH,1220,2017,Spring,d +378,MATH,1220,2017,Spring,d +215,MATH,1220,2017,Summer,a +165,MATH,1220,2018,Spring,a +173,MATH,1220,2018,Spring,a +276,MATH,1220,2018,Spring,a +312,MATH,1220,2018,Spring,a +332,MATH,1220,2018,Spring,a +375,MATH,1220,2018,Spring,a +131,MATH,1220,2018,Spring,b +169,MATH,1220,2018,Spring,b +309,MATH,1220,2018,Spring,b +362,MATH,1220,2018,Spring,b +139,MATH,1220,2018,Summer,a +185,MATH,1220,2018,Summer,a +348,MATH,1220,2018,Summer,a +127,MATH,1220,2019,Fall,a +133,MATH,1220,2019,Fall,a +181,MATH,1220,2019,Fall,a +231,MATH,1220,2019,Fall,a +234,MATH,1220,2019,Fall,a +248,MATH,1220,2019,Fall,a +254,MATH,1220,2019,Fall,a +323,MATH,1220,2019,Fall,a +341,MATH,1220,2019,Fall,a +102,MATH,1220,2019,Fall,b +120,MATH,1220,2019,Fall,b +123,MATH,1220,2019,Fall,b +152,MATH,1220,2019,Fall,b +180,MATH,1220,2019,Fall,b +274,MATH,1220,2019,Fall,b +321,MATH,1220,2019,Fall,b +366,MATH,1220,2019,Fall,b +135,MATH,1220,2019,Fall,c +247,MATH,1220,2019,Fall,c +358,MATH,1220,2019,Fall,c +390,MATH,1220,2019,Fall,c +396,MATH,1220,2019,Fall,c +100,MATH,1220,2020,Spring,a +151,MATH,1220,2020,Spring,a +178,MATH,1220,2020,Spring,a +228,MATH,1220,2020,Spring,a +118,MATH,1220,2020,Summer,a +164,MATH,1220,2020,Summer,a +281,MATH,1220,2020,Summer,a +293,MATH,1220,2020,Summer,a +329,MATH,1220,2020,Summer,a +397,MATH,1220,2020,Summer,a +211,MATH,1250,2015,Spring,c +276,MATH,1250,2015,Spring,c +149,MATH,1250,2015,Fall,a +172,MATH,1250,2015,Fall,a +335,MATH,1250,2015,Fall,a +214,MATH,1250,2016,Spring,a +290,MATH,1250,2016,Spring,a +377,MATH,1250,2016,Spring,a +270,MATH,1250,2016,Summer,a +285,MATH,1250,2016,Summer,a +373,MATH,1250,2016,Summer,a +215,MATH,1250,2016,Fall,a +138,MATH,1250,2016,Fall,b +182,MATH,1250,2016,Fall,b +120,MATH,1250,2016,Fall,c +374,MATH,1250,2016,Fall,c +127,MATH,1250,2017,Summer,a +173,MATH,1250,2017,Summer,a +292,MATH,1250,2017,Summer,a +355,MATH,1250,2017,Summer,a +127,MATH,1250,2017,Summer,b +210,MATH,1250,2017,Summer,b +311,MATH,1250,2017,Summer,b +230,MATH,1250,2017,Summer,c +257,MATH,1250,2017,Summer,c +117,MATH,1250,2017,Summer,d +208,MATH,1250,2017,Summer,d +109,MATH,1250,2018,Spring,a +123,MATH,1250,2018,Spring,a +260,MATH,1250,2018,Spring,a +274,MATH,1250,2018,Spring,a +345,MATH,1250,2018,Spring,a +361,MATH,1250,2018,Spring,a +379,MATH,1250,2018,Spring,a +385,MATH,1250,2018,Spring,a +392,MATH,1250,2018,Spring,a +102,MATH,1250,2018,Summer,a +247,MATH,1250,2018,Summer,a +255,MATH,1250,2018,Summer,a +312,MATH,1250,2018,Summer,a +332,MATH,1250,2018,Summer,a +356,MATH,1250,2018,Summer,a +372,MATH,1250,2018,Summer,a +101,MATH,1250,2018,Summer,b +119,MATH,1250,2018,Summer,b +239,MATH,1250,2018,Summer,b +313,MATH,1250,2018,Summer,b +321,MATH,1250,2018,Summer,b +368,MATH,1250,2018,Summer,b +100,MATH,1250,2018,Summer,c +139,MATH,1250,2018,Summer,c +158,MATH,1250,2018,Summer,c +197,MATH,1250,2018,Summer,c +207,MATH,1250,2018,Summer,c +261,MATH,1250,2018,Summer,c +277,MATH,1250,2018,Summer,c +288,MATH,1250,2018,Summer,c +321,MATH,1250,2018,Summer,c +362,MATH,1250,2018,Summer,c +106,MATH,1250,2020,Summer,a +108,MATH,1250,2020,Summer,a +133,MATH,1250,2020,Summer,a +135,MATH,1250,2020,Summer,a +151,MATH,1250,2020,Summer,a +167,MATH,1250,2020,Summer,a +185,MATH,1250,2020,Summer,a +231,MATH,1250,2020,Summer,a +281,MATH,1250,2020,Summer,a +289,MATH,1250,2020,Summer,a +309,MATH,1250,2020,Summer,a +342,MATH,1250,2020,Summer,a +378,MATH,1250,2020,Summer,a +384,MATH,1250,2020,Summer,a +386,MATH,1250,2020,Summer,a +391,MATH,1250,2020,Summer,a +177,MATH,1260,2015,Spring,c +144,MATH,1260,2015,Summer,a +162,MATH,1260,2015,Summer,a +211,MATH,1260,2015,Summer,a +229,MATH,1260,2016,Fall,a +278,MATH,1260,2016,Fall,a +304,MATH,1260,2017,Summer,a +353,MATH,1260,2017,Summer,a +361,MATH,1260,2017,Summer,a +252,MATH,1260,2017,Fall,a +260,MATH,1260,2017,Fall,a +291,MATH,1260,2017,Fall,a +133,MATH,1260,2019,Spring,a +256,MATH,1260,2019,Spring,a +347,MATH,1260,2019,Spring,a +152,MATH,1260,2019,Spring,b +169,MATH,1260,2019,Spring,b +179,MATH,1260,2019,Spring,b +187,MATH,1260,2019,Spring,b +247,MATH,1260,2019,Spring,b +277,MATH,1260,2019,Spring,b +285,MATH,1260,2019,Spring,b +313,MATH,1260,2019,Spring,b +356,MATH,1260,2019,Spring,b +102,MATH,1260,2019,Spring,c +165,MATH,1260,2019,Spring,c +293,MATH,1260,2019,Spring,c +321,MATH,1260,2019,Spring,c +113,MATH,1260,2019,Summer,a +118,MATH,1260,2019,Summer,a +124,MATH,1260,2019,Summer,a +131,MATH,1260,2019,Summer,a +185,MATH,1260,2019,Summer,a +257,MATH,1260,2019,Summer,a +276,MATH,1260,2019,Summer,a +318,MATH,1260,2019,Summer,a +391,MATH,1260,2019,Summer,a +397,MATH,1260,2019,Summer,a +120,MATH,1260,2019,Summer,b +123,MATH,1260,2019,Summer,b +194,MATH,1260,2019,Summer,b +276,MATH,1260,2019,Summer,b +303,MATH,1260,2019,Summer,b +314,MATH,1260,2019,Summer,b +377,MATH,1260,2019,Summer,b +100,MATH,1260,2019,Fall,a +108,MATH,1260,2019,Fall,a +258,MATH,1260,2019,Fall,a +309,MATH,1260,2019,Fall,a +364,MATH,1260,2019,Fall,a +375,MATH,1260,2019,Fall,a +164,MATH,1260,2020,Spring,a +173,MATH,1260,2020,Spring,a +231,MATH,1260,2020,Spring,a +235,MATH,1260,2020,Spring,a +242,MATH,1260,2020,Spring,a +276,MATH,2210,2015,Spring,b +120,MATH,2210,2015,Summer,c +212,MATH,2210,2015,Summer,c +348,MATH,2210,2015,Summer,c +172,MATH,2210,2015,Fall,a +182,MATH,2210,2015,Fall,a +373,MATH,2210,2015,Fall,a +176,MATH,2210,2017,Spring,a +208,MATH,2210,2017,Spring,a +215,MATH,2210,2017,Spring,a +249,MATH,2210,2017,Spring,a +261,MATH,2210,2017,Spring,a +270,MATH,2210,2017,Spring,a +314,MATH,2210,2017,Spring,a +128,MATH,2210,2017,Summer,a +277,MATH,2210,2017,Summer,a +361,MATH,2210,2017,Summer,a +387,MATH,2210,2017,Summer,a +392,MATH,2210,2017,Summer,a +117,MATH,2210,2018,Spring,a +123,MATH,2210,2018,Spring,a +262,MATH,2210,2018,Spring,a +391,MATH,2210,2018,Spring,a +131,MATH,2210,2018,Spring,b +185,MATH,2210,2018,Spring,b +197,MATH,2210,2018,Spring,b +199,MATH,2210,2018,Spring,b +229,MATH,2210,2018,Spring,b +230,MATH,2210,2018,Spring,b +231,MATH,2210,2018,Spring,b +239,MATH,2210,2018,Spring,b +256,MATH,2210,2018,Spring,b +275,MATH,2210,2018,Spring,b +309,MATH,2210,2018,Spring,b +369,MATH,2210,2018,Spring,b +102,MATH,2210,2019,Spring,a +169,MATH,2210,2019,Spring,a +285,MATH,2210,2019,Spring,a +119,MATH,2210,2019,Spring,b +173,MATH,2210,2019,Spring,b +228,MATH,2210,2019,Spring,b +285,MATH,2210,2019,Spring,b +296,MATH,2210,2019,Spring,b +305,MATH,2210,2019,Spring,b +342,MATH,2210,2019,Spring,b +375,MATH,2210,2019,Spring,b +113,MATH,2210,2020,Spring,a +255,MATH,2210,2020,Spring,a +274,MATH,2210,2020,Spring,a +347,MATH,2210,2020,Spring,a +124,MATH,2210,2020,Spring,b +170,MATH,2210,2020,Spring,b +200,MATH,2210,2020,Spring,b +241,MATH,2210,2020,Spring,c +251,MATH,2210,2020,Spring,c +274,MATH,2210,2020,Spring,c +122,MATH,2210,2020,Fall,a +136,MATH,2210,2020,Fall,a +167,MATH,2210,2020,Fall,a +175,MATH,2210,2020,Fall,a +179,MATH,2210,2020,Fall,a +225,MATH,2210,2020,Fall,a +272,MATH,2210,2020,Fall,a +281,MATH,2210,2020,Fall,a +329,MATH,2210,2020,Fall,a +345,MATH,2210,2020,Fall,a +378,MATH,2210,2020,Fall,a +384,MATH,2210,2020,Fall,a +397,MATH,2210,2020,Fall,a +179,MATH,2270,2015,Fall,a +212,MATH,2270,2015,Fall,a +210,MATH,2270,2015,Fall,b +313,MATH,2270,2015,Fall,b +132,MATH,2270,2017,Summer,a +143,MATH,2270,2017,Summer,a +277,MATH,2270,2017,Summer,a +304,MATH,2270,2017,Summer,a +318,MATH,2270,2017,Summer,a +107,MATH,2270,2017,Fall,a +109,MATH,2270,2017,Fall,a +292,MATH,2270,2017,Fall,a +329,MATH,2270,2017,Fall,a +246,MATH,2270,2017,Fall,b +259,MATH,2270,2017,Fall,b +342,MATH,2270,2017,Fall,b +356,MATH,2270,2017,Fall,b +120,MATH,2270,2017,Fall,c +131,MATH,2270,2017,Fall,c +182,MATH,2270,2017,Fall,c +394,MATH,2270,2017,Fall,c +102,MATH,2270,2017,Fall,d +107,MATH,2270,2017,Fall,d +123,MATH,2270,2017,Fall,d +124,MATH,2270,2017,Fall,d +128,MATH,2270,2017,Fall,d +182,MATH,2270,2017,Fall,d +276,MATH,2270,2017,Fall,d +291,MATH,2270,2017,Fall,d +312,MATH,2270,2017,Fall,d +314,MATH,2270,2017,Fall,d +397,MATH,2270,2017,Fall,d +255,MATH,2270,2019,Spring,a +285,MATH,2270,2019,Spring,a +366,MATH,2270,2019,Spring,a +379,MATH,2270,2019,Spring,a +139,MATH,2270,2019,Summer,a +146,MATH,2270,2019,Summer,a +173,MATH,2270,2019,Summer,a +248,MATH,2270,2019,Summer,a +377,MATH,2270,2019,Summer,a +194,MATH,2270,2019,Summer,b +303,MATH,2270,2019,Summer,b +325,MATH,2270,2019,Summer,b +378,MATH,2270,2019,Summer,b +183,MATH,2270,2019,Summer,c +345,MATH,2270,2019,Summer,c +396,MATH,2270,2019,Summer,c +399,MATH,2270,2019,Summer,c +254,MATH,2270,2019,Fall,a +333,MATH,2270,2019,Fall,a +175,MATH,2270,2020,Spring,a +178,MATH,2270,2020,Spring,a +223,MATH,2270,2020,Spring,a +258,MATH,2270,2020,Spring,a +270,MATH,2270,2020,Spring,a +309,MATH,2270,2020,Spring,a +130,MATH,2270,2020,Fall,a +152,MATH,2270,2020,Fall,a +177,MATH,2270,2020,Fall,a +181,MATH,2270,2020,Fall,a +230,MATH,2270,2020,Fall,a +240,MATH,2270,2020,Fall,a +331,MATH,2270,2020,Fall,a +348,MATH,2270,2020,Fall,a +360,MATH,2270,2020,Fall,a +373,MATH,2270,2020,Fall,a +391,MATH,2270,2020,Fall,a +398,MATH,2270,2020,Fall,a +119,MATH,2270,2020,Fall,b +127,MATH,2270,2020,Fall,b +129,MATH,2270,2020,Fall,b +135,MATH,2270,2020,Fall,b +167,MATH,2270,2020,Fall,b +186,MATH,2270,2020,Fall,b +260,MATH,2270,2020,Fall,b +321,MATH,2270,2020,Fall,b +331,MATH,2270,2020,Fall,b +348,MATH,2270,2020,Fall,b +371,MATH,2270,2020,Fall,b +391,MATH,2270,2020,Fall,b +204,MATH,2280,2015,Summer,a +249,MATH,2280,2015,Summer,a +123,MATH,2280,2015,Fall,a +276,MATH,2280,2015,Fall,a +393,MATH,2280,2016,Fall,a +182,MATH,2280,2018,Spring,a +230,MATH,2280,2018,Spring,a +238,MATH,2280,2018,Spring,a +256,MATH,2280,2018,Spring,a +262,MATH,2280,2018,Spring,a +307,MATH,2280,2018,Spring,a +387,MATH,2280,2018,Spring,a +173,MATH,2280,2018,Fall,a +220,MATH,2280,2018,Fall,a +259,MATH,2280,2018,Fall,a +342,MATH,2280,2018,Fall,a +104,MATH,2280,2018,Fall,b +119,MATH,2280,2018,Fall,b +165,MATH,2280,2018,Fall,b +227,MATH,2280,2018,Fall,b +359,MATH,2280,2018,Fall,b +119,MATH,2280,2018,Fall,c +120,MATH,2280,2018,Fall,c +178,MATH,2280,2018,Fall,c +196,MATH,2280,2018,Fall,c +309,MATH,2280,2018,Fall,c +345,MATH,2280,2018,Fall,c +100,MATH,2280,2019,Fall,a +102,MATH,2280,2019,Fall,a +270,MATH,2280,2019,Fall,a +314,MATH,2280,2019,Fall,a +133,MATH,2280,2019,Fall,b +247,MATH,2280,2019,Fall,b +267,MATH,2280,2019,Fall,b +318,MATH,2280,2019,Fall,b +379,MATH,2280,2019,Fall,b +390,MATH,2280,2019,Fall,b +146,MATH,2280,2019,Fall,c +223,MATH,2280,2019,Fall,c +234,MATH,2280,2019,Fall,c +248,MATH,2280,2019,Fall,c +270,MATH,2280,2019,Fall,c +292,MATH,2280,2019,Fall,c +107,MATH,2280,2020,Spring,a +183,MATH,2280,2020,Spring,a +210,MATH,2280,2020,Spring,a +255,MATH,2280,2020,Spring,a +285,MATH,2280,2020,Spring,a +313,MATH,2280,2020,Spring,a +106,MATH,2280,2020,Spring,b +169,MATH,2280,2020,Spring,b +285,MATH,2280,2020,Spring,b +398,MATH,2280,2020,Spring,b +177,MATH,3210,2015,Spring,b +282,MATH,3210,2015,Spring,b +394,MATH,3210,2015,Spring,b +144,MATH,3210,2015,Summer,a +210,MATH,3210,2015,Summer,a +215,MATH,3210,2015,Summer,a +301,MATH,3210,2015,Summer,a +126,MATH,3210,2015,Fall,a +172,MATH,3210,2015,Fall,a +246,MATH,3210,2015,Fall,a +307,MATH,3210,2015,Fall,a +313,MATH,3210,2015,Fall,a +374,MATH,3210,2015,Fall,a +138,MATH,3210,2015,Fall,b +192,MATH,3210,2015,Fall,c +172,MATH,3210,2015,Fall,d +335,MATH,3210,2015,Fall,d +149,MATH,3210,2016,Spring,a +229,MATH,3210,2016,Spring,a +276,MATH,3210,2016,Spring,a +102,MATH,3210,2016,Fall,a +134,MATH,3210,2016,Fall,a +195,MATH,3210,2016,Fall,a +277,MATH,3210,2016,Fall,a +120,MATH,3210,2017,Spring,a +207,MATH,3210,2017,Spring,a +304,MATH,3210,2017,Spring,a +107,MATH,3210,2017,Summer,a +292,MATH,3210,2017,Summer,a +309,MATH,3210,2017,Summer,a +372,MATH,3210,2017,Summer,a +270,MATH,3210,2019,Spring,a +348,MATH,3210,2019,Spring,a +364,MATH,3210,2019,Spring,a +378,MATH,3210,2019,Spring,a +399,MATH,3210,2019,Spring,a +259,MATH,3210,2019,Spring,b +314,MATH,3210,2019,Spring,b +321,MATH,3210,2019,Spring,b +124,MATH,3210,2019,Fall,a +223,MATH,3210,2019,Fall,a +230,MATH,3210,2019,Fall,a +248,MATH,3210,2019,Fall,a +284,MATH,3210,2019,Fall,a +285,MATH,3210,2019,Fall,a +358,MATH,3210,2019,Fall,a +123,MATH,3210,2020,Spring,a +146,MATH,3210,2020,Spring,a +181,MATH,3210,2020,Spring,a +251,MATH,3210,2020,Spring,a +113,MATH,3210,2020,Summer,a +135,MATH,3210,2020,Summer,a +166,MATH,3210,2020,Summer,a +171,MATH,3210,2020,Summer,a +187,MATH,3210,2020,Summer,a +260,MATH,3210,2020,Summer,a +312,MATH,3210,2020,Summer,a +368,MATH,3210,2020,Summer,a +391,MATH,3210,2020,Summer,a +109,MATH,3210,2020,Fall,a +200,MATH,3210,2020,Fall,a +227,MATH,3210,2020,Fall,a +255,MATH,3210,2020,Fall,a +256,MATH,3210,2020,Fall,a +289,MATH,3210,2020,Fall,a +329,MATH,3210,2020,Fall,a +365,MATH,3210,2020,Fall,a +386,MATH,3210,2020,Fall,a +397,MATH,3210,2020,Fall,a +210,MATH,3220,2016,Spring,a +285,MATH,3220,2016,Spring,a +373,MATH,3220,2016,Spring,a +195,MATH,3220,2016,Spring,b +301,MATH,3220,2016,Spring,b +392,MATH,3220,2016,Spring,b +119,MATH,3220,2016,Spring,c +216,MATH,3220,2016,Spring,c +374,MATH,3220,2016,Spring,c +192,MATH,3220,2016,Spring,d +210,MATH,3220,2016,Spring,d +290,MATH,3220,2016,Spring,d +394,MATH,3220,2016,Spring,d +163,MATH,3220,2016,Summer,a +214,MATH,3220,2016,Summer,a +270,MATH,3220,2016,Summer,a +276,MATH,3220,2016,Summer,a +278,MATH,3220,2016,Summer,a +246,MATH,3220,2016,Fall,a +277,MATH,3220,2016,Fall,a +385,MATH,3220,2016,Fall,a +134,MATH,3220,2016,Fall,b +245,MATH,3220,2016,Fall,b +264,MATH,3220,2016,Fall,b +329,MATH,3220,2016,Fall,b +123,MATH,3220,2017,Spring,a +176,MATH,3220,2017,Spring,a +391,MATH,3220,2017,Spring,a +102,MATH,3220,2017,Fall,a +107,MATH,3220,2017,Fall,a +207,MATH,3220,2017,Fall,a +266,MATH,3220,2017,Fall,a +311,MATH,3220,2017,Fall,a +377,MATH,3220,2017,Fall,a +139,MATH,3220,2017,Fall,b +261,MATH,3220,2017,Fall,b +326,MATH,3220,2017,Fall,b +366,MATH,3220,2017,Fall,b +237,MATH,3220,2018,Spring,a +292,MATH,3220,2018,Spring,a +296,MATH,3220,2018,Spring,a +345,MATH,3220,2018,Spring,a +362,MATH,3220,2018,Spring,a +379,MATH,3220,2018,Spring,a +101,MATH,3220,2018,Spring,b +132,MATH,3220,2018,Spring,b +312,MATH,3220,2018,Spring,b +387,MATH,3220,2018,Spring,b +127,MATH,3220,2018,Spring,c +131,MATH,3220,2018,Spring,c +165,MATH,3220,2018,Spring,c +229,MATH,3220,2018,Spring,c +305,MATH,3220,2018,Spring,c +309,MATH,3220,2018,Spring,c +312,MATH,3220,2018,Spring,c +129,MATH,3220,2018,Spring,d +179,MATH,3220,2018,Spring,d +203,MATH,3220,2018,Spring,d +238,MATH,3220,2018,Spring,d +177,PHYS,2040,2015,Spring,a +192,PHYS,2040,2015,Spring,a +245,PHYS,2040,2015,Fall,a +149,PHYS,2040,2015,Fall,b +295,PHYS,2040,2015,Fall,b +312,PHYS,2040,2015,Fall,b +373,PHYS,2040,2015,Fall,b +374,PHYS,2040,2015,Fall,b +210,PHYS,2040,2015,Fall,c +212,PHYS,2040,2015,Fall,c +307,PHYS,2040,2015,Fall,c +387,PHYS,2040,2015,Fall,c +321,PHYS,2040,2016,Spring,a +389,PHYS,2040,2016,Spring,a +292,PHYS,2040,2017,Summer,a +203,PHYS,2040,2017,Fall,a +237,PHYS,2040,2017,Fall,a +259,PHYS,2040,2017,Fall,a +314,PHYS,2040,2017,Fall,a +379,PHYS,2040,2017,Fall,a +119,PHYS,2040,2017,Fall,b +256,PHYS,2040,2017,Fall,b +285,PHYS,2040,2017,Fall,b +132,PHYS,2040,2017,Fall,c +187,PHYS,2040,2017,Fall,c +214,PHYS,2040,2017,Fall,c +230,PHYS,2040,2017,Fall,c +266,PHYS,2040,2017,Fall,c +270,PHYS,2040,2017,Fall,c +314,PHYS,2040,2017,Fall,c +348,PHYS,2040,2017,Fall,c +101,PHYS,2040,2018,Spring,a +105,PHYS,2040,2018,Spring,a +123,PHYS,2040,2018,Spring,a +169,PHYS,2040,2018,Spring,a +227,PHYS,2040,2018,Spring,a +342,PHYS,2040,2018,Spring,a +178,PHYS,2040,2019,Spring,a +275,PHYS,2040,2019,Spring,a +296,PHYS,2040,2019,Spring,a +372,PHYS,2040,2019,Spring,a +391,PHYS,2040,2019,Spring,a +399,PHYS,2040,2019,Spring,a +152,PHYS,2040,2019,Spring,b +305,PHYS,2040,2019,Spring,b +120,PHYS,2040,2020,Spring,a +125,PHYS,2040,2020,Spring,a +128,PHYS,2040,2020,Spring,a +131,PHYS,2040,2020,Spring,a +194,PHYS,2040,2020,Spring,a +267,PHYS,2040,2020,Spring,a +313,PHYS,2040,2020,Spring,a +377,PHYS,2060,2015,Spring,a +115,PHYS,2060,2016,Spring,a +195,PHYS,2060,2016,Spring,a +229,PHYS,2060,2016,Spring,a +355,PHYS,2060,2016,Spring,a +379,PHYS,2060,2016,Spring,a +392,PHYS,2060,2016,Spring,a +163,PHYS,2060,2016,Spring,b +290,PHYS,2060,2016,Spring,b +262,PHYS,2060,2016,Summer,a +264,PHYS,2060,2016,Summer,a +278,PHYS,2060,2016,Summer,a +373,PHYS,2060,2016,Summer,a +393,PHYS,2060,2016,Summer,a +276,PHYS,2060,2016,Summer,b +282,PHYS,2060,2016,Summer,b +285,PHYS,2060,2016,Summer,b +348,PHYS,2060,2016,Summer,b +374,PHYS,2060,2016,Summer,b +102,PHYS,2060,2018,Summer,a +131,PHYS,2060,2018,Summer,a +120,PHYS,2060,2018,Fall,a +156,PHYS,2060,2018,Fall,a +239,PHYS,2060,2018,Fall,a +298,PHYS,2060,2018,Fall,a +399,PHYS,2060,2018,Fall,a +127,PHYS,2060,2018,Fall,b +158,PHYS,2060,2018,Fall,b +247,PHYS,2060,2018,Fall,b +248,PHYS,2060,2018,Fall,b +257,PHYS,2060,2018,Fall,b +261,PHYS,2060,2018,Fall,b +270,PHYS,2060,2018,Fall,b +275,PHYS,2060,2018,Fall,b +311,PHYS,2060,2018,Fall,b +329,PHYS,2060,2018,Fall,b +127,PHYS,2060,2018,Fall,c +165,PHYS,2060,2018,Fall,c +217,PHYS,2060,2018,Fall,c +275,PHYS,2060,2018,Fall,c +311,PHYS,2060,2018,Fall,c +318,PHYS,2060,2018,Fall,c +329,PHYS,2060,2018,Fall,c +231,PHYS,2060,2018,Fall,d +252,PHYS,2060,2018,Fall,d +259,PHYS,2060,2018,Fall,d +288,PHYS,2060,2018,Fall,d +311,PHYS,2060,2018,Fall,d +230,PHYS,2060,2019,Summer,a +238,PHYS,2060,2019,Summer,a +277,PHYS,2060,2019,Summer,a +307,PHYS,2060,2019,Summer,a +312,PHYS,2060,2019,Summer,a +398,PHYS,2060,2019,Summer,a +106,PHYS,2060,2019,Summer,b +121,PHYS,2060,2019,Summer,b +179,PHYS,2060,2019,Summer,b +194,PHYS,2060,2019,Summer,b +294,PHYS,2060,2019,Summer,b +313,PHYS,2060,2019,Summer,b +366,PHYS,2060,2019,Summer,b +384,PHYS,2060,2019,Summer,b +397,PHYS,2060,2019,Summer,b +108,PHYS,2060,2019,Fall,a +185,PHYS,2060,2019,Fall,a +210,PHYS,2060,2019,Fall,a +359,PHYS,2060,2019,Fall,a +380,PHYS,2060,2019,Fall,a +171,PHYS,2060,2019,Fall,b +241,PHYS,2060,2019,Fall,b +274,PHYS,2060,2019,Fall,b +341,PHYS,2060,2019,Fall,b +368,PHYS,2060,2019,Fall,b +100,PHYS,2060,2019,Fall,c +123,PHYS,2060,2019,Fall,c +151,PHYS,2060,2019,Fall,c +177,PHYS,2060,2019,Fall,c +375,PHYS,2060,2019,Fall,c +122,PHYS,2060,2020,Spring,a +167,PHYS,2060,2020,Spring,a +223,PHYS,2060,2020,Spring,a +255,PHYS,2060,2020,Spring,a +310,PHYS,2060,2020,Spring,a +321,PHYS,2060,2020,Spring,a +153,PHYS,2060,2020,Spring,b +221,PHYS,2060,2020,Spring,b +240,PHYS,2060,2020,Spring,b +269,PHYS,2060,2020,Spring,b +292,PHYS,2060,2020,Spring,b +293,PHYS,2060,2020,Spring,b +321,PHYS,2060,2020,Spring,b +391,PHYS,2060,2020,Spring,b +112,PHYS,2060,2020,Fall,a +142,PHYS,2060,2020,Fall,a +178,PHYS,2060,2020,Fall,a +181,PHYS,2060,2020,Fall,a +187,PHYS,2060,2020,Fall,a +250,PHYS,2060,2020,Fall,a +371,PHYS,2060,2020,Fall,a +376,PHYS,2060,2020,Fall,a +390,PHYS,2060,2020,Fall,a +193,PHYS,2100,2015,Spring,a +277,PHYS,2100,2015,Spring,b +321,PHYS,2100,2015,Spring,b +120,PHYS,2100,2016,Fall,a +312,PHYS,2100,2016,Fall,a +314,PHYS,2100,2016,Fall,a +392,PHYS,2100,2016,Fall,a +176,PHYS,2100,2016,Fall,b +179,PHYS,2100,2016,Fall,b +278,PHYS,2100,2016,Fall,b +177,PHYS,2100,2017,Summer,a +262,PHYS,2100,2017,Summer,a +276,PHYS,2100,2017,Summer,a +375,PHYS,2100,2017,Summer,a +117,PHYS,2100,2017,Summer,b +177,PHYS,2100,2017,Summer,b +215,PHYS,2100,2017,Summer,b +307,PHYS,2100,2017,Summer,b +377,PHYS,2100,2017,Summer,b +378,PHYS,2100,2017,Summer,b +151,PHYS,2100,2017,Summer,c +173,PHYS,2100,2017,Summer,c +215,PHYS,2100,2017,Summer,c +264,PHYS,2100,2017,Summer,c +353,PHYS,2100,2017,Summer,c +355,PHYS,2100,2017,Summer,c +246,PHYS,2100,2017,Fall,a +374,PHYS,2100,2017,Fall,a +387,PHYS,2100,2017,Fall,a +128,PHYS,2100,2018,Fall,a +158,PHYS,2100,2018,Fall,a +185,PHYS,2100,2018,Fall,a +285,PHYS,2100,2018,Fall,a +288,PHYS,2100,2018,Fall,a +366,PHYS,2100,2019,Summer,a +386,PHYS,2100,2019,Summer,a +399,PHYS,2100,2019,Summer,a +282,PHYS,2140,2015,Spring,a +192,PHYS,2140,2015,Spring,b +394,PHYS,2140,2015,Spring,b +140,PHYS,2140,2015,Summer,a +172,PHYS,2140,2015,Summer,b +176,PHYS,2140,2015,Summer,b +270,PHYS,2140,2015,Summer,b +138,PHYS,2140,2015,Summer,c +246,PHYS,2140,2015,Summer,c +373,PHYS,2140,2015,Summer,c +120,PHYS,2140,2015,Fall,a +276,PHYS,2140,2015,Fall,a +123,PHYS,2140,2016,Spring,a +117,PHYS,2140,2016,Spring,b +313,PHYS,2140,2016,Spring,b +134,PHYS,2140,2016,Spring,c +215,PHYS,2140,2016,Spring,c +307,PHYS,2140,2016,Spring,c +312,PHYS,2140,2016,Summer,a +317,PHYS,2140,2016,Summer,a +277,PHYS,2140,2016,Summer,b +392,PHYS,2140,2016,Summer,b +116,PHYS,2140,2016,Fall,a +335,PHYS,2140,2016,Fall,a +387,PHYS,2140,2016,Fall,a +177,PHYS,2140,2017,Summer,a +255,PHYS,2140,2017,Summer,a +285,PHYS,2140,2017,Summer,a +314,PHYS,2140,2017,Summer,a +187,PHYS,2140,2017,Fall,a +259,PHYS,2140,2017,Fall,a +361,PHYS,2140,2017,Fall,b +379,PHYS,2140,2017,Fall,b +101,PHYS,2140,2018,Summer,a +105,PHYS,2140,2018,Summer,a +113,PHYS,2140,2018,Summer,a +128,PHYS,2140,2018,Summer,a +143,PHYS,2140,2018,Summer,a +151,PHYS,2140,2018,Summer,a +231,PHYS,2140,2018,Summer,a +298,PHYS,2140,2018,Summer,a +199,PHYS,2140,2018,Summer,b +305,PHYS,2140,2018,Summer,b +369,PHYS,2140,2018,Summer,b +163,PHYS,2140,2018,Fall,a +253,PHYS,2140,2018,Fall,a +386,PHYS,2140,2018,Fall,a +129,PHYS,2140,2019,Fall,a +167,PHYS,2140,2019,Fall,a +227,PHYS,2140,2019,Fall,a +329,PHYS,2140,2019,Fall,a +366,PHYS,2140,2019,Fall,a +371,PHYS,2140,2019,Fall,a +289,PHYS,2140,2019,Fall,b +318,PHYS,2140,2019,Fall,b +362,PHYS,2140,2019,Fall,b +377,PHYS,2140,2019,Fall,b +119,PHYS,2140,2020,Fall,a +131,PHYS,2140,2020,Fall,a +136,PHYS,2140,2020,Fall,a +146,PHYS,2140,2020,Fall,a +175,PHYS,2140,2020,Fall,a +185,PHYS,2140,2020,Fall,a +222,PHYS,2140,2020,Fall,a +235,PHYS,2140,2020,Fall,a +267,PHYS,2140,2020,Fall,a +292,PHYS,2140,2020,Fall,a +297,PHYS,2140,2020,Fall,a +309,PHYS,2140,2020,Fall,a +345,PHYS,2140,2020,Fall,a +391,PHYS,2140,2020,Fall,a +246,PHYS,2210,2015,Fall,a +374,PHYS,2210,2015,Fall,b +392,PHYS,2210,2015,Fall,b +379,PHYS,2210,2015,Fall,c +177,PHYS,2210,2017,Summer,a +230,PHYS,2210,2017,Summer,a +231,PHYS,2210,2017,Summer,a +373,PHYS,2210,2017,Summer,a +179,PHYS,2210,2017,Summer,b +285,PHYS,2210,2017,Summer,b +326,PHYS,2210,2017,Summer,b +127,PHYS,2210,2017,Summer,c +342,PHYS,2210,2017,Summer,c +208,PHYS,2210,2017,Summer,d +261,PHYS,2210,2017,Summer,d +304,PHYS,2210,2017,Summer,d +373,PHYS,2210,2017,Summer,d +101,PHYS,2210,2018,Fall,a +113,PHYS,2210,2018,Fall,a +183,PHYS,2210,2018,Fall,a +296,PHYS,2210,2018,Fall,a +329,PHYS,2210,2018,Fall,a +113,PHYS,2210,2018,Fall,b +120,PHYS,2210,2018,Fall,b +133,PHYS,2210,2018,Fall,b +151,PHYS,2210,2018,Fall,b +270,PHYS,2210,2018,Fall,b +274,PHYS,2210,2018,Fall,b +288,PHYS,2210,2018,Fall,b +378,PHYS,2210,2018,Fall,b +120,PHYS,2210,2018,Fall,c +124,PHYS,2210,2018,Fall,c +332,PHYS,2210,2018,Fall,c +362,PHYS,2210,2018,Fall,c +119,PHYS,2210,2019,Spring,a +238,PHYS,2210,2019,Spring,a +255,PHYS,2210,2019,Spring,a +305,PHYS,2210,2019,Spring,a +311,PHYS,2210,2019,Spring,a +157,PHYS,2210,2019,Spring,b +199,PHYS,2210,2019,Spring,b +238,PHYS,2210,2019,Spring,b +102,PHYS,2210,2019,Spring,c +165,PHYS,2210,2019,Spring,c +253,PHYS,2210,2019,Spring,c +292,PHYS,2210,2019,Spring,c +368,PHYS,2210,2019,Spring,c +391,PHYS,2210,2019,Spring,c +187,PHYS,2210,2019,Spring,d +255,PHYS,2210,2019,Spring,d +257,PHYS,2210,2019,Spring,d +391,PHYS,2210,2019,Spring,d +128,PHYS,2210,2019,Summer,a +256,PHYS,2210,2019,Summer,a +289,PHYS,2210,2019,Summer,a +359,PHYS,2210,2019,Summer,a +397,PHYS,2210,2019,Summer,a +123,PHYS,2210,2019,Fall,a +135,PHYS,2210,2019,Fall,a +143,PHYS,2210,2019,Fall,a +241,PHYS,2210,2019,Fall,a +340,PHYS,2210,2019,Fall,a +108,PHYS,2210,2019,Fall,b +171,PHYS,2210,2019,Fall,b +200,PHYS,2210,2019,Fall,b +309,PHYS,2210,2019,Fall,b +312,PHYS,2210,2019,Fall,b +333,PHYS,2210,2019,Fall,b +345,PHYS,2210,2019,Fall,b +363,PHYS,2210,2019,Fall,b +366,PHYS,2210,2019,Fall,b +396,PHYS,2210,2019,Fall,b +123,PHYS,2210,2019,Fall,c +221,PHYS,2210,2019,Fall,c +276,PHYS,2210,2019,Fall,c +347,PHYS,2210,2019,Fall,c +371,PHYS,2210,2019,Fall,c +390,PHYS,2210,2019,Fall,c +303,PHYS,2210,2019,Fall,d +374,PHYS,2220,2015,Spring,a +179,PHYS,2220,2015,Fall,a +276,PHYS,2220,2015,Fall,a +321,PHYS,2220,2015,Fall,a +282,PHYS,2220,2015,Fall,b +172,PHYS,2220,2016,Summer,a +317,PHYS,2220,2016,Summer,a +378,PHYS,2220,2016,Summer,a +391,PHYS,2220,2016,Summer,a +245,PHYS,2220,2016,Fall,a +295,PHYS,2220,2016,Fall,a +356,PHYS,2220,2016,Fall,a +385,PHYS,2220,2016,Fall,a +119,PHYS,2220,2017,Spring,a +176,PHYS,2220,2017,Spring,a +187,PHYS,2220,2017,Spring,a +256,PHYS,2220,2017,Spring,a +313,PHYS,2220,2017,Spring,a +372,PHYS,2220,2017,Spring,a +120,PHYS,2220,2017,Spring,b +312,PHYS,2220,2017,Spring,b +355,PHYS,2220,2017,Spring,b +151,PHYS,2220,2017,Spring,c +187,PHYS,2220,2017,Spring,c +270,PHYS,2220,2017,Spring,c +277,PHYS,2220,2017,Spring,c +119,PHYS,2220,2017,Spring,d +163,PHYS,2220,2017,Spring,d +249,PHYS,2220,2017,Spring,d +288,PHYS,2220,2017,Spring,d +312,PHYS,2220,2017,Spring,d +102,PHYS,2220,2018,Spring,a +105,PHYS,2220,2018,Spring,a +107,PHYS,2220,2018,Spring,a +128,PHYS,2220,2018,Spring,a +132,PHYS,2220,2018,Spring,a +134,PHYS,2220,2018,Spring,a +210,PHYS,2220,2018,Spring,a +214,PHYS,2220,2018,Spring,a +227,PHYS,2220,2018,Spring,a +237,PHYS,2220,2018,Spring,a +239,PHYS,2220,2018,Spring,a +305,PHYS,2220,2018,Spring,a +231,PHYS,2220,2018,Summer,a +255,PHYS,2220,2018,Summer,a +257,PHYS,2220,2018,Summer,a +342,PHYS,2220,2018,Summer,a +344,PHYS,2220,2018,Summer,a +373,PHYS,2220,2018,Summer,a +393,PHYS,2220,2018,Summer,a +123,PHYS,2220,2018,Fall,a +133,PHYS,2220,2018,Fall,a +177,PHYS,2220,2018,Fall,a +178,PHYS,2220,2018,Fall,a +196,PHYS,2220,2018,Fall,a +267,PHYS,2220,2018,Fall,a +285,PHYS,2220,2018,Fall,a +292,PHYS,2220,2018,Fall,a +332,PHYS,2220,2018,Fall,a +241,PHYS,2220,2019,Spring,a +113,PHYS,2220,2020,Spring,a +124,PHYS,2220,2020,Spring,a +175,PHYS,2220,2020,Spring,a +235,PHYS,2220,2020,Spring,a +106,PHYS,2220,2020,Summer,a +118,PHYS,2220,2020,Summer,a +121,PHYS,2220,2020,Summer,a +127,PHYS,2220,2020,Summer,a +194,PHYS,2220,2020,Summer,a +247,PHYS,2220,2020,Summer,a +293,PHYS,2220,2020,Summer,a +296,PHYS,2220,2020,Summer,a +309,PHYS,2220,2020,Summer,a +311,PHYS,2220,2020,Summer,a +339,PHYS,2220,2020,Summer,a +345,PHYS,2220,2020,Summer,a +164,PHYS,2220,2020,Summer,b +242,PHYS,2220,2020,Summer,b +289,PHYS,2220,2020,Summer,b +300,PHYS,2220,2020,Summer,b +323,PHYS,2220,2020,Summer,b +390,PHYS,2220,2020,Summer,b +109,PHYS,2220,2020,Fall,a +228,PHYS,2220,2020,Fall,a +386,PHYS,2220,2020,Fall,a +107,PHYS,3210,2016,Summer,a +249,PHYS,3210,2016,Summer,a +134,PHYS,3210,2016,Summer,b +172,PHYS,3210,2016,Summer,b +249,PHYS,3210,2016,Summer,b +314,PHYS,3210,2016,Summer,b +123,PHYS,3210,2016,Fall,a +260,PHYS,3210,2016,Fall,a +321,PHYS,3210,2016,Fall,a +139,PHYS,3210,2017,Summer,a +179,PHYS,3210,2017,Summer,a +230,PHYS,3210,2017,Summer,a +246,PHYS,3210,2017,Summer,a +373,PHYS,3210,2017,Summer,a +378,PHYS,3210,2017,Summer,a +391,PHYS,3210,2017,Summer,a +393,PHYS,3210,2017,Summer,a +208,PHYS,3210,2017,Summer,b +264,PHYS,3210,2017,Summer,b +379,PHYS,3210,2017,Summer,b +155,PHYS,3210,2017,Fall,a +262,PHYS,3210,2017,Fall,a +270,PHYS,3210,2017,Fall,a +335,PHYS,3210,2017,Fall,a +377,PHYS,3210,2017,Fall,a +397,PHYS,3210,2017,Fall,a +119,PHYS,3210,2018,Spring,a +229,PHYS,3210,2018,Spring,a +277,PHYS,3210,2018,Spring,a +294,PHYS,3210,2018,Spring,a +385,PHYS,3210,2018,Spring,a +274,PHYS,3210,2018,Spring,b +372,PHYS,3210,2018,Spring,b +102,PHYS,3210,2018,Spring,c +105,PHYS,3210,2018,Spring,c +197,PHYS,3210,2018,Spring,c +209,PHYS,3210,2018,Spring,c +374,PHYS,3210,2018,Spring,c +381,PHYS,3210,2018,Spring,c +101,PHYS,3210,2018,Fall,a +109,PHYS,3210,2018,Fall,a +227,PHYS,3210,2018,Fall,a +276,PHYS,3210,2018,Fall,a +285,PHYS,3210,2018,Fall,a +113,PHYS,3210,2019,Spring,a +258,PHYS,3210,2019,Spring,a +329,PHYS,3210,2019,Spring,a +351,PHYS,3210,2019,Spring,a +356,PHYS,3210,2019,Spring,a +384,PHYS,3210,2019,Spring,a +217,PHYS,3210,2019,Spring,b +312,PHYS,3210,2019,Spring,b +351,PHYS,3210,2019,Spring,b +231,PHYS,3210,2019,Spring,c +258,PHYS,3210,2019,Spring,c +292,PHYS,3210,2019,Spring,c +329,PHYS,3210,2019,Spring,c +375,PHYS,3210,2019,Spring,c +156,PHYS,3210,2019,Spring,d +173,PHYS,3210,2019,Spring,d +128,PHYS,3210,2019,Summer,a +133,PHYS,3210,2019,Summer,a +146,PHYS,3210,2019,Summer,a +177,PHYS,3210,2019,Summer,a +199,PHYS,3210,2019,Summer,a +133,PHYS,3210,2019,Summer,b +152,PHYS,3210,2019,Summer,b +255,PHYS,3210,2019,Summer,b +287,PHYS,3210,2019,Summer,b +313,PHYS,3210,2019,Summer,b +362,PHYS,3210,2019,Summer,b +366,PHYS,3210,2019,Summer,b +106,PHYS,3210,2019,Summer,c +152,PHYS,3210,2019,Summer,c +167,PHYS,3210,2019,Summer,c +188,PHYS,3210,2019,Summer,c +307,PHYS,3210,2019,Summer,c +309,PHYS,3210,2019,Summer,c +333,PHYS,3210,2019,Summer,c +345,PHYS,3210,2019,Summer,c +100,PHYS,3210,2019,Fall,a +178,PHYS,3210,2019,Fall,a +125,PHYS,3210,2020,Spring,a +131,PHYS,3210,2020,Spring,a +183,PHYS,3210,2020,Spring,a +185,PHYS,3210,2020,Spring,a +254,PHYS,3210,2020,Spring,a +310,PHYS,3210,2020,Spring,a +348,PHYS,3210,2020,Spring,a +390,PHYS,3210,2020,Spring,a +175,PHYS,3210,2020,Summer,a +187,PHYS,3210,2020,Summer,a +240,PHYS,3210,2020,Summer,a +300,PHYS,3210,2020,Summer,a +136,PHYS,3210,2020,Fall,a +153,PHYS,3210,2020,Fall,a +228,PHYS,3210,2020,Fall,a +289,PHYS,3210,2020,Fall,a +293,PHYS,3210,2020,Fall,a +297,PHYS,3210,2020,Fall,a +306,PHYS,3210,2020,Fall,a +339,PHYS,3210,2020,Fall,a +342,PHYS,3210,2020,Fall,a +121,PHYS,3210,2020,Fall,b +129,PHYS,3210,2020,Fall,b +200,PHYS,3210,2020,Fall,b +228,PHYS,3210,2020,Fall,b +256,PHYS,3210,2020,Fall,b +130,PHYS,3210,2020,Fall,c +331,PHYS,3210,2020,Fall,c +115,PHYS,3220,2016,Summer,a +195,PHYS,3220,2016,Summer,a +285,PHYS,3220,2016,Summer,a +312,PHYS,3220,2016,Summer,a +107,PHYS,3220,2016,Summer,b +123,PHYS,3220,2016,Summer,b +277,PHYS,3220,2016,Summer,b +119,PHYS,3220,2017,Summer,a +139,PHYS,3220,2017,Summer,a +215,PHYS,3220,2017,Summer,a +329,PHYS,3220,2017,Summer,a +392,PHYS,3220,2017,Summer,a +120,PHYS,3220,2017,Fall,a +131,PHYS,3220,2017,Fall,a +155,PHYS,3220,2017,Fall,a +214,PHYS,3220,2017,Fall,a +237,PHYS,3220,2017,Fall,a +109,PHYS,3220,2017,Fall,b +203,PHYS,3220,2017,Fall,b +345,PHYS,3220,2017,Fall,b +213,PHYS,3220,2017,Fall,c +230,PHYS,3220,2017,Fall,c +307,PHYS,3220,2017,Fall,c +127,PHYS,3220,2017,Fall,d +187,PHYS,3220,2017,Fall,d +252,PHYS,3220,2017,Fall,d +270,PHYS,3220,2017,Fall,d +276,PHYS,3220,2017,Fall,d +288,PHYS,3220,2017,Fall,d +128,PHYS,3220,2018,Summer,a +143,PHYS,3220,2018,Summer,a +260,PHYS,3220,2018,Summer,a +377,PHYS,3220,2018,Summer,a +379,PHYS,3220,2018,Summer,a +398,PHYS,3220,2018,Summer,a +102,PHYS,3220,2020,Spring,a +133,PHYS,3220,2020,Spring,a +170,PHYS,3220,2020,Spring,a +267,PHYS,3220,2020,Spring,a +310,PHYS,3220,2020,Spring,a +227,PHYS,3220,2020,Spring,b +241,PHYS,3220,2020,Spring,b +251,PHYS,3220,2020,Spring,b +255,PHYS,3220,2020,Spring,b +269,PHYS,3220,2020,Spring,b +321,PHYS,3220,2020,Spring,b +348,PHYS,3220,2020,Spring,b +106,PHYS,3220,2020,Spring,c +152,PHYS,3220,2020,Spring,c +185,PHYS,3220,2020,Spring,c +194,PHYS,3220,2020,Spring,c +200,PHYS,3220,2020,Spring,c +241,PHYS,3220,2020,Spring,c +251,PHYS,3220,2020,Spring,c +271,PHYS,3220,2020,Spring,c +296,PHYS,3220,2020,Spring,c +325,PHYS,3220,2020,Spring,c +365,PHYS,3220,2020,Spring,c +124,PHYS,3220,2020,Spring,d +167,PHYS,3220,2020,Spring,d +185,PHYS,3220,2020,Spring,d +227,PHYS,3220,2020,Spring,d +303,PHYS,3220,2020,Spring,d +341,PHYS,3220,2020,Spring,d +342,PHYS,3220,2020,Spring,d +373,PHYS,3220,2020,Spring,d diff --git a/tests/data/Grade.csv b/tests/data/Grade.csv new file mode 100644 index 000000000..8ba592194 --- /dev/null +++ b/tests/data/Grade.csv @@ -0,0 +1,3028 @@ +student_id,dept,course,term_year,term,section,grade +100,CS,1030,2020,Spring,a,A +101,PHYS,2040,2018,Spring,a,A +102,BIOL,1006,2018,Fall,a,A +104,MATH,2280,2018,Fall,b,A +105,PHYS,3210,2018,Spring,c,A +107,MATH,3210,2017,Summer,a,A +107,PHYS,2220,2018,Spring,a,A +109,BIOL,2355,2019,Spring,d,A +113,CS,3200,2020,Summer,a,A +113,CS,3505,2019,Summer,d,A +115,BIOL,1030,2017,Spring,a,A +118,CS,2100,2019,Fall,b,A +119,BIOL,2355,2018,Summer,d,A +119,CS,3505,2019,Summer,a,A +119,CS,4940,2017,Fall,b,A +119,MATH,2280,2018,Fall,c,A +119,PHYS,3210,2018,Spring,a,A +120,PHYS,2060,2018,Fall,a,A +122,CS,4970,2020,Fall,a,A +123,BIOL,2030,2017,Spring,a,A +123,BIOL,2325,2017,Fall,b,A +123,BIOL,2355,2017,Summer,a,A +123,CS,4940,2020,Summer,b,A +123,MATH,3220,2017,Spring,a,A +124,CS,2100,2018,Fall,c,A +124,CS,2420,2019,Summer,a,A +124,MATH,3210,2019,Fall,a,A +125,BIOL,2330,2019,Fall,a,A +127,BIOL,2355,2018,Fall,a,A +127,PHYS,2060,2018,Fall,c,A +127,PHYS,2220,2020,Summer,a,A +128,BIOL,1006,2017,Fall,a,A +128,BIOL,2010,2020,Summer,b,A +128,CS,3505,2017,Fall,a,A +128,CS,4500,2018,Spring,a,A +132,BIOL,1030,2018,Summer,a,A +132,CS,4500,2018,Spring,b,A +132,CS,4970,2018,Summer,b,A +135,CS,4400,2019,Summer,b,A +139,BIOL,1006,2019,Summer,a,A +139,CS,4000,2017,Summer,a,A +140,CS,3810,2015,Spring,a,A +140,CS,4400,2015,Summer,a,A +143,CS,2100,2017,Fall,a,A +145,MATH,1220,2017,Spring,c,A +146,CS,4970,2020,Summer,c,A +146,PHYS,2140,2020,Fall,a,A +149,BIOL,2325,2015,Fall,c,A +149,PHYS,2040,2015,Fall,b,A +151,BIOL,2355,2019,Spring,b,A +151,CS,4970,2020,Summer,b,A +151,MATH,1220,2020,Spring,a,A +152,BIOL,2021,2018,Fall,b,A +155,PHYS,3210,2017,Fall,a,A +155,PHYS,3220,2017,Fall,a,A +165,BIOL,2330,2017,Fall,a,A +165,MATH,1260,2019,Spring,c,A +166,CS,3500,2020,Summer,a,A +167,BIOL,2355,2020,Fall,a,A +167,PHYS,3220,2020,Spring,d,A +168,CS,2420,2020,Fall,a,A +169,CS,2100,2019,Summer,b,A +169,MATH,2280,2020,Spring,b,A +169,PHYS,2040,2018,Spring,a,A +170,CS,4940,2020,Summer,a,A +173,BIOL,1006,2019,Fall,a,A +173,MATH,2210,2019,Spring,b,A +175,PHYS,3210,2020,Summer,a,A +176,BIOL,1006,2016,Spring,a,A +176,PHYS,2140,2015,Summer,b,A +177,BIOL,2330,2016,Fall,a,A +177,BIOL,2420,2015,Spring,a,A +177,CS,3810,2018,Summer,b,A +177,MATH,1260,2015,Spring,c,A +179,CS,2100,2016,Summer,a,A +179,PHYS,2060,2019,Summer,b,A +185,MATH,1250,2020,Summer,a,A +185,MATH,1260,2019,Summer,a,A +186,MATH,2270,2020,Fall,b,A +187,CS,4970,2020,Summer,b,A +187,PHYS,3210,2020,Summer,a,A +191,CS,4970,2020,Fall,a,A +192,BIOL,2020,2015,Fall,d,A +200,PHYS,3220,2020,Spring,c,A +203,PHYS,3220,2017,Fall,b,A +207,BIOL,2355,2018,Summer,d,A +207,CS,1410,2016,Summer,a,A +207,MATH,1250,2018,Summer,c,A +210,MATH,3220,2016,Spring,d,A +214,MATH,3220,2016,Summer,a,A +215,CS,4500,2016,Spring,b,A +215,PHYS,2140,2016,Spring,c,A +216,CS,1410,2016,Spring,b,A +217,BIOL,1010,2019,Spring,b,A +217,PHYS,2060,2018,Fall,c,A +223,PHYS,2060,2020,Spring,a,A +224,BIOL,2420,2020,Fall,a,A +227,BIOL,2330,2019,Fall,a,A +228,CS,4970,2020,Summer,d,A +229,CS,2420,2016,Fall,a,A +230,CS,3505,2019,Spring,b,A +230,MATH,1250,2017,Summer,c,A +230,PHYS,2210,2017,Summer,a,A +231,BIOL,2210,2017,Spring,a,A +231,CS,2100,2018,Fall,c,A +231,MATH,1220,2019,Fall,a,A +234,CS,4400,2019,Summer,a,A +237,CS,3810,2018,Spring,a,A +238,BIOL,2021,2019,Spring,b,A +240,MATH,2270,2020,Fall,a,A +241,CS,2100,2019,Summer,a,A +242,CS,4970,2020,Summer,a,A +246,BIOL,2420,2015,Spring,b,A +247,CS,3505,2018,Summer,a,A +249,BIOL,1006,2015,Summer,b,A +249,CS,4150,2016,Summer,a,A +249,CS,4150,2016,Summer,b,A +249,PHYS,3210,2016,Summer,b,A +252,CS,3810,2018,Summer,d,A +255,CS,2100,2018,Spring,a,A +255,CS,4400,2017,Spring,b,A +255,CS,4500,2019,Fall,d,A +256,CS,4500,2019,Fall,a,A +257,BIOL,1030,2017,Spring,c,A +257,CS,3505,2020,Summer,a,A +257,MATH,1250,2017,Summer,c,A +260,CS,4150,2019,Spring,a,A +262,CS,2420,2016,Fall,b,A +262,CS,4400,2016,Summer,a,A +262,CS,4970,2018,Fall,b,A +264,BIOL,2420,2017,Summer,a,A +264,PHYS,3210,2017,Summer,b,A +267,PHYS,2040,2020,Spring,a,A +269,PHYS,2060,2020,Spring,b,A +270,PHYS,2060,2018,Fall,b,A +271,CS,1030,2020,Fall,a,A +273,BIOL,1030,2016,Spring,a,A +274,PHYS,2060,2019,Fall,b,A +275,BIOL,1210,2017,Summer,a,A +275,BIOL,2210,2018,Spring,a,A +275,MATH,2210,2018,Spring,b,A +276,CS,3200,2018,Spring,b,A +276,CS,4970,2016,Fall,b,A +277,BIOL,2330,2017,Summer,a,A +277,CS,4000,2020,Fall,a,A +277,CS,4970,2018,Summer,a,A +277,PHYS,2100,2015,Spring,b,A +277,PHYS,2140,2016,Summer,b,A +282,CS,4970,2017,Spring,a,A +283,CS,4970,2020,Fall,b,A +285,BIOL,1010,2018,Summer,b,A +285,BIOL,2020,2018,Spring,a,A +285,BIOL,2030,2017,Spring,d,A +285,BIOL,2420,2020,Spring,a,A +285,CS,4400,2019,Summer,a,A +285,MATH,2280,2020,Spring,a,A +285,PHYS,2220,2018,Fall,a,A +288,MATH,1250,2018,Summer,c,A +289,PHYS,2140,2019,Fall,b,A +290,BIOL,1030,2016,Summer,a,A +292,BIOL,2010,2020,Spring,b,A +292,BIOL,2021,2017,Fall,a,A +292,CS,3200,2020,Summer,a,A +292,MATH,1250,2017,Summer,a,A +292,PHYS,2140,2020,Fall,a,A +293,BIOL,2210,2019,Fall,a,A +293,CS,2100,2019,Summer,a,A +293,PHYS,3210,2020,Fall,a,A +295,MATH,1210,2016,Spring,b,A +299,CS,2420,2017,Summer,b,A +300,CS,3505,2019,Summer,c,A +302,CS,2420,2015,Summer,c,A +307,CS,4400,2016,Spring,a,A +307,MATH,2280,2018,Spring,a,A +307,PHYS,2060,2019,Summer,a,A +310,PHYS,3220,2020,Spring,a,A +311,BIOL,2030,2020,Spring,b,A +311,BIOL,2420,2020,Summer,a,A +311,CS,3810,2018,Summer,c,A +312,BIOL,2330,2015,Fall,d,A +312,PHYS,2060,2019,Summer,a,A +313,BIOL,2420,2020,Summer,a,A +313,PHYS,2220,2017,Spring,a,A +314,BIOL,2030,2016,Fall,a,A +314,CS,3810,2016,Summer,a,A +314,MATH,1260,2019,Summer,b,A +314,MATH,2210,2017,Spring,a,A +318,BIOL,2355,2017,Summer,a,A +321,CS,3500,2019,Fall,b,A +321,CS,4400,2019,Spring,a,A +321,MATH,1220,2019,Fall,b,A +321,MATH,3210,2019,Spring,b,A +323,PHYS,2220,2020,Summer,b,A +329,BIOL,1006,2019,Summer,a,A +329,CS,4400,2017,Spring,a,A +331,PHYS,3210,2020,Fall,c,A +333,CS,3500,2020,Summer,a,A +333,CS,3810,2019,Fall,a,A +335,PHYS,2140,2016,Fall,a,A +336,BIOL,2010,2015,Fall,a,A +340,BIOL,1010,2020,Summer,d,A +340,BIOL,2021,2019,Fall,a,A +342,BIOL,2030,2018,Summer,a,A +342,PHYS,3220,2020,Spring,d,A +345,CS,4400,2019,Spring,d,A +345,PHYS,2210,2019,Fall,b,A +347,BIOL,2210,2020,Fall,a,A +347,BIOL,2420,2020,Summer,a,A +348,BIOL,2355,2018,Summer,b,A +348,CS,3200,2016,Fall,b,A +348,MATH,1220,2018,Summer,a,A +351,CS,4970,2019,Spring,a,A +353,BIOL,1010,2017,Summer,a,A +353,MATH,1260,2017,Summer,a,A +356,MATH,1210,2017,Spring,a,A +357,BIOL,2325,2016,Summer,a,A +359,MATH,2280,2018,Fall,b,A +362,BIOL,1006,2018,Spring,a,A +362,BIOL,2030,2019,Summer,b,A +362,PHYS,2140,2019,Fall,b,A +364,MATH,3210,2019,Spring,a,A +366,BIOL,2355,2017,Fall,a,A +366,CS,1410,2018,Spring,d,A +366,MATH,3220,2017,Fall,b,A +366,PHYS,3210,2019,Summer,b,A +368,CS,4500,2020,Summer,a,A +369,CS,2420,2016,Fall,a,A +369,CS,4400,2017,Spring,a,A +371,CS,3505,2018,Fall,c,A +372,MATH,1210,2018,Spring,a,A +373,BIOL,2355,2017,Fall,b,A +373,PHYS,2220,2018,Summer,a,A +374,PHYS,2100,2017,Fall,a,A +375,BIOL,2355,2017,Summer,a,A +377,BIOL,1210,2017,Spring,a,A +377,BIOL,2030,2017,Spring,a,A +378,PHYS,2210,2018,Fall,b,A +379,BIOL,2355,2018,Summer,b,A +379,CS,4970,2020,Summer,b,A +380,PHYS,2060,2019,Fall,a,A +384,CS,4970,2020,Summer,c,A +384,PHYS,3210,2019,Spring,a,A +386,BIOL,2325,2018,Summer,a,A +386,MATH,1250,2020,Summer,a,A +387,BIOL,2020,2018,Fall,c,A +387,MATH,2280,2018,Spring,a,A +387,PHYS,2100,2017,Fall,a,A +391,CS,4940,2020,Summer,a,A +391,CS,4940,2020,Summer,b,A +391,PHYS,2040,2019,Spring,a,A +391,PHYS,2140,2020,Fall,a,A +391,PHYS,2210,2019,Spring,d,A +392,BIOL,1006,2017,Fall,a,A +393,CS,3100,2017,Summer,a,A +394,MATH,2270,2017,Fall,c,A +394,PHYS,2140,2015,Spring,b,A +396,CS,3500,2019,Summer,a,A +397,BIOL,1010,2017,Spring,a,A +397,CS,3500,2019,Fall,a,A +397,CS,4940,2020,Summer,a,A +397,PHYS,3210,2017,Fall,a,A +399,PHYS,2060,2018,Fall,a,A +399,PHYS,2100,2019,Summer,a,A +100,MATH,1220,2020,Spring,a,A- +102,BIOL,1030,2018,Fall,a,A- +102,BIOL,2020,2019,Summer,a,A- +102,BIOL,2021,2018,Spring,a,A- +102,BIOL,2210,2019,Summer,a,A- +102,CS,4150,2019,Spring,a,A- +102,MATH,1250,2018,Summer,a,A- +107,BIOL,2021,2019,Fall,a,A- +107,CS,3505,2016,Summer,a,A- +107,PHYS,3220,2016,Summer,b,A- +108,BIOL,1010,2020,Summer,b,A- +109,BIOL,1030,2020,Summer,a,A- +109,CS,4970,2020,Summer,d,A- +110,CS,3505,2020,Fall,b,A- +113,BIOL,2030,2019,Summer,b,A- +113,MATH,2210,2020,Spring,a,A- +113,PHYS,2210,2018,Fall,a,A- +113,PHYS,2210,2018,Fall,b,A- +118,CS,4970,2020,Summer,b,A- +120,CS,4970,2017,Spring,a,A- +120,PHYS,2210,2018,Fall,c,A- +120,PHYS,3220,2017,Fall,a,A- +123,BIOL,1010,2015,Summer,b,A- +123,CS,2100,2016,Summer,a,A- +123,MATH,1250,2018,Spring,a,A- +123,MATH,1260,2019,Summer,b,A- +123,MATH,2270,2017,Fall,d,A- +123,MATH,3210,2020,Spring,a,A- +123,PHYS,2040,2018,Spring,a,A- +123,PHYS,3220,2016,Summer,b,A- +124,BIOL,2420,2020,Summer,a,A- +124,MATH,1260,2019,Summer,a,A- +126,BIOL,2020,2015,Fall,a,A- +126,MATH,3210,2015,Fall,a,A- +127,BIOL,2021,2018,Fall,a,A- +127,PHYS,3220,2017,Fall,d,A- +128,CS,1030,2018,Fall,a,A- +128,CS,2420,2017,Fall,a,A- +129,BIOL,2020,2018,Spring,a,A- +130,CS,4970,2020,Fall,c,A- +131,BIOL,1210,2018,Spring,a,A- +131,MATH,2210,2018,Spring,b,A- +133,MATH,1250,2020,Summer,a,A- +138,CS,4940,2015,Summer,a,A- +138,MATH,3210,2015,Fall,b,A- +142,BIOL,1006,2020,Spring,a,A- +142,CS,3500,2020,Summer,a,A- +143,CS,3500,2019,Fall,c,A- +143,CS,3505,2018,Summer,b,A- +143,PHYS,2140,2018,Summer,a,A- +144,BIOL,2020,2015,Summer,a,A- +151,BIOL,1010,2017,Summer,a,A- +151,CS,2420,2016,Fall,b,A- +160,CS,2420,2015,Summer,a,A- +162,MATH,1220,2015,Summer,b,A- +169,CS,3505,2019,Summer,a,A- +170,CS,4400,2020,Spring,a,A- +171,CS,4940,2020,Summer,b,A- +172,CS,2420,2016,Summer,a,A- +173,BIOL,1210,2019,Spring,a,A- +173,BIOL,2010,2017,Summer,a,A- +173,CS,4500,2019,Fall,b,A- +175,BIOL,2420,2020,Fall,a,A- +178,BIOL,2010,2020,Spring,b,A- +179,BIOL,1030,2019,Spring,a,A- +179,CS,4500,2016,Spring,b,A- +181,CS,4000,2020,Spring,b,A- +181,MATH,3210,2020,Spring,a,A- +182,MATH,1250,2016,Fall,b,A- +183,CS,2100,2019,Fall,d,A- +185,CS,1030,2019,Fall,b,A- +185,PHYS,2100,2018,Fall,a,A- +187,BIOL,2210,2017,Summer,a,A- +187,CS,3810,2020,Fall,a,A- +187,CS,4000,2017,Spring,a,A- +187,PHYS,2220,2017,Spring,c,A- +192,CS,3505,2015,Spring,a,A- +193,BIOL,2010,2015,Spring,a,A- +193,PHYS,2100,2015,Spring,a,A- +194,CS,4970,2019,Fall,a,A- +194,PHYS,2220,2020,Summer,a,A- +195,CS,3100,2016,Spring,d,A- +196,CS,3100,2019,Spring,a,A- +197,MATH,1250,2018,Summer,c,A- +199,BIOL,2020,2018,Fall,a,A- +199,CS,2100,2018,Fall,d,A- +202,CS,4400,2020,Fall,b,A- +203,CS,4500,2018,Spring,a,A- +204,CS,2420,2015,Summer,a,A- +208,BIOL,2010,2017,Fall,a,A- +208,MATH,2210,2017,Spring,a,A- +210,PHYS,2220,2018,Spring,a,A- +212,BIOL,2030,2015,Fall,a,A- +212,PHYS,2040,2015,Fall,c,A- +214,CS,4970,2018,Summer,c,A- +215,CS,4400,2015,Summer,a,A- +215,MATH,1250,2016,Fall,a,A- +215,MATH,2210,2017,Spring,a,A- +221,PHYS,2210,2019,Fall,c,A- +228,BIOL,2210,2019,Summer,b,A- +228,MATH,2210,2019,Spring,b,A- +229,MATH,3220,2018,Spring,c,A- +230,CS,4400,2020,Fall,a,A- +231,BIOL,1010,2019,Spring,b,A- +233,CS,4940,2020,Summer,b,A- +235,CS,3505,2019,Fall,c,A- +237,BIOL,2355,2017,Fall,a,A- +237,PHYS,2220,2018,Spring,a,A- +240,CS,3810,2018,Summer,c,A- +240,CS,4150,2018,Fall,a,A- +241,CS,3505,2019,Spring,a,A- +243,BIOL,2030,2017,Spring,b,A- +243,BIOL,2210,2016,Summer,a,A- +243,BIOL,2355,2017,Spring,d,A- +245,CS,3810,2016,Fall,b,A- +246,MATH,3220,2016,Fall,a,A- +247,BIOL,1006,2019,Summer,a,A- +247,BIOL,2355,2019,Spring,a,A- +248,CS,3505,2019,Fall,c,A- +248,MATH,3210,2019,Fall,a,A- +250,CS,4940,2020,Summer,b,A- +252,CS,3505,2018,Fall,b,A- +254,PHYS,3210,2020,Spring,a,A- +255,CS,4150,2018,Fall,b,A- +255,PHYS,2220,2018,Summer,a,A- +257,CS,3200,2018,Spring,a,A- +258,CS,4400,2020,Fall,a,A- +260,CS,3100,2017,Fall,a,A- +260,MATH,3210,2020,Summer,a,A- +261,CS,2100,2018,Summer,a,A- +261,MATH,3220,2017,Fall,b,A- +262,BIOL,2010,2017,Fall,a,A- +262,CS,3505,2018,Summer,b,A- +262,PHYS,2060,2016,Summer,a,A- +270,BIOL,2010,2018,Spring,a,A- +270,BIOL,2021,2016,Fall,a,A- +270,CS,3500,2019,Fall,b,A- +271,CS,4940,2020,Summer,b,A- +272,MATH,2210,2020,Fall,a,A- +275,BIOL,2325,2018,Summer,a,A- +276,BIOL,2020,2018,Fall,a,A- +276,CS,4000,2016,Fall,a,A- +276,PHYS,2060,2016,Summer,b,A- +276,PHYS,2100,2017,Summer,a,A- +277,BIOL,2355,2018,Spring,a,A- +277,CS,1030,2016,Summer,a,A- +277,CS,1410,2020,Spring,b,A- +277,CS,2420,2015,Spring,a,A- +277,CS,4150,2020,Spring,a,A- +277,MATH,3210,2016,Fall,a,A- +278,CS,2100,2016,Summer,c,A- +279,MATH,1210,2018,Summer,a,A- +282,BIOL,1030,2016,Spring,a,A- +282,CS,2420,2016,Fall,a,A- +282,PHYS,2060,2016,Summer,b,A- +285,MATH,1250,2016,Summer,a,A- +285,MATH,1260,2019,Spring,b,A- +285,PHYS,3210,2018,Fall,a,A- +287,PHYS,3210,2019,Summer,b,A- +288,BIOL,2020,2018,Fall,d,A- +288,CS,3100,2019,Spring,b,A- +288,PHYS,3220,2017,Fall,d,A- +289,PHYS,2220,2020,Summer,b,A- +289,PHYS,3210,2020,Fall,a,A- +290,BIOL,2330,2015,Fall,a,A- +290,CS,3200,2016,Summer,a,A- +292,CS,4400,2020,Fall,b,A- +292,CS,4970,2020,Summer,c,A- +292,PHYS,2220,2018,Fall,a,A- +293,CS,4970,2019,Fall,b,A- +293,PHYS,2060,2020,Spring,b,A- +294,CS,4500,2018,Spring,a,A- +295,CS,3200,2015,Fall,d,A- +296,BIOL,2021,2018,Fall,c,A- +296,MATH,2210,2019,Spring,b,A- +296,PHYS,2220,2020,Summer,a,A- +298,CS,4970,2019,Summer,b,A- +300,BIOL,2330,2020,Spring,a,A- +300,CS,4500,2019,Fall,b,A- +300,CS,4940,2020,Summer,b,A- +300,PHYS,3210,2020,Summer,a,A- +301,MATH,3220,2016,Spring,b,A- +305,BIOL,1210,2019,Spring,a,A- +305,MATH,3220,2018,Spring,c,A- +305,PHYS,2220,2018,Spring,a,A- +307,PHYS,2140,2016,Spring,c,A- +307,PHYS,3210,2019,Summer,c,A- +311,CS,4000,2020,Spring,b,A- +311,MATH,1250,2017,Summer,b,A- +311,MATH,3220,2017,Fall,a,A- +311,PHYS,2220,2020,Summer,a,A- +312,BIOL,2021,2018,Summer,a,A- +313,BIOL,1006,2020,Fall,b,A- +313,CS,3505,2015,Fall,a,A- +313,MATH,1250,2018,Summer,b,A- +314,MATH,1220,2017,Spring,b,A- +317,BIOL,1010,2016,Summer,a,A- +317,PHYS,2220,2016,Summer,a,A- +318,CS,4970,2019,Fall,d,A- +321,MATH,1250,2018,Summer,b,A- +321,MATH,1250,2018,Summer,c,A- +325,CS,3200,2020,Spring,c,A- +329,MATH,1220,2020,Summer,a,A- +329,MATH,3220,2016,Fall,b,A- +330,BIOL,1006,2020,Spring,a,A- +332,BIOL,2355,2018,Summer,c,A- +333,PHYS,2210,2019,Fall,b,A- +335,BIOL,1030,2017,Spring,c,A- +335,MATH,3210,2015,Fall,d,A- +339,CS,3505,2020,Fall,a,A- +340,BIOL,2330,2020,Spring,a,A- +342,BIOL,2325,2019,Spring,b,A- +342,BIOL,2355,2018,Summer,a,A- +342,PHYS,3210,2020,Fall,a,A- +344,CS,1030,2018,Fall,a,A- +345,CS,2100,2018,Summer,c,A- +345,CS,2420,2020,Fall,a,A- +345,PHYS,2220,2020,Summer,a,A- +347,CS,4150,2020,Fall,a,A- +348,CS,1410,2018,Spring,a,A- +348,CS,3500,2020,Summer,a,A- +357,CS,4500,2016,Spring,b,A- +359,CS,3810,2019,Fall,b,A- +359,PHYS,2060,2019,Fall,a,A- +361,CS,4500,2018,Spring,a,A- +361,MATH,2210,2017,Summer,a,A- +362,PHYS,3210,2019,Summer,b,A- +363,CS,4970,2019,Summer,c,A- +363,PHYS,2210,2019,Fall,b,A- +366,CS,3100,2019,Spring,b,A- +368,CS,2100,2019,Summer,b,A- +369,BIOL,2325,2016,Summer,a,A- +369,MATH,2210,2018,Spring,b,A- +371,CS,2100,2018,Summer,c,A- +372,BIOL,2355,2019,Spring,b,A- +373,BIOL,2420,2020,Spring,a,A- +373,CS,3200,2016,Summer,a,A- +373,CS,4400,2015,Fall,c,A- +373,PHYS,2060,2016,Summer,a,A- +374,BIOL,2325,2018,Spring,a,A- +374,CS,3100,2016,Spring,b,A- +374,MATH,3220,2016,Spring,c,A- +374,PHYS,2040,2015,Fall,b,A- +377,CS,3810,2018,Summer,b,A- +377,MATH,1260,2019,Summer,b,A- +378,BIOL,2030,2017,Spring,c,A- +378,PHYS,2220,2016,Summer,a,A- +379,BIOL,2021,2016,Fall,a,A- +379,CS,4940,2017,Fall,b,A- +379,CS,4970,2020,Summer,d,A- +379,PHYS,3220,2018,Summer,a,A- +380,BIOL,2330,2019,Fall,a,A- +384,MATH,1250,2020,Summer,a,A- +385,PHYS,3210,2018,Spring,a,A- +386,CS,3810,2018,Summer,a,A- +386,CS,4500,2019,Summer,a,A- +388,CS,3810,2018,Spring,a,A- +391,BIOL,2420,2020,Fall,a,A- +391,CS,3505,2019,Fall,a,A- +392,CS,4970,2018,Summer,c,A- +392,MATH,1210,2017,Summer,c,A- +392,PHYS,2060,2016,Spring,a,A- +393,BIOL,2355,2018,Spring,a,A- +393,CS,3505,2016,Summer,a,A- +395,CS,3500,2016,Spring,a,A- +396,MATH,2270,2019,Summer,c,A- +397,BIOL,1006,2018,Spring,a,A- +397,BIOL,2030,2016,Fall,a,A- +397,CS,3200,2017,Spring,a,A- +398,BIOL,1006,2019,Fall,b,A- +398,CS,4940,2019,Fall,a,A- +398,MATH,1210,2018,Summer,a,A- +399,CS,3810,2018,Summer,b,A- +100,CS,4970,2018,Summer,b,B +100,PHYS,3210,2019,Fall,a,B +102,BIOL,1010,2018,Summer,a,B +102,BIOL,2325,2017,Fall,b,B +105,BIOL,2355,2017,Spring,b,B +105,MATH,1220,2017,Spring,d,B +105,PHYS,2140,2018,Summer,a,B +106,MATH,1210,2020,Spring,b,B +106,MATH,1250,2020,Summer,a,B +106,PHYS,3220,2020,Spring,c,B +107,CS,3500,2016,Summer,a,B +107,PHYS,3210,2016,Summer,a,B +108,CS,3200,2020,Spring,c,B +108,MATH,1260,2019,Fall,a,B +109,BIOL,1006,2019,Fall,a,B +112,CS,3200,2020,Summer,a,B +113,CS,3810,2020,Fall,a,B +115,BIOL,2020,2016,Spring,a,B +117,BIOL,1006,2018,Spring,a,B +117,BIOL,2021,2018,Summer,a,B +118,CS,2100,2019,Fall,a,B +119,MATH,1210,2016,Spring,a,B +119,MATH,3220,2016,Spring,c,B +120,CS,1410,2018,Spring,b,B +121,PHYS,2060,2019,Summer,b,B +122,BIOL,1010,2020,Summer,b,B +122,CS,2100,2020,Fall,a,B +123,MATH,1210,2019,Summer,a,B +123,MATH,2210,2018,Spring,a,B +124,BIOL,2355,2020,Fall,a,B +124,CS,4970,2019,Summer,a,B +124,MATH,2270,2017,Fall,d,B +127,CS,4970,2019,Fall,b,B +127,MATH,1250,2017,Summer,a,B +127,MATH,3220,2018,Spring,c,B +128,BIOL,2210,2018,Spring,a,B +128,BIOL,2420,2020,Summer,a,B +129,CS,3505,2019,Summer,b,B +131,MATH,3220,2018,Spring,c,B +132,CS,4500,2018,Spring,a,B +133,BIOL,2021,2018,Fall,d,B +133,CS,3810,2018,Summer,c,B +134,CS,4000,2017,Summer,a,B +135,CS,3200,2020,Fall,a,B +135,MATH,1220,2019,Fall,c,B +139,MATH,1220,2018,Summer,a,B +143,CS,4970,2018,Fall,c,B +144,MATH,3210,2015,Summer,a,B +146,CS,2100,2019,Fall,c,B +149,CS,3500,2015,Fall,b,B +151,BIOL,2325,2018,Summer,a,B +151,BIOL,2420,2020,Summer,a,B +151,CS,4400,2019,Spring,b,B +151,MATH,1250,2020,Summer,a,B +152,MATH,1260,2019,Spring,b,B +153,BIOL,1010,2020,Summer,a,B +158,MATH,1250,2018,Summer,c,B +162,CS,4150,2015,Summer,a,B +163,MATH,1210,2018,Fall,b,B +164,BIOL,2030,2020,Spring,a,B +164,CS,3500,2020,Summer,a,B +164,CS,3505,2020,Spring,a,B +167,CS,4500,2020,Summer,a,B +169,CS,4000,2020,Spring,a,B +169,CS,4500,2020,Spring,a,B +170,MATH,2210,2020,Spring,b,B +170,PHYS,3220,2020,Spring,a,B +171,CS,3500,2019,Fall,b,B +171,CS,3810,2020,Fall,a,B +173,BIOL,1010,2018,Summer,b,B +173,CS,3505,2018,Summer,b,B +173,MATH,1250,2017,Summer,a,B +176,BIOL,1010,2016,Summer,a,B +176,BIOL,1030,2016,Fall,a,B +177,BIOL,1010,2015,Summer,b,B +177,CS,3810,2018,Summer,a,B +178,PHYS,2040,2019,Spring,a,B +179,CS,3500,2019,Summer,a,B +179,CS,3810,2018,Spring,a,B +179,MATH,1210,2016,Spring,d,B +180,MATH,1220,2019,Fall,b,B +181,CS,2100,2019,Fall,a,B +181,CS,2100,2019,Fall,d,B +181,CS,4000,2020,Spring,a,B +182,BIOL,1010,2015,Summer,a,B +185,BIOL,1010,2020,Summer,c,B +185,BIOL,2210,2020,Fall,a,B +187,PHYS,2040,2017,Fall,c,B +192,CS,3100,2016,Spring,d,B +199,BIOL,1006,2017,Fall,a,B +199,BIOL,2330,2017,Fall,b,B +199,CS,1410,2018,Spring,b,B +199,CS,3500,2019,Fall,b,B +200,BIOL,1010,2020,Summer,a,B +200,CS,3505,2020,Summer,a,B +204,BIOL,2325,2015,Fall,c,B +207,BIOL,2030,2016,Summer,b,B +207,CS,3200,2016,Summer,b,B +207,MATH,3220,2017,Fall,a,B +210,MATH,1220,2016,Spring,a,B +210,MATH,1250,2017,Summer,b,B +210,MATH,3220,2016,Spring,a,B +211,MATH,1260,2015,Summer,a,B +212,MATH,2210,2015,Summer,c,B +214,BIOL,2355,2018,Spring,a,B +214,MATH,1210,2016,Fall,a,B +215,CS,4500,2016,Spring,a,B +215,MATH,1210,2016,Fall,b,B +215,PHYS,2100,2017,Summer,b,B +216,CS,1410,2016,Spring,a,B +221,PHYS,2060,2020,Spring,b,B +227,BIOL,2210,2018,Summer,b,B +229,CS,1410,2018,Spring,b,B +229,CS,3500,2016,Spring,a,B +230,MATH,2270,2020,Fall,a,B +231,MATH,2210,2018,Spring,b,B +231,PHYS,2210,2017,Summer,a,B +234,BIOL,1006,2019,Summer,a,B +235,CS,4150,2020,Fall,a,B +238,MATH,2280,2018,Spring,a,B +240,BIOL,1010,2019,Spring,c,B +240,CS,3505,2018,Fall,a,B +241,BIOL,2420,2020,Spring,b,B +241,CS,3810,2019,Fall,a,B +241,MATH,2210,2020,Spring,c,B +246,CS,3200,2016,Summer,a,B +246,MATH,3210,2015,Fall,a,B +247,CS,4970,2018,Fall,b,B +247,MATH,1250,2018,Summer,a,B +248,BIOL,2021,2018,Fall,c,B +248,MATH,1220,2019,Fall,a,B +248,MATH,2270,2019,Summer,a,B +249,BIOL,1010,2017,Spring,a,B +249,BIOL,2030,2015,Fall,a,B +251,CS,4970,2020,Summer,a,B +251,MATH,2210,2020,Spring,c,B +255,CS,3810,2018,Spring,a,B +255,CS,4000,2017,Spring,a,B +255,MATH,2270,2019,Spring,a,B +255,PHYS,3210,2019,Summer,b,B +257,BIOL,1030,2017,Spring,a,B +258,BIOL,2355,2020,Summer,a,B +258,CS,3505,2018,Fall,a,B +258,CS,3810,2019,Fall,a,B +258,PHYS,3210,2019,Spring,a,B +260,BIOL,2210,2018,Summer,a,B +260,CS,2100,2019,Fall,c,B +264,PHYS,2060,2016,Summer,a,B +264,PHYS,2100,2017,Summer,c,B +267,CS,4400,2019,Summer,b,B +267,PHYS,2140,2020,Fall,a,B +267,PHYS,2220,2018,Fall,a,B +268,CS,2420,2016,Fall,b,B +270,BIOL,1210,2016,Spring,a,B +270,CS,3200,2016,Summer,a,B +270,CS,3810,2018,Summer,b,B +270,MATH,2270,2020,Spring,a,B +270,PHYS,2220,2017,Spring,c,B +274,BIOL,2355,2018,Summer,c,B +274,CS,3200,2018,Spring,a,B +276,BIOL,2325,2019,Summer,a,B +276,CS,1410,2015,Summer,b,B +276,CS,2100,2016,Spring,a,B +276,CS,2420,2015,Fall,a,B +276,CS,4500,2015,Summer,a,B +276,MATH,3220,2016,Summer,a,B +277,MATH,1220,2017,Spring,c,B +277,MATH,3220,2016,Fall,a,B +277,PHYS,2220,2017,Spring,c,B +277,PHYS,3210,2018,Spring,a,B +278,MATH,1210,2016,Fall,b,B +282,BIOL,2355,2017,Spring,c,B +285,BIOL,2030,2017,Spring,b,B +285,PHYS,2040,2017,Fall,b,B +288,CS,3500,2016,Summer,a,B +289,BIOL,1006,2020,Fall,c,B +289,MATH,1250,2020,Summer,a,B +290,BIOL,2021,2015,Summer,c,B +290,CS,1410,2017,Spring,a,B +292,CS,4150,2018,Fall,a,B +292,PHYS,2060,2020,Spring,b,B +292,PHYS,3210,2019,Spring,c,B +293,CS,4500,2019,Fall,a,B +294,CS,4970,2019,Summer,d,B +296,BIOL,2021,2018,Fall,d,B +296,CS,2100,2019,Summer,a,B +296,CS,3505,2019,Summer,b,B +297,BIOL,2210,2020,Fall,a,B +305,CS,3810,2018,Spring,a,B +306,PHYS,3210,2020,Fall,a,B +307,BIOL,1210,2019,Spring,a,B +307,MATH,1220,2016,Spring,a,B +309,BIOL,2330,2017,Summer,a,B +309,CS,4970,2020,Summer,d,B +309,MATH,2270,2020,Spring,a,B +309,MATH,3220,2018,Spring,c,B +309,PHYS,2210,2019,Fall,b,B +311,CS,3505,2019,Fall,c,B +312,BIOL,1010,2017,Spring,a,B +312,PHYS,2140,2016,Summer,a,B +312,PHYS,2220,2017,Spring,b,B +312,PHYS,2220,2017,Spring,d,B +312,PHYS,3210,2019,Spring,b,B +313,BIOL,1010,2018,Summer,b,B +314,BIOL,2355,2018,Fall,a,B +314,CS,2100,2019,Summer,a,B +314,MATH,3210,2019,Spring,b,B +314,PHYS,2140,2017,Summer,a,B +316,CS,2100,2019,Fall,d,B +318,BIOL,1030,2019,Spring,c,B +318,BIOL,2325,2018,Summer,a,B +318,CS,4500,2018,Spring,b,B +321,BIOL,1030,2015,Summer,a,B +321,CS,1030,2016,Fall,a,B +321,CS,4000,2016,Fall,a,B +321,CS,4500,2016,Spring,b,B +321,CS,4970,2019,Fall,b,B +321,PHYS,2040,2016,Spring,a,B +321,PHYS,3220,2020,Spring,b,B +323,BIOL,2355,2020,Summer,a,B +326,MATH,3220,2017,Fall,b,B +329,BIOL,2355,2017,Spring,b,B +329,CS,2100,2018,Summer,b,B +329,CS,3810,2016,Fall,b,B +329,PHYS,2060,2018,Fall,b,B +332,BIOL,2325,2018,Spring,a,B +332,MATH,1210,2019,Spring,a,B +333,BIOL,2355,2020,Summer,a,B +333,CS,2100,2020,Fall,a,B +333,MATH,2270,2019,Fall,a,B +335,CS,1410,2016,Spring,b,B +335,MATH,1250,2015,Fall,a,B +341,CS,4000,2020,Fall,a,B +342,MATH,1250,2020,Summer,a,B +344,CS,4970,2018,Summer,a,B +345,BIOL,2021,2017,Fall,a,B +345,BIOL,2030,2019,Summer,d,B +345,CS,4970,2019,Spring,b,B +348,BIOL,1010,2020,Summer,b,B +348,BIOL,2030,2017,Spring,b,B +348,CS,2100,2017,Fall,a,B +348,MATH,3210,2019,Spring,a,B +351,MATH,1210,2019,Spring,a,B +356,BIOL,2355,2019,Spring,a,B +357,BIOL,2020,2016,Spring,a,B +358,MATH,3210,2019,Fall,a,B +360,MATH,2270,2020,Fall,a,B +363,BIOL,2010,2020,Summer,b,B +364,CS,3500,2020,Summer,a,B +365,BIOL,2420,2020,Spring,b,B +366,BIOL,2021,2018,Summer,a,B +366,MATH,1220,2019,Fall,b,B +368,BIOL,1010,2018,Summer,a,B +368,CS,4000,2020,Fall,a,B +368,PHYS,2210,2019,Spring,c,B +369,BIOL,2210,2018,Summer,a,B +371,BIOL,1010,2020,Summer,d,B +372,CS,3810,2018,Spring,a,B +372,CS,4970,2018,Summer,c,B +373,PHYS,2040,2015,Fall,b,B +373,PHYS,2210,2017,Summer,d,B +375,BIOL,2210,2017,Summer,c,B +378,BIOL,1030,2018,Summer,a,B +378,BIOL,2330,2019,Fall,a,B +378,MATH,1250,2020,Summer,a,B +378,MATH,3210,2019,Spring,a,B +379,CS,4500,2018,Spring,b,B +379,MATH,2270,2019,Spring,a,B +380,CS,3500,2019,Fall,a,B +382,CS,1410,2015,Summer,d,B +384,CS,2100,2018,Fall,b,B +384,MATH,1210,2018,Fall,a,B +385,CS,4000,2018,Spring,a,B +386,CS,3500,2020,Summer,a,B +387,CS,1030,2018,Fall,a,B +390,CS,2100,2019,Summer,a,B +390,CS,2420,2019,Summer,a,B +390,CS,3505,2020,Fall,c,B +390,MATH,1220,2019,Fall,c,B +390,PHYS,2060,2020,Fall,a,B +390,PHYS,2210,2019,Fall,c,B +390,PHYS,2220,2020,Summer,b,B +391,CS,2100,2018,Fall,d,B +392,CS,4400,2015,Fall,b,B +392,MATH,2210,2017,Summer,a,B +397,MATH,1260,2019,Summer,a,B +398,PHYS,2060,2019,Summer,a,B +100,BIOL,2020,2018,Fall,b,B+ +100,MATH,1260,2019,Fall,a,B+ +101,PHYS,2140,2018,Summer,a,B+ +102,MATH,2270,2017,Fall,d,B+ +102,PHYS,2220,2018,Spring,a,B+ +105,CS,3200,2016,Fall,d,B+ +106,CS,3505,2020,Fall,b,B+ +107,BIOL,2355,2020,Spring,a,B+ +107,MATH,3220,2017,Fall,a,B+ +109,BIOL,2010,2020,Spring,a,B+ +110,CS,4000,2020,Fall,a,B+ +115,BIOL,1006,2016,Spring,a,B+ +115,BIOL,1210,2017,Spring,a,B+ +116,CS,3810,2016,Fall,b,B+ +117,MATH,1220,2017,Spring,c,B+ +117,MATH,2210,2018,Spring,a,B+ +118,CS,1030,2020,Spring,c,B+ +120,BIOL,2210,2017,Summer,b,B+ +120,CS,4400,2015,Summer,a,B+ +120,PHYS,2100,2016,Fall,a,B+ +120,PHYS,2140,2015,Fall,a,B+ +122,BIOL,1010,2020,Summer,a,B+ +123,BIOL,2420,2017,Summer,b,B+ +123,MATH,2280,2015,Fall,a,B+ +123,PHYS,2060,2019,Fall,c,B+ +124,CS,4400,2019,Fall,b,B+ +124,PHYS,2210,2018,Fall,c,B+ +127,CS,4000,2019,Spring,a,B+ +128,MATH,2210,2017,Summer,a,B+ +129,CS,3100,2019,Spring,b,B+ +129,CS,3505,2019,Summer,c,B+ +129,CS,3810,2018,Summer,c,B+ +131,CS,3200,2020,Spring,a,B+ +131,CS,3810,2019,Fall,a,B+ +131,CS,4500,2019,Fall,b,B+ +132,CS,2420,2017,Summer,b,B+ +134,CS,2100,2016,Summer,c,B+ +134,MATH,3220,2016,Fall,b,B+ +135,CS,4150,2020,Fall,a,B+ +135,MATH,3210,2020,Summer,a,B+ +140,BIOL,2030,2015,Fall,a,B+ +143,CS,4500,2019,Fall,c,B+ +143,CS,4940,2017,Fall,a,B+ +148,CS,4150,2020,Fall,a,B+ +151,BIOL,1210,2018,Fall,b,B+ +151,PHYS,2140,2018,Summer,a,B+ +152,CS,4970,2019,Fall,c,B+ +152,PHYS,3210,2019,Summer,b,B+ +153,PHYS,3210,2020,Fall,a,B+ +158,CS,2100,2018,Fall,a,B+ +160,BIOL,1030,2016,Summer,a,B+ +160,CS,3810,2016,Summer,a,B+ +163,BIOL,2325,2015,Fall,c,B+ +163,CS,4150,2016,Summer,a,B+ +163,MATH,3220,2016,Summer,a,B+ +166,BIOL,2010,2020,Summer,a,B+ +166,MATH,3210,2020,Summer,a,B+ +174,BIOL,2210,2018,Summer,a,B+ +176,CS,4150,2015,Summer,a,B+ +176,CS,4500,2016,Fall,a,B+ +177,BIOL,2021,2018,Spring,a,B+ +177,BIOL,2355,2020,Summer,b,B+ +179,CS,2420,2017,Summer,c,B+ +179,CS,4400,2016,Summer,a,B+ +179,MATH,3220,2018,Spring,d,B+ +179,PHYS,2100,2016,Fall,b,B+ +180,CS,3500,2019,Fall,a,B+ +181,MATH,1220,2019,Fall,a,B+ +182,BIOL,2020,2015,Fall,c,B+ +182,MATH,2270,2017,Fall,c,B+ +183,PHYS,2210,2018,Fall,a,B+ +185,PHYS,2060,2019,Fall,a,B+ +186,BIOL,2355,2020,Fall,a,B+ +187,BIOL,1006,2019,Fall,a,B+ +192,BIOL,2325,2015,Fall,c,B+ +192,CS,4150,2015,Summer,a,B+ +196,MATH,2280,2018,Fall,c,B+ +196,PHYS,2220,2018,Fall,a,B+ +197,CS,3200,2018,Spring,a,B+ +197,PHYS,3210,2018,Spring,c,B+ +200,MATH,3210,2020,Fall,a,B+ +207,CS,4500,2017,Summer,a,B+ +208,BIOL,2330,2017,Fall,a,B+ +210,MATH,2270,2015,Fall,b,B+ +210,MATH,2280,2020,Spring,a,B+ +210,PHYS,2040,2015,Fall,c,B+ +214,BIOL,1010,2018,Summer,a,B+ +214,BIOL,2020,2016,Spring,a,B+ +214,CS,1030,2016,Summer,a,B+ +214,MATH,1250,2016,Spring,a,B+ +215,BIOL,2210,2017,Spring,b,B+ +215,BIOL,2210,2017,Spring,c,B+ +217,BIOL,2325,2018,Fall,c,B+ +219,CS,2100,2020,Fall,a,B+ +220,CS,3810,2020,Fall,a,B+ +222,BIOL,1006,2020,Fall,a,B+ +222,CS,4970,2020,Summer,b,B+ +225,MATH,2210,2020,Fall,a,B+ +227,PHYS,2220,2018,Spring,a,B+ +227,PHYS,3220,2020,Spring,b,B+ +228,CS,4400,2020,Spring,a,B+ +228,MATH,1210,2019,Summer,a,B+ +228,PHYS,3210,2020,Fall,a,B+ +229,BIOL,2330,2017,Summer,a,B+ +229,PHYS,2060,2016,Spring,a,B+ +230,BIOL,2355,2018,Spring,a,B+ +231,BIOL,2020,2018,Fall,d,B+ +234,MATH,2280,2019,Fall,c,B+ +240,PHYS,3210,2020,Summer,a,B+ +243,CS,1030,2016,Fall,a,B+ +245,PHYS,2040,2015,Fall,a,B+ +246,BIOL,2030,2017,Spring,b,B+ +246,CS,4400,2017,Spring,a,B+ +246,PHYS,3210,2017,Summer,a,B+ +247,BIOL,1010,2019,Spring,d,B+ +247,CS,2100,2020,Fall,a,B+ +248,PHYS,2060,2018,Fall,b,B+ +249,CS,4400,2017,Spring,a,B+ +249,MATH,2210,2017,Spring,a,B+ +249,PHYS,3210,2016,Summer,a,B+ +254,BIOL,1010,2020,Summer,d,B+ +254,CS,3200,2020,Summer,a,B+ +255,CS,3200,2018,Spring,b,B+ +256,BIOL,1010,2020,Summer,a,B+ +256,CS,4000,2019,Spring,a,B+ +257,BIOL,1010,2020,Summer,b,B+ +257,CS,4000,2020,Spring,b,B+ +258,MATH,1260,2019,Fall,a,B+ +259,BIOL,1006,2019,Summer,a,B+ +259,MATH,3210,2019,Spring,b,B+ +259,PHYS,2040,2017,Fall,a,B+ +260,MATH,1210,2020,Spring,b,B+ +260,MATH,1250,2018,Spring,a,B+ +262,BIOL,2325,2018,Summer,a,B+ +262,MATH,2280,2018,Spring,a,B+ +263,CS,2420,2020,Summer,a,B+ +264,BIOL,2355,2017,Fall,b,B+ +264,CS,3100,2017,Fall,a,B+ +267,BIOL,1006,2020,Spring,a,B+ +269,PHYS,3220,2020,Spring,b,B+ +270,BIOL,1006,2018,Spring,b,B+ +270,BIOL,1010,2020,Summer,c,B+ +270,BIOL,1030,2016,Summer,a,B+ +270,BIOL,2020,2018,Fall,a,B+ +270,BIOL,2330,2016,Fall,a,B+ +270,BIOL,2420,2018,Spring,a,B+ +270,MATH,1220,2015,Summer,b,B+ +270,PHYS,2040,2017,Fall,c,B+ +270,PHYS,3210,2017,Fall,a,B+ +270,PHYS,3220,2017,Fall,d,B+ +271,BIOL,1006,2020,Fall,c,B+ +274,MATH,1220,2019,Fall,b,B+ +274,MATH,2210,2020,Spring,a,B+ +276,MATH,1210,2016,Spring,a,B+ +276,MATH,1220,2018,Spring,a,B+ +276,MATH,1260,2019,Summer,b,B+ +276,MATH,2210,2015,Spring,b,B+ +277,BIOL,1030,2016,Summer,a,B+ +277,BIOL,2010,2017,Summer,a,B+ +277,CS,4940,2020,Summer,a,B+ +278,BIOL,1210,2017,Spring,a,B+ +278,BIOL,2355,2017,Spring,a,B+ +281,MATH,2210,2020,Fall,a,B+ +282,BIOL,1210,2017,Summer,a,B+ +284,MATH,3210,2019,Fall,a,B+ +285,BIOL,2010,2018,Spring,a,B+ +285,CS,4150,2016,Summer,b,B+ +285,PHYS,2140,2017,Summer,a,B+ +288,PHYS,2210,2018,Fall,b,B+ +290,PHYS,2060,2016,Spring,b,B+ +292,MATH,3220,2018,Spring,a,B+ +293,BIOL,2020,2019,Summer,a,B+ +293,BIOL,2210,2019,Fall,b,B+ +293,MATH,1220,2020,Summer,a,B+ +294,PHYS,2060,2019,Summer,b,B+ +296,BIOL,1006,2018,Fall,a,B+ +296,BIOL,2010,2020,Summer,b,B+ +296,PHYS,3220,2020,Spring,c,B+ +300,BIOL,1010,2020,Summer,d,B+ +301,CS,4500,2016,Spring,b,B+ +301,MATH,3210,2015,Summer,a,B+ +303,MATH,1260,2019,Summer,b,B+ +304,MATH,2270,2017,Summer,a,B+ +306,CS,3200,2020,Summer,a,B+ +307,BIOL,2020,2019,Summer,a,B+ +309,BIOL,2021,2018,Fall,b,B+ +309,BIOL,2325,2018,Fall,a,B+ +309,CS,1030,2020,Spring,c,B+ +309,CS,2100,2018,Fall,b,B+ +310,PHYS,3210,2020,Spring,a,B+ +311,CS,2100,2017,Fall,a,B+ +311,PHYS,2210,2019,Spring,a,B+ +312,BIOL,1006,2016,Summer,a,B+ +312,CS,1030,2016,Spring,a,B+ +312,CS,1410,2020,Spring,a,B+ +312,CS,2100,2019,Spring,b,B+ +312,CS,3810,2018,Summer,d,B+ +312,MATH,1220,2018,Spring,a,B+ +312,MATH,3210,2020,Summer,a,B+ +313,CS,3810,2018,Spring,a,B+ +313,CS,4400,2017,Spring,c,B+ +313,PHYS,2140,2016,Spring,b,B+ +314,BIOL,1010,2019,Spring,d,B+ +314,CS,3505,2019,Spring,b,B+ +314,PHYS,2040,2017,Fall,c,B+ +317,PHYS,2140,2016,Summer,a,B+ +318,MATH,2280,2019,Fall,b,B+ +318,PHYS,2140,2019,Fall,b,B+ +321,PHYS,2100,2015,Spring,b,B+ +323,BIOL,1010,2020,Summer,d,B+ +326,BIOL,1006,2017,Fall,a,B+ +326,CS,2420,2017,Fall,a,B+ +329,CS,1410,2020,Spring,b,B+ +332,BIOL,1030,2020,Summer,a,B+ +332,PHYS,2210,2018,Fall,c,B+ +333,CS,3505,2020,Fall,b,B+ +333,PHYS,3210,2019,Summer,c,B+ +339,CS,4970,2020,Summer,c,B+ +340,CS,4970,2019,Fall,d,B+ +344,PHYS,2220,2018,Summer,a,B+ +345,BIOL,1006,2017,Fall,a,B+ +345,BIOL,1010,2018,Fall,a,B+ +345,CS,4500,2018,Spring,d,B+ +345,MATH,2270,2019,Summer,c,B+ +345,PHYS,3220,2017,Fall,b,B+ +348,BIOL,2420,2017,Summer,b,B+ +348,CS,2420,2016,Spring,a,B+ +348,MATH,2210,2015,Summer,c,B+ +355,BIOL,2030,2017,Spring,d,B+ +355,CS,3500,2017,Fall,b,B+ +355,PHYS,2060,2016,Spring,a,B+ +356,BIOL,2325,2018,Fall,c,B+ +357,MATH,1220,2016,Spring,a,B+ +359,CS,2100,2019,Summer,b,B+ +360,BIOL,2210,2020,Fall,a,B+ +361,CS,2100,2018,Spring,a,B+ +362,PHYS,2210,2018,Fall,c,B+ +364,CS,4000,2020,Spring,a,B+ +364,MATH,1260,2019,Fall,a,B+ +366,CS,1030,2018,Fall,a,B+ +366,CS,2100,2017,Fall,a,B+ +366,CS,4970,2019,Spring,a,B+ +368,CS,3505,2018,Summer,a,B+ +369,CS,3200,2016,Fall,d,B+ +371,CS,4000,2020,Spring,b,B+ +372,CS,3200,2019,Spring,a,B+ +372,CS,3505,2019,Summer,b,B+ +373,BIOL,1006,2018,Spring,b,B+ +373,BIOL,2325,2018,Spring,a,B+ +373,PHYS,2140,2015,Summer,c,B+ +374,MATH,3210,2015,Fall,a,B+ +374,PHYS,3210,2018,Spring,c,B+ +377,BIOL,2210,2019,Summer,a,B+ +377,CS,3505,2018,Summer,a,B+ +377,CS,4400,2019,Fall,b,B+ +378,BIOL,1006,2020,Fall,b,B+ +378,BIOL,2020,2018,Fall,b,B+ +378,CS,3100,2016,Fall,a,B+ +378,PHYS,3210,2017,Summer,a,B+ +379,BIOL,1030,2015,Spring,d,B+ +379,CS,3200,2016,Summer,a,B+ +379,MATH,2280,2019,Fall,b,B+ +380,BIOL,1030,2019,Summer,a,B+ +380,BIOL,2210,2019,Fall,a,B+ +384,BIOL,1010,2020,Summer,b,B+ +384,BIOL,2021,2018,Fall,c,B+ +384,MATH,2210,2020,Fall,a,B+ +385,BIOL,2325,2017,Fall,a,B+ +385,CS,3500,2017,Fall,c,B+ +385,MATH,1220,2017,Spring,c,B+ +388,CS,4400,2017,Spring,c,B+ +389,MATH,1220,2016,Spring,a,B+ +390,BIOL,1006,2020,Fall,a,B+ +390,BIOL,2010,2020,Summer,b,B+ +392,BIOL,1010,2018,Summer,a,B+ +392,PHYS,3220,2017,Summer,a,B+ +393,PHYS,3210,2017,Summer,a,B+ +394,BIOL,2021,2015,Spring,a,B+ +395,CS,1030,2016,Spring,a,B+ +396,BIOL,2030,2019,Summer,b,B+ +397,CS,4400,2019,Summer,a,B+ +397,MATH,1220,2020,Summer,a,B+ +397,PHYS,2210,2019,Summer,a,B+ +398,CS,1030,2019,Fall,a,B+ +399,BIOL,2030,2019,Summer,c,B+ +101,PHYS,2210,2018,Fall,a,B- +102,CS,1030,2016,Fall,a,B- +102,CS,3200,2016,Fall,b,B- +106,CS,4400,2020,Fall,b,B- +106,MATH,2280,2020,Spring,b,B- +106,PHYS,2220,2020,Summer,a,B- +107,CS,4970,2016,Fall,a,B- +109,BIOL,2030,2019,Summer,c,B- +109,CS,3200,2018,Spring,c,B- +109,CS,3500,2017,Fall,b,B- +109,MATH,1250,2018,Spring,a,B- +109,MATH,2270,2017,Fall,a,B- +113,BIOL,1006,2018,Fall,a,B- +113,PHYS,2220,2020,Spring,a,B- +115,BIOL,2021,2017,Summer,a,B- +115,PHYS,2060,2016,Spring,a,B- +116,CS,1030,2016,Fall,a,B- +116,CS,4970,2017,Spring,a,B- +117,BIOL,1030,2016,Spring,a,B- +117,MATH,1250,2017,Summer,d,B- +118,CS,3500,2019,Summer,a,B- +119,CS,2420,2017,Summer,a,B- +119,CS,4400,2020,Fall,a,B- +119,MATH,2210,2019,Spring,b,B- +120,BIOL,2010,2017,Fall,a,B- +120,MATH,1210,2015,Summer,a,B- +120,MATH,2210,2015,Summer,c,B- +120,MATH,3210,2017,Spring,a,B- +120,PHYS,2210,2018,Fall,b,B- +122,PHYS,2060,2020,Spring,a,B- +123,BIOL,1030,2020,Summer,a,B- +123,CS,1030,2016,Summer,a,B- +123,CS,3100,2017,Fall,a,B- +123,CS,4150,2020,Spring,a,B- +123,PHYS,2210,2019,Fall,c,B- +124,CS,3810,2020,Fall,a,B- +127,MATH,1250,2017,Summer,b,B- +127,PHYS,2210,2017,Summer,c,B- +128,PHYS,2210,2019,Summer,a,B- +128,PHYS,2220,2018,Spring,a,B- +131,CS,1030,2020,Fall,a,B- +131,CS,4400,2020,Fall,a,B- +131,MATH,2270,2017,Fall,c,B- +133,CS,2420,2020,Summer,a,B- +133,PHYS,3210,2019,Summer,a,B- +134,PHYS,2220,2018,Spring,a,B- +134,PHYS,3210,2016,Summer,b,B- +135,BIOL,2010,2020,Spring,a,B- +140,BIOL,2420,2015,Spring,c,B- +144,MATH,1260,2015,Summer,a,B- +146,BIOL,2355,2019,Spring,c,B- +146,CS,4400,2019,Summer,a,B- +151,CS,4000,2017,Spring,a,B- +151,CS,4970,2020,Summer,d,B- +152,BIOL,2325,2019,Spring,a,B- +152,CS,2100,2020,Spring,a,B- +152,CS,3505,2019,Spring,a,B- +152,CS,4400,2020,Fall,a,B- +153,PHYS,2060,2020,Spring,b,B- +155,BIOL,2355,2017,Fall,b,B- +156,CS,3505,2018,Fall,a,B- +163,CS,4970,2018,Summer,c,B- +164,CS,3200,2019,Spring,a,B- +165,MATH,3220,2018,Spring,c,B- +169,BIOL,2210,2018,Summer,a,B- +169,MATH,2210,2019,Spring,a,B- +170,BIOL,1030,2020,Summer,a,B- +171,CS,4970,2020,Summer,d,B- +173,MATH,1260,2020,Spring,a,B- +177,CS,2420,2016,Fall,a,B- +178,CS,2100,2019,Fall,b,B- +179,CS,4970,2016,Fall,b,B- +179,MATH,1220,2017,Spring,b,B- +179,PHYS,2210,2017,Summer,b,B- +182,BIOL,2420,2017,Summer,a,B- +187,BIOL,2330,2017,Fall,b,B- +187,CS,3505,2019,Spring,b,B- +187,MATH,3210,2020,Summer,a,B- +187,PHYS,2140,2017,Fall,a,B- +192,MATH,1220,2015,Summer,a,B- +194,CS,4500,2019,Fall,d,B- +194,MATH,2270,2019,Summer,b,B- +195,BIOL,1030,2016,Summer,a,B- +195,BIOL,2010,2015,Summer,a,B- +197,BIOL,1010,2018,Summer,b,B- +199,BIOL,2021,2018,Fall,a,B- +199,CS,4970,2019,Summer,a,B- +200,CS,4970,2019,Fall,c,B- +208,MATH,1250,2017,Summer,d,B- +208,PHYS,2210,2017,Summer,d,B- +210,BIOL,2420,2020,Spring,a,B- +213,BIOL,1030,2016,Fall,a,B- +213,CS,3100,2016,Fall,a,B- +214,BIOL,2010,2018,Spring,a,B- +215,BIOL,1030,2017,Spring,c,B- +215,MATH,1220,2017,Summer,a,B- +217,BIOL,1030,2019,Spring,b,B- +220,CS,4970,2018,Summer,c,B- +221,CS,4970,2020,Summer,a,B- +223,MATH,2270,2020,Spring,a,B- +228,BIOL,1010,2019,Spring,b,B- +228,BIOL,2030,2019,Summer,b,B- +228,CS,3500,2019,Summer,a,B- +229,CS,3200,2016,Fall,c,B- +229,MATH,1210,2016,Spring,b,B- +230,CS,3810,2018,Spring,a,B- +230,PHYS,2060,2019,Summer,a,B- +230,PHYS,3220,2017,Fall,c,B- +231,CS,1410,2018,Spring,a,B- +231,CS,3200,2020,Summer,a,B- +235,BIOL,2420,2020,Spring,a,B- +235,CS,2100,2019,Fall,b,B- +238,PHYS,2210,2019,Spring,b,B- +239,MATH,1250,2018,Summer,b,B- +239,PHYS,2060,2018,Fall,a,B- +244,BIOL,1010,2020,Summer,d,B- +244,BIOL,2355,2020,Summer,b,B- +246,BIOL,2355,2015,Summer,a,B- +246,CS,3500,2015,Fall,b,B- +247,BIOL,2030,2019,Summer,c,B- +247,PHYS,2220,2020,Summer,a,B- +248,BIOL,2010,2020,Summer,a,B- +248,MATH,2280,2019,Fall,c,B- +252,PHYS,3220,2017,Fall,d,B- +254,CS,4000,2020,Spring,a,B- +255,BIOL,2325,2018,Summer,a,B- +255,CS,4500,2019,Fall,b,B- +256,BIOL,2355,2017,Fall,b,B- +256,CS,4940,2019,Fall,a,B- +256,MATH,1260,2019,Spring,a,B- +258,BIOL,1010,2018,Fall,a,B- +258,BIOL,2210,2018,Summer,c,B- +258,CS,2100,2018,Summer,a,B- +258,CS,4940,2020,Summer,b,B- +259,CS,3505,2018,Summer,b,B- +259,PHYS,2060,2018,Fall,d,B- +260,BIOL,1010,2018,Summer,a,B- +260,BIOL,1030,2019,Summer,a,B- +260,CS,3200,2020,Summer,a,B- +261,PHYS,2060,2018,Fall,b,B- +264,BIOL,2021,2017,Fall,a,B- +267,CS,3505,2020,Summer,a,B- +267,PHYS,3220,2020,Spring,a,B- +268,CS,4970,2016,Fall,a,B- +270,BIOL,1010,2020,Summer,a,B- +270,PHYS,2140,2015,Summer,b,B- +271,BIOL,2210,2020,Fall,a,B- +275,CS,4400,2019,Spring,b,B- +276,BIOL,2210,2018,Spring,a,B- +276,PHYS,2140,2015,Fall,a,B- +277,CS,4400,2015,Summer,a,B- +277,MATH,2210,2017,Summer,a,B- +277,PHYS,3220,2016,Summer,b,B- +282,BIOL,2021,2015,Spring,a,B- +282,CS,3810,2016,Fall,a,B- +282,MATH,1220,2015,Summer,c,B- +285,BIOL,2210,2017,Summer,c,B- +288,CS,4150,2016,Summer,b,B- +290,BIOL,1006,2015,Summer,b,B- +290,BIOL,1010,2015,Fall,b,B- +290,BIOL,2420,2015,Fall,a,B- +290,MATH,1250,2016,Spring,a,B- +292,CS,3500,2017,Summer,a,B- +296,CS,2420,2018,Spring,a,B- +296,PHYS,2040,2019,Spring,a,B- +298,CS,4400,2019,Summer,b,B- +299,BIOL,1210,2017,Spring,a,B- +300,CS,3505,2019,Summer,b,B- +303,CS,1030,2019,Fall,b,B- +306,BIOL,1010,2020,Summer,b,B- +306,BIOL,2010,2020,Summer,b,B- +309,MATH,1250,2020,Summer,a,B- +309,MATH,2210,2018,Spring,b,B- +309,PHYS,2220,2020,Summer,a,B- +310,PHYS,2060,2020,Spring,a,B- +312,CS,3500,2020,Summer,a,B- +312,CS,4940,2020,Summer,b,B- +313,CS,2100,2015,Summer,a,B- +313,CS,4000,2018,Spring,a,B- +313,CS,4500,2018,Spring,d,B- +314,CS,3500,2017,Fall,a,B- +314,CS,4150,2020,Spring,a,B- +318,MATH,1260,2019,Summer,a,B- +321,BIOL,2020,2018,Spring,a,B- +321,BIOL,2325,2015,Spring,a,B- +321,BIOL,2355,2016,Spring,b,B- +321,CS,2420,2016,Summer,a,B- +321,PHYS,3210,2016,Fall,a,B- +325,BIOL,1030,2020,Spring,a,B- +329,MATH,3210,2020,Fall,a,B- +329,PHYS,3220,2017,Summer,a,B- +332,BIOL,1010,2019,Spring,c,B- +332,BIOL,1210,2018,Spring,a,B- +332,CS,2100,2018,Summer,c,B- +336,CS,3200,2015,Fall,c,B- +341,CS,4970,2020,Fall,d,B- +341,PHYS,3220,2020,Spring,d,B- +342,BIOL,2020,2018,Fall,d,B- +342,BIOL,2021,2018,Fall,c,B- +342,CS,4000,2017,Fall,a,B- +345,BIOL,2020,2018,Fall,b,B- +345,BIOL,2355,2019,Spring,c,B- +347,BIOL,1030,2019,Summer,a,B- +347,CS,2100,2019,Summer,a,B- +348,BIOL,2021,2017,Summer,a,B- +348,BIOL,2210,2017,Spring,b,B- +348,MATH,1210,2019,Spring,a,B- +348,PHYS,3210,2020,Spring,a,B- +348,PHYS,3220,2020,Spring,b,B- +353,PHYS,2100,2017,Summer,c,B- +355,BIOL,2330,2017,Fall,a,B- +356,BIOL,1006,2019,Summer,a,B- +356,CS,3505,2019,Summer,d,B- +356,MATH,1250,2018,Summer,a,B- +356,MATH,1260,2019,Spring,b,B- +359,CS,4970,2019,Summer,b,B- +360,BIOL,1030,2020,Summer,a,B- +361,CS,4000,2017,Fall,b,B- +361,MATH,1250,2018,Spring,a,B- +362,BIOL,2020,2018,Fall,c,B- +362,CS,4940,2020,Summer,a,B- +362,MATH,1250,2018,Summer,c,B- +364,CS,4500,2020,Spring,a,B- +365,CS,4500,2019,Fall,d,B- +366,BIOL,2210,2020,Fall,a,B- +368,BIOL,2420,2020,Summer,a,B- +369,MATH,1210,2016,Fall,c,B- +371,BIOL,2210,2020,Fall,a,B- +373,BIOL,2010,2018,Spring,a,B- +373,CS,2100,2018,Fall,a,B- +373,CS,4970,2020,Summer,b,B- +374,BIOL,2210,2017,Summer,c,B- +374,CS,2100,2016,Summer,b,B- +374,CS,3505,2018,Summer,a,B- +374,PHYS,2210,2015,Fall,b,B- +375,BIOL,1010,2019,Spring,a,B- +375,CS,3200,2020,Summer,a,B- +375,MATH,1260,2019,Fall,a,B- +376,PHYS,2060,2020,Fall,a,B- +377,MATH,1250,2016,Spring,a,B- +377,PHYS,3220,2018,Summer,a,B- +378,BIOL,1006,2020,Fall,c,B- +378,BIOL,1010,2018,Summer,b,B- +378,BIOL,2210,2017,Summer,b,B- +378,CS,4970,2019,Summer,a,B- +379,BIOL,2020,2018,Fall,d,B- +385,CS,2420,2016,Spring,a,B- +390,CS,4970,2020,Summer,d,B- +391,BIOL,2210,2018,Spring,a,B- +391,CS,3100,2017,Fall,a,B- +391,MATH,1260,2019,Summer,a,B- +391,MATH,3210,2020,Summer,a,B- +394,MATH,3220,2016,Spring,d,B- +397,CS,4000,2020,Fall,a,B- +398,CS,3505,2020,Fall,a,B- +398,CS,4970,2018,Summer,a,B- +100,BIOL,1030,2020,Spring,a,C +100,CS,1410,2018,Spring,b,C +102,MATH,1210,2018,Spring,a,C +102,MATH,1260,2019,Spring,c,C +106,BIOL,2355,2020,Summer,a,C +107,CS,3810,2016,Fall,a,C +107,MATH,2270,2017,Fall,a,C +109,CS,4400,2019,Spring,b,C +109,PHYS,2220,2020,Fall,a,C +109,PHYS,3210,2018,Fall,a,C +112,CS,4970,2020,Summer,a,C +115,PHYS,3220,2016,Summer,a,C +116,CS,3200,2017,Spring,a,C +117,CS,4500,2016,Fall,a,C +119,BIOL,2030,2016,Summer,a,C +119,BIOL,2355,2018,Summer,a,C +120,CS,3100,2016,Spring,b,C +120,CS,4000,2020,Fall,a,C +120,MATH,1220,2019,Fall,b,C +123,CS,3200,2016,Fall,c,C +123,CS,4500,2019,Summer,a,C +124,BIOL,2325,2018,Fall,a,C +124,CS,3100,2017,Fall,a,C +124,MATH,1210,2019,Summer,a,C +126,CS,3505,2015,Fall,c,C +127,CS,3505,2018,Summer,b,C +127,CS,3810,2019,Fall,a,C +128,CS,3810,2018,Summer,c,C +130,PHYS,3210,2020,Fall,c,C +131,BIOL,1006,2018,Fall,a,C +131,BIOL,2355,2018,Summer,a,C +131,CS,4970,2019,Fall,b,C +131,PHYS,2140,2020,Fall,a,C +131,PHYS,3220,2017,Fall,a,C +133,BIOL,2325,2018,Fall,a,C +133,CS,3200,2018,Spring,a,C +133,CS,4500,2018,Spring,c,C +133,PHYS,2220,2018,Fall,a,C +134,CS,3100,2016,Spring,d,C +135,BIOL,2030,2019,Summer,c,C +135,MATH,2270,2020,Fall,b,C +135,PHYS,2210,2019,Fall,a,C +136,MATH,2210,2020,Fall,a,C +138,BIOL,1006,2015,Summer,a,C +139,MATH,3220,2017,Fall,b,C +143,MATH,2270,2017,Summer,a,C +146,CS,2100,2019,Fall,d,C +146,MATH,3210,2020,Spring,a,C +151,CS,3810,2018,Summer,c,C +152,CS,3500,2019,Summer,a,C +152,CS,4500,2020,Summer,a,C +153,CS,2420,2020,Summer,a,C +157,PHYS,2210,2019,Spring,b,C +163,BIOL,1010,2015,Summer,d,C +163,CS,2100,2017,Fall,a,C +163,CS,3505,2016,Summer,a,C +163,CS,4000,2017,Fall,b,C +164,BIOL,1006,2018,Spring,a,C +164,BIOL,2010,2020,Spring,b,C +164,BIOL,2420,2017,Summer,a,C +164,CS,4500,2018,Spring,c,C +164,MATH,1260,2020,Spring,a,C +165,BIOL,2020,2018,Fall,a,C +165,MATH,2280,2018,Fall,b,C +167,MATH,1250,2020,Summer,a,C +167,MATH,2210,2020,Fall,a,C +169,BIOL,2010,2020,Spring,a,C +169,BIOL,2021,2018,Summer,a,C +169,CS,4400,2019,Spring,c,C +171,BIOL,2030,2020,Spring,a,C +171,CS,2100,2020,Fall,a,C +171,PHYS,2060,2019,Fall,b,C +172,MATH,1250,2015,Fall,a,C +172,PHYS,2140,2015,Summer,b,C +172,PHYS,2220,2016,Summer,a,C +172,PHYS,3210,2016,Summer,b,C +175,BIOL,2010,2020,Summer,a,C +175,CS,1030,2020,Spring,a,C +177,MATH,3210,2015,Spring,b,C +178,MATH,1210,2018,Fall,b,C +178,MATH,2270,2020,Spring,a,C +179,BIOL,1210,2018,Fall,b,C +179,CS,2100,2016,Summer,b,C +179,MATH,2270,2015,Fall,a,C +181,BIOL,2355,2020,Fall,a,C +181,PHYS,2060,2020,Fall,a,C +182,BIOL,2030,2017,Spring,a,C +182,BIOL,2325,2015,Fall,a,C +182,CS,3500,2017,Fall,a,C +182,MATH,2270,2017,Fall,d,C +183,BIOL,2330,2020,Spring,a,C +185,CS,2100,2018,Spring,a,C +185,MATH,1210,2018,Fall,a,C +186,CS,4970,2020,Fall,d,C +187,MATH,1260,2019,Spring,b,C +187,PHYS,2220,2017,Spring,a,C +191,CS,2100,2020,Fall,a,C +192,BIOL,1010,2016,Summer,a,C +194,MATH,1260,2019,Summer,b,C +195,BIOL,2330,2016,Spring,a,C +202,CS,4970,2020,Fall,d,C +203,CS,4000,2018,Spring,a,C +207,CS,3100,2016,Summer,a,C +210,BIOL,2020,2015,Summer,a,C +210,MATH,3210,2015,Summer,a,C +211,BIOL,1010,2015,Fall,b,C +212,BIOL,2020,2016,Spring,a,C +214,CS,3505,2017,Fall,a,C +214,CS,3810,2018,Summer,b,C +215,BIOL,2030,2015,Fall,a,C +215,PHYS,2100,2017,Summer,c,C +219,BIOL,2210,2020,Fall,a,C +220,CS,4940,2019,Fall,a,C +223,CS,3505,2019,Summer,b,C +227,PHYS,3210,2018,Fall,a,C +228,PHYS,2220,2020,Fall,a,C +229,MATH,3210,2016,Spring,a,C +230,MATH,3210,2019,Fall,a,C +230,PHYS,2040,2017,Fall,c,C +231,CS,3810,2018,Summer,b,C +231,MATH,1250,2020,Summer,a,C +237,CS,3100,2017,Fall,a,C +237,PHYS,2040,2017,Fall,a,C +239,BIOL,2210,2018,Summer,a,C +239,MATH,2210,2018,Spring,b,C +240,BIOL,2210,2020,Fall,a,C +241,PHYS,2060,2019,Fall,b,C +241,PHYS,2220,2019,Spring,a,C +241,PHYS,3220,2020,Spring,b,C +242,BIOL,2420,2020,Spring,a,C +248,CS,4500,2019,Summer,a,C +249,MATH,2280,2015,Summer,a,C +250,CS,4970,2019,Fall,a,C +251,BIOL,2010,2020,Summer,a,C +252,CS,2100,2018,Fall,c,C +252,PHYS,2060,2018,Fall,d,C +255,BIOL,2020,2018,Fall,a,C +255,CS,4940,2019,Fall,a,C +255,PHYS,2140,2017,Summer,a,C +256,PHYS,2220,2017,Spring,a,C +258,BIOL,1030,2019,Spring,c,C +259,MATH,2270,2017,Fall,b,C +260,PHYS,3210,2016,Fall,a,C +261,BIOL,1006,2018,Spring,b,C +261,CS,4970,2017,Summer,a,C +263,BIOL,1010,2020,Summer,d,C +267,BIOL,2020,2018,Fall,b,C +270,BIOL,2210,2017,Summer,b,C +270,CS,3810,2018,Summer,d,C +270,CS,4150,2018,Fall,a,C +270,CS,4500,2018,Spring,b,C +270,MATH,1250,2016,Summer,a,C +274,MATH,1250,2018,Spring,a,C +274,MATH,2210,2020,Spring,c,C +275,BIOL,1030,2018,Fall,a,C +275,MATH,1210,2019,Spring,b,C +275,PHYS,2040,2019,Spring,a,C +277,BIOL,2210,2017,Spring,c,C +277,MATH,1210,2016,Spring,d,C +277,PHYS,2060,2019,Summer,a,C +281,MATH,1220,2020,Summer,a,C +282,BIOL,1010,2016,Summer,a,C +282,BIOL,2330,2016,Spring,a,C +282,PHYS,2140,2015,Spring,a,C +285,CS,1030,2019,Fall,a,C +285,CS,4970,2016,Fall,a,C +285,MATH,2210,2019,Spring,b,C +288,CS,2420,2017,Summer,c,C +289,MATH,3210,2020,Fall,a,C +290,BIOL,2355,2017,Spring,a,C +291,CS,4000,2017,Fall,a,C +292,BIOL,2020,2018,Spring,a,C +292,PHYS,2210,2019,Spring,c,C +293,CS,3505,2020,Fall,c,C +293,MATH,1260,2019,Spring,c,C +295,CS,2420,2016,Fall,b,C +295,MATH,1210,2016,Spring,d,C +296,BIOL,2325,2017,Fall,b,C +298,BIOL,1010,2018,Fall,b,C +298,BIOL,1030,2019,Spring,c,C +300,BIOL,2021,2019,Fall,a,C +301,BIOL,1010,2015,Summer,c,C +303,BIOL,2021,2019,Fall,a,C +303,CS,4970,2019,Summer,d,C +307,BIOL,2355,2020,Summer,b,C +307,CS,1030,2020,Spring,a,C +307,CS,3505,2019,Summer,a,C +307,CS,4970,2020,Summer,d,C +307,MATH,1210,2019,Spring,a,C +307,PHYS,3220,2017,Fall,c,C +309,BIOL,2030,2019,Summer,b,C +309,BIOL,2355,2020,Spring,a,C +309,CS,4150,2020,Fall,a,C +309,MATH,2280,2018,Fall,c,C +311,BIOL,2021,2018,Spring,a,C +311,BIOL,2355,2018,Summer,b,C +311,CS,3200,2020,Summer,a,C +311,CS,4940,2017,Fall,a,C +312,CS,3505,2017,Summer,a,C +312,PHYS,2210,2019,Fall,b,C +313,BIOL,1006,2020,Fall,a,C +313,BIOL,1006,2020,Fall,c,C +313,CS,3500,2015,Fall,b,C +313,PHYS,3210,2019,Summer,b,C +318,CS,2100,2019,Fall,c,C +318,CS,3505,2019,Summer,d,C +323,BIOL,2420,2020,Summer,a,C +323,CS,4970,2020,Fall,d,C +325,BIOL,2325,2019,Summer,a,C +329,CS,3505,2016,Fall,b,C +329,CS,4000,2017,Fall,b,C +331,MATH,2270,2020,Fall,a,C +332,CS,3200,2020,Spring,c,C +333,BIOL,1006,2020,Fall,a,C +333,BIOL,2010,2020,Summer,a,C +333,MATH,1210,2019,Spring,a,C +335,CS,2100,2016,Summer,b,C +335,CS,3505,2015,Fall,b,C +340,BIOL,1030,2020,Summer,a,C +340,CS,3505,2019,Summer,b,C +340,CS,3810,2020,Fall,a,C +341,PHYS,2060,2019,Fall,b,C +345,CS,3505,2018,Fall,a,C +345,PHYS,2140,2020,Fall,a,C +348,CS,3810,2016,Fall,a,C +356,BIOL,2021,2018,Summer,a,C +356,CS,2420,2019,Summer,a,C +357,CS,3200,2016,Summer,a,C +361,BIOL,2021,2018,Spring,a,C +362,MATH,1220,2018,Spring,b,C +363,BIOL,2355,2020,Summer,b,C +364,CS,4970,2019,Spring,b,C +365,CS,3500,2020,Summer,a,C +366,BIOL,1010,2018,Summer,b,C +369,BIOL,2330,2016,Fall,a,C +371,BIOL,2030,2018,Summer,b,C +371,CS,4150,2018,Fall,b,C +372,BIOL,1030,2018,Summer,a,C +372,BIOL,2030,2017,Spring,b,C +372,MATH,3210,2017,Summer,a,C +372,PHYS,2040,2019,Spring,a,C +373,BIOL,2021,2018,Spring,a,C +373,CS,4000,2017,Summer,a,C +373,CS,4500,2020,Spring,a,C +373,MATH,2270,2020,Fall,a,C +373,PHYS,2210,2017,Summer,a,C +374,BIOL,1010,2018,Summer,c,C +374,CS,3500,2016,Spring,a,C +374,PHYS,2060,2016,Summer,b,C +374,PHYS,2220,2015,Spring,a,C +375,BIOL,1006,2018,Spring,b,C +375,CS,3500,2019,Fall,b,C +377,CS,2100,2017,Spring,a,C +378,BIOL,2010,2020,Summer,a,C +378,CS,3505,2016,Summer,a,C +378,CS,4150,2016,Summer,a,C +378,MATH,1210,2016,Fall,b,C +378,MATH,2270,2019,Summer,b,C +379,CS,3505,2016,Fall,a,C +379,PHYS,2140,2017,Fall,b,C +379,PHYS,2210,2015,Fall,c,C +381,CS,2100,2018,Summer,c,C +382,BIOL,1010,2015,Summer,b,C +385,CS,3100,2017,Spring,b,C +385,MATH,1250,2018,Spring,a,C +386,PHYS,2140,2018,Fall,a,C +387,MATH,2210,2017,Summer,a,C +387,PHYS,2040,2015,Fall,c,C +387,PHYS,2140,2016,Fall,a,C +388,MATH,1220,2017,Spring,b,C +389,CS,2420,2016,Spring,a,C +390,PHYS,3210,2020,Spring,a,C +391,BIOL,1010,2017,Spring,a,C +391,BIOL,1030,2018,Fall,a,C +391,CS,1410,2017,Spring,a,C +391,CS,4400,2019,Summer,b,C +391,MATH,3220,2017,Spring,a,C +392,BIOL,2210,2016,Summer,a,C +392,CS,3505,2015,Fall,b,C +392,PHYS,2210,2015,Fall,b,C +393,CS,4000,2016,Fall,a,C +393,PHYS,2220,2018,Summer,a,C +394,BIOL,2325,2016,Summer,a,C +394,CS,4970,2016,Fall,a,C +396,PHYS,2210,2019,Fall,b,C +397,BIOL,2325,2018,Summer,a,C +397,CS,3505,2017,Fall,a,C +397,MATH,1210,2016,Fall,a,C +398,PHYS,3220,2018,Summer,a,C +399,BIOL,2325,2018,Fall,c,C +399,MATH,2270,2019,Summer,c,C +100,BIOL,1010,2020,Summer,d,C+ +100,MATH,2280,2019,Fall,a,C+ +101,BIOL,2020,2018,Fall,d,C+ +102,BIOL,2030,2020,Spring,b,C+ +102,MATH,2210,2019,Spring,a,C+ +102,MATH,2280,2019,Fall,a,C+ +102,PHYS,3220,2020,Spring,a,C+ +105,BIOL,2325,2018,Spring,a,C+ +105,CS,2420,2016,Fall,b,C+ +105,PHYS,2040,2018,Spring,a,C+ +107,BIOL,2420,2017,Summer,b,C+ +107,CS,2100,2019,Spring,a,C+ +107,MATH,2270,2017,Fall,d,C+ +108,BIOL,2010,2020,Spring,b,C+ +108,MATH,1210,2020,Spring,b,C+ +108,PHYS,2210,2019,Fall,b,C+ +109,CS,4000,2020,Fall,a,C+ +109,PHYS,3220,2017,Fall,b,C+ +113,BIOL,2355,2018,Summer,c,C+ +113,PHYS,3210,2019,Spring,a,C+ +117,MATH,1220,2017,Spring,b,C+ +118,CS,3505,2019,Fall,b,C+ +118,PHYS,2220,2020,Summer,a,C+ +119,CS,4970,2019,Summer,d,C+ +119,PHYS,2220,2017,Spring,d,C+ +119,PHYS,3220,2017,Summer,a,C+ +120,BIOL,2030,2017,Spring,d,C+ +120,BIOL,2355,2020,Fall,a,C+ +120,CS,2420,2017,Fall,a,C+ +122,MATH,2210,2020,Fall,a,C+ +123,BIOL,1006,2016,Spring,b,C+ +123,CS,3500,2016,Spring,a,C+ +123,CS,3810,2016,Summer,a,C+ +123,PHYS,2220,2018,Fall,a,C+ +123,PHYS,3210,2016,Fall,a,C+ +124,PHYS,2220,2020,Spring,a,C+ +124,PHYS,3220,2020,Spring,d,C+ +127,BIOL,2010,2017,Summer,a,C+ +127,CS,3500,2020,Summer,a,C+ +128,MATH,1210,2018,Fall,b,C+ +131,CS,4500,2019,Fall,c,C+ +133,BIOL,1010,2018,Summer,b,C+ +133,MATH,1260,2019,Spring,a,C+ +134,BIOL,1210,2018,Spring,a,C+ +134,CS,3200,2015,Fall,b,C+ +134,PHYS,2140,2016,Spring,c,C+ +135,BIOL,1030,2020,Spring,a,C+ +135,CS,1030,2020,Spring,c,C+ +138,CS,1030,2016,Spring,a,C+ +138,CS,3100,2016,Spring,d,C+ +138,PHYS,2140,2015,Summer,c,C+ +139,CS,3100,2017,Fall,a,C+ +139,MATH,1250,2018,Summer,c,C+ +140,CS,2420,2015,Summer,c,C+ +140,PHYS,2140,2015,Summer,a,C+ +148,BIOL,1010,2020,Summer,a,C+ +149,CS,4400,2016,Spring,a,C+ +151,BIOL,1030,2017,Spring,c,C+ +151,BIOL,2030,2016,Fall,a,C+ +153,BIOL,1030,2020,Spring,a,C+ +155,BIOL,2330,2017,Fall,a,C+ +158,PHYS,2060,2018,Fall,b,C+ +163,CS,2420,2016,Fall,a,C+ +163,CS,3100,2015,Summer,a,C+ +164,BIOL,1030,2020,Summer,a,C+ +164,BIOL,2021,2019,Fall,a,C+ +164,CS,1410,2018,Spring,b,C+ +165,BIOL,1006,2017,Fall,b,C+ +165,BIOL,1010,2019,Spring,b,C+ +165,MATH,1220,2018,Spring,a,C+ +167,BIOL,1030,2019,Summer,a,C+ +167,MATH,1210,2018,Fall,a,C+ +169,BIOL,2420,2018,Spring,a,C+ +170,CS,1030,2020,Spring,b,C+ +171,MATH,3210,2020,Summer,a,C+ +173,BIOL,2030,2019,Summer,b,C+ +173,CS,4400,2019,Summer,a,C+ +175,BIOL,2355,2020,Fall,a,C+ +175,MATH,2210,2020,Fall,a,C+ +176,BIOL,2020,2015,Fall,c,C+ +176,PHYS,2100,2016,Fall,b,C+ +177,BIOL,1210,2018,Spring,a,C+ +177,BIOL,2010,2020,Summer,b,C+ +177,MATH,2270,2020,Fall,a,C+ +177,PHYS,2210,2017,Summer,a,C+ +178,BIOL,2355,2019,Spring,a,C+ +178,CS,3200,2020,Fall,a,C+ +178,PHYS,2060,2020,Fall,a,C+ +179,CS,3200,2015,Fall,b,C+ +179,MATH,2210,2020,Fall,a,C+ +182,BIOL,2210,2017,Spring,b,C+ +182,CS,3505,2015,Fall,b,C+ +182,CS,4500,2018,Spring,a,C+ +182,MATH,2280,2018,Spring,a,C+ +183,BIOL,1030,2018,Fall,a,C+ +183,BIOL,2020,2018,Fall,a,C+ +185,BIOL,1030,2020,Summer,a,C+ +185,CS,3505,2018,Summer,b,C+ +185,CS,4500,2019,Summer,a,C+ +187,MATH,1220,2017,Spring,a,C+ +187,PHYS,2060,2020,Fall,a,C+ +187,PHYS,3220,2017,Fall,d,C+ +194,CS,3505,2019,Fall,c,C+ +194,CS,4940,2020,Summer,b,C+ +195,MATH,1210,2016,Fall,c,C+ +196,CS,2100,2018,Fall,c,C+ +197,MATH,2210,2018,Spring,b,C+ +199,CS,2420,2019,Summer,a,C+ +200,PHYS,3210,2020,Fall,b,C+ +203,CS,3500,2017,Fall,c,C+ +204,BIOL,2330,2015,Fall,d,C+ +210,CS,1030,2019,Fall,b,C+ +210,PHYS,2060,2019,Fall,a,C+ +211,CS,3200,2015,Spring,b,C+ +213,BIOL,2030,2016,Fall,a,C+ +214,BIOL,1006,2016,Summer,d,C+ +214,BIOL,2325,2018,Spring,a,C+ +214,CS,2100,2016,Spring,a,C+ +215,CS,2100,2017,Fall,a,C+ +215,CS,2420,2016,Fall,b,C+ +219,CS,3505,2020,Summer,a,C+ +221,CS,1030,2020,Spring,a,C+ +223,BIOL,2010,2020,Spring,a,C+ +225,CS,2420,2020,Fall,a,C+ +225,CS,3810,2020,Fall,a,C+ +227,MATH,2280,2018,Fall,b,C+ +227,PHYS,2140,2019,Fall,a,C+ +227,PHYS,3220,2020,Spring,d,C+ +228,CS,2100,2020,Spring,a,C+ +229,MATH,1260,2016,Fall,a,C+ +229,MATH,2210,2018,Spring,b,C+ +231,BIOL,2010,2020,Spring,a,C+ +231,MATH,1260,2020,Spring,a,C+ +234,MATH,1220,2019,Fall,a,C+ +235,CS,4400,2020,Fall,b,C+ +238,MATH,3220,2018,Spring,d,C+ +241,CS,4400,2019,Fall,a,C+ +241,PHYS,3220,2020,Spring,c,C+ +242,BIOL,2010,2020,Summer,a,C+ +243,CS,2420,2016,Fall,c,C+ +245,CS,4150,2016,Summer,a,C+ +245,MATH,1220,2015,Summer,c,C+ +246,PHYS,2100,2017,Fall,a,C+ +246,PHYS,2210,2015,Fall,a,C+ +247,BIOL,2325,2018,Fall,a,C+ +247,MATH,2280,2019,Fall,b,C+ +248,BIOL,2355,2019,Spring,c,C+ +248,CS,3200,2020,Spring,c,C+ +249,CS,3505,2016,Fall,b,C+ +249,CS,4970,2016,Fall,b,C+ +249,PHYS,2220,2017,Spring,d,C+ +250,CS,3505,2020,Fall,c,C+ +253,CS,2100,2018,Fall,d,C+ +254,CS,4500,2019,Fall,d,C+ +255,BIOL,2010,2018,Spring,a,C+ +255,CS,3500,2019,Fall,a,C+ +255,MATH,1250,2018,Summer,a,C+ +255,PHYS,2210,2019,Spring,d,C+ +256,CS,4500,2019,Fall,c,C+ +256,PHYS,2040,2017,Fall,b,C+ +257,BIOL,2020,2018,Fall,a,C+ +257,BIOL,2021,2018,Summer,a,C+ +257,CS,4000,2020,Spring,a,C+ +257,MATH,1260,2019,Summer,a,C+ +257,PHYS,2060,2018,Fall,b,C+ +258,BIOL,1030,2019,Spring,b,C+ +258,CS,3500,2019,Summer,a,C+ +258,PHYS,3210,2019,Spring,c,C+ +260,BIOL,2325,2017,Fall,b,C+ +261,BIOL,2020,2018,Fall,a,C+ +262,BIOL,2020,2018,Fall,b,C+ +266,BIOL,2330,2017,Fall,b,C+ +270,BIOL,2355,2017,Spring,b,C+ +274,BIOL,2020,2018,Fall,a,C+ +275,CS,4970,2019,Spring,a,C+ +276,BIOL,1006,2016,Spring,a,C+ +276,CS,3100,2015,Summer,a,C+ +276,CS,3505,2019,Spring,a,C+ +277,BIOL,1010,2015,Summer,a,C+ +277,MATH,1210,2016,Spring,c,C+ +281,CS,4970,2020,Fall,c,C+ +282,CS,3505,2015,Spring,a,C+ +282,CS,4000,2015,Fall,a,C+ +285,MATH,1220,2017,Spring,b,C+ +285,MATH,3220,2016,Spring,a,C+ +285,PHYS,2210,2017,Summer,b,C+ +287,CS,4400,2019,Summer,a,C+ +289,BIOL,2210,2019,Fall,b,C+ +291,CS,1030,2016,Spring,a,C+ +291,CS,1410,2016,Spring,b,C+ +292,BIOL,1030,2020,Spring,a,C+ +292,MATH,2270,2017,Fall,a,C+ +292,MATH,3210,2017,Summer,a,C+ +295,CS,4970,2017,Spring,a,C+ +297,PHYS,2140,2020,Fall,a,C+ +298,CS,2100,2018,Summer,c,C+ +300,CS,4970,2019,Summer,a,C+ +304,MATH,3210,2017,Spring,a,C+ +307,BIOL,1030,2019,Spring,c,C+ +307,CS,1410,2018,Spring,d,C+ +309,BIOL,2210,2017,Spring,b,C+ +309,CS,2420,2017,Summer,b,C+ +309,CS,3500,2017,Fall,c,C+ +309,CS,4500,2016,Fall,a,C+ +309,MATH,1220,2018,Spring,b,C+ +309,MATH,3210,2017,Summer,a,C+ +311,BIOL,1010,2018,Summer,b,C+ +311,CS,4970,2019,Spring,b,C+ +312,PHYS,2100,2016,Fall,a,C+ +313,BIOL,1010,2018,Summer,c,C+ +313,BIOL,2010,2019,Fall,a,C+ +313,BIOL,2020,2016,Spring,a,C+ +313,MATH,1260,2019,Spring,b,C+ +314,BIOL,1030,2019,Summer,a,C+ +314,BIOL,2210,2019,Summer,a,C+ +314,CS,4970,2017,Spring,a,C+ +314,MATH,2270,2017,Fall,d,C+ +316,BIOL,2010,2019,Fall,a,C+ +318,BIOL,1010,2018,Summer,a,C+ +318,BIOL,2030,2019,Summer,a,C+ +318,BIOL,2210,2019,Summer,b,C+ +321,BIOL,2420,2020,Fall,a,C+ +321,CS,2100,2019,Fall,b,C+ +329,PHYS,3210,2019,Spring,c,C+ +331,MATH,2270,2020,Fall,b,C+ +332,BIOL,2355,2018,Summer,a,C+ +332,CS,4400,2019,Summer,b,C+ +332,MATH,1220,2018,Spring,a,C+ +333,BIOL,2325,2019,Spring,a,C+ +333,CS,4970,2019,Summer,c,C+ +335,CS,4970,2016,Fall,a,C+ +340,BIOL,1010,2020,Summer,a,C+ +342,BIOL,1210,2019,Spring,a,C+ +342,BIOL,2420,2020,Fall,a,C+ +348,BIOL,2330,2020,Spring,a,C+ +348,CS,4500,2017,Summer,a,C+ +348,MATH,2270,2020,Fall,a,C+ +348,PHYS,2040,2017,Fall,c,C+ +355,MATH,1220,2017,Spring,c,C+ +356,MATH,2270,2017,Fall,b,C+ +356,PHYS,2220,2016,Fall,a,C+ +366,BIOL,2030,2020,Spring,a,C+ +368,MATH,3210,2020,Summer,a,C+ +368,PHYS,2060,2019,Fall,b,C+ +369,PHYS,2140,2018,Summer,b,C+ +371,BIOL,1010,2020,Summer,c,C+ +371,PHYS,2140,2019,Fall,a,C+ +372,BIOL,2010,2017,Fall,a,C+ +372,PHYS,2220,2017,Spring,a,C+ +373,BIOL,2210,2020,Fall,a,C+ +374,CS,3810,2018,Summer,a,C+ +375,PHYS,3210,2019,Spring,c,C+ +377,BIOL,2020,2015,Fall,c,C+ +377,PHYS,2100,2017,Summer,b,C+ +378,CS,3200,2020,Spring,c,C+ +378,CS,4000,2016,Fall,a,C+ +378,MATH,1220,2017,Spring,d,C+ +379,BIOL,1006,2020,Fall,a,C+ +379,BIOL,2030,2015,Fall,a,C+ +380,BIOL,2355,2018,Fall,a,C+ +381,PHYS,3210,2018,Spring,c,C+ +382,BIOL,1010,2015,Summer,a,C+ +386,CS,4150,2020,Fall,a,C+ +387,CS,2100,2018,Spring,a,C+ +387,MATH,3220,2018,Spring,b,C+ +388,CS,2100,2016,Summer,b,C+ +389,BIOL,1006,2016,Summer,d,C+ +390,MATH,2280,2019,Fall,b,C+ +391,BIOL,1006,2018,Fall,a,C+ +391,CS,3505,2019,Fall,c,C+ +391,MATH,2210,2018,Spring,a,C+ +391,PHYS,2060,2020,Spring,b,C+ +392,BIOL,1030,2016,Spring,a,C+ +392,BIOL,2330,2017,Fall,b,C+ +392,CS,2100,2018,Summer,c,C+ +394,CS,1410,2016,Summer,a,C+ +395,BIOL,2355,2016,Spring,b,C+ +396,CS,4150,2020,Spring,a,C+ +397,BIOL,2420,2020,Spring,a,C+ +397,CS,2100,2019,Fall,b,C+ +397,CS,2100,2019,Fall,c,C+ +398,MATH,2270,2020,Fall,a,C+ +398,MATH,2280,2020,Spring,b,C+ +399,BIOL,2020,2018,Fall,a,C+ +399,BIOL,2021,2019,Spring,a,C+ +399,CS,2100,2018,Fall,d,C+ +399,MATH,3210,2019,Spring,a,C+ +100,CS,3505,2018,Summer,a,C- +101,BIOL,2030,2018,Summer,b,C- +102,MATH,1220,2019,Fall,b,C- +105,BIOL,1010,2018,Summer,a,C- +106,CS,2100,2019,Summer,b,C- +107,BIOL,2210,2017,Spring,c,C- +107,CS,4000,2017,Fall,a,C- +108,CS,4500,2020,Spring,a,C- +109,CS,1410,2018,Spring,b,C- +109,CS,3505,2020,Fall,c,C- +112,BIOL,1010,2020,Summer,c,C- +113,CS,4400,2020,Spring,a,C- +115,BIOL,2420,2017,Summer,a,C- +118,BIOL,2355,2020,Spring,a,C- +118,CS,2100,2019,Fall,c,C- +118,MATH,1220,2020,Summer,a,C- +119,MATH,2280,2018,Fall,b,C- +120,BIOL,2020,2015,Summer,a,C- +120,CS,4150,2020,Spring,a,C- +120,PHYS,2040,2020,Spring,a,C- +121,BIOL,1010,2020,Summer,d,C- +121,BIOL,2420,2020,Spring,b,C- +121,CS,4970,2018,Fall,d,C- +121,PHYS,3210,2020,Fall,b,C- +122,CS,1030,2020,Spring,a,C- +123,BIOL,2010,2017,Summer,a,C- +123,CS,4000,2020,Spring,b,C- +123,MATH,1220,2019,Fall,b,C- +124,CS,1030,2020,Spring,c,C- +124,CS,3200,2020,Fall,a,C- +125,PHYS,3210,2020,Spring,a,C- +127,BIOL,1006,2019,Spring,a,C- +127,PHYS,2060,2018,Fall,b,C- +131,BIOL,2420,2020,Summer,a,C- +133,CS,3500,2019,Fall,b,C- +133,MATH,1220,2019,Fall,a,C- +133,MATH,2280,2019,Fall,b,C- +135,BIOL,2325,2019,Summer,a,C- +136,CS,4970,2020,Fall,b,C- +137,CS,3505,2020,Summer,a,C- +138,CS,4000,2016,Fall,a,C- +138,CS,4400,2016,Fall,a,C- +138,MATH,1210,2015,Summer,a,C- +139,BIOL,2021,2019,Spring,b,C- +139,CS,4500,2017,Summer,a,C- +139,PHYS,3210,2017,Summer,a,C- +143,BIOL,1006,2019,Summer,a,C- +143,CS,1030,2019,Fall,a,C- +143,PHYS,3220,2018,Summer,a,C- +145,CS,4970,2016,Fall,a,C- +146,BIOL,2420,2020,Fall,a,C- +151,CS,3505,2017,Summer,a,C- +151,PHYS,2060,2019,Fall,c,C- +151,PHYS,2100,2017,Summer,c,C- +151,PHYS,2210,2018,Fall,b,C- +151,PHYS,2220,2017,Spring,c,C- +152,BIOL,2020,2018,Fall,a,C- +152,BIOL,2021,2018,Fall,a,C- +152,MATH,1220,2019,Fall,b,C- +152,PHYS,3210,2019,Summer,c,C- +160,BIOL,1006,2016,Spring,a,C- +161,CS,3200,2020,Fall,a,C- +163,CS,4000,2017,Fall,a,C- +164,BIOL,2325,2018,Fall,a,C- +164,CS,4000,2018,Spring,a,C- +164,CS,4970,2019,Summer,d,C- +165,PHYS,2060,2018,Fall,c,C- +167,BIOL,2325,2018,Fall,b,C- +167,PHYS,3210,2019,Summer,c,C- +171,CS,3505,2020,Fall,c,C- +172,MATH,2210,2015,Fall,a,C- +172,MATH,3210,2015,Fall,d,C- +173,CS,4000,2018,Spring,a,C- +173,MATH,1220,2018,Spring,a,C- +176,CS,3200,2016,Summer,b,C- +176,MATH,3220,2017,Spring,a,C- +177,BIOL,2020,2018,Fall,a,C- +177,CS,3200,2020,Summer,a,C- +177,CS,3505,2017,Fall,b,C- +177,CS,4970,2016,Fall,b,C- +177,PHYS,2140,2017,Summer,a,C- +179,BIOL,2325,2019,Summer,a,C- +179,MATH,1260,2019,Spring,b,C- +179,PHYS,2220,2015,Fall,a,C- +181,MATH,2270,2020,Fall,a,C- +182,CS,3810,2018,Summer,d,C- +182,MATH,1220,2017,Spring,b,C- +185,CS,3200,2020,Summer,a,C- +185,PHYS,3210,2020,Spring,a,C- +187,CS,4400,2019,Spring,c,C- +188,PHYS,3210,2019,Summer,c,C- +189,BIOL,2420,2020,Summer,a,C- +192,BIOL,2355,2015,Summer,a,C- +194,CS,3200,2020,Spring,a,C- +195,MATH,1210,2016,Fall,d,C- +195,MATH,3210,2016,Fall,a,C- +195,PHYS,2060,2016,Spring,a,C- +196,BIOL,2021,2019,Spring,a,C- +199,BIOL,1030,2018,Fall,a,C- +200,CS,4940,2020,Summer,a,C- +202,CS,1030,2020,Fall,a,C- +203,MATH,3220,2018,Spring,d,C- +204,BIOL,1030,2015,Summer,a,C- +207,BIOL,2021,2017,Fall,a,C- +210,CS,3100,2017,Spring,b,C- +211,MATH,1220,2015,Summer,a,C- +214,BIOL,2210,2017,Summer,a,C- +217,PHYS,3210,2019,Spring,b,C- +220,BIOL,2325,2018,Fall,a,C- +221,BIOL,2010,2020,Summer,a,C- +222,PHYS,2140,2020,Fall,a,C- +223,CS,2100,2019,Fall,a,C- +223,CS,3500,2019,Fall,b,C- +227,BIOL,1006,2018,Spring,b,C- +227,CS,4970,2018,Summer,b,C- +228,CS,3200,2020,Spring,a,C- +228,PHYS,3210,2020,Fall,b,C- +230,BIOL,2030,2018,Summer,a,C- +230,CS,2420,2020,Fall,a,C- +231,CS,4940,2020,Summer,b,C- +231,CS,4970,2018,Summer,a,C- +231,PHYS,3210,2019,Spring,c,C- +233,BIOL,2355,2020,Fall,a,C- +233,CS,2420,2020,Summer,a,C- +235,MATH,1260,2020,Spring,a,C- +235,PHYS,2220,2020,Spring,a,C- +237,PHYS,3220,2017,Fall,a,C- +242,MATH,1260,2020,Spring,a,C- +242,PHYS,2220,2020,Summer,b,C- +243,BIOL,1030,2017,Spring,b,C- +247,CS,4940,2020,Summer,b,C- +248,BIOL,2420,2020,Spring,b,C- +248,CS,4400,2019,Spring,c,C- +249,MATH,1210,2016,Fall,a,C- +251,CS,4940,2020,Summer,a,C- +251,PHYS,3220,2020,Spring,b,C- +253,BIOL,1030,2018,Fall,a,C- +256,BIOL,1030,2019,Spring,c,C- +256,MATH,2280,2018,Spring,a,C- +257,CS,4970,2020,Summer,c,C- +257,PHYS,2220,2018,Summer,a,C- +259,BIOL,2010,2017,Summer,a,C- +259,CS,4000,2017,Summer,a,C- +259,MATH,2280,2018,Fall,a,C- +260,CS,3810,2018,Summer,a,C- +260,MATH,2270,2020,Fall,b,C- +261,CS,2420,2017,Summer,c,C- +261,CS,3100,2017,Spring,a,C- +261,MATH,2210,2017,Spring,a,C- +262,CS,2100,2016,Summer,b,C- +266,MATH,3220,2017,Fall,a,C- +267,MATH,2280,2019,Fall,b,C- +268,CS,3200,2016,Fall,a,C- +270,CS,1410,2015,Summer,d,C- +270,MATH,2210,2017,Spring,a,C- +270,MATH,2280,2019,Fall,a,C- +270,MATH,2280,2019,Fall,c,C- +270,MATH,3220,2016,Summer,a,C- +270,PHYS,2210,2018,Fall,b,C- +271,BIOL,2355,2020,Fall,a,C- +271,PHYS,3220,2020,Spring,c,C- +275,BIOL,1010,2018,Summer,b,C- +275,BIOL,2355,2018,Summer,c,C- +275,CS,1410,2018,Spring,d,C- +275,CS,4000,2018,Spring,a,C- +276,CS,3810,2015,Spring,a,C- +276,MATH,1260,2019,Summer,a,C- +276,MATH,3210,2016,Spring,a,C- +276,PHYS,3210,2018,Fall,a,C- +277,BIOL,1010,2015,Summer,c,C- +277,CS,3100,2016,Fall,a,C- +278,BIOL,2210,2016,Summer,a,C- +278,MATH,1260,2016,Fall,a,C- +281,MATH,1250,2020,Summer,a,C- +285,CS,4970,2016,Fall,b,C- +285,MATH,1210,2016,Fall,c,C- +285,MATH,2270,2019,Spring,a,C- +285,MATH,2280,2020,Spring,b,C- +285,PHYS,2100,2018,Fall,a,C- +285,PHYS,3220,2016,Summer,a,C- +288,MATH,1210,2018,Summer,a,C- +290,CS,3505,2016,Summer,a,C- +290,CS,4400,2015,Summer,a,C- +291,MATH,2270,2017,Fall,d,C- +292,BIOL,1006,2018,Spring,b,C- +294,CS,3500,2017,Fall,c,C- +294,CS,3505,2017,Fall,b,C- +294,CS,4940,2017,Fall,a,C- +295,CS,2100,2016,Spring,a,C- +296,CS,3100,2017,Fall,a,C- +296,MATH,3220,2018,Spring,a,C- +297,PHYS,3210,2020,Fall,a,C- +300,PHYS,2220,2020,Summer,b,C- +305,CS,1030,2018,Fall,a,C- +307,BIOL,2330,2019,Fall,a,C- +307,PHYS,2040,2015,Fall,c,C- +309,MATH,1260,2019,Fall,a,C- +309,PHYS,2140,2020,Fall,a,C- +311,PHYS,2060,2018,Fall,c,C- +311,PHYS,2060,2018,Fall,d,C- +312,BIOL,2325,2015,Fall,c,C- +313,BIOL,2030,2017,Spring,a,C- +313,MATH,1210,2019,Summer,a,C- +313,MATH,2270,2015,Fall,b,C- +313,MATH,3210,2015,Fall,a,C- +314,CS,4940,2019,Fall,a,C- +314,PHYS,2040,2017,Fall,a,C- +314,PHYS,2100,2016,Fall,a,C- +317,CS,4500,2016,Spring,a,C- +318,MATH,2270,2017,Summer,a,C- +321,PHYS,2060,2020,Spring,a,C- +321,PHYS,2060,2020,Spring,b,C- +325,BIOL,2020,2018,Fall,d,C- +325,BIOL,2355,2020,Summer,b,C- +329,CS,1030,2019,Fall,b,C- +329,CS,4500,2018,Spring,a,C- +329,PHYS,2210,2018,Fall,a,C- +332,CS,1030,2020,Spring,b,C- +332,CS,3500,2019,Fall,a,C- +335,CS,3810,2016,Fall,b,C- +340,PHYS,2210,2019,Fall,a,C- +341,CS,4500,2019,Fall,d,C- +342,BIOL,2325,2019,Spring,a,C- +342,BIOL,2330,2017,Fall,a,C- +342,PHYS,2220,2018,Summer,a,C- +344,BIOL,2020,2018,Fall,b,C- +344,BIOL,2021,2018,Summer,a,C- +347,BIOL,2030,2020,Spring,b,C- +347,CS,4970,2019,Fall,d,C- +348,BIOL,1010,2020,Summer,c,C- +348,BIOL,2010,2018,Spring,a,C- +348,CS,1030,2016,Spring,a,C- +348,CS,3100,2019,Spring,b,C- +351,PHYS,3210,2019,Spring,a,C- +355,CS,1410,2016,Spring,a,C- +355,MATH,1250,2017,Summer,a,C- +356,CS,2100,2017,Fall,a,C- +362,MATH,3220,2018,Spring,a,C- +364,BIOL,2021,2019,Fall,a,C- +364,CS,4400,2019,Fall,b,C- +365,MATH,3210,2020,Fall,a,C- +366,PHYS,2210,2019,Fall,b,C- +368,CS,2420,2020,Summer,a,C- +368,CS,4400,2019,Summer,b,C- +368,MATH,1250,2018,Summer,b,C- +369,BIOL,2355,2017,Spring,c,C- +371,CS,4500,2019,Summer,a,C- +371,MATH,1210,2018,Summer,a,C- +371,PHYS,2210,2019,Fall,c,C- +372,BIOL,1010,2019,Spring,a,C- +373,BIOL,2030,2018,Summer,a,C- +373,CS,3500,2020,Summer,a,C- +373,MATH,1210,2020,Spring,a,C- +373,MATH,2210,2015,Fall,a,C- +374,BIOL,2420,2015,Summer,a,C- +374,MATH,1250,2016,Fall,c,C- +375,BIOL,1030,2019,Spring,c,C- +375,BIOL,2010,2020,Summer,a,C- +375,BIOL,2030,2020,Spring,b,C- +375,CS,1410,2020,Spring,b,C- +375,PHYS,2100,2017,Summer,a,C- +376,BIOL,1006,2020,Fall,a,C- +377,BIOL,2325,2019,Spring,a,C- +377,BIOL,2355,2015,Summer,a,C- +377,MATH,1220,2015,Summer,c,C- +377,MATH,3220,2017,Fall,a,C- +378,CS,4500,2017,Summer,a,C- +378,CS,4970,2019,Summer,b,C- +378,PHYS,2100,2017,Summer,b,C- +379,CS,3500,2016,Summer,a,C- +379,CS,3810,2018,Summer,d,C- +384,BIOL,2010,2020,Spring,a,C- +385,BIOL,2010,2018,Spring,a,C- +385,CS,1030,2016,Summer,a,C- +385,CS,3505,2017,Fall,b,C- +385,PHYS,2220,2016,Fall,a,C- +388,BIOL,2021,2017,Summer,a,C- +388,CS,3200,2018,Spring,c,C- +390,BIOL,1010,2020,Summer,c,C- +391,BIOL,2325,2019,Spring,a,C- +391,CS,4150,2018,Fall,a,C- +392,BIOL,2355,2016,Spring,b,C- +393,BIOL,2210,2017,Spring,c,C- +394,MATH,1210,2017,Spring,a,C- +396,CS,3505,2018,Fall,c,C- +397,BIOL,1030,2019,Spring,c,C- +397,CS,4970,2016,Fall,b,C- +397,MATH,2210,2020,Fall,a,C- +398,BIOL,1010,2020,Summer,c,C- +398,BIOL,2355,2018,Summer,b,C- +398,CS,4400,2019,Summer,a,C- +399,CS,4970,2019,Summer,d,C- +100,CS,4940,2020,Summer,a,D +101,MATH,1250,2018,Summer,b,D +106,CS,4150,2020,Spring,a,D +107,CS,3200,2016,Fall,d,D +107,MATH,2280,2020,Spring,a,D +109,BIOL,1010,2019,Spring,b,D +109,CS,4500,2019,Fall,d,D +113,BIOL,2020,2018,Fall,b,D +113,PHYS,2140,2018,Summer,a,D +116,MATH,1220,2017,Spring,a,D +117,BIOL,2020,2016,Spring,a,D +117,CS,4940,2017,Fall,a,D +117,PHYS,2140,2016,Spring,b,D +118,CS,4000,2020,Fall,a,D +119,CS,4500,2016,Spring,b,D +119,MATH,1250,2018,Summer,b,D +119,PHYS,2040,2017,Fall,b,D +119,PHYS,2140,2020,Fall,a,D +120,BIOL,2420,2020,Spring,a,D +120,CS,3505,2020,Fall,c,D +120,MATH,2270,2017,Fall,c,D +121,PHYS,2220,2020,Summer,a,D +123,BIOL,2330,2016,Spring,a,D +123,PHYS,2140,2016,Spring,a,D +125,CS,4150,2020,Spring,a,D +129,BIOL,2325,2018,Fall,b,D +129,BIOL,2325,2018,Fall,c,D +131,CS,3500,2017,Fall,b,D +131,PHYS,2060,2018,Summer,a,D +132,BIOL,2420,2017,Summer,a,D +132,MATH,3220,2018,Spring,b,D +132,PHYS,2220,2018,Spring,a,D +133,MATH,1210,2019,Spring,a,D +134,BIOL,2010,2018,Spring,a,D +136,PHYS,2140,2020,Fall,a,D +138,BIOL,2330,2015,Fall,b,D +138,CS,2420,2015,Spring,a,D +138,CS,4500,2016,Spring,b,D +139,BIOL,2325,2019,Summer,a,D +143,CS,4400,2019,Summer,b,D +144,BIOL,2355,2016,Spring,a,D +146,BIOL,2010,2020,Summer,a,D +148,CS,4970,2020,Fall,c,D +151,CS,3200,2016,Fall,d,D +152,CS,4000,2020,Spring,b,D +152,PHYS,2040,2019,Spring,b,D +160,CS,2100,2016,Summer,b,D +162,CS,4500,2016,Spring,b,D +163,BIOL,2020,2018,Fall,a,D +163,BIOL,2355,2017,Spring,d,D +163,MATH,1220,2017,Spring,b,D +165,CS,3200,2018,Spring,a,D +169,MATH,1220,2018,Spring,b,D +170,BIOL,2010,2020,Summer,a,D +171,PHYS,2210,2019,Fall,b,D +172,CS,3100,2015,Summer,a,D +172,MATH,3210,2015,Fall,a,D +173,PHYS,2100,2017,Summer,c,D +173,PHYS,3210,2019,Spring,d,D +175,BIOL,1010,2020,Summer,b,D +176,MATH,2210,2017,Spring,a,D +177,CS,4500,2016,Fall,a,D +177,PHYS,2100,2017,Summer,a,D +178,BIOL,1010,2020,Summer,a,D +178,MATH,1220,2020,Spring,a,D +178,MATH,2280,2018,Fall,c,D +179,BIOL,2010,2020,Spring,b,D +179,BIOL,2021,2016,Fall,a,D +182,CS,3100,2016,Fall,a,D +182,MATH,1210,2016,Spring,c,D +183,CS,4400,2019,Fall,a,D +183,MATH,2280,2020,Spring,a,D +183,PHYS,3210,2020,Spring,a,D +185,BIOL,2355,2018,Summer,a,D +185,CS,4400,2020,Fall,b,D +185,MATH,2210,2018,Spring,b,D +185,PHYS,3220,2020,Spring,c,D +187,PHYS,2210,2019,Spring,d,D +188,BIOL,2030,2019,Summer,c,D +193,CS,4000,2015,Spring,a,D +194,BIOL,1006,2020,Spring,a,D +194,PHYS,2040,2020,Spring,a,D +197,BIOL,2010,2018,Spring,a,D +199,PHYS,2140,2018,Summer,b,D +199,PHYS,2210,2019,Spring,b,D +200,CS,4500,2020,Spring,a,D +203,CS,1410,2018,Spring,b,D +204,MATH,2280,2015,Summer,a,D +208,CS,2420,2017,Summer,c,D +208,PHYS,3210,2017,Summer,b,D +210,BIOL,1010,2015,Fall,a,D +214,PHYS,2040,2017,Fall,c,D +214,PHYS,3220,2017,Fall,a,D +216,MATH,3220,2016,Spring,c,D +219,CS,2420,2020,Summer,a,D +219,CS,4970,2020,Summer,b,D +220,CS,4500,2019,Fall,d,D +220,MATH,1210,2020,Spring,a,D +228,BIOL,2010,2020,Summer,a,D +228,BIOL,2010,2020,Summer,b,D +229,BIOL,1006,2017,Fall,b,D +230,BIOL,2420,2020,Spring,b,D +231,CS,2420,2020,Summer,a,D +231,PHYS,2220,2018,Summer,a,D +233,CS,4970,2020,Summer,c,D +235,BIOL,2010,2020,Summer,b,D +235,PHYS,2140,2020,Fall,a,D +238,CS,3505,2019,Summer,b,D +239,BIOL,2325,2018,Fall,c,D +240,BIOL,2330,2019,Fall,a,D +240,PHYS,2060,2020,Spring,b,D +241,BIOL,2030,2019,Summer,d,D +242,CS,3200,2020,Summer,a,D +244,BIOL,1010,2020,Summer,b,D +245,CS,1030,2016,Summer,a,D +245,CS,2420,2016,Fall,a,D +245,MATH,3220,2016,Fall,b,D +245,PHYS,2220,2016,Fall,a,D +246,BIOL,1006,2015,Summer,a,D +246,CS,3200,2016,Summer,b,D +247,PHYS,2060,2018,Fall,b,D +248,BIOL,1030,2019,Spring,c,D +252,MATH,1260,2017,Fall,a,D +253,PHYS,2140,2018,Fall,a,D +253,PHYS,2210,2019,Spring,c,D +254,CS,4970,2020,Summer,d,D +254,MATH,2270,2019,Fall,a,D +256,BIOL,1210,2019,Spring,a,D +256,CS,1030,2018,Fall,a,D +256,MATH,2210,2018,Spring,b,D +257,BIOL,2030,2017,Spring,a,D +257,BIOL,2210,2017,Summer,a,D +257,CS,2100,2018,Fall,b,D +257,CS,2100,2018,Fall,c,D +257,CS,3810,2018,Summer,c,D +257,CS,4400,2019,Spring,d,D +258,MATH,2270,2020,Spring,a,D +259,BIOL,2210,2017,Summer,b,D +259,CS,4400,2019,Fall,b,D +259,CS,4970,2019,Fall,c,D +260,CS,4500,2019,Fall,a,D +260,MATH,1220,2017,Spring,d,D +262,PHYS,3210,2017,Fall,a,D +270,CS,2100,2018,Summer,a,D +274,PHYS,2210,2018,Fall,b,D +274,PHYS,3210,2018,Spring,b,D +276,BIOL,2030,2018,Summer,b,D +276,PHYS,2220,2015,Fall,a,D +277,BIOL,2030,2016,Fall,a,D +277,CS,3500,2016,Spring,a,D +277,MATH,1250,2018,Summer,c,D +278,PHYS,2060,2016,Summer,a,D +284,CS,3505,2019,Fall,a,D +285,BIOL,2355,2017,Spring,d,D +285,CS,4940,2019,Fall,a,D +285,PHYS,2060,2016,Summer,b,D +288,BIOL,1006,2017,Fall,a,D +292,CS,3505,2019,Summer,d,D +294,MATH,1210,2019,Spring,a,D +297,BIOL,1006,2020,Fall,a,D +298,CS,3505,2019,Summer,b,D +298,PHYS,2060,2018,Fall,a,D +301,BIOL,1210,2016,Spring,a,D +303,CS,4500,2019,Fall,a,D +303,PHYS,2210,2019,Fall,d,D +304,PHYS,2210,2017,Summer,d,D +305,CS,3505,2018,Fall,a,D +307,CS,2100,2019,Spring,a,D +307,CS,4500,2016,Spring,b,D +307,MATH,3210,2015,Fall,a,D +309,PHYS,3210,2019,Summer,c,D +311,BIOL,2210,2018,Summer,b,D +312,BIOL,2010,2019,Fall,a,D +312,BIOL,2355,2017,Spring,a,D +312,CS,4400,2019,Spring,d,D +312,MATH,1250,2018,Summer,a,D +313,BIOL,2021,2019,Spring,b,D +313,BIOL,2210,2018,Summer,b,D +313,CS,4940,2020,Summer,a,D +313,MATH,1220,2016,Spring,a,D +320,BIOL,2030,2019,Summer,c,D +321,MATH,1260,2019,Spring,c,D +321,PHYS,2220,2015,Fall,a,D +325,MATH,2270,2019,Summer,b,D +325,PHYS,3220,2020,Spring,c,D +329,CS,2100,2018,Summer,c,D +329,CS,2420,2016,Fall,b,D +332,BIOL,1006,2019,Fall,b,D +332,PHYS,2220,2018,Fall,a,D +333,CS,4400,2019,Spring,a,D +335,BIOL,2355,2017,Fall,a,D +335,CS,1410,2016,Spring,a,D +335,CS,4500,2017,Summer,a,D +335,MATH,1210,2016,Spring,b,D +339,PHYS,3210,2020,Fall,a,D +341,BIOL,1030,2020,Spring,a,D +342,MATH,2210,2019,Spring,b,D +342,MATH,2270,2017,Fall,b,D +342,PHYS,2210,2017,Summer,c,D +344,CS,3810,2018,Summer,b,D +345,CS,4940,2020,Summer,b,D +345,MATH,1210,2017,Summer,a,D +345,MATH,2210,2020,Fall,a,D +347,CS,1030,2020,Fall,a,D +347,MATH,1260,2019,Spring,a,D +347,MATH,2210,2020,Spring,a,D +348,CS,3505,2015,Fall,d,D +348,CS,4150,2015,Summer,b,D +348,CS,4400,2020,Spring,a,D +348,CS,4970,2018,Summer,b,D +355,CS,3505,2017,Fall,a,D +358,MATH,1220,2019,Fall,c,D +364,BIOL,1006,2019,Fall,a,D +365,BIOL,1006,2020,Spring,a,D +366,CS,4500,2018,Spring,d,D +372,BIOL,1210,2017,Spring,a,D +373,CS,3505,2019,Summer,b,D +374,BIOL,2030,2017,Spring,a,D +375,CS,4000,2020,Spring,a,D +375,PHYS,2060,2019,Fall,c,D +377,CS,2420,2015,Spring,a,D +377,CS,3200,2017,Spring,a,D +377,PHYS,2060,2015,Spring,a,D +378,CS,2100,2017,Spring,a,D +379,BIOL,1010,2019,Spring,d,D +379,MATH,3220,2018,Spring,a,D +379,PHYS,2060,2016,Spring,a,D +379,PHYS,3210,2017,Summer,b,D +385,CS,3200,2016,Fall,d,D +386,BIOL,1030,2020,Summer,a,D +386,BIOL,2010,2020,Spring,a,D +386,CS,2100,2019,Fall,a,D +386,CS,4000,2020,Spring,a,D +386,CS,4940,2020,Summer,a,D +387,BIOL,2210,2017,Summer,b,D +387,BIOL,2330,2017,Fall,a,D +391,MATH,2270,2020,Fall,b,D +392,CS,1410,2018,Spring,d,D +392,PHYS,2140,2016,Summer,b,D +393,BIOL,2325,2018,Summer,a,D +393,CS,2100,2018,Summer,c,D +393,MATH,2280,2016,Fall,a,D +393,PHYS,2060,2016,Summer,a,D +397,BIOL,2355,2017,Spring,c,D +397,MATH,2270,2017,Fall,d,D +397,PHYS,2060,2019,Summer,b,D +100,PHYS,2060,2019,Fall,c,D+ +102,BIOL,2355,2017,Spring,d,D+ +102,CS,4970,2018,Fall,d,D+ +102,PHYS,2210,2019,Spring,c,D+ +102,PHYS,3210,2018,Spring,c,D+ +105,BIOL,2355,2017,Spring,a,D+ +107,CS,1410,2018,Spring,b,D+ +107,CS,2420,2018,Spring,a,D+ +107,CS,4500,2019,Summer,a,D+ +109,CS,1410,2018,Spring,c,D+ +109,MATH,3210,2020,Fall,a,D+ +113,BIOL,2010,2020,Spring,a,D+ +113,MATH,1260,2019,Summer,a,D+ +118,BIOL,1006,2020,Fall,c,D+ +119,BIOL,2420,2016,Spring,a,D+ +119,PHYS,2210,2019,Spring,a,D+ +119,PHYS,2220,2017,Spring,a,D+ +120,BIOL,2325,2019,Summer,a,D+ +120,CS,1030,2020,Spring,c,D+ +120,CS,2100,2019,Fall,d,D+ +120,MATH,2280,2018,Fall,c,D+ +121,CS,3505,2020,Fall,b,D+ +122,BIOL,2355,2020,Summer,a,D+ +122,BIOL,2420,2020,Fall,a,D+ +124,CS,3500,2020,Summer,a,D+ +124,CS,3505,2017,Fall,a,D+ +125,PHYS,2040,2020,Spring,a,D+ +128,CS,4400,2019,Spring,b,D+ +128,MATH,2270,2017,Fall,d,D+ +128,PHYS,2140,2018,Summer,a,D+ +128,PHYS,3220,2018,Summer,a,D+ +129,MATH,3220,2018,Spring,d,D+ +130,MATH,2270,2020,Fall,a,D+ +131,BIOL,2030,2019,Summer,d,D+ +131,MATH,1220,2018,Spring,b,D+ +131,PHYS,2040,2020,Spring,a,D+ +132,MATH,2270,2017,Summer,a,D+ +132,PHYS,2040,2017,Fall,c,D+ +135,CS,4970,2019,Summer,d,D+ +135,MATH,1210,2020,Spring,a,D+ +135,MATH,1250,2020,Summer,a,D+ +138,CS,3100,2016,Spring,b,D+ +138,CS,3200,2015,Fall,a,D+ +138,MATH,1250,2016,Fall,b,D+ +139,BIOL,1030,2019,Spring,b,D+ +139,CS,3200,2019,Spring,a,D+ +139,PHYS,3220,2017,Summer,a,D+ +142,BIOL,2355,2020,Fall,a,D+ +143,BIOL,1030,2019,Spring,c,D+ +143,BIOL,2210,2018,Summer,a,D+ +144,BIOL,1210,2016,Spring,a,D+ +146,CS,4970,2020,Summer,d,D+ +146,PHYS,3210,2019,Summer,a,D+ +149,CS,1030,2016,Spring,a,D+ +156,PHYS,2060,2018,Fall,a,D+ +162,MATH,1260,2015,Summer,a,D+ +163,PHYS,2220,2017,Spring,d,D+ +164,BIOL,2210,2017,Summer,b,D+ +164,CS,2100,2018,Fall,d,D+ +164,CS,4970,2019,Summer,b,D+ +164,MATH,1220,2020,Summer,a,D+ +165,CS,4970,2019,Spring,a,D+ +167,CS,2420,2020,Summer,a,D+ +172,CS,1410,2015,Summer,d,D+ +173,BIOL,2210,2018,Summer,c,D+ +173,CS,4970,2019,Summer,c,D+ +175,MATH,2270,2020,Spring,a,D+ +177,PHYS,2040,2015,Spring,a,D+ +177,PHYS,2060,2019,Fall,c,D+ +178,BIOL,2021,2019,Spring,b,D+ +178,CS,3505,2019,Fall,a,D+ +179,BIOL,1010,2015,Fall,a,D+ +179,CS,4150,2020,Spring,a,D+ +179,PHYS,3210,2017,Summer,a,D+ +182,BIOL,2010,2015,Summer,a,D+ +182,BIOL,2355,2017,Fall,b,D+ +183,MATH,2270,2019,Summer,c,D+ +185,CS,4970,2018,Summer,c,D+ +185,PHYS,3220,2020,Spring,d,D+ +187,BIOL,2355,2018,Summer,b,D+ +192,BIOL,2420,2015,Spring,d,D+ +192,MATH,3220,2016,Spring,d,D+ +192,PHYS,2140,2015,Spring,b,D+ +194,PHYS,2060,2019,Summer,b,D+ +199,CS,3505,2017,Fall,a,D+ +204,CS,4150,2015,Summer,a,D+ +208,CS,3505,2017,Fall,b,D+ +209,PHYS,3210,2018,Spring,c,D+ +210,BIOL,2210,2018,Summer,c,D+ +210,BIOL,2330,2020,Spring,a,D+ +210,CS,3810,2018,Summer,d,D+ +211,CS,3505,2015,Fall,a,D+ +213,CS,3810,2016,Fall,a,D+ +214,BIOL,2030,2018,Summer,b,D+ +214,BIOL,2330,2017,Summer,a,D+ +214,PHYS,2220,2018,Spring,a,D+ +215,PHYS,3220,2017,Summer,a,D+ +217,CS,2100,2018,Fall,c,D+ +220,BIOL,1210,2018,Fall,a,D+ +220,BIOL,2210,2020,Fall,a,D+ +227,BIOL,2355,2018,Summer,a,D+ +227,MATH,3210,2020,Fall,a,D+ +228,CS,3505,2019,Spring,a,D+ +228,MATH,1220,2020,Spring,a,D+ +230,BIOL,2210,2018,Summer,b,D+ +230,MATH,1210,2017,Summer,b,D+ +231,CS,4400,2017,Spring,b,D+ +231,PHYS,2060,2018,Fall,d,D+ +233,CS,3810,2020,Fall,a,D+ +238,CS,2100,2019,Summer,b,D+ +239,BIOL,1010,2018,Fall,a,D+ +240,CS,4500,2020,Summer,a,D+ +241,BIOL,1006,2020,Fall,a,D+ +241,PHYS,2210,2019,Fall,a,D+ +247,MATH,1220,2019,Fall,c,D+ +249,BIOL,2021,2015,Summer,b,D+ +251,CS,4940,2020,Summer,b,D+ +254,BIOL,1030,2020,Spring,a,D+ +254,MATH,1220,2019,Fall,a,D+ +255,BIOL,1006,2020,Fall,b,D+ +255,BIOL,1210,2018,Fall,a,D+ +255,CS,4970,2018,Fall,d,D+ +255,MATH,1210,2019,Summer,a,D+ +255,MATH,3210,2020,Fall,a,D+ +255,PHYS,2060,2020,Spring,a,D+ +255,PHYS,2210,2019,Spring,a,D+ +255,PHYS,3220,2020,Spring,b,D+ +256,CS,2100,2017,Spring,a,D+ +256,CS,3505,2018,Fall,a,D+ +256,PHYS,2210,2019,Summer,a,D+ +257,CS,4970,2020,Summer,a,D+ +259,CS,2100,2018,Fall,d,D+ +259,MATH,1220,2017,Spring,b,D+ +260,CS,1030,2019,Fall,a,D+ +260,CS,3500,2020,Summer,a,D+ +260,MATH,1260,2017,Fall,a,D+ +262,BIOL,1006,2017,Fall,a,D+ +262,BIOL,2355,2018,Summer,a,D+ +262,CS,4000,2017,Fall,a,D+ +264,CS,3810,2016,Fall,b,D+ +264,CS,4150,2016,Summer,b,D+ +264,MATH,3220,2016,Fall,b,D+ +267,CS,4970,2018,Fall,d,D+ +270,BIOL,2325,2019,Spring,b,D+ +270,CS,3505,2019,Summer,d,D+ +270,MATH,1210,2016,Spring,a,D+ +270,MATH,3210,2019,Spring,a,D+ +273,CS,1410,2016,Spring,b,D+ +275,CS,3500,2017,Fall,c,D+ +276,BIOL,2355,2018,Summer,d,D+ +276,CS,4400,2017,Spring,c,D+ +276,MATH,2280,2015,Fall,a,D+ +276,PHYS,3220,2017,Fall,d,D+ +277,BIOL,1006,2020,Fall,b,D+ +277,CS,3505,2020,Spring,a,D+ +277,MATH,2270,2017,Summer,a,D+ +285,BIOL,2325,2019,Summer,a,D+ +285,CS,2100,2018,Summer,b,D+ +285,CS,3810,2016,Fall,a,D+ +285,MATH,2210,2019,Spring,a,D+ +288,CS,4970,2017,Summer,a,D+ +288,PHYS,2100,2018,Fall,a,D+ +288,PHYS,2220,2017,Spring,d,D+ +289,BIOL,2020,2019,Summer,a,D+ +289,CS,2100,2020,Fall,a,D+ +289,CS,3505,2019,Fall,b,D+ +290,CS,1030,2016,Spring,a,D+ +291,BIOL,2330,2016,Fall,a,D+ +291,MATH,1260,2017,Fall,a,D+ +292,BIOL,2325,2019,Spring,a,D+ +292,BIOL,2420,2020,Summer,a,D+ +292,CS,4000,2020,Fall,a,D+ +292,MATH,2280,2019,Fall,c,D+ +292,PHYS,2040,2017,Summer,a,D+ +294,BIOL,1006,2018,Spring,b,D+ +294,CS,4400,2019,Summer,a,D+ +295,PHYS,2040,2015,Fall,b,D+ +296,CS,4500,2019,Fall,d,D+ +298,BIOL,2210,2018,Summer,a,D+ +298,PHYS,2140,2018,Summer,a,D+ +299,BIOL,2355,2017,Spring,c,D+ +300,BIOL,2030,2019,Summer,d,D+ +302,CS,3100,2015,Summer,a,D+ +303,BIOL,1006,2019,Summer,a,D+ +305,CS,4500,2018,Spring,b,D+ +305,MATH,2210,2019,Spring,b,D+ +305,PHYS,2040,2019,Spring,b,D+ +305,PHYS,2140,2018,Summer,b,D+ +305,PHYS,2210,2019,Spring,a,D+ +307,PHYS,2100,2017,Summer,b,D+ +309,BIOL,2010,2019,Fall,a,D+ +309,CS,4000,2020,Spring,b,D+ +309,CS,4970,2020,Summer,b,D+ +310,MATH,1210,2020,Spring,a,D+ +311,CS,3500,2017,Summer,a,D+ +312,BIOL,2210,2016,Summer,a,D+ +312,CS,2420,2016,Spring,a,D+ +312,CS,3200,2015,Fall,c,D+ +312,MATH,3220,2018,Spring,b,D+ +312,PHYS,3220,2016,Summer,a,D+ +313,BIOL,1030,2017,Spring,a,D+ +313,CS,4970,2016,Fall,b,D+ +313,PHYS,2060,2019,Summer,b,D+ +314,CS,3100,2016,Fall,a,D+ +318,BIOL,1006,2017,Fall,b,D+ +318,BIOL,2021,2018,Summer,a,D+ +318,CS,4000,2017,Summer,a,D+ +321,BIOL,1006,2017,Fall,a,D+ +321,BIOL,1010,2017,Summer,a,D+ +321,CS,3505,2017,Fall,b,D+ +321,CS,4940,2020,Summer,b,D+ +323,MATH,1220,2019,Fall,a,D+ +325,CS,4970,2019,Summer,b,D+ +326,CS,4940,2017,Fall,a,D+ +326,PHYS,2210,2017,Summer,b,D+ +329,BIOL,1010,2018,Summer,c,D+ +329,BIOL,2355,2017,Spring,d,D+ +329,BIOL,2420,2020,Fall,a,D+ +329,CS,4970,2019,Summer,d,D+ +329,PHYS,2060,2018,Fall,c,D+ +331,CS,3500,2020,Summer,a,D+ +331,CS,4940,2020,Summer,a,D+ +332,BIOL,2020,2018,Spring,a,D+ +332,MATH,1250,2018,Summer,a,D+ +333,BIOL,1030,2019,Spring,a,D+ +333,CS,4500,2019,Summer,a,D+ +335,CS,3500,2017,Fall,a,D+ +339,CS,4150,2020,Fall,a,D+ +342,CS,3200,2020,Spring,a,D+ +345,CS,4000,2017,Fall,b,D+ +345,PHYS,3210,2019,Summer,c,D+ +347,BIOL,2355,2018,Summer,d,D+ +347,CS,3810,2020,Fall,a,D+ +355,PHYS,2220,2017,Spring,b,D+ +356,BIOL,2210,2016,Summer,a,D+ +356,CS,3810,2018,Summer,d,D+ +356,CS,4970,2018,Fall,d,D+ +356,PHYS,3210,2019,Spring,a,D+ +361,CS,4000,2017,Fall,a,D+ +361,MATH,1260,2017,Summer,a,D+ +362,BIOL,1010,2018,Fall,a,D+ +363,BIOL,2420,2020,Summer,a,D+ +365,CS,2420,2020,Summer,a,D+ +366,BIOL,1006,2018,Spring,a,D+ +369,CS,4500,2018,Spring,c,D+ +371,BIOL,1006,2020,Fall,b,D+ +371,CS,3505,2018,Fall,a,D+ +372,CS,2420,2017,Summer,c,D+ +373,MATH,1250,2016,Summer,a,D+ +373,MATH,3220,2016,Spring,a,D+ +373,PHYS,3220,2020,Spring,d,D+ +374,BIOL,2355,2017,Spring,d,D+ +375,MATH,2210,2019,Spring,b,D+ +377,CS,3500,2019,Fall,a,D+ +377,PHYS,2140,2019,Fall,b,D+ +378,BIOL,2021,2018,Summer,a,D+ +378,BIOL,2355,2017,Spring,d,D+ +378,MATH,2210,2020,Fall,a,D+ +379,BIOL,2325,2015,Fall,b,D+ +379,CS,2100,2019,Spring,a,D+ +379,CS,4400,2019,Spring,a,D+ +379,MATH,1210,2016,Fall,c,D+ +380,CS,3505,2019,Summer,b,D+ +386,MATH,1210,2020,Spring,b,D+ +386,MATH,3210,2020,Fall,a,D+ +387,BIOL,1030,2018,Fall,a,D+ +387,BIOL,2355,2018,Summer,d,D+ +387,CS,2420,2017,Fall,a,D+ +388,CS,1410,2018,Spring,c,D+ +389,PHYS,2040,2016,Spring,a,D+ +390,BIOL,2355,2020,Fall,a,D+ +391,CS,4500,2018,Spring,d,D+ +391,PHYS,2210,2019,Spring,c,D+ +392,BIOL,2020,2015,Fall,b,D+ +392,CS,2420,2016,Fall,a,D+ +394,BIOL,1006,2015,Spring,b,D+ +397,BIOL,2021,2018,Fall,b,D+ +397,CS,2420,2016,Fall,c,D+ +397,CS,3100,2017,Fall,a,D+ +397,CS,4500,2020,Summer,a,D+ +397,CS,4940,2020,Summer,b,D+ +398,CS,4500,2019,Fall,b,D+ +399,PHYS,2040,2019,Spring,a,D+ +100,BIOL,2030,2019,Summer,b,F +100,CS,4940,2020,Summer,b,F +101,CS,4500,2018,Spring,a,F +101,MATH,3220,2018,Spring,b,F +101,PHYS,3210,2018,Fall,a,F +102,CS,3810,2019,Fall,b,F +102,MATH,3210,2016,Fall,a,F +104,MATH,1210,2018,Fall,b,F +106,BIOL,2355,2020,Summer,b,F +106,PHYS,2060,2019,Summer,b,F +107,MATH,1210,2016,Fall,c,F +108,CS,3500,2019,Fall,b,F +112,PHYS,2060,2020,Fall,a,F +113,BIOL,1010,2020,Summer,a,F +113,MATH,3210,2020,Summer,a,F +115,BIOL,2210,2017,Spring,a,F +116,CS,3505,2016,Fall,b,F +117,BIOL,1210,2017,Spring,a,F +119,BIOL,2210,2019,Summer,b,F +119,BIOL,2325,2018,Spring,a,F +119,CS,1030,2020,Fall,a,F +119,MATH,2270,2020,Fall,b,F +120,BIOL,1030,2016,Fall,a,F +120,BIOL,2330,2016,Spring,a,F +120,CS,1410,2018,Spring,a,F +120,CS,3200,2016,Fall,a,F +120,MATH,1260,2019,Summer,b,F +121,CS,1410,2020,Spring,a,F +121,CS,4970,2018,Fall,c,F +122,CS,1410,2020,Spring,a,F +123,CS,4400,2020,Fall,a,F +127,CS,4400,2020,Fall,b,F +127,CS,4500,2020,Summer,a,F +128,BIOL,1030,2019,Summer,a,F +128,CS,4500,2018,Spring,d,F +129,BIOL,2355,2018,Summer,c,F +129,PHYS,3210,2020,Fall,b,F +131,BIOL,1030,2020,Summer,a,F +131,BIOL,2210,2018,Summer,a,F +131,BIOL,2325,2018,Fall,b,F +131,MATH,1260,2019,Summer,a,F +131,PHYS,3210,2020,Spring,a,F +132,BIOL,2030,2018,Summer,b,F +133,PHYS,3210,2019,Summer,b,F +137,BIOL,2355,2020,Summer,b,F +139,BIOL,1010,2019,Spring,a,F +139,BIOL,2020,2018,Spring,a,F +139,CS,2100,2018,Summer,c,F +142,CS,3505,2020,Fall,a,F +142,CS,4500,2020,Spring,a,F +143,BIOL,2030,2019,Summer,d,F +143,PHYS,2210,2019,Fall,a,F +146,CS,3505,2020,Spring,a,F +146,MATH,2280,2019,Fall,c,F +149,CS,4500,2016,Spring,b,F +149,MATH,1250,2015,Fall,a,F +151,BIOL,2210,2017,Summer,a,F +152,MATH,2270,2020,Fall,a,F +158,BIOL,2020,2018,Fall,a,F +158,PHYS,2100,2018,Fall,a,F +162,CS,1030,2016,Spring,a,F +162,CS,3505,2015,Fall,c,F +163,BIOL,2030,2016,Summer,b,F +163,PHYS,2140,2018,Fall,a,F +164,PHYS,2220,2020,Summer,b,F +167,BIOL,2010,2020,Summer,a,F +167,CS,4940,2019,Fall,a,F +167,PHYS,2060,2020,Spring,a,F +167,PHYS,2140,2019,Fall,a,F +169,BIOL,1006,2019,Fall,b,F +169,BIOL,2355,2018,Spring,a,F +175,PHYS,2220,2020,Spring,a,F +177,BIOL,2210,2017,Spring,c,F +177,CS,2100,2020,Fall,a,F +177,CS,4940,2020,Summer,b,F +177,MATH,1210,2018,Fall,a,F +178,BIOL,1006,2019,Summer,a,F +178,CS,4970,2018,Fall,d,F +178,PHYS,2220,2018,Fall,a,F +179,BIOL,1006,2016,Summer,b,F +179,BIOL,2030,2017,Spring,a,F +179,CS,3505,2018,Fall,a,F +181,CS,1030,2020,Fall,a,F +182,BIOL,1006,2015,Summer,a,F +182,BIOL,1210,2016,Spring,a,F +182,CS,1410,2015,Summer,c,F +182,CS,2100,2018,Summer,b,F +185,BIOL,2420,2018,Spring,a,F +185,CS,4000,2020,Fall,a,F +187,CS,3200,2020,Spring,b,F +192,MATH,3210,2015,Fall,c,F +194,CS,2100,2019,Summer,b,F +195,BIOL,1010,2016,Summer,a,F +195,CS,2420,2016,Summer,a,F +195,PHYS,3220,2016,Summer,a,F +197,BIOL,1030,2018,Summer,a,F +199,BIOL,2030,2020,Spring,b,F +199,CS,3505,2017,Fall,b,F +199,CS,4400,2020,Spring,a,F +200,CS,1410,2020,Spring,b,F +200,MATH,2210,2020,Spring,b,F +207,BIOL,2420,2017,Summer,b,F +210,BIOL,2010,2018,Spring,a,F +210,CS,3100,2017,Spring,a,F +210,CS,4150,2019,Spring,a,F +211,BIOL,1010,2015,Fall,c,F +211,BIOL,1030,2015,Spring,c,F +211,MATH,1250,2015,Spring,c,F +213,PHYS,3220,2017,Fall,c,F +220,BIOL,2355,2020,Fall,a,F +220,CS,3505,2019,Summer,a,F +221,BIOL,2355,2020,Summer,a,F +221,CS,4970,2020,Summer,b,F +223,MATH,3210,2019,Fall,a,F +229,PHYS,3210,2018,Spring,a,F +230,BIOL,1210,2019,Spring,a,F +230,MATH,2280,2018,Spring,a,F +231,BIOL,1030,2019,Spring,d,F +231,MATH,1210,2018,Fall,a,F +231,PHYS,2140,2018,Summer,a,F +237,MATH,3220,2018,Spring,a,F +238,BIOL,1010,2018,Summer,b,F +240,CS,2100,2019,Spring,a,F +243,BIOL,2021,2016,Fall,a,F +246,BIOL,1030,2015,Summer,a,F +247,CS,1030,2019,Fall,b,F +247,CS,3500,2020,Summer,a,F +247,CS,3505,2018,Summer,b,F +248,BIOL,2420,2020,Spring,a,F +250,PHYS,2060,2020,Fall,a,F +252,BIOL,1010,2018,Fall,a,F +252,CS,4000,2017,Fall,a,F +255,BIOL,2355,2019,Spring,c,F +255,BIOL,2420,2020,Fall,a,F +255,CS,3505,2018,Summer,a,F +255,MATH,2280,2020,Spring,a,F +256,BIOL,2021,2018,Fall,a,F +256,MATH,3210,2020,Fall,a,F +257,MATH,1210,2018,Summer,a,F +257,PHYS,2210,2019,Spring,d,F +258,CS,2100,2018,Summer,c,F +259,BIOL,1010,2018,Summer,c,F +259,PHYS,2140,2017,Fall,a,F +260,BIOL,2020,2018,Fall,b,F +260,CS,4940,2017,Fall,a,F +260,PHYS,3220,2018,Summer,a,F +261,MATH,1250,2018,Summer,c,F +261,PHYS,2210,2017,Summer,d,F +262,CS,1030,2016,Fall,a,F +267,CS,1410,2020,Spring,a,F +267,CS,4970,2018,Fall,b,F +268,BIOL,2021,2016,Fall,a,F +270,BIOL,1010,2020,Summer,b,F +270,BIOL,2030,2019,Summer,b,F +270,BIOL,2030,2019,Summer,c,F +270,CS,2420,2016,Fall,c,F +272,CS,4400,2020,Fall,a,F +274,CS,4970,2018,Fall,a,F +276,BIOL,1010,2015,Summer,a,F +276,BIOL,2355,2018,Summer,b,F +276,CS,3500,2019,Summer,a,F +276,CS,4970,2016,Fall,a,F +276,MATH,1250,2015,Spring,c,F +278,BIOL,1010,2017,Spring,a,F +278,MATH,3220,2016,Summer,a,F +280,MATH,1220,2015,Summer,b,F +281,CS,3500,2020,Summer,a,F +282,CS,3200,2015,Fall,d,F +282,CS,3500,2016,Summer,a,F +282,PHYS,2220,2015,Fall,b,F +285,CS,4500,2016,Spring,b,F +289,CS,4970,2019,Summer,a,F +290,BIOL,2325,2015,Fall,b,F +290,CS,4500,2015,Summer,b,F +290,MATH,3220,2016,Spring,d,F +292,BIOL,1010,2018,Summer,b,F +292,BIOL,2355,2018,Fall,a,F +292,CS,2100,2018,Fall,a,F +293,PHYS,2220,2020,Summer,a,F +299,MATH,1220,2017,Spring,a,F +301,CS,1410,2015,Summer,d,F +303,CS,3200,2020,Spring,a,F +303,MATH,2270,2019,Summer,b,F +303,PHYS,3220,2020,Spring,d,F +304,BIOL,2010,2017,Fall,a,F +304,MATH,1260,2017,Summer,a,F +307,CS,4000,2015,Fall,a,F +309,CS,3505,2019,Fall,b,F +309,CS,4400,2017,Spring,b,F +311,BIOL,2010,2020,Summer,a,F +311,MATH,1210,2018,Fall,a,F +311,PHYS,2060,2018,Fall,b,F +312,BIOL,2030,2019,Summer,a,F +312,MATH,1210,2018,Fall,a,F +312,PHYS,2040,2015,Fall,b,F +313,CS,3200,2016,Fall,b,F +313,MATH,2280,2020,Spring,a,F +313,PHYS,2040,2020,Spring,a,F +314,PHYS,3210,2016,Summer,b,F +320,CS,3505,2019,Spring,a,F +329,BIOL,1030,2016,Summer,a,F +329,BIOL,2210,2019,Summer,a,F +329,BIOL,2325,2019,Spring,a,F +329,CS,4150,2020,Fall,a,F +329,PHYS,2140,2019,Fall,a,F +332,CS,3505,2020,Spring,a,F +333,BIOL,2420,2020,Summer,a,F +335,BIOL,2330,2016,Fall,a,F +339,CS,4940,2020,Summer,b,F +339,PHYS,2220,2020,Summer,a,F +340,BIOL,1006,2020,Spring,a,F +340,CS,4500,2019,Summer,a,F +341,MATH,1220,2019,Fall,a,F +342,MATH,1210,2017,Summer,c,F +344,CS,2100,2018,Fall,b,F +345,CS,3200,2020,Fall,a,F +345,MATH,2280,2018,Fall,c,F +345,MATH,3220,2018,Spring,a,F +347,PHYS,2210,2019,Fall,c,F +348,PHYS,2060,2016,Summer,b,F +353,CS,2420,2017,Summer,a,F +355,BIOL,2010,2017,Fall,a,F +355,CS,3100,2017,Spring,b,F +355,PHYS,2100,2017,Summer,c,F +356,BIOL,1210,2019,Spring,a,F +358,CS,3505,2019,Spring,b,F +359,PHYS,2210,2019,Summer,a,F +361,CS,2420,2017,Summer,a,F +362,BIOL,2355,2020,Spring,a,F +363,CS,3500,2019,Fall,c,F +365,PHYS,3220,2020,Spring,c,F +366,PHYS,2060,2019,Summer,b,F +366,PHYS,2100,2019,Summer,a,F +368,BIOL,2210,2019,Summer,a,F +368,CS,4970,2019,Summer,a,F +368,MATH,1210,2018,Summer,a,F +371,MATH,2270,2020,Fall,b,F +372,MATH,1250,2018,Summer,a,F +373,BIOL,1010,2017,Spring,a,F +373,BIOL,2020,2018,Fall,b,F +377,PHYS,3210,2017,Fall,a,F +378,CS,4940,2020,Summer,a,F +379,BIOL,2355,2018,Summer,d,F +384,BIOL,1030,2019,Spring,d,F +385,MATH,1210,2016,Fall,d,F +386,CS,3505,2018,Summer,a,F +386,PHYS,2100,2019,Summer,a,F +386,PHYS,2220,2020,Fall,a,F +387,BIOL,2325,2017,Fall,b,F +387,MATH,1210,2017,Summer,c,F +389,CS,1410,2016,Summer,a,F +390,MATH,1210,2020,Spring,a,F +391,BIOL,2330,2017,Fall,a,F +391,CS,1030,2020,Spring,b,F +391,CS,4970,2019,Summer,b,F +391,MATH,1250,2020,Summer,a,F +391,MATH,2270,2020,Fall,a,F +391,PHYS,2220,2016,Summer,a,F +392,MATH,3220,2016,Spring,b,F +392,PHYS,2100,2016,Fall,a,F +393,CS,1030,2016,Summer,a,F +396,MATH,1220,2019,Fall,c,F +397,BIOL,2210,2017,Spring,c,F +399,BIOL,1010,2018,Summer,a,F diff --git a/tests/data/Section.csv b/tests/data/Section.csv new file mode 100644 index 000000000..8dc95361b --- /dev/null +++ b/tests/data/Section.csv @@ -0,0 +1,757 @@ +dept,course,term_year,term,section,auditorium +BIOL,1006,2015,Spring,a,C68 +BIOL,1006,2015,Spring,b,C22 +BIOL,1006,2015,Summer,a,D38 +BIOL,1006,2015,Summer,b,C15 +BIOL,1006,2016,Spring,a,B87 +BIOL,1006,2016,Spring,b,D72 +BIOL,1006,2016,Summer,a,A34 +BIOL,1006,2016,Summer,b,D48 +BIOL,1006,2016,Summer,c,F34 +BIOL,1006,2016,Summer,d,F48 +BIOL,1006,2017,Fall,a,E42 +BIOL,1006,2017,Fall,b,B83 +BIOL,1006,2018,Spring,a,F39 +BIOL,1006,2018,Spring,b,A18 +BIOL,1006,2018,Fall,a,A13 +BIOL,1006,2019,Spring,a,D59 +BIOL,1006,2019,Summer,a,F70 +BIOL,1006,2019,Fall,a,B54 +BIOL,1006,2019,Fall,b,D79 +BIOL,1006,2020,Spring,a,A89 +BIOL,1006,2020,Fall,a,C13 +BIOL,1006,2020,Fall,b,C70 +BIOL,1006,2020,Fall,c,F46 +BIOL,1010,2015,Summer,a,D12 +BIOL,1010,2015,Summer,b,F82 +BIOL,1010,2015,Summer,c,A7 +BIOL,1010,2015,Summer,d,B17 +BIOL,1010,2015,Fall,a,B9 +BIOL,1010,2015,Fall,b,E27 +BIOL,1010,2015,Fall,c,B43 +BIOL,1010,2015,Fall,d,E1 +BIOL,1010,2016,Summer,a,B70 +BIOL,1010,2017,Spring,a,A17 +BIOL,1010,2017,Summer,a,B76 +BIOL,1010,2018,Summer,a,E15 +BIOL,1010,2018,Summer,b,D58 +BIOL,1010,2018,Summer,c,E76 +BIOL,1010,2018,Fall,a,E6 +BIOL,1010,2018,Fall,b,F67 +BIOL,1010,2019,Spring,a,A8 +BIOL,1010,2019,Spring,b,D55 +BIOL,1010,2019,Spring,c,D92 +BIOL,1010,2019,Spring,d,A11 +BIOL,1010,2020,Summer,a,E71 +BIOL,1010,2020,Summer,b,D77 +BIOL,1010,2020,Summer,c,D65 +BIOL,1010,2020,Summer,d,A90 +BIOL,1030,2015,Spring,a,E93 +BIOL,1030,2015,Spring,b,D58 +BIOL,1030,2015,Spring,c,D44 +BIOL,1030,2015,Spring,d,D54 +BIOL,1030,2015,Summer,a,C55 +BIOL,1030,2016,Spring,a,F61 +BIOL,1030,2016,Summer,a,A56 +BIOL,1030,2016,Fall,a,B72 +BIOL,1030,2017,Spring,a,E43 +BIOL,1030,2017,Spring,b,D46 +BIOL,1030,2017,Spring,c,D93 +BIOL,1030,2018,Summer,a,B85 +BIOL,1030,2018,Fall,a,C72 +BIOL,1030,2019,Spring,a,E29 +BIOL,1030,2019,Spring,b,E99 +BIOL,1030,2019,Spring,c,E87 +BIOL,1030,2019,Spring,d,A78 +BIOL,1030,2019,Summer,a,F35 +BIOL,1030,2020,Spring,a,C45 +BIOL,1030,2020,Summer,a,E85 +BIOL,1210,2015,Spring,a,A12 +BIOL,1210,2015,Spring,b,B49 +BIOL,1210,2016,Spring,a,E77 +BIOL,1210,2017,Spring,a,F11 +BIOL,1210,2017,Summer,a,D78 +BIOL,1210,2018,Spring,a,A45 +BIOL,1210,2018,Fall,a,D68 +BIOL,1210,2018,Fall,b,A29 +BIOL,1210,2019,Spring,a,A27 +BIOL,2010,2015,Spring,a,B17 +BIOL,2010,2015,Summer,a,E72 +BIOL,2010,2015,Summer,b,C10 +BIOL,2010,2015,Fall,a,D3 +BIOL,2010,2017,Summer,a,C15 +BIOL,2010,2017,Fall,a,B80 +BIOL,2010,2018,Spring,a,C12 +BIOL,2010,2019,Fall,a,F44 +BIOL,2010,2020,Spring,a,A66 +BIOL,2010,2020,Spring,b,E66 +BIOL,2010,2020,Summer,a,C94 +BIOL,2010,2020,Summer,b,F19 +BIOL,2020,2015,Summer,a,F10 +BIOL,2020,2015,Fall,a,D60 +BIOL,2020,2015,Fall,b,E58 +BIOL,2020,2015,Fall,c,E83 +BIOL,2020,2015,Fall,d,E42 +BIOL,2020,2016,Spring,a,F41 +BIOL,2020,2018,Spring,a,C60 +BIOL,2020,2018,Fall,a,A83 +BIOL,2020,2018,Fall,b,A79 +BIOL,2020,2018,Fall,c,D60 +BIOL,2020,2018,Fall,d,F6 +BIOL,2020,2019,Summer,a,F25 +BIOL,2021,2015,Spring,a,C92 +BIOL,2021,2015,Summer,a,A32 +BIOL,2021,2015,Summer,b,D68 +BIOL,2021,2015,Summer,c,B47 +BIOL,2021,2016,Fall,a,F83 +BIOL,2021,2017,Summer,a,D37 +BIOL,2021,2017,Fall,a,E20 +BIOL,2021,2018,Spring,a,B45 +BIOL,2021,2018,Summer,a,F51 +BIOL,2021,2018,Fall,a,A40 +BIOL,2021,2018,Fall,b,F43 +BIOL,2021,2018,Fall,c,F90 +BIOL,2021,2018,Fall,d,F88 +BIOL,2021,2019,Spring,a,A83 +BIOL,2021,2019,Spring,b,E47 +BIOL,2021,2019,Fall,a,C99 +BIOL,2030,2015,Spring,a,A65 +BIOL,2030,2015,Spring,b,F68 +BIOL,2030,2015,Fall,a,B77 +BIOL,2030,2016,Summer,a,E22 +BIOL,2030,2016,Summer,b,A53 +BIOL,2030,2016,Fall,a,D79 +BIOL,2030,2017,Spring,a,D30 +BIOL,2030,2017,Spring,b,C61 +BIOL,2030,2017,Spring,c,B48 +BIOL,2030,2017,Spring,d,E57 +BIOL,2030,2018,Summer,a,B26 +BIOL,2030,2018,Summer,b,B33 +BIOL,2030,2019,Summer,a,F67 +BIOL,2030,2019,Summer,b,C11 +BIOL,2030,2019,Summer,c,C58 +BIOL,2030,2019,Summer,d,B56 +BIOL,2030,2020,Spring,a,D45 +BIOL,2030,2020,Spring,b,D7 +BIOL,2210,2016,Summer,a,C19 +BIOL,2210,2017,Spring,a,F18 +BIOL,2210,2017,Spring,b,D58 +BIOL,2210,2017,Spring,c,A3 +BIOL,2210,2017,Summer,a,E94 +BIOL,2210,2017,Summer,b,D15 +BIOL,2210,2017,Summer,c,B39 +BIOL,2210,2018,Spring,a,E59 +BIOL,2210,2018,Summer,a,D77 +BIOL,2210,2018,Summer,b,F66 +BIOL,2210,2018,Summer,c,F19 +BIOL,2210,2019,Summer,a,B86 +BIOL,2210,2019,Summer,b,E47 +BIOL,2210,2019,Fall,a,E65 +BIOL,2210,2019,Fall,b,D61 +BIOL,2210,2020,Fall,a,C9 +BIOL,2325,2015,Spring,a,F14 +BIOL,2325,2015,Spring,b,F97 +BIOL,2325,2015,Fall,a,F23 +BIOL,2325,2015,Fall,b,F60 +BIOL,2325,2015,Fall,c,D81 +BIOL,2325,2016,Summer,a,D5 +BIOL,2325,2017,Fall,a,E51 +BIOL,2325,2017,Fall,b,E61 +BIOL,2325,2018,Spring,a,B37 +BIOL,2325,2018,Summer,a,F43 +BIOL,2325,2018,Fall,a,D52 +BIOL,2325,2018,Fall,b,D44 +BIOL,2325,2018,Fall,c,D89 +BIOL,2325,2019,Spring,a,E35 +BIOL,2325,2019,Spring,b,F55 +BIOL,2325,2019,Summer,a,B70 +BIOL,2330,2015,Spring,a,B89 +BIOL,2330,2015,Fall,a,C79 +BIOL,2330,2015,Fall,b,C82 +BIOL,2330,2015,Fall,c,A10 +BIOL,2330,2015,Fall,d,D47 +BIOL,2330,2016,Spring,a,F87 +BIOL,2330,2016,Fall,a,F57 +BIOL,2330,2017,Summer,a,C47 +BIOL,2330,2017,Fall,a,E20 +BIOL,2330,2017,Fall,b,C48 +BIOL,2330,2019,Fall,a,A95 +BIOL,2330,2020,Spring,a,E16 +BIOL,2355,2015,Spring,a,C89 +BIOL,2355,2015,Spring,b,D26 +BIOL,2355,2015,Summer,a,D23 +BIOL,2355,2015,Summer,b,D12 +BIOL,2355,2015,Summer,c,C86 +BIOL,2355,2016,Spring,a,C21 +BIOL,2355,2016,Spring,b,F82 +BIOL,2355,2017,Spring,a,B31 +BIOL,2355,2017,Spring,b,A47 +BIOL,2355,2017,Spring,c,C60 +BIOL,2355,2017,Spring,d,E17 +BIOL,2355,2017,Summer,a,A9 +BIOL,2355,2017,Fall,a,F62 +BIOL,2355,2017,Fall,b,D74 +BIOL,2355,2018,Spring,a,F10 +BIOL,2355,2018,Summer,a,C17 +BIOL,2355,2018,Summer,b,E82 +BIOL,2355,2018,Summer,c,B56 +BIOL,2355,2018,Summer,d,A16 +BIOL,2355,2018,Fall,a,C22 +BIOL,2355,2019,Spring,a,B45 +BIOL,2355,2019,Spring,b,E37 +BIOL,2355,2019,Spring,c,C26 +BIOL,2355,2019,Spring,d,E36 +BIOL,2355,2020,Spring,a,E83 +BIOL,2355,2020,Summer,a,B22 +BIOL,2355,2020,Summer,b,F78 +BIOL,2355,2020,Fall,a,A4 +BIOL,2420,2015,Spring,a,E34 +BIOL,2420,2015,Spring,b,E54 +BIOL,2420,2015,Spring,c,A64 +BIOL,2420,2015,Spring,d,E38 +BIOL,2420,2015,Summer,a,C62 +BIOL,2420,2015,Fall,a,D39 +BIOL,2420,2016,Spring,a,B57 +BIOL,2420,2017,Summer,a,C94 +BIOL,2420,2017,Summer,b,C52 +BIOL,2420,2018,Spring,a,C31 +BIOL,2420,2020,Spring,a,B21 +BIOL,2420,2020,Spring,b,E93 +BIOL,2420,2020,Summer,a,D66 +BIOL,2420,2020,Fall,a,D3 +CS,1030,2016,Spring,a,A7 +CS,1030,2016,Summer,a,F87 +CS,1030,2016,Fall,a,A56 +CS,1030,2018,Fall,a,C71 +CS,1030,2019,Fall,a,E88 +CS,1030,2019,Fall,b,B13 +CS,1030,2020,Spring,a,C72 +CS,1030,2020,Spring,b,B26 +CS,1030,2020,Spring,c,D65 +CS,1030,2020,Fall,a,D67 +CS,1410,2015,Spring,a,E18 +CS,1410,2015,Summer,a,B51 +CS,1410,2015,Summer,b,F39 +CS,1410,2015,Summer,c,E66 +CS,1410,2015,Summer,d,F73 +CS,1410,2016,Spring,a,C43 +CS,1410,2016,Spring,b,D75 +CS,1410,2016,Summer,a,F81 +CS,1410,2017,Spring,a,E74 +CS,1410,2018,Spring,a,F80 +CS,1410,2018,Spring,b,D19 +CS,1410,2018,Spring,c,B5 +CS,1410,2018,Spring,d,F15 +CS,1410,2020,Spring,a,E61 +CS,1410,2020,Spring,b,F94 +CS,2100,2015,Summer,a,E49 +CS,2100,2016,Spring,a,C70 +CS,2100,2016,Summer,a,F88 +CS,2100,2016,Summer,b,F34 +CS,2100,2016,Summer,c,B32 +CS,2100,2017,Spring,a,C99 +CS,2100,2017,Fall,a,C62 +CS,2100,2018,Spring,a,F36 +CS,2100,2018,Summer,a,E49 +CS,2100,2018,Summer,b,D45 +CS,2100,2018,Summer,c,B38 +CS,2100,2018,Fall,a,A45 +CS,2100,2018,Fall,b,F33 +CS,2100,2018,Fall,c,B26 +CS,2100,2018,Fall,d,C72 +CS,2100,2019,Spring,a,B14 +CS,2100,2019,Spring,b,E31 +CS,2100,2019,Summer,a,E29 +CS,2100,2019,Summer,b,A13 +CS,2100,2019,Fall,a,A88 +CS,2100,2019,Fall,b,A71 +CS,2100,2019,Fall,c,B53 +CS,2100,2019,Fall,d,D62 +CS,2100,2020,Spring,a,C42 +CS,2100,2020,Fall,a,F74 +CS,2420,2015,Spring,a,A23 +CS,2420,2015,Summer,a,A51 +CS,2420,2015,Summer,b,B96 +CS,2420,2015,Summer,c,C5 +CS,2420,2015,Fall,a,A43 +CS,2420,2016,Spring,a,E68 +CS,2420,2016,Summer,a,E60 +CS,2420,2016,Fall,a,C21 +CS,2420,2016,Fall,b,F33 +CS,2420,2016,Fall,c,A95 +CS,2420,2017,Summer,a,B23 +CS,2420,2017,Summer,b,F52 +CS,2420,2017,Summer,c,E42 +CS,2420,2017,Fall,a,B18 +CS,2420,2018,Spring,a,A34 +CS,2420,2019,Summer,a,E2 +CS,2420,2020,Summer,a,D40 +CS,2420,2020,Fall,a,F99 +CS,3100,2015,Summer,a,C48 +CS,3100,2015,Summer,b,B18 +CS,3100,2016,Spring,a,C54 +CS,3100,2016,Spring,b,D97 +CS,3100,2016,Spring,c,F28 +CS,3100,2016,Spring,d,F97 +CS,3100,2016,Summer,a,A68 +CS,3100,2016,Fall,a,A73 +CS,3100,2017,Spring,a,E26 +CS,3100,2017,Spring,b,B22 +CS,3100,2017,Summer,a,A88 +CS,3100,2017,Fall,a,A66 +CS,3100,2019,Spring,a,E60 +CS,3100,2019,Spring,b,C93 +CS,3200,2015,Spring,a,E8 +CS,3200,2015,Spring,b,A61 +CS,3200,2015,Fall,a,F94 +CS,3200,2015,Fall,b,D48 +CS,3200,2015,Fall,c,D58 +CS,3200,2015,Fall,d,D49 +CS,3200,2016,Summer,a,E18 +CS,3200,2016,Summer,b,C16 +CS,3200,2016,Fall,a,E17 +CS,3200,2016,Fall,b,B1 +CS,3200,2016,Fall,c,C60 +CS,3200,2016,Fall,d,E55 +CS,3200,2017,Spring,a,B32 +CS,3200,2018,Spring,a,A5 +CS,3200,2018,Spring,b,D79 +CS,3200,2018,Spring,c,A31 +CS,3200,2019,Spring,a,F7 +CS,3200,2020,Spring,a,A18 +CS,3200,2020,Spring,b,C30 +CS,3200,2020,Spring,c,F74 +CS,3200,2020,Summer,a,F42 +CS,3200,2020,Fall,a,F67 +CS,3500,2015,Fall,a,F23 +CS,3500,2015,Fall,b,D72 +CS,3500,2016,Spring,a,F86 +CS,3500,2016,Summer,a,F54 +CS,3500,2017,Summer,a,B29 +CS,3500,2017,Fall,a,D8 +CS,3500,2017,Fall,b,D72 +CS,3500,2017,Fall,c,D32 +CS,3500,2019,Summer,a,B7 +CS,3500,2019,Fall,a,E6 +CS,3500,2019,Fall,b,B98 +CS,3500,2019,Fall,c,F72 +CS,3500,2020,Summer,a,C2 +CS,3505,2015,Spring,a,F97 +CS,3505,2015,Fall,a,B51 +CS,3505,2015,Fall,b,E42 +CS,3505,2015,Fall,c,D60 +CS,3505,2015,Fall,d,C40 +CS,3505,2016,Summer,a,D60 +CS,3505,2016,Fall,a,D98 +CS,3505,2016,Fall,b,B48 +CS,3505,2017,Summer,a,F19 +CS,3505,2017,Fall,a,E75 +CS,3505,2017,Fall,b,C20 +CS,3505,2018,Summer,a,B64 +CS,3505,2018,Summer,b,F44 +CS,3505,2018,Fall,a,F83 +CS,3505,2018,Fall,b,D22 +CS,3505,2018,Fall,c,C22 +CS,3505,2019,Spring,a,B70 +CS,3505,2019,Spring,b,A68 +CS,3505,2019,Summer,a,F7 +CS,3505,2019,Summer,b,D18 +CS,3505,2019,Summer,c,B9 +CS,3505,2019,Summer,d,A28 +CS,3505,2019,Fall,a,C8 +CS,3505,2019,Fall,b,F79 +CS,3505,2019,Fall,c,F63 +CS,3505,2020,Spring,a,D2 +CS,3505,2020,Summer,a,E37 +CS,3505,2020,Fall,a,F56 +CS,3505,2020,Fall,b,B14 +CS,3505,2020,Fall,c,E20 +CS,3810,2015,Spring,a,C46 +CS,3810,2016,Summer,a,F29 +CS,3810,2016,Fall,a,A84 +CS,3810,2016,Fall,b,F98 +CS,3810,2018,Spring,a,F22 +CS,3810,2018,Summer,a,F43 +CS,3810,2018,Summer,b,A68 +CS,3810,2018,Summer,c,B28 +CS,3810,2018,Summer,d,F73 +CS,3810,2019,Fall,a,E73 +CS,3810,2019,Fall,b,B41 +CS,3810,2020,Fall,a,D10 +CS,4000,2015,Spring,a,E50 +CS,4000,2015,Spring,b,E43 +CS,4000,2015,Summer,a,F93 +CS,4000,2015,Fall,a,C7 +CS,4000,2016,Fall,a,E77 +CS,4000,2017,Spring,a,A82 +CS,4000,2017,Summer,a,D30 +CS,4000,2017,Fall,a,D24 +CS,4000,2017,Fall,b,F49 +CS,4000,2018,Spring,a,B92 +CS,4000,2019,Spring,a,B95 +CS,4000,2020,Spring,a,D47 +CS,4000,2020,Spring,b,A17 +CS,4000,2020,Fall,a,E53 +CS,4150,2015,Summer,a,E77 +CS,4150,2015,Summer,b,D2 +CS,4150,2016,Summer,a,B74 +CS,4150,2016,Summer,b,F49 +CS,4150,2018,Fall,a,C33 +CS,4150,2018,Fall,b,F81 +CS,4150,2019,Spring,a,D14 +CS,4150,2020,Spring,a,D43 +CS,4150,2020,Fall,a,F77 +CS,4400,2015,Summer,a,B62 +CS,4400,2015,Fall,a,C38 +CS,4400,2015,Fall,b,F63 +CS,4400,2015,Fall,c,B42 +CS,4400,2016,Spring,a,D47 +CS,4400,2016,Summer,a,E70 +CS,4400,2016,Fall,a,A94 +CS,4400,2017,Spring,a,D38 +CS,4400,2017,Spring,b,A53 +CS,4400,2017,Spring,c,B82 +CS,4400,2019,Spring,a,E52 +CS,4400,2019,Spring,b,F54 +CS,4400,2019,Spring,c,C90 +CS,4400,2019,Spring,d,E77 +CS,4400,2019,Summer,a,A14 +CS,4400,2019,Summer,b,F86 +CS,4400,2019,Fall,a,A73 +CS,4400,2019,Fall,b,F83 +CS,4400,2020,Spring,a,D14 +CS,4400,2020,Fall,a,E72 +CS,4400,2020,Fall,b,E29 +CS,4500,2015,Summer,a,E89 +CS,4500,2015,Summer,b,C4 +CS,4500,2016,Spring,a,A15 +CS,4500,2016,Spring,b,F19 +CS,4500,2016,Fall,a,E62 +CS,4500,2017,Summer,a,D41 +CS,4500,2018,Spring,a,A44 +CS,4500,2018,Spring,b,F22 +CS,4500,2018,Spring,c,F32 +CS,4500,2018,Spring,d,E21 +CS,4500,2019,Summer,a,F24 +CS,4500,2019,Fall,a,D4 +CS,4500,2019,Fall,b,B58 +CS,4500,2019,Fall,c,D1 +CS,4500,2019,Fall,d,B36 +CS,4500,2020,Spring,a,A74 +CS,4500,2020,Summer,a,B47 +CS,4940,2015,Summer,a,E82 +CS,4940,2017,Fall,a,C79 +CS,4940,2017,Fall,b,F18 +CS,4940,2019,Fall,a,E50 +CS,4940,2020,Summer,a,F23 +CS,4940,2020,Summer,b,D37 +CS,4970,2016,Fall,a,E65 +CS,4970,2016,Fall,b,D88 +CS,4970,2017,Spring,a,D63 +CS,4970,2017,Summer,a,B38 +CS,4970,2018,Summer,a,E96 +CS,4970,2018,Summer,b,D71 +CS,4970,2018,Summer,c,E15 +CS,4970,2018,Fall,a,C70 +CS,4970,2018,Fall,b,A98 +CS,4970,2018,Fall,c,E28 +CS,4970,2018,Fall,d,A95 +CS,4970,2019,Spring,a,B39 +CS,4970,2019,Spring,b,A58 +CS,4970,2019,Summer,a,A57 +CS,4970,2019,Summer,b,A100 +CS,4970,2019,Summer,c,B95 +CS,4970,2019,Summer,d,C91 +CS,4970,2019,Fall,a,D22 +CS,4970,2019,Fall,b,B27 +CS,4970,2019,Fall,c,E45 +CS,4970,2019,Fall,d,E69 +CS,4970,2020,Summer,a,C38 +CS,4970,2020,Summer,b,E87 +CS,4970,2020,Summer,c,B97 +CS,4970,2020,Summer,d,A36 +CS,4970,2020,Fall,a,B90 +CS,4970,2020,Fall,b,B19 +CS,4970,2020,Fall,c,B98 +CS,4970,2020,Fall,d,D63 +MATH,1210,2015,Summer,a,F54 +MATH,1210,2016,Spring,a,A52 +MATH,1210,2016,Spring,b,C89 +MATH,1210,2016,Spring,c,C59 +MATH,1210,2016,Spring,d,C75 +MATH,1210,2016,Fall,a,F12 +MATH,1210,2016,Fall,b,D82 +MATH,1210,2016,Fall,c,C9 +MATH,1210,2016,Fall,d,D28 +MATH,1210,2017,Spring,a,B64 +MATH,1210,2017,Summer,a,C71 +MATH,1210,2017,Summer,b,E63 +MATH,1210,2017,Summer,c,F98 +MATH,1210,2018,Spring,a,D3 +MATH,1210,2018,Summer,a,D59 +MATH,1210,2018,Fall,a,B89 +MATH,1210,2018,Fall,b,F39 +MATH,1210,2019,Spring,a,C12 +MATH,1210,2019,Spring,b,C11 +MATH,1210,2019,Summer,a,B7 +MATH,1210,2020,Spring,a,B55 +MATH,1210,2020,Spring,b,F13 +MATH,1220,2015,Summer,a,A2 +MATH,1220,2015,Summer,b,A55 +MATH,1220,2015,Summer,c,D10 +MATH,1220,2016,Spring,a,A41 +MATH,1220,2017,Spring,a,B83 +MATH,1220,2017,Spring,b,B9 +MATH,1220,2017,Spring,c,A79 +MATH,1220,2017,Spring,d,D45 +MATH,1220,2017,Summer,a,F96 +MATH,1220,2018,Spring,a,B12 +MATH,1220,2018,Spring,b,B97 +MATH,1220,2018,Summer,a,C55 +MATH,1220,2019,Fall,a,E93 +MATH,1220,2019,Fall,b,F4 +MATH,1220,2019,Fall,c,F39 +MATH,1220,2020,Spring,a,B96 +MATH,1220,2020,Summer,a,B64 +MATH,1250,2015,Spring,a,A68 +MATH,1250,2015,Spring,b,A47 +MATH,1250,2015,Spring,c,B50 +MATH,1250,2015,Spring,d,E54 +MATH,1250,2015,Fall,a,D99 +MATH,1250,2016,Spring,a,A34 +MATH,1250,2016,Summer,a,D65 +MATH,1250,2016,Fall,a,D55 +MATH,1250,2016,Fall,b,A82 +MATH,1250,2016,Fall,c,E20 +MATH,1250,2017,Summer,a,B20 +MATH,1250,2017,Summer,b,D76 +MATH,1250,2017,Summer,c,F88 +MATH,1250,2017,Summer,d,C90 +MATH,1250,2018,Spring,a,B8 +MATH,1250,2018,Summer,a,A59 +MATH,1250,2018,Summer,b,A40 +MATH,1250,2018,Summer,c,F95 +MATH,1250,2020,Summer,a,F34 +MATH,1260,2015,Spring,a,C94 +MATH,1260,2015,Spring,b,A43 +MATH,1260,2015,Spring,c,C68 +MATH,1260,2015,Summer,a,E81 +MATH,1260,2016,Fall,a,C21 +MATH,1260,2017,Summer,a,F15 +MATH,1260,2017,Fall,a,A2 +MATH,1260,2019,Spring,a,A71 +MATH,1260,2019,Spring,b,F95 +MATH,1260,2019,Spring,c,B42 +MATH,1260,2019,Summer,a,C35 +MATH,1260,2019,Summer,b,E48 +MATH,1260,2019,Fall,a,A23 +MATH,1260,2020,Spring,a,A52 +MATH,2210,2015,Spring,a,C12 +MATH,2210,2015,Spring,b,A48 +MATH,2210,2015,Summer,a,C95 +MATH,2210,2015,Summer,b,D48 +MATH,2210,2015,Summer,c,D99 +MATH,2210,2015,Summer,d,F70 +MATH,2210,2015,Fall,a,B20 +MATH,2210,2017,Spring,a,A43 +MATH,2210,2017,Summer,a,F94 +MATH,2210,2018,Spring,a,D63 +MATH,2210,2018,Spring,b,B92 +MATH,2210,2019,Spring,a,D90 +MATH,2210,2019,Spring,b,D96 +MATH,2210,2020,Spring,a,A76 +MATH,2210,2020,Spring,b,D85 +MATH,2210,2020,Spring,c,B38 +MATH,2210,2020,Fall,a,F95 +MATH,2270,2015,Fall,a,B100 +MATH,2270,2015,Fall,b,A20 +MATH,2270,2017,Summer,a,D40 +MATH,2270,2017,Fall,a,A21 +MATH,2270,2017,Fall,b,C91 +MATH,2270,2017,Fall,c,A28 +MATH,2270,2017,Fall,d,C19 +MATH,2270,2019,Spring,a,F39 +MATH,2270,2019,Summer,a,A52 +MATH,2270,2019,Summer,b,E96 +MATH,2270,2019,Summer,c,A60 +MATH,2270,2019,Fall,a,A2 +MATH,2270,2020,Spring,a,B17 +MATH,2270,2020,Fall,a,F11 +MATH,2270,2020,Fall,b,C10 +MATH,2280,2015,Summer,a,D17 +MATH,2280,2015,Fall,a,C16 +MATH,2280,2016,Fall,a,F51 +MATH,2280,2018,Spring,a,C36 +MATH,2280,2018,Fall,a,E32 +MATH,2280,2018,Fall,b,D53 +MATH,2280,2018,Fall,c,D8 +MATH,2280,2019,Fall,a,E32 +MATH,2280,2019,Fall,b,E3 +MATH,2280,2019,Fall,c,F46 +MATH,2280,2020,Spring,a,C73 +MATH,2280,2020,Spring,b,D35 +MATH,3210,2015,Spring,a,C8 +MATH,3210,2015,Spring,b,D68 +MATH,3210,2015,Summer,a,B21 +MATH,3210,2015,Fall,a,C69 +MATH,3210,2015,Fall,b,F8 +MATH,3210,2015,Fall,c,B74 +MATH,3210,2015,Fall,d,D46 +MATH,3210,2016,Spring,a,B23 +MATH,3210,2016,Fall,a,C76 +MATH,3210,2017,Spring,a,E73 +MATH,3210,2017,Summer,a,D70 +MATH,3210,2019,Spring,a,A43 +MATH,3210,2019,Spring,b,B17 +MATH,3210,2019,Fall,a,C8 +MATH,3210,2020,Spring,a,B100 +MATH,3210,2020,Summer,a,C10 +MATH,3210,2020,Fall,a,D76 +MATH,3220,2016,Spring,a,F63 +MATH,3220,2016,Spring,b,B91 +MATH,3220,2016,Spring,c,F79 +MATH,3220,2016,Spring,d,B86 +MATH,3220,2016,Summer,a,B49 +MATH,3220,2016,Fall,a,B23 +MATH,3220,2016,Fall,b,F74 +MATH,3220,2017,Spring,a,E5 +MATH,3220,2017,Fall,a,E29 +MATH,3220,2017,Fall,b,A64 +MATH,3220,2018,Spring,a,B45 +MATH,3220,2018,Spring,b,B82 +MATH,3220,2018,Spring,c,A91 +MATH,3220,2018,Spring,d,F43 +PHYS,2040,2015,Spring,a,B53 +PHYS,2040,2015,Fall,a,A62 +PHYS,2040,2015,Fall,b,E84 +PHYS,2040,2015,Fall,c,B21 +PHYS,2040,2016,Spring,a,A38 +PHYS,2040,2017,Summer,a,B94 +PHYS,2040,2017,Fall,a,A44 +PHYS,2040,2017,Fall,b,E62 +PHYS,2040,2017,Fall,c,D84 +PHYS,2040,2018,Spring,a,B7 +PHYS,2040,2019,Spring,a,F94 +PHYS,2040,2019,Spring,b,F37 +PHYS,2040,2020,Spring,a,D20 +PHYS,2060,2015,Spring,a,F77 +PHYS,2060,2016,Spring,a,A61 +PHYS,2060,2016,Spring,b,C51 +PHYS,2060,2016,Summer,a,C12 +PHYS,2060,2016,Summer,b,D24 +PHYS,2060,2018,Summer,a,E8 +PHYS,2060,2018,Fall,a,A11 +PHYS,2060,2018,Fall,b,E53 +PHYS,2060,2018,Fall,c,E30 +PHYS,2060,2018,Fall,d,D67 +PHYS,2060,2019,Summer,a,D74 +PHYS,2060,2019,Summer,b,D39 +PHYS,2060,2019,Fall,a,F5 +PHYS,2060,2019,Fall,b,E74 +PHYS,2060,2019,Fall,c,E19 +PHYS,2060,2020,Spring,a,B22 +PHYS,2060,2020,Spring,b,B17 +PHYS,2060,2020,Fall,a,B81 +PHYS,2100,2015,Spring,a,C94 +PHYS,2100,2015,Spring,b,A12 +PHYS,2100,2016,Fall,a,F80 +PHYS,2100,2016,Fall,b,D15 +PHYS,2100,2017,Summer,a,A14 +PHYS,2100,2017,Summer,b,A37 +PHYS,2100,2017,Summer,c,C53 +PHYS,2100,2017,Fall,a,E78 +PHYS,2100,2018,Fall,a,F89 +PHYS,2100,2019,Summer,a,F31 +PHYS,2140,2015,Spring,a,C36 +PHYS,2140,2015,Spring,b,F88 +PHYS,2140,2015,Summer,a,B39 +PHYS,2140,2015,Summer,b,D100 +PHYS,2140,2015,Summer,c,C94 +PHYS,2140,2015,Fall,a,B57 +PHYS,2140,2016,Spring,a,F63 +PHYS,2140,2016,Spring,b,C8 +PHYS,2140,2016,Spring,c,B9 +PHYS,2140,2016,Summer,a,B100 +PHYS,2140,2016,Summer,b,E4 +PHYS,2140,2016,Fall,a,B8 +PHYS,2140,2017,Summer,a,F26 +PHYS,2140,2017,Fall,a,E51 +PHYS,2140,2017,Fall,b,A88 +PHYS,2140,2018,Summer,a,B61 +PHYS,2140,2018,Summer,b,C45 +PHYS,2140,2018,Fall,a,F89 +PHYS,2140,2019,Fall,a,B29 +PHYS,2140,2019,Fall,b,F27 +PHYS,2140,2020,Fall,a,F2 +PHYS,2210,2015,Fall,a,B33 +PHYS,2210,2015,Fall,b,C92 +PHYS,2210,2015,Fall,c,F36 +PHYS,2210,2017,Summer,a,E51 +PHYS,2210,2017,Summer,b,A66 +PHYS,2210,2017,Summer,c,C72 +PHYS,2210,2017,Summer,d,E37 +PHYS,2210,2018,Fall,a,F42 +PHYS,2210,2018,Fall,b,C84 +PHYS,2210,2018,Fall,c,F39 +PHYS,2210,2019,Spring,a,B8 +PHYS,2210,2019,Spring,b,E52 +PHYS,2210,2019,Spring,c,F18 +PHYS,2210,2019,Spring,d,F64 +PHYS,2210,2019,Summer,a,C54 +PHYS,2210,2019,Fall,a,E91 +PHYS,2210,2019,Fall,b,B44 +PHYS,2210,2019,Fall,c,B88 +PHYS,2210,2019,Fall,d,D86 +PHYS,2220,2015,Spring,a,E24 +PHYS,2220,2015,Fall,a,F72 +PHYS,2220,2015,Fall,b,B88 +PHYS,2220,2015,Fall,c,F12 +PHYS,2220,2016,Summer,a,D43 +PHYS,2220,2016,Fall,a,D16 +PHYS,2220,2017,Spring,a,E75 +PHYS,2220,2017,Spring,b,A61 +PHYS,2220,2017,Spring,c,E16 +PHYS,2220,2017,Spring,d,D68 +PHYS,2220,2018,Spring,a,B26 +PHYS,2220,2018,Summer,a,D19 +PHYS,2220,2018,Fall,a,A63 +PHYS,2220,2019,Spring,a,C82 +PHYS,2220,2020,Spring,a,E98 +PHYS,2220,2020,Summer,a,A17 +PHYS,2220,2020,Summer,b,F55 +PHYS,2220,2020,Fall,a,D1 +PHYS,3210,2016,Summer,a,B3 +PHYS,3210,2016,Summer,b,F94 +PHYS,3210,2016,Fall,a,C40 +PHYS,3210,2017,Summer,a,B9 +PHYS,3210,2017,Summer,b,C38 +PHYS,3210,2017,Fall,a,E44 +PHYS,3210,2018,Spring,a,B44 +PHYS,3210,2018,Spring,b,D46 +PHYS,3210,2018,Spring,c,B52 +PHYS,3210,2018,Fall,a,B94 +PHYS,3210,2019,Spring,a,A47 +PHYS,3210,2019,Spring,b,A49 +PHYS,3210,2019,Spring,c,C99 +PHYS,3210,2019,Spring,d,A77 +PHYS,3210,2019,Summer,a,F14 +PHYS,3210,2019,Summer,b,A7 +PHYS,3210,2019,Summer,c,D57 +PHYS,3210,2019,Fall,a,D90 +PHYS,3210,2020,Spring,a,F2 +PHYS,3210,2020,Summer,a,F67 +PHYS,3210,2020,Fall,a,B54 +PHYS,3210,2020,Fall,b,A66 +PHYS,3210,2020,Fall,c,A37 +PHYS,3220,2016,Summer,a,B46 +PHYS,3220,2016,Summer,b,C21 +PHYS,3220,2017,Summer,a,C31 +PHYS,3220,2017,Fall,a,A74 +PHYS,3220,2017,Fall,b,B12 +PHYS,3220,2017,Fall,c,A93 +PHYS,3220,2017,Fall,d,C83 +PHYS,3220,2018,Summer,a,C34 +PHYS,3220,2020,Spring,a,C55 +PHYS,3220,2020,Spring,b,A98 +PHYS,3220,2020,Spring,c,A18 +PHYS,3220,2020,Spring,d,B43 diff --git a/tests/data/Student.csv b/tests/data/Student.csv new file mode 100644 index 000000000..bdcf87846 --- /dev/null +++ b/tests/data/Student.csv @@ -0,0 +1,301 @@ +student_id,first_name,last_name,sex,date_of_birth,home_address,home_city,home_state,home_zip,home_phone +100,Allison,Hill,F,1991-05-09,819 Anthony Fields Suite 083,Jacquelinebury,IN,01352,+1-542-351-1615 +101,Lindsey,Roman,F,1995-05-18,618 Courtney Tunnel Apt. 310,Kendrashire,UT,50324,(525)534-1928x327 +102,William,Bowman,M,2005-01-07,030 Morales Centers Suite 953,Randallside,IL,32826,(969)653-2871x01226 +103,Janice,Carlson,F,1989-07-16,0184 Peterson Green,North Jenniferchester,PA,67043,+1-489-325-2880x9570 +104,Sherry,Decker,F,2004-04-08,117 Spence Mountain,New Staceyville,NJ,28261,001-346-578-7133 +105,Alisha,Spencer,F,1994-03-10,031 Heath Circle,New Jasonland,NH,62454,+1-631-165-6670x106 +106,Rebecca,Rodriguez,F,1987-11-30,24731 Michelle Orchard Apt. 801,Allisonville,GA,53066,(064)746-8723 +107,Tracy,Riley,F,2005-02-24,97882 William Summit Apt. 136,Port Johnstad,MA,77004,(435)346-2475x10799 +108,Mr.,Daniel,M,1995-07-04,2784 Archer Ports Apt. 841,Taylorland,NV,36198,534.874.0164x0052 +109,Deborah,Figueroa,F,1994-05-30,12805 Hernandez Creek,Port Laura,VT,28036,586.923.2260x25634 +110,Meredith,Reyes,F,1997-03-09,75433 James Heights,Rasmussenburgh,MD,70783,001-142-940-1965x569 +111,Stephanie,Lee,F,1997-01-06,8356 Elizabeth Highway,Lake Jennifer,IA,54029,482-366-2994x68044 +112,Rachel,Lawson,F,1990-12-07,872 Campbell Prairie,Clarenceshire,IA,26601,3791769367 +113,Brittany,Watts,F,2003-02-04,632 Dominguez Lodge Suite 172,Contrerasshire,WV,58509,872-774-3487x34714 +114,Gabriella,Orozco,F,1998-11-11,2316 Amy Lakes,West Rebeccastad,TX,75957,(546)688-9373x467 +115,Gabriella,Shelton,F,1997-01-15,2980 Vargas Prairie,South Michelleville,KS,60099,646-417-0805x310 +116,Travis,Gonzalez,M,1996-07-14,19374 Jackson Place,Dannyfort,CO,03866,663.193.1491x905 +117,Mary,Jones,F,2002-05-15,7165 Poole Road,Lake Tammy,SD,71040,(945)314-7379x965 +118,Samuel,White,M,1994-03-13,9480 Lee Forest Apt. 837,Travisfort,HI,91174,957.885.6855 +119,Devin,King,M,1986-05-27,82337 Brittany Skyway,Tinafort,LA,40119,+1-240-084-2710 +120,Julie,Alexander,F,1993-08-06,711 Charles Plaza,East Annaburgh,CT,55049,+1-677-496-4990x913 +121,Deborah,Miller,F,1993-07-27,67974 Keith Gateway Suite 134,Weberfurt,MA,71877,421.024.9947x17464 +122,Johnny,Miller,M,1995-05-20,40139 Smith Spring,Johnstonmouth,MT,58464,(967)175-6551 +123,Gary,Steele,M,1987-09-04,807 Johnny Cove Suite 808,North April,MO,58440,(824)771-0932 +124,Adam,Russell,M,2000-01-14,12748 Perry Manors Apt. 782,Port William,UT,36709,840-449-9727x875 +125,Patricia,Williams,F,1988-06-19,627 Martinez Vista Apt. 171,Stephenchester,NC,20733,(459)615-8657x809 +126,Jade,Thomas,F,2004-07-08,221 Reyes Rapid Apt. 923,East Jonathan,SD,38201,759-464-7436 +127,Ashley,James,F,1997-11-27,064 Michelle Spur,Lozanomouth,VA,30663,(394)210-4709 +128,Carlos,Browning,M,1990-09-16,85884 Scott Stream,Lake Julie,CO,10370,001-368-516-0481 +129,Megan,Chambers,F,2002-09-06,137 Nicole Park Suite 317,Turnerbury,WV,40394,382-675-8692 +130,Matthew,Bass,M,1986-08-24,53773 Garcia Rapids Suite 506,Port Stacy,CA,28302,5329318393 +131,David,Schroeder,M,1998-03-28,22842 Michelle Crescent Apt. 395,East Davidbury,AR,59257,(178)390-8470x0766 +132,John,Browning,M,1989-10-24,1249 Kelley Heights,Schmidtview,CO,92484,+1-836-736-5766x1565 +133,Brittany,Leblanc,F,2002-04-29,15280 Hoffman Highway Apt. 560,Burkeborough,GA,86580,(158)514-9368 +134,Dr.,Louis,M,1993-03-28,402 Kathryn Valleys Apt. 229,Chadmouth,CA,70032,752-545-9910x2290 +135,Denise,Stanley,F,1993-02-08,81561 Erika Meadow,Brandonbury,AL,40008,+1-445-107-6226x838 +136,Michael,Gomez,M,1994-03-14,7159 Richard Port Apt. 605,Port Stevechester,MI,14376,681-645-3521x81883 +137,Hannah,Luna,F,1996-11-30,24329 Katherine Circles Suite 779,Coleside,NY,82358,+1-527-177-4490x5814 +138,Anthony,Decker,M,1997-08-09,998 Betty Villages Suite 079,Marcport,AR,14067,001-182-037-7889x255 +139,George,Harper,M,1988-10-20,18644 Douglas Underpass Suite 519,Sabrinaburgh,NC,17402,652.816.8505 +140,Tiffany,Peterson,F,1998-09-26,214 Garcia Springs,Stephensontown,RI,17677,292-706-5379 +141,Nicole,Cole,F,1990-08-18,735 Hudson Loaf,Stricklandport,DC,26675,+1-075-818-1412x4782 +142,Susan,Velasquez,F,1986-02-05,6853 Christopher Flat Apt. 152,West Mariachester,OH,59300,001-043-289-8614x341 +143,Jennifer,Bauer,F,1988-10-31,980 Andrews Roads,North Michael,FL,88085,(518)888-8067x06540 +144,Austin,Allen,M,2001-06-29,5205 Li Drives,Marshallchester,SD,08771,3030548687 +145,Nicole,Lee,F,2000-05-12,541 Kim Knoll Apt. 652,South Sandra,SC,95801,9284511544 +146,Michelle,Jackson,F,2000-10-29,596 Tina Village,New Michaelfort,WV,19215,1355690927 +147,Jacqueline,Hines,F,2001-04-19,4310 Porter Junctions Suite 447,New Heathershire,CT,10207,(715)518-8442 +148,Timothy,Little,M,1988-06-05,32370 Ashley Loop Suite 291,West Jenniferport,MD,75854,517-785-2892 +149,Carl,Shaw,M,1991-08-28,4225 Perez Village Suite 414,Port Joshuastad,CA,84516,922.995.9001x094 +150,Randall,Butler,M,1996-10-13,4473 Cohen Green,North Scottport,NJ,41471,001-562-588-1537 +151,Jerry,Thomas,M,1994-02-09,632 Peck Roads Apt. 278,Port Tyler,MD,60431,(500)479-7480 +152,Jessica,Khan,F,2004-11-24,6098 Angela Circles Suite 849,Davidshire,SC,44945,001-239-868-0002x578 +153,Jordan,Hicks,M,2005-10-09,0551 Silva Squares Suite 097,New Teresa,HI,07232,(896)230-9130x7562 +154,Christina,Shaw,F,1994-11-30,028 Mark Prairie,Leeville,KY,46938,334.843.4437x5758 +155,Robert,Hill,M,1994-01-22,6524 Stephanie Cliff Suite 473,South Sarahchester,NM,77418,833.016.5712 +156,Krista,Hickman,F,1987-02-26,734 Debbie Union Apt. 938,Melissatown,MA,23541,001-672-400-4991x547 +157,Teresa,Rosales,F,1997-01-28,27420 Gibbs Parks,Thompsonhaven,TN,68039,122-753-0463 +158,Debra,Rivera,F,1998-08-19,53017 Richard Mills Suite 414,East Susan,MN,79896,878-339-1878x51910 +159,Stephanie,Harris,F,2001-08-26,713 Burns Turnpike,North David,NV,73743,406.403.9106x51801 +160,John,Mitchell,M,1986-09-10,656 Sally Isle Apt. 825,Port Phillipland,TN,99614,001-786-863-3752x431 +161,Timothy,Small,M,2005-07-09,7903 Morales Ford,Port Brianport,SD,96382,953.428.3644 +162,Jamie,Webster,F,1998-10-02,27086 Grant Crest Apt. 351,Booneton,FL,35688,901.398.3735x40331 +163,Paul,Rocha,M,1987-06-23,3854 Amanda Island Apt. 877,Port Terrancefort,LA,54755,320.489.9642x353 +164,Sandra,Porter,F,1993-10-17,77725 Jennifer Meadow Suite 808,Lake Sierrafurt,MA,83168,2038750997 +165,Alexis,Patel,F,2003-10-31,840 Wolfe Lane,Whiteside,ID,81736,546.156.7933 +166,Jonathan,Hamilton,M,1986-06-14,180 Rachel Rest Suite 401,Juanmouth,FL,41721,001-926-142-9396x856 +167,William,Brown,M,1988-06-02,9965 Joshua Well Apt. 586,New Donna,NM,32803,262-655-1104 +168,Philip,Garcia,M,2004-12-15,8610 Angela Pine,Shieldstown,RI,95507,001-398-262-2444x721 +169,Desiree,Evans,F,2000-07-27,799 Daniel Grove,Cookstad,KS,44375,+1-924-593-7526x5479 +170,Erika,Ramirez,F,1999-11-03,398 Katrina Burg,Sherryville,TN,09565,243.426.6179x79688 +171,Sergio,Barnes,M,1989-07-10,891 John Prairie Apt. 909,Byrdbury,WI,56921,4388899375 +172,Patricia,Chapman,F,2001-04-24,14611 Cross Inlet,Lake Adriana,CA,95134,401.051.2382 +173,Gary,Simmons,M,1992-04-12,2660 Ware Locks Apt. 033,New Laura,SC,70872,371-478-5969x6915 +174,Jimmy,Thompson,M,1991-10-25,912 John Cove Apt. 286,North Patrick,NY,91390,(742)257-9050x72368 +175,Jon,Cohen,M,2004-05-12,1903 Joshua Mountains Apt. 797,Danielland,SD,48586,+1-078-361-3407x4517 +176,Autumn,Cain,F,2003-06-04,962 Glover Stravenue Suite 958,South Mario,IN,35542,001-126-042-2325x367 +177,Mark,Brooks,M,1999-06-14,684 Wiley Locks Apt. 901,Stephenfurt,AR,70549,(637)454-5892 +178,Karina,Cooper,F,1989-02-04,70127 Victoria Lane,Blankenshiphaven,UT,36417,415.206.4361x10371 +179,Courtney,Frazier,F,2005-01-31,627 Patrick Row Apt. 554,Lake Karenland,DE,70035,2753269731 +180,Charles,Martinez,M,2003-07-15,2341 Carolyn Roads,Port Anthony,UT,27429,364.037.6137x9180 +181,Timothy,Anderson,M,2000-05-01,710 Smith Field,Frybury,OK,54952,+1-188-924-1418 +182,William,Moore,M,1990-08-03,146 Mathis Center Apt. 617,Brianfurt,DC,02161,+1-275-884-2524 +183,Bruce,Yoder,M,1989-11-04,4917 Michael Mill,Michaelberg,NH,95237,(800)030-7562 +184,Toni,Johnson,F,1996-06-28,3536 Flores Stream Suite 180,Lake Tinashire,MN,37503,870-534-9493x759 +185,Dr.,Patty,F,1989-01-31,60385 Steele Branch Apt. 641,Port Robertshire,DE,37178,3865719182 +186,James,Vargas,M,1996-05-29,44565 Joseph Circles Apt. 912,South Leeland,RI,59734,(112)490-3521x356 +187,Amy,Norman,F,1987-05-16,1994 Jones Wells,New Lisaton,SD,16560,001-029-667-0662x532 +188,Sophia,Johnson,F,1998-02-20,68701 Derrick Extensions,Foxstad,SC,50635,(759)856-4205x930 +189,Whitney,Robinson,F,2002-08-10,2239 Joanna Island Suite 599,Port Maryfort,NE,23511,0393087059 +190,Teresa,Foster,F,1995-12-10,26752 Hoffman Tunnel,Michaelfurt,ME,96707,096-902-9593 +191,Brian,Crawford,M,2000-01-03,5215 Joseph Forges,East Danieltown,OR,22303,(658)617-9327x1040 +192,Trevor,Jones,M,1992-05-20,815 Austin Manors,Port Frederickhaven,CO,27442,884-443-1069x87205 +193,Brandon,Colon,M,1998-06-27,32417 Parker Keys,New Christopher,FL,50497,(047)743-4902 +194,Michael,Miller,M,2005-05-13,938 Paul Mount Suite 793,North Raven,MO,68241,921.722.3320x61632 +195,Lisa,Mills,F,1987-03-12,99119 Floyd Track,Humphreyburgh,NH,62504,(629)960-6530 +196,Thomas,Prince,M,2003-06-14,47132 Julia Springs Apt. 691,East Madisonmouth,UT,07868,+1-148-628-9023x303 +197,Anthony,Ward,M,1988-12-29,6103 Brooke Drives,Matthewsborough,VT,98668,602.933.3346 +198,Sharon,Coffey,F,2001-10-19,29034 Hahn Road,Joshuaside,MN,29102,896.910.8589 +199,Edwin,Rodriguez,M,1999-09-08,4443 Kathy Turnpike Suite 965,Jenniferfurt,IL,55363,099-353-8758x4282 +200,John,Figueroa,M,1988-05-05,513 Julie Groves Suite 554,Stevenland,NY,76563,(381)684-6022x356 +201,Stephanie,Hatfield,F,2000-07-12,52500 Jason Springs,Ericmouth,CT,57348,760-083-5058x30033 +202,Gregory,Anderson,M,1990-05-20,04478 Morgan Tunnel Suite 575,Martinside,AL,29903,(098)215-0648 +203,Linda,Williams,F,2003-04-29,16761 Wells Dale Suite 046,Elaineburgh,CT,14252,+1-141-173-9348 +204,Mr.,Jason,M,1995-12-29,753 Emily Union Suite 721,Joneschester,NY,60368,012.045.5611 +205,Stefanie,Smith,F,1991-05-06,79415 White Knoll Suite 467,Banksfort,OH,08187,979-729-6590 +206,Sheryl,Acosta,F,1997-06-06,6701 Leon River,Katrinamouth,WI,88298,(916)375-6289x0028 +207,Samuel,Booth,M,2002-11-04,40838 Powell Ford,Lake Shane,MI,16060,001-016-608-8019 +208,Miss,Stefanie,F,1998-01-01,0375 Harvey Mall,Jenniferland,HI,45243,+1-488-510-2726x1493 +209,Tara,Long,F,2005-10-29,160 Monroe Path Suite 779,Taylorport,AZ,57230,(829)221-6995x8669 +210,Stacey,Hunt,F,2000-02-15,83339 Parks Valleys Apt. 288,Marcusland,MS,75295,846.081.0620x03424 +211,Brianna,Brown,F,1987-07-09,5719 Stevenson Trace,Annaberg,SC,38202,001-665-800-4397x359 +212,Craig,Hardy,M,1991-03-10,122 Wilson Camp,East Eugene,AL,61623,5909479851 +213,Evan,Robinson,M,1986-03-21,6886 Jeffrey Field,West Jeffery,NE,74076,573-993-0561 +214,Carol,Huber,F,1997-03-16,36138 Johns Run,Lake Charles,AK,94462,1024819346 +215,Mark,Hamilton,M,2004-01-26,9190 Jones Via Apt. 491,Port Patrick,AK,20990,(684)245-0882 +216,Aaron,Carlson,M,1988-03-18,53682 Jeffrey Street Apt. 290,Randolphshire,NV,38597,397.552.3149 +217,Cheryl,Tucker,F,1998-02-15,299 Leslie Lane Apt. 336,West Erin,MS,58874,+1-781-291-4283x411 +218,Sarah,Welch,F,1998-04-20,308 Patricia Mountains Suite 256,Lake Jessicaburgh,MT,52508,(392)827-2299x2750 +219,Katherine,Brown,F,1991-11-01,56770 Deborah Course,Schultzburgh,NH,75233,659-184-6386x5577 +220,Adriana,Macias,F,1993-02-01,4322 Carolyn Stravenue,Robertborough,ND,63287,603.029.9228x092 +221,Roberto,Valentine,M,1990-06-02,7236 Norton Stravenue Apt. 842,Matthewview,HI,51024,388-629-1279 +222,Sherry,Schmidt,F,2005-07-09,9806 Wood Camp,Jeromefort,ME,77708,247-314-9864 +223,Michelle,Clarke,F,1992-11-06,35651 Denise Fork,Hendersonborough,ND,99456,872-588-7449x56213 +224,Melissa,Martin,F,1988-08-22,8902 Cynthia Squares,Ruizstad,IL,49107,669.849.0277x0384 +225,Richard,Dixon,M,2005-10-02,530 Miller Gardens Apt. 669,North Janeside,OR,73785,439-376-9042x681 +226,Kathy,Morgan,F,1993-09-28,89476 Carrillo Shores Suite 779,Olsonberg,SC,29386,+1-658-804-3416x5182 +227,Hayden,Shannon,M,1987-05-11,373 John Fort Apt. 395,North Samanthafurt,NM,71473,+1-595-794-7284x6392 +228,Jay,Ayers,M,1994-11-11,271 Stevens Rest,East Biancaborough,IL,72402,(795)527-6365 +229,Jennifer,Hayes,F,1996-02-16,143 Chase Extensions Suite 270,South Wendyhaven,OK,64283,906.120.3471 +230,Felicia,Ward,F,2001-09-12,06159 Barbara Ports Apt. 455,Tonychester,ME,38056,225.699.6112x5355 +231,Michael,Jacobs,M,2003-10-01,598 Gutierrez Estates Apt. 341,West Codyside,AZ,52538,+1-114-921-6433x472 +232,Ryan,Johnson,M,1988-12-19,77848 Tara Ridge Apt. 979,New Amanda,MS,30271,(564)240-0825x478 +233,Thomas,Arroyo,M,1994-11-13,4930 Lopez Trail,East Jennifer,TN,29414,3894484631 +234,Dylan,Walsh,M,1993-04-23,3502 Amanda Estates,East Jenniferchester,DE,65195,475-705-1204x618 +235,Corey,Skinner,M,2003-08-24,36730 Jill Corner Suite 376,Larryborough,AZ,72535,743-503-1365 +236,Rebecca,Richards,F,1987-12-15,979 Kelli Forge,New Matthew,PA,08372,281-273-5857x306 +237,Brandy,Roach,F,1994-11-17,73928 Jessica Garden,Rochamouth,DE,39255,(708)620-9593x51863 +238,Kathleen,Arnold,F,2003-10-23,1181 Sharon Estate,North Jamestown,ME,64714,940.539.1037x1705 +239,Teresa,Perry,F,1992-01-03,480 Davenport Cliff Apt. 811,Amandaville,ID,82463,(861)957-6122x86852 +240,Krista,Garner,F,1995-04-23,004 Holmes Well,West Jeffrey,AK,90903,001-889-921-0752x245 +241,Danielle,Scott,F,2000-02-03,3157 Margaret Rest Suite 194,Lake Patrickmouth,KY,57426,001-139-060-4805x892 +242,Connie,Williams,F,2000-09-13,9981 Keith Key,North Ashleytown,CA,66275,+1-227-837-6938x983 +243,Deborah,Jordan,F,1988-11-02,66553 Brittney Brooks Apt. 597,Scottside,ND,20947,039-240-5147 +244,Evelyn,Singh,F,1986-03-15,879 Thomas Ridges Apt. 980,North James,IL,61444,4510463681 +245,Kari,Harper,F,2002-12-22,800 Alyssa Hill,East Michael,NM,31460,046.084.3256 +246,Jessica,Edwards,F,1988-03-23,29832 Janet Mount,Port Theresaland,VA,42115,(125)205-6647x42312 +247,Pamela,Salazar,F,1995-02-06,33051 Woods Mills Suite 526,North James,PA,02468,001-333-127-9757x366 +248,Roger,Cortez,M,1992-05-18,8808 Stephen Trail Suite 388,Lake Angela,NY,06962,644.726.4908 +249,Julie,Lucas,F,1989-01-08,98266 Angel Locks Suite 371,New Rebecca,OK,16694,751-868-9268 +250,Patricia,Barr,F,2002-09-16,22064 Kayla Lock Suite 123,Lake Alexanderport,SD,80190,(977)671-9903 +251,Donald,Fuller,M,2005-05-23,05020 Massey Greens,Williamsbury,ND,80597,+1-279-501-4556x168 +252,John,Martinez,M,2000-06-13,3390 Jessica Plaza,Webbchester,WY,38143,548.995.2997x8772 +253,Crystal,Roberts,F,1996-02-19,1396 Matthew Park,Alexville,SC,40841,(501)556-9902x3557 +254,Rebecca,Brewer,F,1988-03-04,857 Gutierrez Shoal Suite 495,Andrewmouth,VA,46847,001-405-682-9962x914 +255,Brandon,Wiley,M,2003-06-25,84215 Strickland Unions Apt. 078,West Timothyhaven,KS,13379,230.768.1040x91570 +256,Pamela,Reese,F,2004-08-11,3533 Amanda Springs Suite 422,North Cindy,GA,46417,249.321.4958 +257,Carlos,Ruiz,M,2001-10-06,66299 Vaughn Lock,West James,SD,10796,171.747.7332x945 +258,Michael,Ortega,M,1996-03-13,0171 Steven Drive Suite 992,Richardchester,NV,09797,(696)393-8276x15396 +259,Jessica,Cobb,F,1998-10-24,1971 Ford Oval,Thompsonshire,CO,78673,013-290-2278x469 +260,Christina,Maldonado,F,1989-08-26,465 Aguilar Plain Suite 240,South Brian,SD,47587,+1-036-965-6666x8327 +261,Janice,Middleton,F,2001-06-08,220 Alfred Roads,South Veronica,NY,55008,001-969-278-6876x532 +262,Adam,Jimenez,M,1988-12-05,89500 Bush Courts Apt. 128,Terrellmouth,AR,80464,189.490.5807 +263,Taylor,Berry,M,1995-11-05,442 Sandra Shoals,Anneton,DC,07266,+1-904-712-8144x2944 +264,Adrian,Rodriguez,M,2000-11-23,75243 Lauren Throughway Apt. 129,Mooreport,RI,31689,001-239-504-1027 +265,Eric,Reese,M,1995-03-12,6742 Graham Glen Suite 658,Blakeside,WV,57096,414-967-3938x525 +266,Michael,Decker,M,1990-01-01,75344 Andrew Common,Douglasfort,NY,93309,926-921-2447 +267,Robin,Thompson,F,1985-12-12,62712 Reynolds Plains Apt. 741,North Jessicamouth,MO,86073,001-642-569-0877x661 +268,Janice,Norris,F,1992-10-30,5546 Wendy Port,Lake Matthew,PA,38506,(063)461-5717 +269,Charles,Lee,M,2001-07-07,1847 Flowers Locks Suite 050,Lake Richard,NC,69067,001-829-310-2707x903 +270,Mark,Conway,M,1990-01-11,9111 Lauren Fields,Simmonsfort,ND,42999,001-982-530-9251x142 +271,Ann,Pearson,F,1996-03-02,723 Joseph Locks,East Heatherstad,NM,12038,083-318-1958x837 +272,Mary,Hill,F,1991-11-27,772 Sandra Causeway Apt. 364,Lake Katherine,OR,70933,078-113-7995 +273,Nicole,Villanueva,F,1992-07-11,36363 Brenda Causeway,East Chelsea,ME,60497,435.209.0421x7762 +274,Daniel,Phillips,M,2000-09-10,298 Miller Terrace Apt. 397,Ramirezchester,ID,43400,929.060.0780x686 +275,Rebecca,Nicholson,F,2001-09-12,0632 John Wells,New Evanview,NH,60117,+1-625-701-6580x464 +276,Logan,Johnston,M,1994-01-14,5085 Rodriguez Islands Suite 552,Janetmouth,DE,44400,(793)355-4864x01557 +277,Kelsey,Martinez,F,1990-12-14,4795 Dougherty Station Suite 137,West Haroldshire,DC,15184,(380)468-2756x7043 +278,John,Wade,M,1991-11-20,9242 Perez Islands Apt. 025,Port Christine,NE,24392,+1-223-105-9274x5238 +279,Mary,Spence,F,1995-12-23,841 Sullivan Mill,South Luketown,WI,43922,(492)975-1702x814 +280,Lisa,Robinson,F,1996-09-24,3983 Wang Extensions,Lake Ericashire,MD,64787,805.626.5650x4554 +281,Shannon,Miller,M,1998-09-15,426 Perry Street Suite 234,Port Valerie,WV,99606,646-287-9232 +282,Donna,Henry,F,1992-01-09,7873 Aaron Fort,Flowersview,VT,55178,(301)471-9597x9647 +283,Dr.,Jacqueline,F,2003-05-28,2572 Brian Island,Stephanietown,NY,10570,(219)285-5445 +284,Lauren,Morrow,F,1989-11-19,7652 Eric Fields Apt. 898,Marquezchester,MA,10514,+1-075-452-7985x2401 +285,Shannon,Thomas,F,1996-03-07,16110 Todd Camp,Lake Williamton,ID,09184,119.393.2501x24955 +286,Kathryn,Chandler,F,1992-01-27,90833 Jackson Shore Apt. 138,Wellschester,ND,14568,+1-663-836-1517x1827 +287,Michele,Hawkins,F,1992-01-08,47947 Richard Way,Lake Patricia,WA,48662,7167811266 +288,William,Figueroa,M,1999-07-16,3539 Powell Ford,South Kathy,NJ,99631,967-842-7114x773 +289,Chad,Garcia,M,2002-11-10,269 Hernandez Plains,North Karenmouth,GA,87282,(485)880-0616x7567 +290,Andrew,Hawkins,M,1991-03-28,762 Paul Skyway,Tracymouth,MN,74196,(647)969-5450x0902 +291,Hannah,Harmon,F,1987-03-11,1655 Brian Forest Apt. 491,Jonesburgh,AK,43245,(698)640-7905x696 +292,Brent,Freeman,M,1996-01-14,5294 Ryan Mews,Cobbfort,IN,06731,001-639-191-9541x987 +293,Angela,Colon,F,1993-03-01,5366 Zachary Ramp,Nicolestad,FL,65932,748.969.0835x72324 +294,Alexis,Robles,M,1986-08-06,603 Derek Forks,Hopkinsville,WI,64181,1594165162 +295,Laura,Mason,F,1994-07-28,8471 David Station Apt. 963,Robinsonland,IN,54027,+1-078-515-8673x4257 +296,Alex,Rasmussen,M,1996-02-27,0348 Danielle Ridges Suite 183,Priceside,WI,33994,343-275-6041 +297,Todd,Ruiz,M,1999-07-21,124 Bell Pines Suite 570,Davidsonville,NY,00904,(459)112-3829 +298,Ricky,Flores,M,1992-08-31,95431 Hunter Trail Suite 930,Leblancfurt,VA,61111,206.969.4215 +299,Keith,Smith,M,1992-01-21,713 Lee Throughway Suite 476,Lake Carolshire,ND,55332,204-439-7359x71072 +300,William,Sanders,M,1987-06-20,9411 Williams Viaduct,West Catherine,SC,93505,8964652809 +301,Christopher,Vasquez,M,1994-11-23,86241 Tiffany Mill,Campbellborough,VA,35001,(625)728-7032x0320 +302,Carla,Mcdonald,F,2005-11-05,7587 Daniel Roads Apt. 513,Whiteville,IL,87419,(089)261-3715 +303,Melanie,Becker,F,2005-04-14,520 Mariah Prairie Apt. 490,North Cindy,WV,96749,045-018-9616 +304,David,Wise,M,2003-05-13,66421 Laurie Rue,Mckeestad,CA,48664,(767)499-6165 +305,Jessica,Simmons,F,1994-05-19,3278 Warren Glens,Port Tim,CT,39876,(490)810-8186x61794 +306,Lauren,Mack,F,1994-09-28,2601 Janet Harbor Suite 794,Port Lisa,AR,79675,+1-168-006-1027x7697 +307,Valerie,Ward,F,1988-11-06,4122 Daniel Bridge Suite 037,Debraview,SC,25524,727.601.2277 +308,Scott,Richards,M,2002-07-09,050 Melanie Light Apt. 799,Yolandatown,MT,95477,(080)695-8146 +309,Audrey,Dean,F,1995-11-26,2437 Jesse Fields,Morganstad,NC,17692,001-665-729-3417 +310,Christina,Obrien,F,1997-05-30,433 Kidd Island,New Gregg,MO,08845,931-837-4550x84289 +311,Michael,House,M,1991-04-06,119 Garrison Corners,Williamville,GA,47901,001-787-125-5213 +312,Jennifer,Mack,F,1998-03-25,8214 Kari Island Suite 286,Taylorview,VT,68154,001-720-811-5562x606 +313,Margaret,Orr,F,1992-11-24,846 Erin Oval Apt. 550,Mcculloughstad,MD,84895,001-997-563-4108x562 +314,Kimberly,Lewis,F,2003-03-10,2008 Allen Springs,Valerieland,ME,82681,017-490-7539x989 +315,Elizabeth,Estrada,F,1999-08-16,68315 Lee Spur Apt. 266,North Pamelaport,LA,69478,864.976.7762x282 +316,Judith,Faulkner,F,1995-12-03,770 Raymond Islands Suite 961,New Billyland,WY,40249,(229)604-4327x0185 +317,Amanda,Olson,F,1999-11-09,6792 Wagner Lodge,South Michelle,SC,87598,658-074-1209x4818 +318,Tina,Weaver,F,1997-06-27,7801 Schmidt Vista Apt. 339,Lake Catherine,AZ,03550,608-564-1118x24224 +319,Christian,Farley,M,2005-11-10,200 Corey Crossroad,Scottside,AZ,31908,(886)140-5786 +320,Sarah,Mason,F,2002-04-29,2386 Peters Camp,Woodwardstad,DC,08388,465.398.4028 +321,Elizabeth,Foster,F,1996-11-11,4639 Pham Trail,Reidshire,IL,87306,795-020-9700x268 +322,Michele,Farmer,F,2001-01-17,1807 Gomez Station Suite 562,Cainshire,LA,25796,0453194337 +323,Mr.,Johnathan,M,1988-02-18,614 Snyder Oval,Arielfurt,AR,17310,938-430-8948 +324,Aaron,Simmons,M,2005-05-17,566 Erin Lodge Apt. 030,West Shane,FL,11223,+1-361-332-5411x0760 +325,Mark,Cook,M,1998-10-05,50583 Parsons Plains,Garrettmouth,AR,04871,120.704.9611 +326,Kristin,Phillips,F,2003-07-08,399 Patrick Square,Harveyborough,RI,60017,311-091-9392x845 +327,Nathaniel,Wallace,M,2003-03-05,49685 Nicole Springs Apt. 495,Port Zachary,DE,31615,+1-806-533-3153x7795 +328,Kylie,Rogers,F,1992-03-09,07303 Owens Ferry,Lake Lisa,ME,52970,+1-050-150-8124x7395 +329,Allen,Gonzalez,M,1998-08-03,583 Andrew Streets Suite 026,Nicoleborough,MN,48950,896.112.2338x65596 +330,David,Williams,M,2003-03-30,530 Ramirez Creek Suite 973,Kristenfort,DC,51372,872-558-7774x9690 +331,Stephanie,Hayes,F,2000-06-01,6925 Christopher Shore,South Jerry,MT,44590,(665)754-6027x341 +332,Bradley,Kirby,M,2004-05-25,311 Benjamin Fall Apt. 544,Kaylahaven,NJ,18571,001-044-566-9078x263 +333,Paul,Wells,M,1986-04-01,751 Jacob Springs Suite 377,Johnsonland,IA,97206,(553)666-8459x0902 +334,Troy,Rivera,M,1988-04-13,6636 Paul Mall Apt. 741,New Gregoryfort,AK,26584,001-643-348-1705x802 +335,Michelle,Wells,F,2001-06-11,8743 Douglas Centers Apt. 385,Suarezview,OR,38238,469-263-2967x629 +336,Michael,Williams,M,2003-01-30,841 Bowen Field,Port Angela,AR,14292,+1-567-243-8070x176 +337,Jennifer,Lee,F,1989-05-04,257 Carlos Orchard,Port Donaldfort,DC,02868,(186)210-4275 +338,Michelle,Stafford,F,1986-11-14,81647 Adam Springs,Mcfarlandbury,CA,55771,001-531-312-2068x155 +339,Taylor,Foster,F,1996-03-06,52065 Jason Fields,Joshuastad,VT,54384,+1-718-924-1956x252 +340,Stephen,Stewart,M,2000-07-01,9976 Harmon Mills,Alexandertown,CT,31485,001-910-257-4326 +341,Amanda,Mclean,F,1993-06-27,524 Kristin Bypass Suite 640,Lake Matthewville,VA,33051,685.270.1713x0232 +342,Christina,Coleman,F,1986-08-05,3471 Ward Isle,West Chelsea,DE,63677,+1-614-982-8246x747 +343,Kristina,Castillo,F,1999-01-05,30085 Sara Views Suite 567,Port Charles,WY,16816,001-236-458-7506x633 +344,Robert,Mccoy,M,1992-05-05,4972 Carrie Villages Suite 011,Sabrinabury,VT,68466,+1-264-488-6946x1195 +345,Daniel,Goodman,M,2005-03-19,70116 Pena Row,West Janeville,WV,59570,+1-230-234-6791x2141 +346,Destiny,Peterson,F,1994-12-18,100 Stephanie Prairie,Williamsberg,ME,68668,001-759-655-5535x669 +347,Shane,Drake,M,1999-12-23,209 Alyssa Village,Wrightview,UT,67991,050.505.7397x69156 +348,Todd,Alvarez,M,2001-02-07,64932 Walter Spurs Suite 027,Turnerfurt,UT,22528,001-783-332-1160x256 +349,Greg,Kent,M,1988-01-10,8633 Kelly Courts Apt. 931,Davidburgh,OR,41238,366.552.8993x160 +350,Nicole,Sweeney,F,1993-07-30,81497 Lewis Glens,Brownfort,OK,96531,+1-027-642-0865 +351,John,Bailey,M,2005-07-22,438 David Shore,Lindahaven,MN,21956,742-333-0591 +352,Kara,Landry,F,1986-04-25,6263 John Meadow Suite 261,Hancockfurt,NC,48646,117-830-9997 +353,Nichole,Bauer,F,2003-12-15,6492 Bryan Union,Lopezfort,NV,70810,(898)131-2920x8751 +354,Kenneth,Delgado,M,2004-02-03,118 Tammy Drive,Barrettberg,WV,38957,(975)859-8831x030 +355,Jennifer,Pierce,F,1998-10-24,71462 Jones Row Suite 359,Loristad,DE,57337,9314181861 +356,Brandon,Blankenship,M,1989-03-03,401 Tanya Isle,Port Gregorychester,SD,64676,(948)491-0256x25889 +357,Jennifer,Vargas,F,1995-04-21,226 Adams Valley Suite 539,South Scott,MN,38095,001-834-146-5111x312 +358,Patrick,Spencer,M,1997-08-29,682 Zachary Wells Suite 160,Rhondamouth,OH,98761,890.972.8321 +359,Casey,Gomez,M,1987-02-15,15381 Timothy Fort,New Phillipside,WV,68072,001-970-509-7545x105 +360,Adam,Jordan,M,1991-06-05,617 Kayla Forges Apt. 545,East Lisa,MI,58088,605-313-4026 +361,Erin,Johnson,F,1993-12-19,416 Tyler Rapid Apt. 686,Port Lauraland,AL,90211,5690674471 +362,Danielle,Hernandez,F,1990-12-24,436 Jasmine Station,Wayneville,NJ,83663,(260)432-6093 +363,Anthony,Russell,M,1995-08-17,56708 Brett Court Apt. 563,North Blake,OR,28285,(916)247-5541x108 +364,Carlos,Ward,M,1988-06-19,9534 Patrick Tunnel Apt. 910,Rhondafurt,OH,13429,001-954-738-2023x684 +365,James,Lawson,M,1994-01-09,9087 Le Forks,Phillipsburgh,HI,70436,242.403.3810 +366,Mackenzie,Compton,F,1989-07-16,426 Phillips Way Suite 053,Joshuaberg,NC,76950,001-649-837-3543 +367,Robert,Mullins,M,1996-06-21,527 Hunter Estates,Lopezport,NC,03259,(269)312-1637 +368,Tracy,Garcia,F,1989-07-15,916 Daniel Bridge Suite 023,Adamsside,SC,01732,(513)279-7245x72308 +369,Mark,Martinez,M,2002-08-27,86203 Ronald Curve,Jeremiahhaven,VT,15234,(131)451-9515 +370,Thomas,Huang,M,1988-07-08,9262 Mcdaniel Plaza,Port Joseph,LA,35287,+1-225-267-7119x642 +371,Wendy,White,F,1988-10-06,6952 Valdez Forge,South Amanda,SD,50914,689.313.5030x587 +372,Tammie,Brown,F,1998-07-26,247 Melissa Walk Suite 333,North Suzannechester,AK,56168,1917920252 +373,Angela,Carroll,F,1986-04-16,28476 Wallace Port,North Brianfurt,DC,21518,678-498-4362x4186 +374,Beth,Lewis,F,1995-02-07,891 Mcdonald Harbor,Margaretville,NY,26024,159-503-4281 +375,Linda,Avila,F,1999-03-18,0341 Cunningham Park Suite 005,West Tinamouth,MO,41719,001-215-681-8209 +376,John,Melton,M,2003-09-22,113 Aguirre Ports,Martinshire,OR,85880,001-572-545-9606x339 +377,Brittany,Burton,F,1990-09-12,48171 Geoffrey Green Apt. 955,East Kelseyberg,IL,58440,001-970-546-6927x589 +378,Michael,Hunter,M,2001-11-10,903 Castro Dale Apt. 629,North Paul,CA,61564,711.216.6365x15597 +379,Natalie,Wilson,F,1988-10-06,235 Huerta Springs Apt. 567,East Andrewmouth,ID,23583,461-476-8342 +380,Anna,Valenzuela,F,1996-12-07,56778 Martin Ridge Apt. 960,Patriciaville,NH,19456,502.727.5164x80727 +381,Kenneth,Johnson,M,2003-01-01,296 Jason Extension,Stephaniebury,IA,40735,+1-177-665-5868x5127 +382,Christopher,Larson,M,2004-06-14,649 Bullock Corners,Lake Christophertown,CO,98797,789-046-3378 +383,Christina,Harrison,F,2003-07-30,660 Casey Mission Apt. 446,Adamside,AK,49575,+1-955-296-3863x9609 +384,Todd,Myers,M,1989-02-03,26312 Welch Spurs,Burtonberg,WV,27208,609-209-8196 +385,Morgan,Lucero,F,1990-02-03,34383 Roman Isle Apt. 041,Burtonfurt,CO,60679,442-117-5361 +386,Joanne,Martin,F,1993-04-12,9015 Webb Plains Suite 284,Leetown,MT,20469,+1-130-523-1244x7315 +387,John,Lamb,M,1996-10-06,423 Clay Gateway Apt. 994,East Jenniferview,NJ,36109,966.395.5172x0849 +388,Charlene,Sanchez,F,1989-06-03,51050 Lewis Parks,East Carl,GA,29004,919.665.5330x770 +389,Jennifer,Martinez,F,2001-11-27,4090 Mitchell Streets,Port Samantha,NY,09604,644-556-1857 +390,Jennifer,Horton,F,1987-09-15,159 Jeffrey Stream Apt. 563,East Rachelbury,WY,90710,010.414.5964 +391,Tammy,Silva,F,1988-09-26,96718 Lane Prairie,Morrischester,IL,39329,331-170-3037x637 +392,Daniel,Garza,M,2005-07-23,472 Garcia Crescent Suite 679,Kimberlyville,DC,40759,271.130.7240x78754 +393,Krista,Gomez,F,2002-09-18,5074 Brandon Junction,Leeville,IN,80120,(103)131-0094x3181 +394,Sonya,Lyons,F,1994-01-14,47323 Keith Pine,Clintonport,MS,40520,(122)572-0765 +395,William,Ibarra,M,2001-04-27,57907 Kennedy Canyon Apt. 438,Karimouth,SC,44498,(584)745-7054x5897 +396,Michael,Chandler,M,2001-03-16,257 Becky Ridge Apt. 313,Grayland,NM,71924,001-824-556-9644x309 +397,Barbara,Pope,F,1990-02-13,1072 Edward Vista Suite 247,Lake Alexis,IN,78236,4065004254 +398,Jonathan,Mullen,M,1991-10-25,236 Miller Fields Apt. 536,Port Corey,IA,41229,592.342.6834x414 +399,Lori,Gardner,F,1996-03-17,2875 Jennings Island Apt. 766,Port Anthony,CA,18927,+1-985-298-9406x260 diff --git a/tests/data/StudentMajor.csv b/tests/data/StudentMajor.csv new file mode 100644 index 000000000..644a46492 --- /dev/null +++ b/tests/data/StudentMajor.csv @@ -0,0 +1,227 @@ +student_id,dept,declare_date +100,BIOL,2010-01-10 +102,CS,2019-01-13 +103,PHYS,2018-10-04 +104,CS,2010-11-04 +105,CS,2018-11-20 +107,MATH,2020-01-04 +108,PHYS,2012-09-26 +111,MATH,2001-04-19 +112,MATH,2000-07-12 +113,PHYS,2000-01-02 +114,MATH,2004-06-01 +115,BIOL,2006-11-19 +116,CS,2002-04-14 +117,PHYS,2002-08-13 +118,CS,2015-12-29 +120,MATH,2015-03-18 +121,BIOL,2010-01-05 +122,MATH,2006-11-17 +123,PHYS,2007-01-19 +124,MATH,2002-08-03 +125,CS,2004-12-02 +126,PHYS,2012-01-26 +127,CS,2013-04-17 +128,MATH,2001-03-10 +129,BIOL,2001-02-08 +130,CS,2019-10-27 +131,MATH,2007-07-10 +132,PHYS,2002-11-23 +134,CS,2000-04-10 +135,MATH,2001-06-24 +136,MATH,2014-01-09 +137,CS,2011-09-26 +139,CS,2019-08-21 +141,BIOL,2020-06-24 +142,CS,2000-01-02 +143,PHYS,2004-12-03 +144,CS,2009-12-05 +147,CS,2002-08-30 +148,PHYS,2014-04-18 +150,BIOL,2011-11-07 +151,PHYS,2003-07-14 +153,PHYS,2020-09-08 +156,PHYS,2018-07-10 +159,PHYS,2017-12-07 +160,MATH,2005-10-18 +161,MATH,2005-08-29 +162,MATH,2007-08-04 +163,BIOL,2015-09-17 +164,CS,2013-11-20 +165,CS,2008-09-25 +166,BIOL,2006-09-03 +167,MATH,2005-11-05 +168,PHYS,2004-07-07 +169,PHYS,2013-10-08 +171,PHYS,2016-12-25 +172,MATH,2005-07-17 +174,PHYS,2001-12-04 +175,CS,2018-10-22 +176,MATH,1999-10-29 +177,BIOL,2020-05-28 +178,PHYS,2002-04-10 +181,BIOL,2005-12-04 +182,PHYS,2000-02-18 +183,PHYS,2003-10-13 +184,MATH,1999-03-07 +185,CS,2011-03-27 +187,PHYS,2012-11-18 +188,PHYS,2018-05-03 +189,BIOL,2017-08-06 +191,MATH,2001-06-13 +194,CS,2010-08-05 +195,BIOL,2005-04-21 +196,CS,2020-11-07 +197,BIOL,2016-12-20 +198,CS,2015-11-19 +200,CS,2005-06-20 +203,BIOL,2006-01-22 +204,MATH,2018-05-29 +205,PHYS,2015-02-13 +206,CS,2016-01-16 +207,CS,2010-12-24 +210,BIOL,2011-02-17 +211,PHYS,2020-01-17 +212,BIOL,2018-01-04 +213,MATH,2003-09-10 +215,BIOL,2001-04-14 +216,MATH,2013-12-07 +217,PHYS,2013-07-18 +218,PHYS,2020-04-13 +219,MATH,2011-10-19 +220,PHYS,2001-05-30 +221,MATH,2018-05-14 +223,BIOL,2001-08-29 +224,PHYS,2003-04-30 +225,PHYS,2016-08-07 +226,PHYS,2009-02-23 +228,CS,2002-06-08 +230,MATH,2003-01-05 +231,MATH,2015-12-20 +232,CS,2006-11-05 +233,PHYS,2000-10-01 +234,CS,2019-06-20 +235,PHYS,2017-05-23 +236,BIOL,2010-04-05 +237,CS,1999-10-08 +238,CS,2006-08-16 +239,MATH,2008-11-11 +240,MATH,2007-07-22 +241,MATH,2012-04-14 +242,PHYS,2011-03-06 +243,MATH,2001-04-24 +244,CS,2004-05-15 +245,CS,2008-10-19 +246,PHYS,2001-07-18 +248,CS,2017-03-08 +249,MATH,2018-07-30 +250,BIOL,2007-03-19 +251,CS,2016-08-13 +252,BIOL,2019-10-19 +253,CS,2016-01-06 +254,PHYS,2009-08-16 +255,BIOL,2012-08-01 +256,PHYS,2020-01-19 +257,MATH,2000-12-04 +258,BIOL,2017-07-29 +259,PHYS,2002-10-09 +260,BIOL,2018-10-30 +261,BIOL,2015-01-10 +262,BIOL,2007-12-14 +263,MATH,2000-01-08 +264,CS,2000-02-06 +265,PHYS,2010-07-03 +267,PHYS,2013-05-04 +268,PHYS,2007-11-17 +269,PHYS,2005-10-27 +270,BIOL,2010-05-20 +272,CS,2001-01-08 +273,MATH,2003-09-28 +274,CS,2005-12-13 +275,BIOL,2017-08-12 +276,PHYS,2010-03-20 +277,PHYS,2001-02-13 +278,CS,2007-01-07 +279,MATH,2015-10-17 +280,PHYS,2001-06-25 +282,CS,2018-03-09 +283,CS,2019-10-03 +285,BIOL,2000-03-15 +286,MATH,2010-10-08 +287,MATH,2001-05-29 +288,PHYS,2013-02-28 +290,PHYS,2019-05-09 +292,MATH,2019-11-03 +293,BIOL,2001-09-28 +295,MATH,2017-10-05 +296,CS,2015-04-16 +299,PHYS,2003-05-28 +301,PHYS,2008-03-15 +302,MATH,2000-06-02 +304,MATH,2002-07-17 +305,PHYS,2000-03-18 +307,BIOL,2015-11-24 +308,MATH,2016-04-09 +311,BIOL,2006-08-31 +312,PHYS,2010-12-01 +313,CS,2013-09-06 +314,PHYS,2015-04-02 +315,BIOL,2009-04-28 +318,PHYS,2006-10-01 +319,CS,1999-09-24 +320,MATH,2000-11-18 +321,PHYS,1999-11-24 +322,BIOL,2005-09-03 +323,BIOL,2017-03-05 +324,CS,2019-09-10 +325,MATH,2011-11-28 +326,MATH,1999-08-13 +328,CS,2017-10-19 +329,CS,2015-05-29 +332,PHYS,2000-10-09 +334,MATH,2012-03-04 +336,PHYS,2011-11-02 +337,MATH,2003-04-06 +338,PHYS,2013-08-15 +340,CS,2013-07-10 +342,PHYS,2017-09-12 +343,PHYS,2003-09-09 +344,PHYS,2002-12-07 +345,CS,2013-11-25 +346,BIOL,2003-01-06 +348,PHYS,2019-12-13 +349,PHYS,2011-07-06 +350,CS,2010-12-20 +351,CS,2005-08-03 +352,MATH,2010-09-04 +353,PHYS,2013-11-07 +357,BIOL,2000-12-20 +358,CS,2007-02-07 +360,BIOL,2006-11-23 +362,BIOL,2002-02-17 +364,BIOL,2019-01-11 +365,BIOL,1999-05-05 +366,MATH,2006-09-23 +367,CS,2013-01-20 +368,CS,2017-03-30 +369,BIOL,2018-04-30 +370,PHYS,2000-07-22 +371,CS,1999-07-05 +372,CS,2007-07-03 +373,MATH,2000-12-07 +376,CS,2001-08-10 +378,MATH,2000-12-05 +379,PHYS,2003-04-24 +382,PHYS,2013-12-03 +383,PHYS,2005-02-22 +385,MATH,2008-08-12 +386,PHYS,2000-06-27 +390,CS,2009-09-08 +391,MATH,2010-11-24 +392,CS,2019-07-01 +393,CS,2007-04-24 +394,BIOL,2008-12-12 +395,PHYS,2003-06-01 +396,MATH,2019-08-16 +398,MATH,2012-07-14 +399,CS,2015-04-16 diff --git a/tests/data/Term.csv b/tests/data/Term.csv new file mode 100644 index 000000000..91c3400ae --- /dev/null +++ b/tests/data/Term.csv @@ -0,0 +1,19 @@ +term_year,term +2015,Spring +2015,Summer +2015,Fall +2016,Spring +2016,Summer +2016,Fall +2017,Spring +2017,Summer +2017,Fall +2018,Spring +2018,Summer +2018,Fall +2019,Spring +2019,Summer +2019,Fall +2020,Spring +2020,Summer +2020,Fall From fc363f4e49886420ed1d7e4e71efc56867f9b2d9 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 16:03:33 -0700 Subject: [PATCH 165/212] First pass at migrating test_university --- tests/schema_university.py | 17 +++-------- tests/test_university.py | 62 ++++++++++++++++++++++++++------------ 2 files changed, 48 insertions(+), 31 deletions(-) diff --git a/tests/schema_university.py b/tests/schema_university.py index 619ea459f..c569dbcbf 100644 --- a/tests/schema_university.py +++ b/tests/schema_university.py @@ -1,9 +1,7 @@ import datajoint as dj +import inspect -schema = dj.Schema() - -@schema class Student(dj.Manual): definition = """ student_id : int unsigned # university-wide ID number @@ -20,7 +18,6 @@ class Student(dj.Manual): """ -@schema class Department(dj.Manual): definition = """ dept : varchar(6) # abbreviated department name, e.g. BIOL @@ -31,7 +28,6 @@ class Department(dj.Manual): """ -@schema class StudentMajor(dj.Manual): definition = """ -> Student @@ -41,7 +37,6 @@ class StudentMajor(dj.Manual): """ -@schema class Course(dj.Manual): definition = """ -> Department @@ -52,7 +47,6 @@ class Course(dj.Manual): """ -@schema class Term(dj.Manual): definition = """ term_year : year @@ -60,7 +54,6 @@ class Term(dj.Manual): """ -@schema class Section(dj.Manual): definition = """ -> Course @@ -71,7 +64,6 @@ class Section(dj.Manual): """ -@schema class CurrentTerm(dj.Manual): definition = """ omega=0 : tinyint @@ -80,7 +72,6 @@ class CurrentTerm(dj.Manual): """ -@schema class Enroll(dj.Manual): definition = """ -> Student @@ -88,7 +79,6 @@ class Enroll(dj.Manual): """ -@schema class LetterGrade(dj.Lookup): definition = """ grade : char(2) @@ -110,10 +100,13 @@ class LetterGrade(dj.Lookup): ] -@schema class Grade(dj.Manual): definition = """ -> Enroll --- -> LetterGrade """ + + +LOCALS_UNI = {k: v for k, v in locals().items() if inspect.isclass(v)} +__all__ = list(LOCALS_UNI) diff --git a/tests/test_university.py b/tests/test_university.py index 02520a4b8..198ba0e5e 100644 --- a/tests/test_university.py +++ b/tests/test_university.py @@ -1,27 +1,44 @@ -from nose.tools import assert_true, assert_list_equal, assert_false, raises +import pytest import hashlib +from pathlib import Path from datajoint import DataJointError +import datajoint as dj from .schema_university import * -from . import PREFIX, CONN_INFO +from . import PREFIX, schema_university def _hash4(table): - """hash of table contents""" + """Hash of table contents""" data = table.fetch(order_by="KEY", as_dict=True) blob = dj.blob.pack(data, compress=False) return hashlib.md5(blob).digest().hex()[:4] -@raises(DataJointError) -def test_activate_unauthorized(): - schema.activate("unauthorized", connection=dj.conn(**CONN_INFO)) - - -def test_activate(): - schema.activate( - PREFIX + "_university", connection=dj.conn(**CONN_INFO) - ) # deferred activation +@pytest.fixture +def schema_uni_inactive(): + schema = dj.Schema(context=schema_university.LOCALS_UNI) + schema(Student) + schema(Department) + schema(StudentMajor) + schema(Course) + schema(Term) + schema(Section) + schema(CurrentTerm) + schema(Enroll) + schema(LetterGrade) + schema(Grade) + yield schema + schema.drop() + + +@pytest.fixture +def schema_uni(db_creds_test, schema_uni_inactive, connection_test): + # Deferred activation + schema_uni_inactive.activate( + PREFIX + "_university", connection=dj.conn(**db_creds_test) + ) # --------------- Fill University ------------------- + test_data_dir = Path(__file__).parent / "data" for table in ( Student, Department, @@ -33,12 +50,19 @@ def test_activate(): Enroll, Grade, ): - from pathlib import Path + path = test_data_dir / Path(table.__name__ + ".csv") + assert path.is_file(), f"File {path} is not a file" + assert path.exists(), f"File {path} does not exist" + table().insert(path) + return schema_uni_inactive + - table().insert(Path("./data/" + table.__name__ + ".csv")) +def test_activate_unauthorized(schema_uni_inactive, db_creds_test, connection_test): + with pytest.raises(DataJointError): + schema_uni_inactive.activate("unauthorized", connection=dj.conn(**db_creds_test)) -def test_fill(): +def test_fill(schema_uni): """check that the randomized tables are consistently defined""" # check randomized tables assert len(Student()) == 300 and _hash4(Student) == "1e1a" @@ -48,7 +72,7 @@ def test_fill(): assert len(Grade()) == 3027 and _hash4(Grade) == "4a9d" -def test_restrict(): +def test_restrict(schema_uni): """ test diverse restrictions from the university database. This test relies on a specific instantiation of the database. @@ -90,7 +114,7 @@ def test_restrict(): assert len(special) == 158 -def test_advanced_join(): +def test_advanced_join(schema_uni): """test advanced joins""" # Students with ungraded courses in current term ungraded = Enroll * CurrentTerm - Grade @@ -102,14 +126,14 @@ def test_advanced_join(): assert len(ungraded.join(major)) == len(ungraded & major) == 31 -def test_union(): +def test_union(schema_uni): # effective left join Enroll with Major q1 = (Enroll & "student_id=101") + (Enroll & "student_id=102") q2 = Enroll & "student_id in (101, 102)" assert len(q1) == len(q2) == 41 -def test_aggr(): +def test_aggr(schema_uni): avg_grade_per_course = Course.aggr( Grade * LetterGrade, avg_grade="round(avg(points), 2)" ) From f307d3b8fb8f9486ef761143d60d30680debd7bc Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 16:03:43 -0700 Subject: [PATCH 166/212] Format with black --- tests/test_university.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/tests/test_university.py b/tests/test_university.py index 198ba0e5e..956cc506f 100644 --- a/tests/test_university.py +++ b/tests/test_university.py @@ -59,7 +59,9 @@ def schema_uni(db_creds_test, schema_uni_inactive, connection_test): def test_activate_unauthorized(schema_uni_inactive, db_creds_test, connection_test): with pytest.raises(DataJointError): - schema_uni_inactive.activate("unauthorized", connection=dj.conn(**db_creds_test)) + schema_uni_inactive.activate( + "unauthorized", connection=dj.conn(**db_creds_test) + ) def test_fill(schema_uni): @@ -160,8 +162,8 @@ def test_aggr(schema_uni): Grade, ..., n="count(student_id)", keep_all_rows=True ) & "n>1" assert not any( - name in section.heading.names for name in Grade.heading.secondary_attributes - ) + name in section.heading.names for name in Grade.heading.secondary_attributes + ) assert len(set(section.fetch("dept"))) == 1 assert len(section) == 168 assert bool(section) From e12e0211af8a9cdd47d91f08891a6c081253b778 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 16:06:22 -0700 Subject: [PATCH 167/212] cp to tests --- tests/test_update1.py | 126 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 126 insertions(+) create mode 100644 tests/test_update1.py diff --git a/tests/test_update1.py b/tests/test_update1.py new file mode 100644 index 000000000..d2f7dc18f --- /dev/null +++ b/tests/test_update1.py @@ -0,0 +1,126 @@ +from nose.tools import assert_true, assert_false, assert_equal, raises +import os +import numpy as np +from pathlib import Path +import tempfile +import datajoint as dj +from . import PREFIX, CONN_INFO +from datajoint import DataJointError + +schema = dj.Schema(PREFIX + "_update1", connection=dj.conn(**CONN_INFO)) + +dj.config["stores"]["update_store"] = dict(protocol="file", location=tempfile.mkdtemp()) + +dj.config["stores"]["update_repo"] = dict( + stage=tempfile.mkdtemp(), protocol="file", location=tempfile.mkdtemp() +) + + +scratch_folder = tempfile.mkdtemp() + +dj.errors._switch_filepath_types(True) + + +@schema +class Thing(dj.Manual): + definition = """ + thing : int + --- + number=0 : int + frac : float + picture = null : attach@update_store + params = null : longblob + img_file = null: filepath@update_repo + timestamp = CURRENT_TIMESTAMP : datetime + """ + + +def test_update1(): + """test normal updates""" + + dj.errors._switch_filepath_types(True) + # CHECK 1 -- initial insert + key = dict(thing=1) + Thing.insert1(dict(key, frac=0.5)) + check1 = Thing.fetch1() + + # CHECK 2 -- some updates + # numbers and datetimes + Thing.update1(dict(key, number=3, frac=30, timestamp="2020-01-01 10:00:00")) + # attachment + attach_file = Path(scratch_folder, "attach1.dat") + buffer1 = os.urandom(100) + attach_file.write_bytes(buffer1) + Thing.update1(dict(key, picture=attach_file)) + attach_file.unlink() + assert_false(attach_file.is_file()) + + # filepath + stage_path = dj.config["stores"]["update_repo"]["stage"] + relpath, filename = "one/two/three", "picture.dat" + managed_file = Path(stage_path, relpath, filename) + managed_file.parent.mkdir(parents=True, exist_ok=True) + original_file_data = os.urandom(3000) + with managed_file.open("wb") as f: + f.write(original_file_data) + Thing.update1(dict(key, img_file=managed_file)) + managed_file.unlink() + assert_false(managed_file.is_file()) + + check2 = Thing.fetch1(download_path=scratch_folder) + buffer2 = Path(check2["picture"]).read_bytes() # read attachment + final_file_data = managed_file.read_bytes() # read filepath + + # CHECK 3 -- reset to default values using None + Thing.update1( + dict( + key, + number=None, + timestamp=None, + picture=None, + img_file=None, + params=np.random.randn(3, 3), + ) + ) + check3 = Thing.fetch1() + + assert_true( + check1["number"] == 0 and check1["picture"] is None and check1["params"] is None + ) + + assert_true( + check2["number"] == 3 + and check2["frac"] == 30.0 + and check2["picture"] is not None + and check2["params"] is None + and buffer1 == buffer2 + ) + + assert_true( + check3["number"] == 0 + and check3["frac"] == 30.0 + and check3["picture"] is None + and check3["img_file"] is None + and isinstance(check3["params"], np.ndarray) + ) + + assert_true(check3["timestamp"] > check2["timestamp"]) + assert_equal(buffer1, buffer2) + assert_equal(original_file_data, final_file_data) + + +@raises(DataJointError) +def test_update1_nonexistent(): + Thing.update1(dict(thing=100, frac=0.5)) # updating a non-existent entry + + +@raises(DataJointError) +def test_update1_noprimary(): + Thing.update1(dict(number=None)) # missing primary key + + +@raises(DataJointError) +def test_update1_misspelled_attribute(): + key = dict(thing=17) + Thing.insert1(dict(key, frac=1.5)) + Thing.update1(dict(key, numer=3)) # misspelled attribute From 96146418f681b4b8816347995607856094eb9837 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 16:06:37 -0700 Subject: [PATCH 168/212] nose2pytest test_update1 --- tests/test_update1.py | 26 ++++++++++---------------- 1 file changed, 10 insertions(+), 16 deletions(-) diff --git a/tests/test_update1.py b/tests/test_update1.py index d2f7dc18f..60dfe8c10 100644 --- a/tests/test_update1.py +++ b/tests/test_update1.py @@ -53,7 +53,7 @@ def test_update1(): attach_file.write_bytes(buffer1) Thing.update1(dict(key, picture=attach_file)) attach_file.unlink() - assert_false(attach_file.is_file()) + assert not attach_file.is_file() # filepath stage_path = dj.config["stores"]["update_repo"]["stage"] @@ -65,7 +65,7 @@ def test_update1(): f.write(original_file_data) Thing.update1(dict(key, img_file=managed_file)) managed_file.unlink() - assert_false(managed_file.is_file()) + assert not managed_file.is_file() check2 = Thing.fetch1(download_path=scratch_folder) buffer2 = Path(check2["picture"]).read_bytes() # read attachment @@ -84,29 +84,23 @@ def test_update1(): ) check3 = Thing.fetch1() - assert_true( - check1["number"] == 0 and check1["picture"] is None and check1["params"] is None - ) + assert check1["number"] == 0 and check1["picture"] is None and check1["params"] is None - assert_true( - check2["number"] == 3 + assert (check2["number"] == 3 and check2["frac"] == 30.0 and check2["picture"] is not None and check2["params"] is None - and buffer1 == buffer2 - ) + and buffer1 == buffer2) - assert_true( - check3["number"] == 0 + assert (check3["number"] == 0 and check3["frac"] == 30.0 and check3["picture"] is None and check3["img_file"] is None - and isinstance(check3["params"], np.ndarray) - ) + and isinstance(check3["params"], np.ndarray)) - assert_true(check3["timestamp"] > check2["timestamp"]) - assert_equal(buffer1, buffer2) - assert_equal(original_file_data, final_file_data) + assert check3["timestamp"] > check2["timestamp"] + assert buffer1 == buffer2 + assert original_file_data == final_file_data @raises(DataJointError) From 20ab185b7dd544c8dff4c7cc744b3879612ec4f0 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 16:26:15 -0700 Subject: [PATCH 169/212] Format with black --- tests/test_update1.py | 97 +++++++++++++++++++++++++++---------------- 1 file changed, 62 insertions(+), 35 deletions(-) diff --git a/tests/test_update1.py b/tests/test_update1.py index 60dfe8c10..07e0e5b80 100644 --- a/tests/test_update1.py +++ b/tests/test_update1.py @@ -1,27 +1,13 @@ -from nose.tools import assert_true, assert_false, assert_equal, raises +import pytest import os import numpy as np from pathlib import Path import tempfile import datajoint as dj -from . import PREFIX, CONN_INFO +from . import PREFIX from datajoint import DataJointError -schema = dj.Schema(PREFIX + "_update1", connection=dj.conn(**CONN_INFO)) -dj.config["stores"]["update_store"] = dict(protocol="file", location=tempfile.mkdtemp()) - -dj.config["stores"]["update_repo"] = dict( - stage=tempfile.mkdtemp(), protocol="file", location=tempfile.mkdtemp() -) - - -scratch_folder = tempfile.mkdtemp() - -dj.errors._switch_filepath_types(True) - - -@schema class Thing(dj.Manual): definition = """ thing : int @@ -35,10 +21,38 @@ class Thing(dj.Manual): """ -def test_update1(): - """test normal updates""" +@pytest.fixture(scope="module") +def mock_stores_update(tmpdir_factory): + og_stores_config = dj.config.get("stores") + if "stores" not in dj.config: + dj.config["stores"] = {} + dj.config["stores"]["update_store"] = dict( + protocol="file", location=tmpdir_factory.mktemp("store") + ) + dj.config["stores"]["update_repo"] = dict( + stage=tmpdir_factory.mktemp("repo_stage"), + protocol="file", + location=tmpdir_factory.mktemp("repo_loc"), + ) + yield + if og_stores_config is None: + del dj.config["stores"] + else: + dj.config["stores"] = og_stores_config - dj.errors._switch_filepath_types(True) + +@pytest.fixture +def schema_update1(connection_test): + schema = dj.Schema( + PREFIX + "_update1", context=dict(Thing=Thing), connection=connection_test + ) + schema(Thing) + yield schema + schema.drop() + + +def test_update1(tmpdir, enable_filepath_feature, schema_update1, mock_stores_update): + """Test normal updates""" # CHECK 1 -- initial insert key = dict(thing=1) Thing.insert1(dict(key, frac=0.5)) @@ -48,7 +62,7 @@ def test_update1(): # numbers and datetimes Thing.update1(dict(key, number=3, frac=30, timestamp="2020-01-01 10:00:00")) # attachment - attach_file = Path(scratch_folder, "attach1.dat") + attach_file = Path(tmpdir, "attach1.dat") buffer1 = os.urandom(100) attach_file.write_bytes(buffer1) Thing.update1(dict(key, picture=attach_file)) @@ -67,7 +81,7 @@ def test_update1(): managed_file.unlink() assert not managed_file.is_file() - check2 = Thing.fetch1(download_path=scratch_folder) + check2 = Thing.fetch1(download_path=tmpdir) buffer2 = Path(check2["picture"]).read_bytes() # read attachment final_file_data = managed_file.read_bytes() # read filepath @@ -84,37 +98,50 @@ def test_update1(): ) check3 = Thing.fetch1() - assert check1["number"] == 0 and check1["picture"] is None and check1["params"] is None + assert ( + check1["number"] == 0 and check1["picture"] is None and check1["params"] is None + ) - assert (check2["number"] == 3 + assert ( + check2["number"] == 3 and check2["frac"] == 30.0 and check2["picture"] is not None and check2["params"] is None - and buffer1 == buffer2) + and buffer1 == buffer2 + ) - assert (check3["number"] == 0 + assert ( + check3["number"] == 0 and check3["frac"] == 30.0 and check3["picture"] is None and check3["img_file"] is None - and isinstance(check3["params"], np.ndarray)) + and isinstance(check3["params"], np.ndarray) + ) assert check3["timestamp"] > check2["timestamp"] assert buffer1 == buffer2 assert original_file_data == final_file_data -@raises(DataJointError) -def test_update1_nonexistent(): - Thing.update1(dict(thing=100, frac=0.5)) # updating a non-existent entry +def test_update1_nonexistent( + enable_filepath_feature, schema_update1, mock_stores_update +): + with pytest.raises(DataJointError): + # updating a non-existent entry + Thing.update1(dict(thing=100, frac=0.5)) -@raises(DataJointError) -def test_update1_noprimary(): - Thing.update1(dict(number=None)) # missing primary key +def test_update1_noprimary(enable_filepath_feature, schema_update1, mock_stores_update): + with pytest.raises(DataJointError): + # missing primary key + Thing.update1(dict(number=None)) -@raises(DataJointError) -def test_update1_misspelled_attribute(): +def test_update1_misspelled_attribute( + enable_filepath_feature, schema_update1, mock_stores_update +): key = dict(thing=17) Thing.insert1(dict(key, frac=1.5)) - Thing.update1(dict(key, numer=3)) # misspelled attribute + with pytest.raises(DataJointError): + # misspelled attribute + Thing.update1(dict(key, numer=3)) From 164753c4aca6cc1d41dce05d232d509e655a45b9 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 16:32:07 -0700 Subject: [PATCH 170/212] cp to tests --- tests/test_uuid.py | 69 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 69 insertions(+) create mode 100644 tests/test_uuid.py diff --git a/tests/test_uuid.py b/tests/test_uuid.py new file mode 100644 index 000000000..1d5aa0818 --- /dev/null +++ b/tests/test_uuid.py @@ -0,0 +1,69 @@ +from nose.tools import assert_true, assert_equal, raises +import uuid +from .schema_uuid import Basic, Item, Topic +from datajoint import DataJointError +from itertools import count + + +def test_uuid(): + """test inserting and fetching of UUID attributes and restricting by UUID attributes""" + u, n = uuid.uuid4(), -1 + Basic().insert1(dict(item=u, number=n)) + Basic().insert(zip(map(uuid.uuid1, range(20)), count())) + number = (Basic() & {"item": u}).fetch1("number") + assert_equal(number, n) + item = (Basic & {"number": n}).fetch1("item") + assert_equal(u, item) + + +def test_string_uuid(): + """test that only UUID objects are accepted when inserting UUID fields""" + u, n = "00000000-0000-0000-0000-000000000000", 24601 + Basic().insert1(dict(item=u, number=n)) + k, m = (Basic & {"item": u}).fetch1("KEY", "number") + assert_equal(m, n) + assert_true(isinstance(k["item"], uuid.UUID)) + + +@raises(DataJointError) +def test_invalid_uuid_insert1(): + """test that only UUID objects are accepted when inserting UUID fields""" + u, n = 0, 24601 + Basic().insert1(dict(item=u, number=n)) + + +@raises(DataJointError) +def test_invalid_uuid_insert2(): + """test that only UUID objects are accepted when inserting UUID fields""" + u, n = "abc", 24601 + Basic().insert1(dict(item=u, number=n)) + + +@raises(DataJointError) +def test_invalid_uuid_restrict1(): + """test that only UUID objects are accepted when inserting UUID fields""" + u = 0 + k, m = (Basic & {"item": u}).fetch1("KEY", "number") + + +@raises(DataJointError) +def test_invalid_uuid_restrict1(): + """test that only UUID objects are accepted when inserting UUID fields""" + u = "abc" + k, m = (Basic & {"item": u}).fetch1("KEY", "number") + + +def test_uuid_dependencies(): + """test the use of UUID in foreign keys""" + for word in ( + "Neuroscience", + "Knowledge", + "Curiosity", + "Inspiration", + "Science", + "Philosophy", + "Conscience", + ): + Topic().add(word) + Item.populate() + assert_equal(Item().progress(), (0, len(Topic()))) From 5f439c66909df1ee33bc8b1cee224d9c0ef359fa Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 16:32:51 -0700 Subject: [PATCH 171/212] nose2pytest test_uuid --- tests/test_uuid.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/test_uuid.py b/tests/test_uuid.py index 1d5aa0818..100089dbf 100644 --- a/tests/test_uuid.py +++ b/tests/test_uuid.py @@ -11,9 +11,9 @@ def test_uuid(): Basic().insert1(dict(item=u, number=n)) Basic().insert(zip(map(uuid.uuid1, range(20)), count())) number = (Basic() & {"item": u}).fetch1("number") - assert_equal(number, n) + assert number == n item = (Basic & {"number": n}).fetch1("item") - assert_equal(u, item) + assert u == item def test_string_uuid(): @@ -21,8 +21,8 @@ def test_string_uuid(): u, n = "00000000-0000-0000-0000-000000000000", 24601 Basic().insert1(dict(item=u, number=n)) k, m = (Basic & {"item": u}).fetch1("KEY", "number") - assert_equal(m, n) - assert_true(isinstance(k["item"], uuid.UUID)) + assert m == n + assert isinstance(k["item"], uuid.UUID) @raises(DataJointError) @@ -66,4 +66,4 @@ def test_uuid_dependencies(): ): Topic().add(word) Item.populate() - assert_equal(Item().progress(), (0, len(Topic()))) + assert Item().progress() == (0, len(Topic())) From 5d2b1f7f6efdeebf463946c922469747281971b7 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 16:34:58 -0700 Subject: [PATCH 172/212] Migrate test_uuid --- tests/test_uuid.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/tests/test_uuid.py b/tests/test_uuid.py index 100089dbf..d99aa6c4c 100644 --- a/tests/test_uuid.py +++ b/tests/test_uuid.py @@ -1,11 +1,11 @@ -from nose.tools import assert_true, assert_equal, raises +import pytest import uuid from .schema_uuid import Basic, Item, Topic from datajoint import DataJointError from itertools import count -def test_uuid(): +def test_uuid(schema_uuid): """test inserting and fetching of UUID attributes and restricting by UUID attributes""" u, n = uuid.uuid4(), -1 Basic().insert1(dict(item=u, number=n)) @@ -16,7 +16,7 @@ def test_uuid(): assert u == item -def test_string_uuid(): +def test_string_uuid(schema_uuid): """test that only UUID objects are accepted when inserting UUID fields""" u, n = "00000000-0000-0000-0000-000000000000", 24601 Basic().insert1(dict(item=u, number=n)) @@ -25,35 +25,35 @@ def test_string_uuid(): assert isinstance(k["item"], uuid.UUID) -@raises(DataJointError) -def test_invalid_uuid_insert1(): +def test_invalid_uuid_insert1(schema_uuid): """test that only UUID objects are accepted when inserting UUID fields""" u, n = 0, 24601 - Basic().insert1(dict(item=u, number=n)) + with pytest.raises(DataJointError): + Basic().insert1(dict(item=u, number=n)) -@raises(DataJointError) -def test_invalid_uuid_insert2(): +def test_invalid_uuid_insert2(schema_uuid): """test that only UUID objects are accepted when inserting UUID fields""" u, n = "abc", 24601 - Basic().insert1(dict(item=u, number=n)) + with pytest.raises(DataJointError): + Basic().insert1(dict(item=u, number=n)) -@raises(DataJointError) -def test_invalid_uuid_restrict1(): +def test_invalid_uuid_restrict1(schema_uuid): """test that only UUID objects are accepted when inserting UUID fields""" u = 0 - k, m = (Basic & {"item": u}).fetch1("KEY", "number") + with pytest.raises(DataJointError): + k, m = (Basic & {"item": u}).fetch1("KEY", "number") -@raises(DataJointError) -def test_invalid_uuid_restrict1(): +def test_invalid_uuid_restrict1(schema_uuid): """test that only UUID objects are accepted when inserting UUID fields""" u = "abc" - k, m = (Basic & {"item": u}).fetch1("KEY", "number") + with pytest.raises(DataJointError): + k, m = (Basic & {"item": u}).fetch1("KEY", "number") -def test_uuid_dependencies(): +def test_uuid_dependencies(schema_uuid): """test the use of UUID in foreign keys""" for word in ( "Neuroscience", From 922ab005fa6f9908627c2bffe65f62df58e7de7e Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 16:44:26 -0700 Subject: [PATCH 173/212] Clean test_adapted_attributes --- tests/test_adapted_attributes.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index bbe8456f5..455dbd6ca 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -22,24 +22,22 @@ def schema_ad( adapted_graph_instance, enable_adapted_types, enable_filepath_feature, + tmpdir ): - stores_config = { + dj.config["stores"] = { "repo-s3": dict( S3_CONN_INFO, protocol="s3", location="adapted/repo", - stage=tempfile.mkdtemp(), + stage=tmpdir ) } - dj.config["stores"] = stores_config - layout_to_filepath = schema_adapted.LayoutToFilepath() context = { **schema_adapted.LOCALS_ADAPTED, "graph": adapted_graph_instance, - "layout_to_filepath": layout_to_filepath, + "layout_to_filepath": schema_adapted.LayoutToFilepath(), } schema = dj.schema(SCHEMA_NAME, context=context, connection=connection_test) - graph = adapted_graph_instance schema(schema_adapted.Connectivity) schema(schema_adapted.Layout) yield schema @@ -93,7 +91,6 @@ def test_adapted_filepath_type(schema_ad, minio_client): t = Layout() t.insert1((0, layout)) result = t.fetch1("layout") - # TODO: may fail, used to be assert_dict_equal assert result == layout t.delete() c.delete() From 608782f7f3eb52a5a47ed7a99844d5baf5fe1643 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 16:46:02 -0700 Subject: [PATCH 174/212] Clean test_admin --- tests/test_admin.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/tests/test_admin.py b/tests/test_admin.py index 1ab89c1af..43b418f80 100644 --- a/tests/test_admin.py +++ b/tests/test_admin.py @@ -7,19 +7,17 @@ import pymysql import pytest -from . import CONN_INFO_ROOT - @pytest.fixture() -def user_alice() -> dict: +def user_alice(db_creds_root) -> dict: # set up - reset config, log in as root, and create a new user alice # reset dj.config manually because its state may be changed by these tests if os.path.exists(dj.settings.LOCALCONFIG): os.remove(dj.settings.LOCALCONFIG) dj.config["database.password"] = os.getenv("DJ_PASS") - root_conn = dj.conn(**CONN_INFO_ROOT, reset=True) + root_conn = dj.conn(**db_creds_root, reset=True) new_credentials = dict( - host=CONN_INFO_ROOT["host"], + host=db_creds_root["host"], user="alice", password="oldpass", ) From 44a9184446df3aeb59be6ece94e0f12e748e68d1 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 16:47:21 -0700 Subject: [PATCH 175/212] Clean test_aggr_regressions --- tests/test_aggr_regressions.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/tests/test_aggr_regressions.py b/tests/test_aggr_regressions.py index b4d4e0802..31ec81faa 100644 --- a/tests/test_aggr_regressions.py +++ b/tests/test_aggr_regressions.py @@ -12,10 +12,9 @@ @pytest.fixture(scope="function") def schema_aggr_reg(connection_test): - context = LOCALS_AGGR_REGRESS schema = dj.Schema( PREFIX + "_aggr_regress", - context=context, + context=LOCALS_AGGR_REGRESS, connection=connection_test, ) schema(R) @@ -27,10 +26,9 @@ def schema_aggr_reg(connection_test): @pytest.fixture(scope="function") def schema_aggr_reg_with_abx(connection_test): - context = LOCALS_AGGR_REGRESS schema = dj.Schema( PREFIX + "_aggr_regress_with_abx", - context=context, + context=LOCALS_AGGR_REGRESS, connection=connection_test, ) schema(R) From 8f85b0237a9a30dbf44d7f4c535da5678d6c96dc Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 16:51:29 -0700 Subject: [PATCH 176/212] Clean test_alter Also move table defs to schema_alter module --- tests/schema_alter.py | 55 ++++++++++++++++++++++++++++++ tests/test_alter.py | 79 ++++++++----------------------------------- 2 files changed, 69 insertions(+), 65 deletions(-) create mode 100644 tests/schema_alter.py diff --git a/tests/schema_alter.py b/tests/schema_alter.py new file mode 100644 index 000000000..53c0bf87f --- /dev/null +++ b/tests/schema_alter.py @@ -0,0 +1,55 @@ +import datajoint as dj + + +class Experiment(dj.Imported): + original_definition = """ # information about experiments + -> Subject + experiment_id :smallint # experiment number for this subject + --- + experiment_date :date # date when experiment was started + -> [nullable] User + data_path="" :varchar(255) # file path to recorded data + notes="" :varchar(2048) # e.g. purpose of experiment + entry_time=CURRENT_TIMESTAMP :timestamp # automatic timestamp + """ + + definition1 = """ # Experiment + -> Subject + experiment_id :smallint # experiment number for this subject + --- + data_path : int # some number + extra=null : longblob # just testing + -> [nullable] User + subject_notes=null :varchar(2048) # {notes} e.g. purpose of experiment + entry_time=CURRENT_TIMESTAMP :timestamp # automatic timestamp + """ + + +class Parent(dj.Manual): + definition = """ + parent_id: int + """ + + class Child(dj.Part): + definition = """ + -> Parent + """ + definition_new = """ + -> master + --- + child_id=null: int + """ + + class Grandchild(dj.Part): + definition = """ + -> master.Child + """ + definition_new = """ + -> master.Child + --- + grandchild_id=null: int + """ + + +LOCALS_ALTER = {k: v for k, v in locals().items() if inspect.isclass(v)} +__all__ = list(LOCALS_ALTER) diff --git a/tests/test_alter.py b/tests/test_alter.py index a78a07f26..f2acafb36 100644 --- a/tests/test_alter.py +++ b/tests/test_alter.py @@ -2,59 +2,8 @@ import re import datajoint as dj from . import schema as schema_any_module, PREFIX +from .schema_alter import Experiment, Parent, LOCALS_ALTER - -class Experiment(dj.Imported): - original_definition = """ # information about experiments - -> Subject - experiment_id :smallint # experiment number for this subject - --- - experiment_date :date # date when experiment was started - -> [nullable] User - data_path="" :varchar(255) # file path to recorded data - notes="" :varchar(2048) # e.g. purpose of experiment - entry_time=CURRENT_TIMESTAMP :timestamp # automatic timestamp - """ - - definition1 = """ # Experiment - -> Subject - experiment_id :smallint # experiment number for this subject - --- - data_path : int # some number - extra=null : longblob # just testing - -> [nullable] User - subject_notes=null :varchar(2048) # {notes} e.g. purpose of experiment - entry_time=CURRENT_TIMESTAMP :timestamp # automatic timestamp - """ - - -class Parent(dj.Manual): - definition = """ - parent_id: int - """ - - class Child(dj.Part): - definition = """ - -> Parent - """ - definition_new = """ - -> master - --- - child_id=null: int - """ - - class Grandchild(dj.Part): - definition = """ - -> master.Child - """ - definition_new = """ - -> master.Child - --- - grandchild_id=null: int - """ - - -LOCALS_ALTER = {"Experiment": Experiment, "Parent": Parent} COMBINED_CONTEXT = { **schema_any_module.LOCALS_ANY, **LOCALS_ALTER, @@ -71,6 +20,19 @@ def schema_alter(connection_test, schema_any): class TestAlter: + def verify_alter(self, schema_alter, table, attribute_sql): + definition_original = schema_alter.connection.query( + f"SHOW CREATE TABLE {table.full_table_name}" + ).fetchone()[1] + table.definition = table.definition_new + table.alter(prompt=False) + definition_new = schema_alter.connection.query( + f"SHOW CREATE TABLE {table.full_table_name}" + ).fetchone()[1] + assert ( + re.sub(f"{attribute_sql},\n ", "", definition_new) == definition_original + ) + def test_alter(self, schema_alter): original = schema_alter.connection.query( "SHOW CREATE TABLE " + Experiment.full_table_name @@ -89,19 +51,6 @@ def test_alter(self, schema_alter): assert altered != restored assert original == restored - def verify_alter(self, schema_alter, table, attribute_sql): - definition_original = schema_alter.connection.query( - f"SHOW CREATE TABLE {table.full_table_name}" - ).fetchone()[1] - table.definition = table.definition_new - table.alter(prompt=False) - definition_new = schema_alter.connection.query( - f"SHOW CREATE TABLE {table.full_table_name}" - ).fetchone()[1] - assert ( - re.sub(f"{attribute_sql},\n ", "", definition_new) == definition_original - ) - def test_alter_part(self, schema_alter): """ https://github.com/datajoint/datajoint-python/issues/936 From d40fca65de80ac023855da893efb98cf2ba6a2d0 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 16:53:29 -0700 Subject: [PATCH 177/212] Clean test_attach --- tests/test_attach.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/tests/test_attach.py b/tests/test_attach.py index 654feef5b..4b6ece9fd 100644 --- a/tests/test_attach.py +++ b/tests/test_attach.py @@ -1,15 +1,14 @@ import pytest -import tempfile from pathlib import Path import os from .schema_external import Attach -def test_attach_attributes(schema_ext, minio_client): +def test_attach_attributes(schema_ext, minio_client, tmpdir_factory): """Test saving files in attachments""" # create a mock file table = Attach() - source_folder = tempfile.mkdtemp() + source_folder = tmpdir_factory.mktemp() for i in range(2): attach1 = Path(source_folder, "attach1.img") data1 = os.urandom(100) @@ -21,7 +20,7 @@ def test_attach_attributes(schema_ext, minio_client): f.write(data2) table.insert1(dict(attach=i, img=attach1, txt=attach2)) - download_folder = Path(tempfile.mkdtemp()) + download_folder = Path(tmpdir_factory.mktemp()) keys, path1, path2 = table.fetch( "KEY", "img", "txt", download_path=download_folder, order_by="KEY" ) @@ -43,11 +42,11 @@ def test_attach_attributes(schema_ext, minio_client): assert p2 == path2[0] -def test_return_string(schema_ext, minio_client): +def test_return_string(schema_ext, minio_client, tmpdir_factory): """Test returning string on fetch""" # create a mock file table = Attach() - source_folder = tempfile.mkdtemp() + source_folder = tmpdir_factory.mktemp() attach1 = Path(source_folder, "attach1.img") data1 = os.urandom(100) @@ -59,7 +58,7 @@ def test_return_string(schema_ext, minio_client): f.write(data2) table.insert1(dict(attach=2, img=attach1, txt=attach2)) - download_folder = Path(tempfile.mkdtemp()) + download_folder = Path(tmpdir_factory.mktemp()) keys, path1, path2 = table.fetch( "KEY", "img", "txt", download_path=download_folder, order_by="KEY" ) From c0d56a623bea3ecfce8f8f0ba90df9e16cb85ed8 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 16:54:44 -0700 Subject: [PATCH 178/212] Fix ImportError --- tests/schema_alter.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/schema_alter.py b/tests/schema_alter.py index 53c0bf87f..d607bc7c4 100644 --- a/tests/schema_alter.py +++ b/tests/schema_alter.py @@ -1,4 +1,5 @@ import datajoint as dj +import inspect class Experiment(dj.Imported): From 95643de6a0c3661b1e1def6a4af200cb67c2b918 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 17:01:33 -0700 Subject: [PATCH 179/212] Make stores config serializable --- tests/test_adapted_attributes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index 455dbd6ca..c0fb6b0eb 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -29,7 +29,7 @@ def schema_ad( S3_CONN_INFO, protocol="s3", location="adapted/repo", - stage=tmpdir + stage=str(tmpdir) ) } context = { From 62bd4ccb643b5478039ed5b45aef99df7dd9024c Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 18:13:23 -0700 Subject: [PATCH 180/212] Correct use of tmpdir_factory --- tests/test_attach.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_attach.py b/tests/test_attach.py index 4b6ece9fd..b3ecea04e 100644 --- a/tests/test_attach.py +++ b/tests/test_attach.py @@ -8,7 +8,7 @@ def test_attach_attributes(schema_ext, minio_client, tmpdir_factory): """Test saving files in attachments""" # create a mock file table = Attach() - source_folder = tmpdir_factory.mktemp() + source_folder = tmpdir_factory.mktemp("source") for i in range(2): attach1 = Path(source_folder, "attach1.img") data1 = os.urandom(100) @@ -20,7 +20,7 @@ def test_attach_attributes(schema_ext, minio_client, tmpdir_factory): f.write(data2) table.insert1(dict(attach=i, img=attach1, txt=attach2)) - download_folder = Path(tmpdir_factory.mktemp()) + download_folder = Path(tmpdir_factory.mktemp("download")) keys, path1, path2 = table.fetch( "KEY", "img", "txt", download_path=download_folder, order_by="KEY" ) @@ -46,7 +46,7 @@ def test_return_string(schema_ext, minio_client, tmpdir_factory): """Test returning string on fetch""" # create a mock file table = Attach() - source_folder = tmpdir_factory.mktemp() + source_folder = tmpdir_factory.mktemp("source") attach1 = Path(source_folder, "attach1.img") data1 = os.urandom(100) @@ -58,7 +58,7 @@ def test_return_string(schema_ext, minio_client, tmpdir_factory): f.write(data2) table.insert1(dict(attach=2, img=attach1, txt=attach2)) - download_folder = Path(tmpdir_factory.mktemp()) + download_folder = Path(tmpdir_factory.mktemp("download")) keys, path1, path2 = table.fetch( "KEY", "img", "txt", download_path=download_folder, order_by="KEY" ) From 32b7c4c66168f1b4bf6e0eb47c643861669f7200 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 18:25:11 -0700 Subject: [PATCH 181/212] Clean up test_autopopulate --- tests/conftest.py | 61 ++++++++ tests/test_autopopulate.py | 290 +++++++++++++++++-------------------- tests/test_relation.py | 50 ------- 3 files changed, 190 insertions(+), 211 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 5a38eef90..e68c8f72e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -414,3 +414,64 @@ def minio_client(minio_client_bare): for o in objs ] minio_client_bare.remove_bucket(S3_CONN_INFO["bucket"]) + + +@pytest.fixture +def test(schema_any): + yield schema.TTest() + + +@pytest.fixture +def test2(schema_any): + yield schema.TTest2() + + +@pytest.fixture +def test_extra(schema_any): + yield schema.TTestExtra() + + +@pytest.fixture +def test_no_extra(schema_any): + yield schema.TTestNoExtra() + + +@pytest.fixture +def user(schema_any): + return schema.User() + + +@pytest.fixture +def subject(schema_any): + return schema.Subject() + + +@pytest.fixture +def experiment(schema_any): + return schema.Experiment() + + +@pytest.fixture +def ephys(schema_any): + return schema.Ephys() + + +@pytest.fixture +def img(schema_any): + return schema.Image() + + +@pytest.fixture +def trial(schema_any): + return schema.Trial() + + +@pytest.fixture +def channel(schema_any): + return schema.Ephys.Channel() + + +@pytest.fixture +def trash(schema_any): + return schema.UberTrash() + diff --git a/tests/test_autopopulate.py b/tests/test_autopopulate.py index 25f8e16ec..d1225a140 100644 --- a/tests/test_autopopulate.py +++ b/tests/test_autopopulate.py @@ -5,164 +5,132 @@ import pymysql -class TestPopulate: - """ - Test base relations: insert, delete - """ - - @classmethod - def setup_class(cls): - cls.user = schema.User() - cls.subject = schema.Subject() - cls.experiment = schema.Experiment() - cls.trial = schema.Trial() - cls.ephys = schema.Ephys() - cls.channel = schema.Ephys.Channel() - - @classmethod - def teardown_class(cls): - """Delete automatic tables just in case""" - for autopop_table in ( - cls.channel, - cls.ephys, - cls.trial.Condition, - cls.trial, - cls.experiment, - ): - try: - autopop_table.delete_quick() - except (pymysql.err.OperationalError, dj.errors.MissingTableError): - # Table doesn't exist - pass - - def test_populate(self, schema_any): - # test simple populate - assert self.subject, "root tables are empty" - assert not self.experiment, "table already filled?" - self.experiment.populate() - assert ( - len(self.experiment) - == len(self.subject) * self.experiment.fake_experiments_per_subject - ) - - # test restricted populate - assert not self.trial, "table already filled?" - restriction = self.subject.proj(animal="subject_id").fetch("KEY")[0] - d = self.trial.connection.dependencies - d.load() - self.trial.populate(restriction) - assert self.trial, "table was not populated" - key_source = self.trial.key_source - assert len(key_source & self.trial) == len(key_source & restriction) - assert len(key_source - self.trial) == len(key_source - restriction) - - # test subtable populate - assert not self.ephys - assert not self.channel - self.ephys.populate() - assert self.ephys - assert self.channel - - def test_populate_with_success_count(self, schema_any): - # test simple populate - assert self.subject, "root tables are empty" - assert not self.experiment, "table already filled?" - ret = self.experiment.populate() - success_count = ret["success_count"] - assert len(self.experiment.key_source & self.experiment) == success_count - - # test restricted populate - assert not self.trial, "table already filled?" - restriction = self.subject.proj(animal="subject_id").fetch("KEY")[0] - d = self.trial.connection.dependencies - d.load() - ret = self.trial.populate(restriction, suppress_errors=True) - success_count = ret["success_count"] - assert len(self.trial.key_source & self.trial) == success_count - - def test_populate_exclude_error_and_ignore_jobs(self, schema_any): - # test simple populate - assert self.subject, "root tables are empty" - assert not self.experiment, "table already filled?" - - keys = self.experiment.key_source.fetch("KEY", limit=2) - for idx, key in enumerate(keys): - if idx == 0: - schema_any.jobs.ignore(self.experiment.table_name, key) - else: - schema_any.jobs.error(self.experiment.table_name, key, "") - - self.experiment.populate(reserve_jobs=True) - assert ( - len(self.experiment.key_source & self.experiment) - == len(self.experiment.key_source) - 2 - ) - - def test_allow_direct_insert(self, schema_any): - assert self.subject, "root tables are empty" - key = self.subject.fetch("KEY", limit=1)[0] - key["experiment_id"] = 1000 - key["experiment_date"] = "2018-10-30" - self.experiment.insert1(key, allow_direct_insert=True) - - def test_multi_processing(self, schema_any): - assert self.subject, "root tables are empty" - assert not self.experiment, "table already filled?" - self.experiment.populate(processes=2) - assert ( - len(self.experiment) - == len(self.subject) * self.experiment.fake_experiments_per_subject - ) - - def test_max_multi_processing(self, schema_any): - assert self.subject, "root tables are empty" - assert not self.experiment, "table already filled?" - self.experiment.populate(processes=None) - assert ( - len(self.experiment) - == len(self.subject) * self.experiment.fake_experiments_per_subject - ) - - def test_allow_insert(self, schema_any): - assert self.subject, "root tables are empty" - key = self.subject.fetch("KEY")[0] - key["experiment_id"] = 1001 - key["experiment_date"] = "2018-10-30" - with pytest.raises(DataJointError): - self.experiment.insert1(key) - - def test_load_dependencies(self): - schema = dj.Schema(f"{PREFIX}_load_dependencies_populate") - - @schema - class ImageSource(dj.Lookup): - definition = """ - image_source_id: int - """ - contents = [(0,)] - - @schema - class Image(dj.Imported): - definition = """ - -> ImageSource - --- - image_data: longblob - """ - - def make(self, key): - self.insert1(dict(key, image_data=dict())) - - Image.populate() - - @schema - class Crop(dj.Computed): - definition = """ - -> Image - --- - crop_image: longblob - """ - - def make(self, key): - self.insert1(dict(key, crop_image=dict())) - - Crop.populate() +def test_populate(trial, subject, experiment, ephys, channel): + # test simple populate + assert subject, "root tables are empty" + assert not experiment, "table already filled?" + experiment.populate() + assert ( + len(experiment) + == len(subject) * experiment.fake_experiments_per_subject + ) + + # test restricted populate + assert not trial, "table already filled?" + restriction = subject.proj(animal="subject_id").fetch("KEY")[0] + d = trial.connection.dependencies + d.load() + trial.populate(restriction) + assert trial, "table was not populated" + key_source = trial.key_source + assert len(key_source & trial) == len(key_source & restriction) + assert len(key_source - trial) == len(key_source - restriction) + + # test subtable populate + assert not ephys + assert not channel + ephys.populate() + assert ephys + assert channel + + +def test_populate_with_success_count(subject, experiment, trial): + # test simple populate + assert subject, "root tables are empty" + assert not experiment, "table already filled?" + ret = experiment.populate() + success_count = ret["success_count"] + assert len(experiment.key_source & experiment) == success_count + + # test restricted populate + assert not trial, "table already filled?" + restriction = subject.proj(animal="subject_id").fetch("KEY")[0] + d = trial.connection.dependencies + d.load() + ret = trial.populate(restriction, suppress_errors=True) + success_count = ret["success_count"] + assert len(trial.key_source & trial) == success_count + + +def test_populate_exclude_error_and_ignore_jobs(schema_any, subject, experiment): + # test simple populate + assert subject, "root tables are empty" + assert not experiment, "table already filled?" + + keys = experiment.key_source.fetch("KEY", limit=2) + for idx, key in enumerate(keys): + if idx == 0: + schema_any.jobs.ignore(experiment.table_name, key) + else: + schema_any.jobs.error(experiment.table_name, key, "") + + experiment.populate(reserve_jobs=True) + assert ( + len(experiment.key_source & experiment) + == len(experiment.key_source) - 2 + ) + + +def test_allow_direct_insert(subject, experiment): + assert subject, "root tables are empty" + key = subject.fetch("KEY", limit=1)[0] + key["experiment_id"] = 1000 + key["experiment_date"] = "2018-10-30" + experiment.insert1(key, allow_direct_insert=True) + + +@pytest.mark.parametrize('processes', [None, 2]) +def test_multi_processing(subject, experiment, processes): + assert subject, "root tables are empty" + assert not experiment, "table already filled?" + experiment.populate(processes=None) + assert ( + len(experiment) + == len(subject) * experiment.fake_experiments_per_subject + ) + + +def test_allow_insert(subject, experiment): + assert subject, "root tables are empty" + key = subject.fetch("KEY")[0] + key["experiment_id"] = 1001 + key["experiment_date"] = "2018-10-30" + with pytest.raises(DataJointError): + experiment.insert1(key) + + +def test_load_dependencies(): + schema = dj.Schema(f"{PREFIX}_load_dependencies_populate") + + @schema + class ImageSource(dj.Lookup): + definition = """ + image_source_id: int + """ + contents = [(0,)] + + @schema + class Image(dj.Imported): + definition = """ + -> ImageSource + --- + image_data: longblob + """ + + def make(self, key): + self.insert1(dict(key, image_data=dict())) + + Image.populate() + + @schema + class Crop(dj.Computed): + definition = """ + -> Image + --- + crop_image: longblob + """ + + def make(self, key): + self.insert1(dict(key, crop_image=dict())) + + Crop.populate() diff --git a/tests/test_relation.py b/tests/test_relation.py index 2011a1901..a40b17d4e 100644 --- a/tests/test_relation.py +++ b/tests/test_relation.py @@ -10,56 +10,6 @@ from . import schema -@pytest.fixture -def test(schema_any): - yield schema.TTest() - - -@pytest.fixture -def test2(schema_any): - yield schema.TTest2() - - -@pytest.fixture -def test_extra(schema_any): - yield schema.TTestExtra() - - -@pytest.fixture -def test_no_extra(schema_any): - yield schema.TTestNoExtra() - - -@pytest.fixture -def user(schema_any): - return schema.User() - - -@pytest.fixture -def subject(schema_any): - return schema.Subject() - - -@pytest.fixture -def experiment(schema_any): - return schema.Experiment() - - -@pytest.fixture -def ephys(schema_any): - return schema.Ephys() - - -@pytest.fixture -def img(schema_any): - return schema.Image() - - -@pytest.fixture -def trash(schema_any): - return schema.UberTrash() - - def test_contents(user, subject): """ test the ability of tables to self-populate using the contents property From 91d7ad6e1ebada0de6af2a8ba797959604af1aff Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 19:28:26 -0700 Subject: [PATCH 182/212] Clean up other modules --- tests/conftest.py | 53 ++- tests/test_blob_matlab.py | 236 +++++----- tests/test_cascading_delete.py | 211 ++++----- tests/test_connection.py | 58 ++- tests/test_declare.py | 565 ++++++++++++------------ tests/test_fetch.py | 713 +++++++++++++++---------------- tests/test_fetch_same.py | 42 +- tests/test_jobs.py | 33 +- tests/test_json.py | 14 +- tests/test_nan.py | 53 +-- tests/test_plugin.py | 6 +- tests/test_privileges.py | 19 +- tests/test_reconnection.py | 36 +- tests/test_relation.py | 1 - tests/test_relation_u.py | 134 +++--- tests/test_relational_operand.py | 31 +- tests/test_s3.py | 66 ++- tests/test_schema_keywords.py | 8 +- 18 files changed, 1113 insertions(+), 1166 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index e68c8f72e..0409565dc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,6 @@ import datajoint as dj from packaging import version -from typing import Dict +from typing import Dict, List import os from os import environ, remove import minio @@ -19,8 +19,6 @@ ) from . import ( PREFIX, - CONN_INFO, - S3_CONN_INFO, schema, schema_simple, schema_advanced, @@ -184,8 +182,18 @@ def connection_test(connection_root): connection.close() +@pytest.fixture +def s3_creds() -> Dict: + return dict( + endpoint=os.environ.get("S3_ENDPOINT", "fakeservices.datajoint.io"), + access_key=os.environ.get("S3_ACCESS_KEY", "datajoint"), + secret_key=os.environ.get("S3_SECRET_KEY", "datajoint"), + bucket=os.environ.get("S3_BUCKET", "datajoint.test"), + ) + + @pytest.fixture(scope="session") -def stores_config(tmpdir_factory): +def stores_config(s3_creds, tmpdir_factory): stores_config = { "raw": dict(protocol="file", location=tmpdir_factory.mktemp("raw")), "repo": dict( @@ -194,7 +202,7 @@ def stores_config(tmpdir_factory): location=tmpdir_factory.mktemp("repo"), ), "repo-s3": dict( - S3_CONN_INFO, + s3_creds, protocol="s3", location="dj/repo", stage=tmpdir_factory.mktemp("repo-s3"), @@ -203,7 +211,7 @@ def stores_config(tmpdir_factory): protocol="file", location=tmpdir_factory.mktemp("local"), subfolding=(1, 1) ), "share": dict( - S3_CONN_INFO, protocol="s3", location="dj/store/repo", subfolding=(2, 4) + s3_creds, protocol="s3", location="dj/store/repo", subfolding=(2, 4) ), } return stores_config @@ -380,12 +388,12 @@ def http_client(): @pytest.fixture(scope="session") -def minio_client_bare(http_client): +def minio_client_bare(s3_creds, http_client): """Initialize MinIO with an endpoint and access/secret keys.""" client = minio.Minio( - S3_CONN_INFO["endpoint"], - access_key=S3_CONN_INFO["access_key"], - secret_key=S3_CONN_INFO["secret_key"], + s3_creds["endpoint"], + access_key=s3_creds["access_key"], + secret_key=s3_creds["secret_key"], secure=True, http_client=http_client, ) @@ -393,12 +401,12 @@ def minio_client_bare(http_client): @pytest.fixture(scope="session") -def minio_client(minio_client_bare): +def minio_client(s3_creds, minio_client_bare): """Initialize a MinIO client and create buckets for testing session.""" # Setup MinIO bucket aws_region = "us-east-1" try: - minio_client_bare.make_bucket(S3_CONN_INFO["bucket"], location=aws_region) + minio_client_bare.make_bucket(s3_creds["bucket"], location=aws_region) except minio.error.S3Error as e: if e.code != "BucketAlreadyOwnedByYou": raise e @@ -406,14 +414,14 @@ def minio_client(minio_client_bare): yield minio_client_bare # Teardown S3 - objs = list(minio_client_bare.list_objects(S3_CONN_INFO["bucket"], recursive=True)) + objs = list(minio_client_bare.list_objects(s3_creds["bucket"], recursive=True)) objs = [ minio_client_bare.remove_object( - S3_CONN_INFO["bucket"], o.object_name.encode("utf-8") + s3_creds["bucket"], o.object_name.encode("utf-8") ) for o in objs ] - minio_client_bare.remove_bucket(S3_CONN_INFO["bucket"]) + minio_client_bare.remove_bucket(s3_creds["bucket"]) @pytest.fixture @@ -441,9 +449,22 @@ def user(schema_any): return schema.User() +@pytest.fixture +def lang(schema_any): + yield schema.Language() + + +@pytest.fixture +def languages(lang) -> List: + og_contents = lang.contents + languages = og_contents.copy() + yield languages + lang.contents = og_contents + + @pytest.fixture def subject(schema_any): - return schema.Subject() + yield schema.Subject() @pytest.fixture diff --git a/tests/test_blob_matlab.py b/tests/test_blob_matlab.py index 575e6b0b8..6d99d4118 100644 --- a/tests/test_blob_matlab.py +++ b/tests/test_blob_matlab.py @@ -3,7 +3,6 @@ import datajoint as dj from datajoint.blob import pack, unpack from numpy.testing import assert_array_equal - from . import PREFIX @@ -17,7 +16,7 @@ class Blob(dj.Manual): @pytest.fixture -def schema(connection_test): +def schema_blob(connection_test): schema = dj.Schema(PREFIX + "_test1", dict(Blob=Blob), connection=connection_test) schema(Blob) yield schema @@ -25,8 +24,8 @@ def schema(connection_test): @pytest.fixture -def insert_blobs_func(schema): - def insert_blobs(): +def schema_blob_pop(schema_blob): + def insert_blobs(schema): """ This function inserts blobs resulting from the following datajoint-matlab code: @@ -60,124 +59,119 @@ def insert_blobs(): ) ) - yield insert_blobs - - -@pytest.fixture -def setup_class(schema, insert_blobs_func): assert not dj.config["safemode"], "safemode must be disabled" Blob().delete() - insert_blobs_func() + insert_blobs(schema_blob) + return schema_blob -class TestFetch: - @staticmethod - def test_complex_matlab_blobs(setup_class): - """ - test correct de-serialization of various blob types - """ - blobs = Blob().fetch("blob", order_by="KEY") - - blob = blobs[0] # 'simple string' 'character string' - assert blob[0] == "character string" - - blob = blobs[1] # '1D vector' 1:15:180 - assert_array_equal(blob, np.r_[1:180:15][None, :]) - assert_array_equal(blob, unpack(pack(blob))) - - blob = blobs[2] # 'string array' {'string1' 'string2'} - assert isinstance(blob, dj.MatCell) - assert_array_equal(blob, np.array([["string1", "string2"]])) - assert_array_equal(blob, unpack(pack(blob))) - - blob = blobs[ - 3 - ] # 'struct array' struct('a', {1,2}, 'b', {struct('c', magic(3)), struct('C', magic(5))}) - assert isinstance(blob, dj.MatStruct) - assert tuple(blob.dtype.names) == ("a", "b") - assert_array_equal(blob.a[0, 0], np.array([[1.0]])) - assert_array_equal(blob.a[0, 1], np.array([[2.0]])) - assert isinstance(blob.b[0, 1], dj.MatStruct) - assert tuple(blob.b[0, 1].C[0, 0].shape) == (5, 5) - b = unpack(pack(blob)) - assert_array_equal(b[0, 0].b[0, 0].c, blob[0, 0].b[0, 0].c) - assert_array_equal(b[0, 1].b[0, 0].C, blob[0, 1].b[0, 0].C) - - blob = blobs[4] # '3D double array' reshape(1:24, [2,3,4]) - assert_array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F")) - assert blob.dtype == "float64" - assert_array_equal(blob, unpack(pack(blob))) - - blob = blobs[5] # reshape(uint8(1:24), [2,3,4]) - assert np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F")) - assert blob.dtype == "uint8" - assert_array_equal(blob, unpack(pack(blob))) - - blob = blobs[6] # fftn(reshape(1:24, [2,3,4])) - assert tuple(blob.shape) == (2, 3, 4) - assert blob.dtype == "complex128" - assert_array_equal(blob, unpack(pack(blob))) - - @staticmethod - def test_complex_matlab_squeeze(setup_class): - """ - test correct de-serialization of various blob types - """ - blob = (Blob & "id=1").fetch1( - "blob", squeeze=True - ) # 'simple string' 'character string' - assert blob == "character string" - - blob = (Blob & "id=2").fetch1( - "blob", squeeze=True - ) # '1D vector' 1:15:180 - assert_array_equal(blob, np.r_[1:180:15]) - - blob = (Blob & "id=3").fetch1( - "blob", squeeze=True - ) # 'string array' {'string1' 'string2'} - assert isinstance(blob, dj.MatCell) - assert_array_equal(blob, np.array(["string1", "string2"])) - - blob = (Blob & "id=4").fetch1( - "blob", squeeze=True - ) # 'struct array' struct('a', {1,2}, 'b', {struct('c', magic(3)), struct('C', magic(5))}) - assert isinstance(blob, dj.MatStruct) - assert tuple(blob.dtype.names) == ("a", "b") - assert_array_equal( - blob.a, - np.array( - [ - 1.0, - 2, - ] - ), - ) - assert isinstance(blob[1].b, dj.MatStruct) - assert tuple(blob[1].b.C.item().shape) == (5, 5) - - blob = (Blob & "id=5").fetch1( - "blob", squeeze=True - ) # '3D double array' reshape(1:24, [2,3,4]) - assert np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F")) - assert blob.dtype == "float64" - - blob = (Blob & "id=6").fetch1( - "blob", squeeze=True - ) # reshape(uint8(1:24), [2,3,4]) - assert np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F")) - assert blob.dtype == "uint8" - - blob = (Blob & "id=7").fetch1( - "blob", squeeze=True - ) # fftn(reshape(1:24, [2,3,4])) - assert tuple(blob.shape) == (2, 3, 4) - assert blob.dtype == "complex128" - - def test_iter(self, setup_class): - """ - test iterator over the entity set - """ - from_iter = {d["id"]: d for d in Blob()} - assert len(from_iter) == len(Blob()) - assert from_iter[1]["blob"] == "character string" +def test_complex_matlab_blobs(schema_blob_pop): + """ + test correct de-serialization of various blob types + """ + blobs = Blob().fetch("blob", order_by="KEY") + + blob = blobs[0] # 'simple string' 'character string' + assert blob[0] == "character string" + + blob = blobs[1] # '1D vector' 1:15:180 + assert_array_equal(blob, np.r_[1:180:15][None, :]) + assert_array_equal(blob, unpack(pack(blob))) + + blob = blobs[2] # 'string array' {'string1' 'string2'} + assert isinstance(blob, dj.MatCell) + assert_array_equal(blob, np.array([["string1", "string2"]])) + assert_array_equal(blob, unpack(pack(blob))) + + blob = blobs[ + 3 + ] # 'struct array' struct('a', {1,2}, 'b', {struct('c', magic(3)), struct('C', magic(5))}) + assert isinstance(blob, dj.MatStruct) + assert tuple(blob.dtype.names) == ("a", "b") + assert_array_equal(blob.a[0, 0], np.array([[1.0]])) + assert_array_equal(blob.a[0, 1], np.array([[2.0]])) + assert isinstance(blob.b[0, 1], dj.MatStruct) + assert tuple(blob.b[0, 1].C[0, 0].shape) == (5, 5) + b = unpack(pack(blob)) + assert_array_equal(b[0, 0].b[0, 0].c, blob[0, 0].b[0, 0].c) + assert_array_equal(b[0, 1].b[0, 0].C, blob[0, 1].b[0, 0].C) + + blob = blobs[4] # '3D double array' reshape(1:24, [2,3,4]) + assert_array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F")) + assert blob.dtype == "float64" + assert_array_equal(blob, unpack(pack(blob))) + + blob = blobs[5] # reshape(uint8(1:24), [2,3,4]) + assert np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F")) + assert blob.dtype == "uint8" + assert_array_equal(blob, unpack(pack(blob))) + + blob = blobs[6] # fftn(reshape(1:24, [2,3,4])) + assert tuple(blob.shape) == (2, 3, 4) + assert blob.dtype == "complex128" + assert_array_equal(blob, unpack(pack(blob))) + + +def test_complex_matlab_squeeze(schema_blob_pop): + """ + test correct de-serialization of various blob types + """ + blob = (Blob & "id=1").fetch1( + "blob", squeeze=True + ) # 'simple string' 'character string' + assert blob == "character string" + + blob = (Blob & "id=2").fetch1( + "blob", squeeze=True + ) # '1D vector' 1:15:180 + assert_array_equal(blob, np.r_[1:180:15]) + + blob = (Blob & "id=3").fetch1( + "blob", squeeze=True + ) # 'string array' {'string1' 'string2'} + assert isinstance(blob, dj.MatCell) + assert_array_equal(blob, np.array(["string1", "string2"])) + + blob = (Blob & "id=4").fetch1( + "blob", squeeze=True + ) # 'struct array' struct('a', {1,2}, 'b', {struct('c', magic(3)), struct('C', magic(5))}) + assert isinstance(blob, dj.MatStruct) + assert tuple(blob.dtype.names) == ("a", "b") + assert_array_equal( + blob.a, + np.array( + [ + 1.0, + 2, + ] + ), + ) + assert isinstance(blob[1].b, dj.MatStruct) + assert tuple(blob[1].b.C.item().shape) == (5, 5) + + blob = (Blob & "id=5").fetch1( + "blob", squeeze=True + ) # '3D double array' reshape(1:24, [2,3,4]) + assert np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F")) + assert blob.dtype == "float64" + + blob = (Blob & "id=6").fetch1( + "blob", squeeze=True + ) # reshape(uint8(1:24), [2,3,4]) + assert np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F")) + assert blob.dtype == "uint8" + + blob = (Blob & "id=7").fetch1( + "blob", squeeze=True + ) # fftn(reshape(1:24, [2,3,4])) + assert tuple(blob.shape) == (2, 3, 4) + assert blob.dtype == "complex128" + + +def test_iter(schema_blob_pop): + """ + test iterator over the entity set + """ + from_iter = {d["id"]: d for d in Blob()} + assert len(from_iter) == len(Blob()) + assert from_iter[1]["blob"] == "character string" diff --git a/tests/test_cascading_delete.py b/tests/test_cascading_delete.py index 8646edeca..dcaaa86d2 100644 --- a/tests/test_cascading_delete.py +++ b/tests/test_cascading_delete.py @@ -14,106 +14,113 @@ def schema_simp_pop(schema_simp): yield schema_simp -class TestDelete: - def test_delete_tree(self, schema_simp_pop): - assert not dj.config["safemode"], "safemode must be off for testing" - assert ( - L() and A() and B() and B.C() and D() and E() and E.F(), - "schema is not populated", - ) - A().delete() - assert not A() or B() or B.C() or D() or E() or E.F(), "incomplete delete" - - def test_stepwise_delete(self, schema_simp_pop): - assert not dj.config["safemode"], "safemode must be off for testing" - assert L() and A() and B() and B.C(), "schema population failed" - B.C().delete(force=True) - assert not B.C(), "failed to delete child tables" - B().delete() - assert ( - not B() - ), "failed to delete from the parent table following child table deletion" - - def test_delete_tree_restricted(self, schema_simp_pop): - assert not dj.config["safemode"], "safemode must be off for testing" - assert ( - L() and A() and B() and B.C() and D() and E() and E.F() - ), "schema is not populated" - cond = "cond_in_a" - rel = A() & cond - rest = dict( - A=len(A()) - len(rel), - B=len(B() - rel), - C=len(B.C() - rel), - D=len(D() - rel), - E=len(E() - rel), - F=len(E.F() - rel), - ) - rel.delete() - assert not ( - rel or B() & rel or B.C() & rel or D() & rel or E() & rel or (E.F() & rel) - ), "incomplete delete" - assert len(A()) == rest["A"], "invalid delete restriction" - assert len(B()) == rest["B"], "invalid delete restriction" - assert len(B.C()) == rest["C"], "invalid delete restriction" - assert len(D()) == rest["D"], "invalid delete restriction" - assert len(E()) == rest["E"], "invalid delete restriction" - assert len(E.F()) == rest["F"], "invalid delete restriction" - - def test_delete_lookup(self, schema_simp_pop): - assert not dj.config["safemode"], "safemode must be off for testing" - assert ( - bool(L() and A() and B() and B.C() and D() and E() and E.F()), - "schema is not populated", - ) - L().delete() - assert not bool(L() or D() or E() or E.F()), "incomplete delete" - A().delete() # delete all is necessary because delete L deletes from subtables. - - def test_delete_lookup_restricted(self, schema_simp_pop): - assert not dj.config["safemode"], "safemode must be off for testing" - assert ( - L() and A() and B() and B.C() and D() and E() and E.F(), - "schema is not populated", - ) - rel = L() & "cond_in_l" - original_count = len(L()) - deleted_count = len(rel) - rel.delete() - assert len(L()) == original_count - deleted_count - - def test_delete_complex_keys(self, schema_any): - """ - https://github.com/datajoint/datajoint-python/issues/883 - https://github.com/datajoint/datajoint-python/issues/886 - """ - assert not dj.config["safemode"], "safemode must be off for testing" - parent_key_count = 8 - child_key_count = 1 - restriction = dict( - {"parent_id_{}".format(i + 1): i for i in range(parent_key_count)}, - **{ - "child_id_{}".format(i + 1): (i + parent_key_count) - for i in range(child_key_count) - } - ) - assert len(ComplexParent & restriction) == 1, "Parent record missing" - assert len(ComplexChild & restriction) == 1, "Child record missing" - (ComplexParent & restriction).delete() - assert len(ComplexParent & restriction) == 0, "Parent record was not deleted" - assert len(ComplexChild & restriction) == 0, "Child record was not deleted" - - def test_delete_master(self, schema_simp_pop): +def test_delete_tree(schema_simp_pop): + assert not dj.config["safemode"], "safemode must be off for testing" + assert ( + L() and A() and B() and B.C() and D() and E() and E.F(), + "schema is not populated", + ) + A().delete() + assert not A() or B() or B.C() or D() or E() or E.F(), "incomplete delete" + + +def test_stepwise_delete(schema_simp_pop): + assert not dj.config["safemode"], "safemode must be off for testing" + assert L() and A() and B() and B.C(), "schema population failed" + B.C().delete(force=True) + assert not B.C(), "failed to delete child tables" + B().delete() + assert ( + not B() + ), "failed to delete from the parent table following child table deletion" + + +def test_delete_tree_restricted(schema_simp_pop): + assert not dj.config["safemode"], "safemode must be off for testing" + assert ( + L() and A() and B() and B.C() and D() and E() and E.F() + ), "schema is not populated" + cond = "cond_in_a" + rel = A() & cond + rest = dict( + A=len(A()) - len(rel), + B=len(B() - rel), + C=len(B.C() - rel), + D=len(D() - rel), + E=len(E() - rel), + F=len(E.F() - rel), + ) + rel.delete() + assert not ( + rel or B() & rel or B.C() & rel or D() & rel or E() & rel or (E.F() & rel) + ), "incomplete delete" + assert len(A()) == rest["A"], "invalid delete restriction" + assert len(B()) == rest["B"], "invalid delete restriction" + assert len(B.C()) == rest["C"], "invalid delete restriction" + assert len(D()) == rest["D"], "invalid delete restriction" + assert len(E()) == rest["E"], "invalid delete restriction" + assert len(E.F()) == rest["F"], "invalid delete restriction" + + +def test_delete_lookup(schema_simp_pop): + assert not dj.config["safemode"], "safemode must be off for testing" + assert ( + bool(L() and A() and B() and B.C() and D() and E() and E.F()), + "schema is not populated", + ) + L().delete() + assert not bool(L() or D() or E() or E.F()), "incomplete delete" + A().delete() # delete all is necessary because delete L deletes from subtables. + + +def test_delete_lookup_restricted(schema_simp_pop): + assert not dj.config["safemode"], "safemode must be off for testing" + assert ( + L() and A() and B() and B.C() and D() and E() and E.F(), + "schema is not populated", + ) + rel = L() & "cond_in_l" + original_count = len(L()) + deleted_count = len(rel) + rel.delete() + assert len(L()) == original_count - deleted_count + + +def test_delete_complex_keys(schema_any): + """ + https://github.com/datajoint/datajoint-python/issues/883 + https://github.com/datajoint/datajoint-python/issues/886 + """ + assert not dj.config["safemode"], "safemode must be off for testing" + parent_key_count = 8 + child_key_count = 1 + restriction = dict( + {"parent_id_{}".format(i + 1): i for i in range(parent_key_count)}, + **{ + "child_id_{}".format(i + 1): (i + parent_key_count) + for i in range(child_key_count) + } + ) + assert len(ComplexParent & restriction) == 1, "Parent record missing" + assert len(ComplexChild & restriction) == 1, "Child record missing" + (ComplexParent & restriction).delete() + assert len(ComplexParent & restriction) == 0, "Parent record was not deleted" + assert len(ComplexChild & restriction) == 0, "Child record was not deleted" + + +def test_delete_master(schema_simp_pop): + Profile().populate_random() + Profile().delete() + + +def test_delete_parts(schema_simp_pop): + """test issue #151""" + with pytest.raises(dj.DataJointError): Profile().populate_random() - Profile().delete() - - def test_delete_parts(self, schema_simp_pop): - """test issue #151""" - with pytest.raises(dj.DataJointError): - Profile().populate_random() - Website().delete() - - def test_drop_part(self, schema_simp_pop): - """test issue #374""" - with pytest.raises(dj.DataJointError): - Website().drop() + Website().delete() + + +def test_drop_part(schema_simp_pop): + """test issue #374""" + with pytest.raises(dj.DataJointError): + Website().drop() diff --git a/tests/test_connection.py b/tests/test_connection.py index 8cdbbbff5..98b930660 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -5,41 +5,35 @@ import datajoint as dj from datajoint import DataJointError import numpy as np -from . import CONN_INFO_ROOT from . import PREFIX import pytest +class Subjects(dj.Manual): + definition = """ + #Basic subject + subject_id : int # unique subject id + --- + real_id : varchar(40) # real-world name + species = "mouse" : enum('mouse', 'monkey', 'human') # species + """ + + @pytest.fixture -def schema(connection_test): +def schema_tx(connection_test): schema = dj.Schema( - PREFIX + "_transactions", context=dict(), connection=connection_test + PREFIX + "_transactions", context=dict(Subjects=Subjects), connection=connection_test ) + schema(Subjects) yield schema schema.drop() -@pytest.fixture -def Subjects(schema): - @schema - class Subjects(dj.Manual): - definition = """ - #Basic subject - subject_id : int # unique subject id - --- - real_id : varchar(40) # real-world name - species = "mouse" : enum('mouse', 'monkey', 'human') # species - """ - - yield Subjects - Subjects.drop() - - -def test_dj_conn(): +def test_dj_conn(db_creds_root): """ Should be able to establish a connection as root user """ - c = dj.conn(**CONN_INFO_ROOT) + c = dj.conn(**db_creds_root) assert c.is_connected @@ -50,24 +44,24 @@ def test_dj_connection_class(connection_test): assert connection_test.is_connected -def test_persistent_dj_conn(): +def test_persistent_dj_conn(db_creds_root): """ conn() method should provide persistent connection across calls. Setting reset=True should create a new persistent connection. """ - c1 = dj.conn(**CONN_INFO_ROOT) + c1 = dj.conn(**db_creds_root) c2 = dj.conn() - c3 = dj.conn(**CONN_INFO_ROOT) - c4 = dj.conn(reset=True, **CONN_INFO_ROOT) - c5 = dj.conn(**CONN_INFO_ROOT) + c3 = dj.conn(**db_creds_root) + c4 = dj.conn(reset=True, **db_creds_root) + c5 = dj.conn(**db_creds_root) assert c1 is c2 assert c1 is c3 assert c1 is not c4 assert c4 is c5 -def test_repr(): - c1 = dj.conn(**CONN_INFO_ROOT) +def test_repr(db_creds_root): + c1 = dj.conn(**db_creds_root) assert "disconnected" not in repr(c1) and "connected" in repr(c1) @@ -76,7 +70,7 @@ def test_active(connection_test): assert conn.in_transaction, "Transaction is not active" -def test_transaction_rollback(connection_test, Subjects): +def test_transaction_rollback(schema_tx, connection_test): """Test transaction cancellation using a with statement""" tmp = np.array( [(1, "Peter", "mouse"), (2, "Klara", "monkey")], @@ -101,13 +95,13 @@ def test_transaction_rollback(connection_test, Subjects): ), "Length is not 0. Expected because rollback should have happened." -def test_cancel(connection_test, Subjects): +def test_cancel(schema_tx, connection_test): """Tests cancelling a transaction explicitly""" tmp = np.array( [(1, "Peter", "mouse"), (2, "Klara", "monkey")], - Subjects.heading.as_dtype, + Subjects().heading.as_dtype, ) - Subjects.delete_quick() + Subjects().delete_quick() Subjects.insert1(tmp[0]) connection_test.start_transaction() Subjects.insert1(tmp[1]) diff --git a/tests/test_declare.py b/tests/test_declare.py index a88d396e7..13d91c8ff 100644 --- a/tests/test_declare.py +++ b/tests/test_declare.py @@ -5,320 +5,319 @@ from datajoint.declare import declare -class TestDeclare: - def test_schema_decorator(self, schema_any): - assert issubclass(Subject, dj.Lookup) - assert not issubclass(Subject, dj.Part) - - def test_class_help(self, schema_any): - help(TTest) - help(TTest2) - assert TTest.definition in TTest.__doc__ - assert TTest.definition in TTest2.__doc__ - - def test_instance_help(self, schema_any): - help(TTest()) - help(TTest2()) - assert TTest().definition in TTest().__doc__ - assert TTest2().definition in TTest2().__doc__ - - def test_describe(self, schema_any): - """real_definition should match original definition""" - rel = Experiment() - context = inspect.currentframe().f_globals - s1 = declare(rel.full_table_name, rel.definition, context) - s2 = declare(rel.full_table_name, rel.describe(), context) - assert s1 == s2 - - def test_describe_indexes(self, schema_any): - """real_definition should match original definition""" - rel = IndexRich() - context = inspect.currentframe().f_globals - s1 = declare(rel.full_table_name, rel.definition, context) - s2 = declare(rel.full_table_name, rel.describe(), context) - assert s1 == s2 - - def test_describe_dependencies(self, schema_any): - """real_definition should match original definition""" - rel = ThingC() - context = inspect.currentframe().f_globals - s1 = declare(rel.full_table_name, rel.definition, context) - s2 = declare(rel.full_table_name, rel.describe(), context) - assert s1 == s2 - - def test_part(self, schema_any): +def test_schema_decorator(schema_any): + assert issubclass(Subject, dj.Lookup) + assert not issubclass(Subject, dj.Part) + +def test_class_help(schema_any): + help(TTest) + help(TTest2) + assert TTest.definition in TTest.__doc__ + assert TTest.definition in TTest2.__doc__ + +def test_instance_help(schema_any): + help(TTest()) + help(TTest2()) + assert TTest().definition in TTest().__doc__ + assert TTest2().definition in TTest2().__doc__ + +def test_describe(schema_any): + """real_definition should match original definition""" + rel = Experiment() + context = inspect.currentframe().f_globals + s1 = declare(rel.full_table_name, rel.definition, context) + s2 = declare(rel.full_table_name, rel.describe(), context) + assert s1 == s2 + +def test_describe_indexes(schema_any): + """real_definition should match original definition""" + rel = IndexRich() + context = inspect.currentframe().f_globals + s1 = declare(rel.full_table_name, rel.definition, context) + s2 = declare(rel.full_table_name, rel.describe(), context) + assert s1 == s2 + +def test_describe_dependencies(schema_any): + """real_definition should match original definition""" + rel = ThingC() + context = inspect.currentframe().f_globals + s1 = declare(rel.full_table_name, rel.definition, context) + s2 = declare(rel.full_table_name, rel.describe(), context) + assert s1 == s2 + +def test_part(schema_any): + """ + Lookup and part with the same name. See issue #365 + """ + local_schema = dj.Schema(schema_any.database) + + @local_schema + class Type(dj.Lookup): + definition = """ + type : varchar(255) """ - Lookup and part with the same name. See issue #365 - """ - local_schema = dj.Schema(schema_any.database) + contents = zip(("Type1", "Type2", "Type3")) - @local_schema - class Type(dj.Lookup): - definition = """ - type : varchar(255) - """ - contents = zip(("Type1", "Type2", "Type3")) + @local_schema + class TypeMaster(dj.Manual): + definition = """ + master_id : int + """ - @local_schema - class TypeMaster(dj.Manual): + class Type(dj.Part): definition = """ - master_id : int + -> TypeMaster + -> Type """ - class Type(dj.Part): - definition = """ - -> TypeMaster - -> Type - """ +def test_attributes(schema_any): + """ + Test autoincrement declaration + """ + auto = Auto() + auto.fill() + subject = Subject() + experiment = Experiment() + trial = Trial() + ephys = Ephys() + channel = Ephys.Channel() + + assert auto.heading.names == ["id", "name"] + assert auto.heading.attributes["id"].autoincrement + + # test attribute declarations + assert subject.heading.names == [ + "subject_id", + "real_id", + "species", + "date_of_birth", + "subject_notes", + ] + assert subject.primary_key == ["subject_id"] + assert subject.heading.attributes["subject_id"].numeric + assert not subject.heading.attributes["real_id"].numeric + + assert experiment.heading.names == [ + "subject_id", + "experiment_id", + "experiment_date", + "username", + "data_path", + "notes", + "entry_time", + ] + assert experiment.primary_key == ["subject_id", "experiment_id"] + + assert trial.heading.names == [ # tests issue #516 + "animal", + "experiment_id", + "trial_id", + "start_time", + ] + assert trial.primary_key == ["animal", "experiment_id", "trial_id"] + + assert ephys.heading.names == [ + "animal", + "experiment_id", + "trial_id", + "sampling_frequency", + "duration", + ] + assert ephys.primary_key == ["animal", "experiment_id", "trial_id"] + + assert channel.heading.names == [ + "animal", + "experiment_id", + "trial_id", + "channel", + "voltage", + "current", + ] + assert channel.primary_key == ["animal", "experiment_id", "trial_id", "channel"] + assert channel.heading.attributes["voltage"].is_blob + +def test_dependencies(schema_any): + user = User() + subject = Subject() + experiment = Experiment() + trial = Trial() + ephys = Ephys() + channel = Ephys.Channel() + + assert experiment.full_table_name in user.children(primary=False) + assert set(experiment.parents(primary=False)) == {user.full_table_name} + assert experiment.full_table_name in user.children(primary=False) + assert set(experiment.parents(primary=False)) == {user.full_table_name} + assert set( + s.full_table_name + for s in experiment.parents(primary=False, as_objects=True) + ) == {user.full_table_name} + + assert experiment.full_table_name in subject.descendants() + assert experiment.full_table_name in { + s.full_table_name for s in subject.descendants(as_objects=True) + } + assert subject.full_table_name in experiment.ancestors() + assert subject.full_table_name in { + s.full_table_name for s in experiment.ancestors(as_objects=True) + } + + assert trial.full_table_name in experiment.descendants() + assert trial.full_table_name in { + s.full_table_name for s in experiment.descendants(as_objects=True) + } + assert experiment.full_table_name in trial.ancestors() + assert experiment.full_table_name in { + s.full_table_name for s in trial.ancestors(as_objects=True) + } + + assert set(trial.children(primary=True)) == { + ephys.full_table_name, + trial.Condition.full_table_name, + } + assert set(trial.parts()) == {trial.Condition.full_table_name} + assert set(s.full_table_name for s in trial.parts(as_objects=True)) == { + trial.Condition.full_table_name + } + assert set(ephys.parents(primary=True)) == {trial.full_table_name} + assert set( + s.full_table_name for s in ephys.parents(primary=True, as_objects=True) + ) == {trial.full_table_name} + assert set(ephys.children(primary=True)) == {channel.full_table_name} + assert set( + s.full_table_name for s in ephys.children(primary=True, as_objects=True) + ) == {channel.full_table_name} + assert set(channel.parents(primary=True)) == {ephys.full_table_name} + assert set( + s.full_table_name for s in channel.parents(primary=True, as_objects=True) + ) == {ephys.full_table_name} + +def test_descendants_only_contain_part_table(schema_any): + """issue #927""" + + class A(dj.Manual): + definition = """ + a: int + """ - def test_attributes(self, schema_any): + class B(dj.Manual): + definition = """ + -> A + b: int """ - Test autoincrement declaration + + class Master(dj.Manual): + definition = """ + table_master: int """ - auto = Auto() - auto.fill() - subject = Subject() - experiment = Experiment() - trial = Trial() - ephys = Ephys() - channel = Ephys.Channel() - - assert auto.heading.names == ["id", "name"] - assert auto.heading.attributes["id"].autoincrement - - # test attribute declarations - assert subject.heading.names == [ - "subject_id", - "real_id", - "species", - "date_of_birth", - "subject_notes", - ] - assert subject.primary_key == ["subject_id"] - assert subject.heading.attributes["subject_id"].numeric - assert not subject.heading.attributes["real_id"].numeric - - assert experiment.heading.names == [ - "subject_id", - "experiment_id", - "experiment_date", - "username", - "data_path", - "notes", - "entry_time", - ] - assert experiment.primary_key == ["subject_id", "experiment_id"] - - assert trial.heading.names == [ # tests issue #516 - "animal", - "experiment_id", - "trial_id", - "start_time", - ] - assert trial.primary_key == ["animal", "experiment_id", "trial_id"] - - assert ephys.heading.names == [ - "animal", - "experiment_id", - "trial_id", - "sampling_frequency", - "duration", - ] - assert ephys.primary_key == ["animal", "experiment_id", "trial_id"] - - assert channel.heading.names == [ - "animal", - "experiment_id", - "trial_id", - "channel", - "voltage", - "current", - ] - assert channel.primary_key == ["animal", "experiment_id", "trial_id", "channel"] - assert channel.heading.attributes["voltage"].is_blob - - def test_dependencies(self, schema_any): - user = User() - subject = Subject() - experiment = Experiment() - trial = Trial() - ephys = Ephys() - channel = Ephys.Channel() - - assert experiment.full_table_name in user.children(primary=False) - assert set(experiment.parents(primary=False)) == {user.full_table_name} - assert experiment.full_table_name in user.children(primary=False) - assert set(experiment.parents(primary=False)) == {user.full_table_name} - assert set( - s.full_table_name - for s in experiment.parents(primary=False, as_objects=True) - ) == {user.full_table_name} - - assert experiment.full_table_name in subject.descendants() - assert experiment.full_table_name in { - s.full_table_name for s in subject.descendants(as_objects=True) - } - assert subject.full_table_name in experiment.ancestors() - assert subject.full_table_name in { - s.full_table_name for s in experiment.ancestors(as_objects=True) - } - - assert trial.full_table_name in experiment.descendants() - assert trial.full_table_name in { - s.full_table_name for s in experiment.descendants(as_objects=True) - } - assert experiment.full_table_name in trial.ancestors() - assert experiment.full_table_name in { - s.full_table_name for s in trial.ancestors(as_objects=True) - } - - assert set(trial.children(primary=True)) == { - ephys.full_table_name, - trial.Condition.full_table_name, - } - assert set(trial.parts()) == {trial.Condition.full_table_name} - assert set(s.full_table_name for s in trial.parts(as_objects=True)) == { - trial.Condition.full_table_name - } - assert set(ephys.parents(primary=True)) == {trial.full_table_name} - assert set( - s.full_table_name for s in ephys.parents(primary=True, as_objects=True) - ) == {trial.full_table_name} - assert set(ephys.children(primary=True)) == {channel.full_table_name} - assert set( - s.full_table_name for s in ephys.children(primary=True, as_objects=True) - ) == {channel.full_table_name} - assert set(channel.parents(primary=True)) == {ephys.full_table_name} - assert set( - s.full_table_name for s in channel.parents(primary=True, as_objects=True) - ) == {ephys.full_table_name} - - def test_descendants_only_contain_part_table(self, schema_any): - """issue #927""" - - class A(dj.Manual): - definition = """ - a: int - """ - class B(dj.Manual): + class Part(dj.Part): definition = """ - -> A - b: int + -> master + -> B """ - class Master(dj.Manual): - definition = """ - table_master: int - """ + context = dict(A=A, B=B, Master=Master) + schema_any(A, context=context) + schema_any(B, context=context) + schema_any(Master, context=context) + assert A.descendants() == [ + "`djtest_test1`.`a`", + "`djtest_test1`.`b`", + "`djtest_test1`.`master__part`", + ] + +def test_bad_attribute_name(schema_any): + class BadName(dj.Manual): + definition = """ + Bad_name : int + """ - class Part(dj.Part): - definition = """ - -> master - -> B - """ - - context = dict(A=A, B=B, Master=Master) - schema_any(A, context=context) - schema_any(B, context=context) - schema_any(Master, context=context) - assert A.descendants() == [ - "`djtest_test1`.`a`", - "`djtest_test1`.`b`", - "`djtest_test1`.`master__part`", - ] - - def test_bad_attribute_name(self, schema_any): - class BadName(dj.Manual): - definition = """ - Bad_name : int - """ + with pytest.raises(dj.DataJointError): + schema_any(BadName) - with pytest.raises(dj.DataJointError): - schema_any(BadName) +def test_bad_fk_rename(schema_any): + """issue #381""" - def test_bad_fk_rename(self, schema_any): - """issue #381""" + class A(dj.Manual): + definition = """ + a : int + """ - class A(dj.Manual): - definition = """ - a : int - """ + class B(dj.Manual): + definition = """ + b -> A # invalid, the new syntax is (b) -> A + """ - class B(dj.Manual): - definition = """ - b -> A # invalid, the new syntax is (b) -> A - """ + schema_any(A) + with pytest.raises(dj.DataJointError): + schema_any(B) - schema_any(A) - with pytest.raises(dj.DataJointError): - schema_any(B) +def test_primary_nullable_foreign_key(schema_any): + class Q(dj.Manual): + definition = """ + -> [nullable] Experiment + """ - def test_primary_nullable_foreign_key(self, schema_any): - class Q(dj.Manual): - definition = """ - -> [nullable] Experiment - """ + with pytest.raises(dj.DataJointError): + schema_any(Q) - with pytest.raises(dj.DataJointError): - schema_any(Q) +def test_invalid_foreign_key_option(schema_any): + class R(dj.Manual): + definition = """ + -> Experiment + ---- + -> [optional] User + """ - def test_invalid_foreign_key_option(self, schema_any): - class R(dj.Manual): - definition = """ - -> Experiment - ---- - -> [optional] User - """ + with pytest.raises(dj.DataJointError): + schema_any(R) - with pytest.raises(dj.DataJointError): - schema_any(R) +def test_unsupported_datatype(schema_any): + class Q(dj.Manual): + definition = """ + experiment : int + --- + description : text + """ - def test_unsupported_datatype(self, schema_any): - class Q(dj.Manual): - definition = """ - experiment : int - --- - description : text - """ + with pytest.raises(dj.DataJointError): + schema_any(Q) - with pytest.raises(dj.DataJointError): - schema_any(Q) +def test_int_datatype(schema_any): + @schema_any + class Owner(dj.Manual): + definition = """ + ownerid : int + --- + car_count : integer + """ - def test_int_datatype(self, schema_any): - @schema_any - class Owner(dj.Manual): - definition = """ - ownerid : int - --- - car_count : integer - """ +def test_unsupported_int_datatype(schema_any): + class Driver(dj.Manual): + definition = """ + driverid : tinyint + --- + car_count : tinyinteger + """ - def test_unsupported_int_datatype(self, schema_any): - class Driver(dj.Manual): - definition = """ - driverid : tinyint - --- - car_count : tinyinteger - """ + with pytest.raises(dj.DataJointError): + schema_any(Driver) - with pytest.raises(dj.DataJointError): - schema_any(Driver) +def test_long_table_name(schema_any): + """ + test issue #205 -- reject table names over 64 characters in length + """ - def test_long_table_name(self, schema_any): - """ - test issue #205 -- reject table names over 64 characters in length + class WhyWouldAnyoneCreateATableNameThisLong(dj.Manual): + definition = """ + master : int """ - class WhyWouldAnyoneCreateATableNameThisLong(dj.Manual): + class WithSuchALongPartNameThatItCrashesMySQL(dj.Part): definition = """ - master : int + -> (master) """ - class WithSuchALongPartNameThatItCrashesMySQL(dj.Part): - definition = """ - -> (master) - """ - - with pytest.raises(dj.DataJointError): - schema_any(WhyWouldAnyoneCreateATableNameThisLong) + with pytest.raises(dj.DataJointError): + schema_any(WhyWouldAnyoneCreateATableNameThisLong) diff --git a/tests/test_fetch.py b/tests/test_fetch.py index b1480fa7d..eef14a140 100644 --- a/tests/test_fetch.py +++ b/tests/test_fetch.py @@ -13,35 +13,274 @@ import io -@pytest.fixture -def lang(): - yield schema.Language() - - -@pytest.fixture -def languages(lang) -> List: - og_contents = lang.contents - languages = og_contents.copy() - yield languages - lang.contents = og_contents - - -@pytest.fixture -def subject(): - yield schema.Subject() - +def test_getattribute(subject): + """Testing Fetch.__call__ with attributes""" + list1 = sorted(subject.proj().fetch(as_dict=True), key=itemgetter("subject_id")) + list2 = sorted(subject.fetch(dj.key), key=itemgetter("subject_id")) + for l1, l2 in zip(list1, list2): + assert l1 == l2, "Primary key is not returned correctly" + + tmp = subject.fetch(order_by="subject_id") + + subject_notes, key, real_id = subject.fetch("subject_notes", dj.key, "real_id") + + np.testing.assert_array_equal( + sorted(subject_notes), sorted(tmp["subject_notes"]) + ) + np.testing.assert_array_equal(sorted(real_id), sorted(tmp["real_id"])) + list1 = sorted(key, key=itemgetter("subject_id")) + for l1, l2 in zip(list1, list2): + assert l1 == l2, "Primary key is not returned correctly" + +def test_getattribute_for_fetch1(subject): + """Testing Fetch1.__call__ with attributes""" + assert (subject & "subject_id=10").fetch1("subject_id") == 10 + assert (subject & "subject_id=10").fetch1("subject_id", "species") == ( + 10, + "monkey", + ) + +def test_order_by(lang, languages): + """Tests order_by sorting order""" + for ord_name, ord_lang in itertools.product(*2 * [["ASC", "DESC"]]): + cur = lang.fetch(order_by=("name " + ord_name, "language " + ord_lang)) + languages.sort(key=itemgetter(1), reverse=ord_lang == "DESC") + languages.sort(key=itemgetter(0), reverse=ord_name == "DESC") + for c, l in zip(cur, languages): + assert np.all( + cc == ll for cc, ll in zip(c, l) + ), "Sorting order is different" -class TestFetch: - def test_getattribute(self, schema_any, subject): - """Testing Fetch.__call__ with attributes""" - list1 = sorted(subject.proj().fetch(as_dict=True), key=itemgetter("subject_id")) +def test_order_by_default(lang, languages): + """Tests order_by sorting order with defaults""" + cur = lang.fetch(order_by=("language", "name DESC")) + languages.sort(key=itemgetter(0), reverse=True) + languages.sort(key=itemgetter(1), reverse=False) + for c, l in zip(cur, languages): + assert np.all( + [cc == ll for cc, ll in zip(c, l)] + ), "Sorting order is different" + +def test_limit(lang): + """Test the limit kwarg""" + limit = 4 + cur = lang.fetch(limit=limit) + assert len(cur) == limit, "Length is not correct" + +def test_order_by_limit(lang, languages): + """Test the combination of order by and limit kwargs""" + cur = lang.fetch(limit=4, order_by=["language", "name DESC"]) + languages.sort(key=itemgetter(0), reverse=True) + languages.sort(key=itemgetter(1), reverse=False) + assert len(cur) == 4, "Length is not correct" + for c, l in list(zip(cur, languages))[:4]: + assert np.all( + [cc == ll for cc, ll in zip(c, l)] + ), "Sorting order is different" + +def test_head_tail(schema_any): + query = schema.User * schema.Language + n = 5 + frame = query.head(n, format="frame") + assert isinstance(frame, pandas.DataFrame) + array = query.head(n, format="array") + assert array.size == n + assert len(frame) == n + assert query.primary_key == frame.index.names + + n = 4 + frame = query.tail(n, format="frame") + array = query.tail(n, format="array") + assert array.size == n + assert len(frame) == n + assert query.primary_key == frame.index.names + +def test_limit_offset(lang, languages): + """Test the limit and offset kwargs together""" + cur = lang.fetch(offset=2, limit=4, order_by=["language", "name DESC"]) + languages.sort(key=itemgetter(0), reverse=True) + languages.sort(key=itemgetter(1), reverse=False) + assert len(cur) == 4, "Length is not correct" + for c, l in list(zip(cur, languages[2:6])): + assert np.all( + [cc == ll for cc, ll in zip(c, l)] + ), "Sorting order is different" + +def test_iter(lang, languages): + """Test iterator""" + cur = lang.fetch(order_by=["language", "name DESC"]) + languages.sort(key=itemgetter(0), reverse=True) + languages.sort(key=itemgetter(1), reverse=False) + for (name, lang_val), (tname, tlang) in list(zip(cur, languages)): + assert name == tname and lang_val == tlang, "Values are not the same" + # now as dict + cur = lang.fetch(as_dict=True, order_by=("language", "name DESC")) + for row, (tname, tlang) in list(zip(cur, languages)): + assert ( + row["name"] == tname and row["language"] == tlang + ), "Values are not the same" + +def test_keys(lang, languages): + """test key fetch""" + languages.sort(key=itemgetter(0), reverse=True) + languages.sort(key=itemgetter(1), reverse=False) + + lang = schema.Language() + cur = lang.fetch("name", "language", order_by=("language", "name DESC")) + cur2 = list(lang.fetch("KEY", order_by=["language", "name DESC"])) + + for c, c2 in zip(zip(*cur), cur2): + assert c == tuple(c2.values()), "Values are not the same" + +def test_attributes_as_dict(subject): + """ + Issue #595 + """ + attrs = ("species", "date_of_birth") + result = subject.fetch(*attrs, as_dict=True) + assert bool(result) and len(result) == len(subject) + assert set(result[0]) == set(attrs) + +def test_fetch1_step1(lang, languages): + assert ( + lang.contents + == languages + == [ + ("Fabian", "English"), + ("Edgar", "English"), + ("Dimitri", "English"), + ("Dimitri", "Ukrainian"), + ("Fabian", "German"), + ("Edgar", "Japanese"), + ] + ), "Unexpected contents in Language table" + key = {"name": "Edgar", "language": "Japanese"} + true = languages[-1] + dat = (lang & key).fetch1() + for k, (ke, c) in zip(true, dat.items()): + assert k == c == (lang & key).fetch1(ke), "Values are not the same" + +def test_misspelled_attribute(schema_any): + with pytest.raises(dj.DataJointError): + f = (schema.Language & 'lang = "ENGLISH"').fetch() + +def test_repr(subject): + """Test string representation of fetch, returning table preview""" + repr = subject.fetch.__repr__() + n = len(repr.strip().split("\n")) + limit = dj.config["display.limit"] + # 3 lines are used for headers (2) and summary statement (1) + assert n - 3 <= limit + +def test_fetch_none(lang): + """Test preparing attributes for getitem""" + with pytest.raises(dj.DataJointError): + lang.fetch(None) + +def test_asdict(lang): + """Test returns as dictionaries""" + d = lang.fetch(as_dict=True) + for dd in d: + assert isinstance(dd, dict) + +def test_offset(lang, languages): + """Tests offset""" + cur = lang.fetch(limit=4, offset=1, order_by=["language", "name DESC"]) + + languages.sort(key=itemgetter(0), reverse=True) + languages.sort(key=itemgetter(1), reverse=False) + assert len(cur) == 4, "Length is not correct" + for c, l in list(zip(cur, languages[1:]))[:4]: + assert np.all( + [cc == ll for cc, ll in zip(c, l)] + ), "Sorting order is different" + +def test_limit_warning(lang): + """Tests whether warning is raised if offset is used without limit.""" + logger = logging.getLogger("datajoint") + log_capture = io.StringIO() + stream_handler = logging.StreamHandler(log_capture) + log_format = logging.Formatter( + "[%(asctime)s][%(funcName)s][%(levelname)s]: %(message)s" + ) + stream_handler.setFormatter(log_format) + stream_handler.set_name("test_limit_warning") + logger.addHandler(stream_handler) + lang.fetch(offset=1) + + log_contents = log_capture.getvalue() + log_capture.close() + + for handler in logger.handlers: # Clean up handler + if handler.name == "test_limit_warning": + logger.removeHandler(handler) + assert "[WARNING]: Offset set, but no limit." in log_contents + +def test_len(lang): + """Tests __len__""" + assert len(lang.fetch()) == len(lang), "__len__ is not behaving properly" + +def test_fetch1_step2(lang): + """Tests whether fetch1 raises error""" + with pytest.raises(dj.DataJointError): + lang.fetch1() + +def test_fetch1_step3(lang): + """Tests whether fetch1 raises error""" + with pytest.raises(dj.DataJointError): + lang.fetch1("name") + +def test_decimal(schema_any): + """Tests that decimal fields are correctly fetched and used in restrictions, see issue #334""" + rel = schema.DecimalPrimaryKey() + assert len(rel.fetch()), "Table DecimalPrimaryKey contents are empty" + rel.insert1([decimal.Decimal("3.1415926")]) + keys = rel.fetch() + assert len(keys) > 0 + assert len(rel & keys[0]) == 1 + keys = rel.fetch(dj.key) + assert len(keys) >= 2 + assert len(rel & keys[1]) == 1 + +def test_nullable_numbers(schema_any): + """test mixture of values and nulls in numeric attributes""" + table = schema.NullableNumbers() + table.insert( + ( + ( + k, + np.random.randn(), + np.random.randint(-1000, 1000), + np.random.randn(), + ) + for k in range(10) + ) + ) + table.insert1((100, None, None, None)) + f, d, i = table.fetch("fvalue", "dvalue", "ivalue") + assert None in i + assert any(np.isnan(d)) + assert any(np.isnan(f)) + +def test_fetch_format(subject): + """test fetch_format='frame'""" + with dj.config(fetch_format="frame"): + # test if lists are both dicts + list1 = sorted( + subject.proj().fetch(as_dict=True), key=itemgetter("subject_id") + ) list2 = sorted(subject.fetch(dj.key), key=itemgetter("subject_id")) for l1, l2 in zip(list1, list2): assert l1 == l2, "Primary key is not returned correctly" + # tests if pandas dataframe tmp = subject.fetch(order_by="subject_id") + assert isinstance(tmp, pandas.DataFrame) + tmp = tmp.to_records() - subject_notes, key, real_id = subject.fetch("subject_notes", dj.key, "real_id") + subject_notes, key, real_id = subject.fetch( + "subject_notes", dj.key, "real_id" + ) np.testing.assert_array_equal( sorted(subject_notes), sorted(tmp["subject_notes"]) @@ -51,349 +290,91 @@ def test_getattribute(self, schema_any, subject): for l1, l2 in zip(list1, list2): assert l1 == l2, "Primary key is not returned correctly" - def test_getattribute_for_fetch1(self, schema_any, subject): - """Testing Fetch1.__call__ with attributes""" - assert (subject & "subject_id=10").fetch1("subject_id") == 10 - assert (subject & "subject_id=10").fetch1("subject_id", "species") == ( - 10, - "monkey", - ) - - def test_order_by(self, schema_any, lang, languages): - """Tests order_by sorting order""" - for ord_name, ord_lang in itertools.product(*2 * [["ASC", "DESC"]]): - cur = lang.fetch(order_by=("name " + ord_name, "language " + ord_lang)) - languages.sort(key=itemgetter(1), reverse=ord_lang == "DESC") - languages.sort(key=itemgetter(0), reverse=ord_name == "DESC") - for c, l in zip(cur, languages): - assert np.all( - cc == ll for cc, ll in zip(c, l) - ), "Sorting order is different" - - def test_order_by_default(self, schema_any, lang, languages): - """Tests order_by sorting order with defaults""" - cur = lang.fetch(order_by=("language", "name DESC")) - languages.sort(key=itemgetter(0), reverse=True) - languages.sort(key=itemgetter(1), reverse=False) - for c, l in zip(cur, languages): - assert np.all( - [cc == ll for cc, ll in zip(c, l)] - ), "Sorting order is different" - - def test_limit(self, schema_any, lang): - """Test the limit kwarg""" - limit = 4 - cur = lang.fetch(limit=limit) - assert len(cur) == limit, "Length is not correct" - - def test_order_by_limit(self, schema_any, lang, languages): - """Test the combination of order by and limit kwargs""" - cur = lang.fetch(limit=4, order_by=["language", "name DESC"]) - languages.sort(key=itemgetter(0), reverse=True) - languages.sort(key=itemgetter(1), reverse=False) - assert len(cur) == 4, "Length is not correct" - for c, l in list(zip(cur, languages))[:4]: - assert np.all( - [cc == ll for cc, ll in zip(c, l)] - ), "Sorting order is different" - - def test_head_tail(self, schema_any): - query = schema.User * schema.Language - n = 5 - frame = query.head(n, format="frame") - assert isinstance(frame, pandas.DataFrame) - array = query.head(n, format="array") - assert array.size == n - assert len(frame) == n - assert query.primary_key == frame.index.names - - n = 4 - frame = query.tail(n, format="frame") - array = query.tail(n, format="array") - assert array.size == n - assert len(frame) == n - assert query.primary_key == frame.index.names - - def test_limit_offset(self, schema_any, lang, languages): - """Test the limit and offset kwargs together""" - cur = lang.fetch(offset=2, limit=4, order_by=["language", "name DESC"]) - languages.sort(key=itemgetter(0), reverse=True) - languages.sort(key=itemgetter(1), reverse=False) - assert len(cur) == 4, "Length is not correct" - for c, l in list(zip(cur, languages[2:6])): - assert np.all( - [cc == ll for cc, ll in zip(c, l)] - ), "Sorting order is different" - - def test_iter(self, schema_any, lang, languages): - """Test iterator""" - cur = lang.fetch(order_by=["language", "name DESC"]) - languages.sort(key=itemgetter(0), reverse=True) - languages.sort(key=itemgetter(1), reverse=False) - for (name, lang_val), (tname, tlang) in list(zip(cur, languages)): - assert name == tname and lang_val == tlang, "Values are not the same" - # now as dict - cur = lang.fetch(as_dict=True, order_by=("language", "name DESC")) - for row, (tname, tlang) in list(zip(cur, languages)): - assert ( - row["name"] == tname and row["language"] == tlang - ), "Values are not the same" - - def test_keys(self, schema_any, lang, languages): - """test key fetch""" - languages.sort(key=itemgetter(0), reverse=True) - languages.sort(key=itemgetter(1), reverse=False) - - lang = schema.Language() - cur = lang.fetch("name", "language", order_by=("language", "name DESC")) - cur2 = list(lang.fetch("KEY", order_by=["language", "name DESC"])) - - for c, c2 in zip(zip(*cur), cur2): - assert c == tuple(c2.values()), "Values are not the same" - - def test_attributes_as_dict(self, schema_any, subject): - """ - Issue #595 - """ - attrs = ("species", "date_of_birth") - result = subject.fetch(*attrs, as_dict=True) - assert bool(result) and len(result) == len(subject) - assert set(result[0]) == set(attrs) - - def test_fetch1_step1(self, schema_any, lang, languages): - assert ( - lang.contents - == languages - == [ - ("Fabian", "English"), - ("Edgar", "English"), - ("Dimitri", "English"), - ("Dimitri", "Ukrainian"), - ("Fabian", "German"), - ("Edgar", "Japanese"), - ] - ), "Unexpected contents in Language table" - key = {"name": "Edgar", "language": "Japanese"} - true = languages[-1] - dat = (lang & key).fetch1() - for k, (ke, c) in zip(true, dat.items()): - assert k == c == (lang & key).fetch1(ke), "Values are not the same" - - def test_misspelled_attribute(self, schema_any): - with pytest.raises(dj.DataJointError): - f = (schema.Language & 'lang = "ENGLISH"').fetch() - - def test_repr(self, schema_any, subject): - """Test string representation of fetch, returning table preview""" - repr = subject.fetch.__repr__() - n = len(repr.strip().split("\n")) - limit = dj.config["display.limit"] - # 3 lines are used for headers (2) and summary statement (1) - assert n - 3 <= limit - - def test_fetch_none(self, schema_any, lang): - """Test preparing attributes for getitem""" - with pytest.raises(dj.DataJointError): - lang.fetch(None) - - def test_asdict(self, schema_any, lang): - """Test returns as dictionaries""" - d = lang.fetch(as_dict=True) - for dd in d: - assert isinstance(dd, dict) - - def test_offset(self, schema_any, lang, languages): - """Tests offset""" - cur = lang.fetch(limit=4, offset=1, order_by=["language", "name DESC"]) - - languages.sort(key=itemgetter(0), reverse=True) - languages.sort(key=itemgetter(1), reverse=False) - assert len(cur) == 4, "Length is not correct" - for c, l in list(zip(cur, languages[1:]))[:4]: - assert np.all( - [cc == ll for cc, ll in zip(c, l)] - ), "Sorting order is different" - - def test_limit_warning(self, schema_any, lang): - """Tests whether warning is raised if offset is used without limit.""" - logger = logging.getLogger("datajoint") - log_capture = io.StringIO() - stream_handler = logging.StreamHandler(log_capture) - log_format = logging.Formatter( - "[%(asctime)s][%(funcName)s][%(levelname)s]: %(message)s" - ) - stream_handler.setFormatter(log_format) - stream_handler.set_name("test_limit_warning") - logger.addHandler(stream_handler) - lang.fetch(offset=1) - - log_contents = log_capture.getvalue() - log_capture.close() - - for handler in logger.handlers: # Clean up handler - if handler.name == "test_limit_warning": - logger.removeHandler(handler) - assert "[WARNING]: Offset set, but no limit." in log_contents - - def test_len(self, schema_any, lang): - """Tests __len__""" - assert len(lang.fetch()) == len(lang), "__len__ is not behaving properly" - - def test_fetch1_step2(self, schema_any, lang): - """Tests whether fetch1 raises error""" - with pytest.raises(dj.DataJointError): - lang.fetch1() - - def test_fetch1_step3(self, schema_any, lang): - """Tests whether fetch1 raises error""" - with pytest.raises(dj.DataJointError): - lang.fetch1("name") - - def test_decimal(self, schema_any): - """Tests that decimal fields are correctly fetched and used in restrictions, see issue #334""" - rel = schema.DecimalPrimaryKey() - assert len(rel.fetch()), "Table DecimalPrimaryKey contents are empty" - rel.insert1([decimal.Decimal("3.1415926")]) - keys = rel.fetch() - assert len(keys) > 0 - assert len(rel & keys[0]) == 1 - keys = rel.fetch(dj.key) - assert len(keys) >= 2 - assert len(rel & keys[1]) == 1 - - def test_nullable_numbers(self, schema_any): - """test mixture of values and nulls in numeric attributes""" - table = schema.NullableNumbers() - table.insert( - ( - ( - k, - np.random.randn(), - np.random.randint(-1000, 1000), - np.random.randn(), - ) - for k in range(10) - ) - ) - table.insert1((100, None, None, None)) - f, d, i = table.fetch("fvalue", "dvalue", "ivalue") - assert None in i - assert any(np.isnan(d)) - assert any(np.isnan(f)) - - def test_fetch_format(self, schema_any, subject): - """test fetch_format='frame'""" - with dj.config(fetch_format="frame"): - # test if lists are both dicts - list1 = sorted( - subject.proj().fetch(as_dict=True), key=itemgetter("subject_id") - ) - list2 = sorted(subject.fetch(dj.key), key=itemgetter("subject_id")) - for l1, l2 in zip(list1, list2): - assert l1 == l2, "Primary key is not returned correctly" - - # tests if pandas dataframe - tmp = subject.fetch(order_by="subject_id") - assert isinstance(tmp, pandas.DataFrame) - tmp = tmp.to_records() - - subject_notes, key, real_id = subject.fetch( - "subject_notes", dj.key, "real_id" - ) - - np.testing.assert_array_equal( - sorted(subject_notes), sorted(tmp["subject_notes"]) +def test_key_fetch1(subject): + """test KEY fetch1 - issue #976""" + with dj.config(fetch_format="array"): + k1 = (subject & "subject_id=10").fetch1("KEY") + with dj.config(fetch_format="frame"): + k2 = (subject & "subject_id=10").fetch1("KEY") + assert k1 == k2 + +def test_same_secondary_attribute(schema_any): + children = (schema.Child * schema.Parent().proj()).fetch()["name"] + assert len(children) == 1 + assert children[0] == "Dan" + +def test_query_caching(schema_any): + # initialize cache directory + os.mkdir(os.path.expanduser("~/dj_query_cache")) + + with dj.config(query_cache=os.path.expanduser("~/dj_query_cache")): + conn = schema.TTest3.connection + # insert sample data and load cache + schema.TTest3.insert([dict(key=100 + i, value=200 + i) for i in range(2)]) + conn.set_query_cache(query_cache="main") + cached_res = schema.TTest3().fetch() + # attempt to insert while caching enabled + try: + schema.TTest3.insert( + [dict(key=200 + i, value=400 + i) for i in range(2)] ) - np.testing.assert_array_equal(sorted(real_id), sorted(tmp["real_id"])) - list1 = sorted(key, key=itemgetter("subject_id")) - for l1, l2 in zip(list1, list2): - assert l1 == l2, "Primary key is not returned correctly" - - def test_key_fetch1(self, schema_any, subject): - """test KEY fetch1 - issue #976""" - with dj.config(fetch_format="array"): - k1 = (subject & "subject_id=10").fetch1("KEY") - with dj.config(fetch_format="frame"): - k2 = (subject & "subject_id=10").fetch1("KEY") - assert k1 == k2 - - def test_same_secondary_attribute(self, schema_any): - children = (schema.Child * schema.Parent().proj()).fetch()["name"] - assert len(children) == 1 - assert children[0] == "Dan" - - def test_query_caching(self, schema_any): - # initialize cache directory - os.mkdir(os.path.expanduser("~/dj_query_cache")) - - with dj.config(query_cache=os.path.expanduser("~/dj_query_cache")): - conn = schema.TTest3.connection - # insert sample data and load cache - schema.TTest3.insert([dict(key=100 + i, value=200 + i) for i in range(2)]) - conn.set_query_cache(query_cache="main") - cached_res = schema.TTest3().fetch() - # attempt to insert while caching enabled - try: - schema.TTest3.insert( - [dict(key=200 + i, value=400 + i) for i in range(2)] - ) - assert False, "Insert allowed while query caching enabled" - except dj.DataJointError: - conn.set_query_cache() - # insert new data - schema.TTest3.insert([dict(key=600 + i, value=800 + i) for i in range(2)]) - # re-enable cache to access old results - conn.set_query_cache(query_cache="main") - previous_cache = schema.TTest3().fetch() - # verify properly cached and how to refresh results - assert all([c == p for c, p in zip(cached_res, previous_cache)]) + assert False, "Insert allowed while query caching enabled" + except dj.DataJointError: conn.set_query_cache() - uncached_res = schema.TTest3().fetch() - assert len(uncached_res) > len(cached_res) - # purge query cache - conn.purge_query_cache() - - # reset cache directory state (will fail if purge was unsuccessful) - os.rmdir(os.path.expanduser("~/dj_query_cache")) - - def test_fetch_group_by(self, schema_any): - """ - https://github.com/datajoint/datajoint-python/issues/914 - """ - - assert schema.Parent().fetch("KEY", order_by="name") == [{"parent_id": 1}] - - def test_dj_u_distinct(self, schema_any): - """ - Test developed to see if removing DISTINCT from the select statement - generation breaks the dj.U universal set implementation - """ - - # Contents to be inserted - contents = [(1, 2, 3), (2, 2, 3), (3, 3, 2), (4, 5, 5)] - schema.Stimulus.insert(contents) - - # Query the whole table - test_query = schema.Stimulus() - - # Use dj.U to create a list of unique contrast and brightness combinations - result = dj.U("contrast", "brightness") & test_query - expected_result = [ - {"contrast": 2, "brightness": 3}, - {"contrast": 3, "brightness": 2}, - {"contrast": 5, "brightness": 5}, - ] - - fetched_result = result.fetch(as_dict=True, order_by=("contrast", "brightness")) - schema.Stimulus.delete_quick() - assert fetched_result == expected_result - - def test_backslash(self, schema_any): - """ - https://github.com/datajoint/datajoint-python/issues/999 - """ - expected = "She\\Hulk" - schema.Parent.insert([(2, expected)]) - q = schema.Parent & dict(name=expected) - assert q.fetch1("name") == expected - q.delete() + # insert new data + schema.TTest3.insert([dict(key=600 + i, value=800 + i) for i in range(2)]) + # re-enable cache to access old results + conn.set_query_cache(query_cache="main") + previous_cache = schema.TTest3().fetch() + # verify properly cached and how to refresh results + assert all([c == p for c, p in zip(cached_res, previous_cache)]) + conn.set_query_cache() + uncached_res = schema.TTest3().fetch() + assert len(uncached_res) > len(cached_res) + # purge query cache + conn.purge_query_cache() + + # reset cache directory state (will fail if purge was unsuccessful) + os.rmdir(os.path.expanduser("~/dj_query_cache")) + +def test_fetch_group_by(schema_any): + """ + https://github.com/datajoint/datajoint-python/issues/914 + """ + + assert schema.Parent().fetch("KEY", order_by="name") == [{"parent_id": 1}] + +def test_dj_u_distinct(schema_any): + """ + Test developed to see if removing DISTINCT from the select statement + generation breaks the dj.U universal set implementation + """ + + # Contents to be inserted + contents = [(1, 2, 3), (2, 2, 3), (3, 3, 2), (4, 5, 5)] + schema.Stimulus.insert(contents) + + # Query the whole table + test_query = schema.Stimulus() + + # Use dj.U to create a list of unique contrast and brightness combinations + result = dj.U("contrast", "brightness") & test_query + expected_result = [ + {"contrast": 2, "brightness": 3}, + {"contrast": 3, "brightness": 2}, + {"contrast": 5, "brightness": 5}, + ] + + fetched_result = result.fetch(as_dict=True, order_by=("contrast", "brightness")) + schema.Stimulus.delete_quick() + assert fetched_result == expected_result + +def test_backslash(schema_any): + """ + https://github.com/datajoint/datajoint-python/issues/999 + """ + expected = "She\\Hulk" + schema.Parent.insert([(2, expected)]) + q = schema.Parent & dict(name=expected) + assert q.fetch1("name") == expected + q.delete() diff --git a/tests/test_fetch_same.py b/tests/test_fetch_same.py index 4935bb037..8f4cac5f2 100644 --- a/tests/test_fetch_same.py +++ b/tests/test_fetch_same.py @@ -1,5 +1,5 @@ import pytest -from . import PREFIX, CONN_INFO +from . import PREFIX import numpy as np import datajoint as dj @@ -16,11 +16,11 @@ class ProjData(dj.Manual): @pytest.fixture -def schema_fetch_same(connection_root): +def schema_fetch_same(connection_test): schema = dj.Schema( PREFIX + "_fetch_same", context=dict(ProjData=ProjData), - connection=connection_root, + connection=connection_test, ) schema(ProjData) ProjData().insert( @@ -46,27 +46,21 @@ def schema_fetch_same(connection_root): schema.drop() -@pytest.fixture -def projdata(): - yield ProjData() - - -class TestFetchSame: - def test_object_conversion_one(self, schema_fetch_same, projdata): - new = projdata.proj(sub="resp").fetch("sub") - assert new.dtype == np.float64 +def test_object_conversion_one(schema_fetch_same): + new = ProjData().proj(sub="resp").fetch("sub") + assert new.dtype == np.float64 - def test_object_conversion_two(self, schema_fetch_same, projdata): - [sub, add] = projdata.proj(sub="resp", add="sim").fetch("sub", "add") - assert sub.dtype == np.float64 - assert add.dtype == np.float64 +def test_object_conversion_two(schema_fetch_same): + [sub, add] = ProjData().proj(sub="resp", add="sim").fetch("sub", "add") + assert sub.dtype == np.float64 + assert add.dtype == np.float64 - def test_object_conversion_all(self, schema_fetch_same, projdata): - new = projdata.proj(sub="resp", add="sim").fetch() - assert new["sub"].dtype == np.float64 - assert new["add"].dtype == np.float64 +def test_object_conversion_all(schema_fetch_same): + new = ProjData().proj(sub="resp", add="sim").fetch() + assert new["sub"].dtype == np.float64 + assert new["add"].dtype == np.float64 - def test_object_no_convert(self, schema_fetch_same, projdata): - new = projdata.fetch() - assert new["big"].dtype == "object" - assert new["blah"].dtype == "object" +def test_object_no_convert(schema_fetch_same): + new = ProjData().fetch() + assert new["big"].dtype == "object" + assert new["blah"].dtype == "object" diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 37974ac86..ebe257f8b 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -6,40 +6,35 @@ import datajoint as dj -@pytest.fixture -def subjects(): - yield schema.Subject() - - -def test_reserve_job(schema_any, subjects): - assert subjects +def test_reserve_job(subject, schema_any): + assert subject table_name = "fake_table" # reserve jobs - for key in subjects.fetch("KEY"): + for key in subject.fetch("KEY"): assert schema_any.jobs.reserve(table_name, key), "failed to reserve a job" # refuse jobs - for key in subjects.fetch("KEY"): + for key in subject.fetch("KEY"): assert not schema_any.jobs.reserve( table_name, key ), "failed to respect reservation" # complete jobs - for key in subjects.fetch("KEY"): + for key in subject.fetch("KEY"): schema_any.jobs.complete(table_name, key) assert not schema_any.jobs, "failed to free jobs" # reserve jobs again - for key in subjects.fetch("KEY"): + for key in subject.fetch("KEY"): assert schema_any.jobs.reserve(table_name, key), "failed to reserve new jobs" # finish with error - for key in subjects.fetch("KEY"): + for key in subject.fetch("KEY"): schema_any.jobs.error(table_name, key, "error message") # refuse jobs with errors - for key in subjects.fetch("KEY"): + for key in subject.fetch("KEY"): assert not schema_any.jobs.reserve( table_name, key ), "failed to ignore error jobs" @@ -95,7 +90,7 @@ def test_suppress_dj_errors(schema_any): assert len(schema.DjExceptionName()) == len(schema_any.jobs) > 0 -def test_long_error_message(schema_any, subjects): +def test_long_error_message(subject, schema_any): # create long error message long_error_message = "".join( random.choice(string.ascii_letters) for _ in range(ERROR_MESSAGE_LENGTH + 100) @@ -103,10 +98,10 @@ def test_long_error_message(schema_any, subjects): short_error_message = "".join( random.choice(string.ascii_letters) for _ in range(ERROR_MESSAGE_LENGTH // 2) ) - assert subjects + assert subject table_name = "fake_table" - key = subjects.fetch("KEY")[0] + key = subject.fetch("KEY")[0] # test long error message schema_any.jobs.reserve(table_name, key) @@ -131,7 +126,7 @@ def test_long_error_message(schema_any, subjects): schema_any.jobs.delete() -def test_long_error_stack(schema_any, subjects): +def test_long_error_stack(subject, schema_any): # create long error stack STACK_SIZE = ( 89942 # Does not fit into small blob (should be 64k, but found to be higher) @@ -139,10 +134,10 @@ def test_long_error_stack(schema_any, subjects): long_error_stack = "".join( random.choice(string.ascii_letters) for _ in range(STACK_SIZE) ) - assert subjects + assert subject table_name = "fake_table" - key = subjects.fetch("KEY")[0] + key = subject.fetch("KEY")[0] # test long error stack schema_any.jobs.reserve(table_name, key) diff --git a/tests/test_json.py b/tests/test_json.py index c1caaeedd..a63baaca2 100644 --- a/tests/test_json.py +++ b/tests/test_json.py @@ -7,7 +7,7 @@ from . import PREFIX if Version(dj.conn().query("select @@version;").fetchone()[0]) < Version("8.0.0"): - pytest.skip("skipping windows-only tests", allow_module_level=True) + pytest.skip("These tests require MySQL >= v8.0.0", allow_module_level=True) class Team(dj.Lookup): @@ -65,14 +65,14 @@ class Team(dj.Lookup): @pytest.fixture -def schema(connection_test): - schema = dj.Schema(PREFIX + "_json", context=dict(), connection=connection_test) +def schema_json(connection_test): + schema = dj.Schema(PREFIX + "_json", context=dict(Team=Team), connection=connection_test) schema(Team) yield schema schema.drop() -def test_insert_update(schema): +def test_insert_update(schema_json): car = { "name": "Discovery", "length": 22.9, @@ -108,7 +108,7 @@ def test_insert_update(schema): assert not q -def test_describe(schema): +def test_describe(schema_json): rel = Team() context = inspect.currentframe().f_globals s1 = declare(rel.full_table_name, rel.definition, context) @@ -116,7 +116,7 @@ def test_describe(schema): assert s1 == s2 -def test_restrict(schema): +def test_restrict(schema_json): # dict assert (Team & {"car.name": "Chaching"}).fetch1("name") == "business" @@ -176,7 +176,7 @@ def test_restrict(schema): ).fetch1("name") == "business", "2nd `headlight` object did not match" -def test_proj(schema): +def test_proj(schema_json): # proj necessary since we need to rename indexed value into a proper attribute name assert Team.proj(car_length="car.length").fetch( as_dict=True, order_by="car_length" diff --git a/tests/test_nan.py b/tests/test_nan.py index 299c0d9f8..deaa097ec 100644 --- a/tests/test_nan.py +++ b/tests/test_nan.py @@ -12,36 +12,39 @@ class NanTest(dj.Manual): """ -@pytest.fixture(scope="module") -def schema(connection_test): - schema = dj.Schema(PREFIX + "_nantest", connection=connection_test) +@pytest.fixture +def schema_nan(connection_test): + schema = dj.Schema(PREFIX + "_nantest", context=dict(NanTest=NanTest), connection=connection_test) schema(NanTest) yield schema schema.drop() -@pytest.fixture(scope="class") -def setup_class(request, schema): +@pytest.fixture +def arr_a(): + return np.array([0, 1 / 3, np.nan, np.pi, np.nan]) + + +@pytest.fixture +def schema_nan_pop(schema_nan, arr_a): rel = NanTest() with dj.config(safemode=False): rel.delete() - a = np.array([0, 1 / 3, np.nan, np.pi, np.nan]) - rel.insert(((i, value) for i, value in enumerate(a))) - request.cls.rel = rel - request.cls.a = a - - -class TestNaNInsert: - def test_insert_nan(self, setup_class): - """Test fetching of null values""" - b = self.rel.fetch("value", order_by="id") - assert (np.isnan(self.a) == np.isnan(b)).all(), "incorrect handling of Nans" - assert np.allclose( - self.a[np.logical_not(np.isnan(self.a))], b[np.logical_not(np.isnan(b))] - ), "incorrect storage of floats" - - def test_nulls_do_not_affect_primary_keys(self, setup_class): - """Test against a case that previously caused a bug when skipping existing entries.""" - self.rel.insert( - ((i, value) for i, value in enumerate(self.a)), skip_duplicates=True - ) + rel.insert(((i, value) for i, value in enumerate(arr_a))) + return schema_nan + + +def test_insert_nan(schema_nan_pop, arr_a): + """Test fetching of null values""" + b = NanTest().fetch("value", order_by="id") + assert (np.isnan(arr_a) == np.isnan(b)).all(), "incorrect handling of Nans" + assert np.allclose( + arr_a[np.logical_not(np.isnan(arr_a))], b[np.logical_not(np.isnan(b))] + ), "incorrect storage of floats" + + +def test_nulls_do_not_affect_primary_keys(schema_nan_pop, arr_a): + """Test against a case that previously caused a bug when skipping existing entries.""" + NanTest().insert( + ((i, value) for i, value in enumerate(arr_a)), skip_duplicates=True + ) diff --git a/tests/test_plugin.py b/tests/test_plugin.py index ddb8b3bfc..95933d2ff 100644 --- a/tests/test_plugin.py +++ b/tests/test_plugin.py @@ -23,8 +23,7 @@ def test_normal_djerror(): assert e.__cause__ is None -@pytest.mark.parametrize("category", ("connection",)) -def test_verified_djerror(category): +def test_verified_djerror(category="connection"): try: curr_plugins = getattr(p, "{}_plugins".format(category)) setattr( @@ -42,8 +41,7 @@ def test_verified_djerror_type(): test_verified_djerror(category="type") -@pytest.mark.parametrize("category", ("connection",)) -def test_unverified_djerror(category): +def test_unverified_djerror(category="connection"): try: curr_plugins = getattr(p, "{}_plugins".format(category)) setattr( diff --git a/tests/test_privileges.py b/tests/test_privileges.py index 949dbc8aa..fc20b430e 100644 --- a/tests/test_privileges.py +++ b/tests/test_privileges.py @@ -1,25 +1,24 @@ import os import pytest import datajoint as dj -from . import schema, CONN_INFO_ROOT, PREFIX -from . import schema_privileges +from . import schema, PREFIX, schema_privileges namespace = locals() @pytest.fixture def schema_priv(connection_test): - schema_priv = dj.Schema( + schema = dj.Schema( context=schema_privileges.LOCALS_PRIV, connection=connection_test, ) - schema_priv(schema_privileges.Parent) - schema_priv(schema_privileges.Child) - schema_priv(schema_privileges.NoAccess) - schema_priv(schema_privileges.NoAccessAgain) - yield schema_priv - if schema_priv.is_activated(): - schema_priv.drop() + schema(schema_privileges.Parent) + schema(schema_privileges.Child) + schema(schema_privileges.NoAccess) + schema(schema_privileges.NoAccessAgain) + yield schema + if schema.is_activated(): + schema.drop() @pytest.fixture diff --git a/tests/test_reconnection.py b/tests/test_reconnection.py index 262531243..5eea4af11 100644 --- a/tests/test_reconnection.py +++ b/tests/test_reconnection.py @@ -5,32 +5,28 @@ import pytest import datajoint as dj from datajoint import DataJointError -from . import CONN_INFO @pytest.fixture -def conn(connection_root): - return dj.conn(reset=True, **CONN_INFO) +def conn(connection_root, db_creds_root): + return dj.conn(reset=True, **db_creds_root) -class TestReconnect: - """ - Test reconnection - """ +def test_close(conn): + assert conn.is_connected, "Connection should be alive" + conn.close() + assert not conn.is_connected, "Connection should now be closed" - def test_close(self, conn): - assert conn.is_connected, "Connection should be alive" - conn.close() - assert not conn.is_connected, "Connection should now be closed" - def test_reconnect(self, conn): - assert conn.is_connected, "Connection should be alive" +def test_reconnect(conn): + assert conn.is_connected, "Connection should be alive" + conn.close() + conn.query("SHOW DATABASES;", reconnect=True).fetchall() + assert conn.is_connected, "Connection should be alive" + + +def test_reconnect_throws_error_in_transaction(conn): + assert conn.is_connected, "Connection should be alive" + with conn.transaction, pytest.raises(DataJointError): conn.close() conn.query("SHOW DATABASES;", reconnect=True).fetchall() - assert conn.is_connected, "Connection should be alive" - - def test_reconnect_throws_error_in_transaction(self, conn): - assert conn.is_connected, "Connection should be alive" - with conn.transaction, pytest.raises(DataJointError): - conn.close() - conn.query("SHOW DATABASES;", reconnect=True).fetchall() diff --git a/tests/test_relation.py b/tests/test_relation.py index a40b17d4e..169ffc29a 100644 --- a/tests/test_relation.py +++ b/tests/test_relation.py @@ -6,7 +6,6 @@ import datajoint as dj from datajoint.table import Table from unittest.mock import patch - from . import schema diff --git a/tests/test_relation_u.py b/tests/test_relation_u.py index 50997662d..f9a0f3a8f 100644 --- a/tests/test_relation_u.py +++ b/tests/test_relation_u.py @@ -5,83 +5,77 @@ from .schema_simple import * -class TestU: - """ - Test tables: insert, delete - """ +# def setup_class(cls): +# cls.user = User() +# cls.language = Language() +# cls.subject = Subject() +# cls.experiment = Experiment() +# cls.trial = Trial() +# cls.ephys = Ephys() +# cls.channel = Ephys.Channel() +# cls.img = Image() +# cls.trash = UberTrash() - @classmethod - def setup_class(cls): - cls.user = User() - cls.language = Language() - cls.subject = Subject() - cls.experiment = Experiment() - cls.trial = Trial() - cls.ephys = Ephys() - cls.channel = Ephys.Channel() - cls.img = Image() - cls.trash = UberTrash() +def test_restriction(lang, languages, trial): + language_set = {s[1] for s in languages} + rel = dj.U("language") & lang + assert list(rel.heading.names) == ["language"] + assert len(rel) == len(language_set) + assert set(rel.fetch("language")) == language_set + # Test for issue #342 + rel = trial * dj.U("start_time") + assert list(rel.primary_key) == trial.primary_key + ["start_time"] + assert list(rel.primary_key) == list((rel & "trial_id>3").primary_key) + assert list((dj.U("start_time") & trial).primary_key) == ["start_time"] - def test_restriction(self, schema_any): - language_set = {s[1] for s in self.language.contents} - rel = dj.U("language") & self.language - assert list(rel.heading.names) == ["language"] - assert len(rel) == len(language_set) - assert set(rel.fetch("language")) == language_set - # Test for issue #342 - rel = self.trial * dj.U("start_time") - assert list(rel.primary_key) == self.trial.primary_key + ["start_time"] - assert list(rel.primary_key) == list((rel & "trial_id>3").primary_key) - assert list((dj.U("start_time") & self.trial).primary_key) == ["start_time"] +def test_invalid_restriction(schema_any): + with raises(dj.DataJointError): + result = dj.U("color") & dict(color="red") - def test_invalid_restriction(self, schema_any): - with raises(dj.DataJointError): - result = dj.U("color") & dict(color="red") +def test_ineffective_restriction(lang): + rel = lang & dj.U("language") + assert rel.make_sql() == lang.make_sql() - def test_ineffective_restriction(self, schema_any): - rel = self.language & dj.U("language") - assert rel.make_sql() == self.language.make_sql() +def test_join(experiment): + rel = experiment * dj.U("experiment_date") + assert experiment.primary_key == ["subject_id", "experiment_id"] + assert rel.primary_key == experiment.primary_key + ["experiment_date"] - def test_join(self, schema_any): - rel = self.experiment * dj.U("experiment_date") - assert self.experiment.primary_key == ["subject_id", "experiment_id"] - assert rel.primary_key == self.experiment.primary_key + ["experiment_date"] + rel = dj.U("experiment_date") * experiment + assert experiment.primary_key == ["subject_id", "experiment_id"] + assert rel.primary_key == experiment.primary_key + ["experiment_date"] - rel = dj.U("experiment_date") * self.experiment - assert self.experiment.primary_key == ["subject_id", "experiment_id"] - assert rel.primary_key == self.experiment.primary_key + ["experiment_date"] +def test_invalid_join(schema_any): + with raises(dj.DataJointError): + rel = dj.U("language") * dict(language="English") - def test_invalid_join(self, schema_any): - with raises(dj.DataJointError): - rel = dj.U("language") * dict(language="English") +def test_repr_without_attrs(schema_any): + """test dj.U() display""" + query = dj.U().aggr(Language, n="count(*)") + repr(query) - def test_repr_without_attrs(self, schema_any): - """test dj.U() display""" - query = dj.U().aggr(Language, n="count(*)") - repr(query) +def test_aggregations(schema_any): + lang = Language() + # test total aggregation on expression object + n1 = dj.U().aggr(lang, n="count(*)").fetch1("n") + assert n1 == len(lang.fetch()) + # test total aggregation on expression class + n2 = dj.U().aggr(Language, n="count(*)").fetch1("n") + assert n1 == n2 + rel = dj.U("language").aggr(Language, number_of_speakers="count(*)") + assert len(rel) == len(set(l[1] for l in Language.contents)) + assert (rel & 'language="English"').fetch1("number_of_speakers") == 3 - def test_aggregations(self, schema_any): - lang = Language() - # test total aggregation on expression object - n1 = dj.U().aggr(lang, n="count(*)").fetch1("n") - assert n1 == len(lang.fetch()) - # test total aggregation on expression class - n2 = dj.U().aggr(Language, n="count(*)").fetch1("n") - assert n1 == n2 - rel = dj.U("language").aggr(Language, number_of_speakers="count(*)") - assert len(rel) == len(set(l[1] for l in Language.contents)) - assert (rel & 'language="English"').fetch1("number_of_speakers") == 3 +def test_argmax(schema_any): + rel = TTest() + # get the tuples corresponding to the maximum value + mx = (rel * dj.U().aggr(rel, mx="max(value)")) & "mx=value" + assert mx.fetch("value")[0] == max(rel.fetch("value")) - def test_argmax(self, schema_any): - rel = TTest() - # get the tuples corresponding to the maximum value - mx = (rel * dj.U().aggr(rel, mx="max(value)")) & "mx=value" - assert mx.fetch("value")[0] == max(rel.fetch("value")) - - def test_aggr(self, schema_any, schema_simp): - rel = ArgmaxTest() - amax1 = (dj.U("val") * rel) & dj.U("secondary_key").aggr(rel, val="min(val)") - amax2 = (dj.U("val") * rel) * dj.U("secondary_key").aggr(rel, val="min(val)") - assert ( - len(amax1) == len(amax2) == rel.n - ), "Aggregated argmax with join and restriction does not yield the same length." +def test_aggr(schema_any, schema_simp): + rel = ArgmaxTest() + amax1 = (dj.U("val") * rel) & dj.U("secondary_key").aggr(rel, val="min(val)") + amax2 = (dj.U("val") * rel) * dj.U("secondary_key").aggr(rel, val="min(val)") + assert ( + len(amax1) == len(amax2) == rel.n + ), "Aggregated argmax with join and restriction does not yield the same length." diff --git a/tests/test_relational_operand.py b/tests/test_relational_operand.py index 06adee5c8..acd117509 100644 --- a/tests/test_relational_operand.py +++ b/tests/test_relational_operand.py @@ -5,34 +5,9 @@ import datetime import numpy as np import datajoint as dj -from .schema_simple import ( - A, - B, - D, - E, - F, - L, - DataA, - DataB, - TTestUpdate, - IJ, - JI, - ReservedWord, - OutfitLaunch, -) -from .schema import ( - Experiment, - TTest3, - Trial, - Ephys, - Child, - Parent, - SubjectA, - SessionA, - SessionStatusA, - SessionDateA, -) -from . import PREFIX, CONN_INFO +from .schema_simple import * +from .schema import * +from . import PREFIX @pytest.fixture diff --git a/tests/test_s3.py b/tests/test_s3.py index 090d6acf0..b5babdd8b 100644 --- a/tests/test_s3.py +++ b/tests/test_s3.py @@ -1,50 +1,48 @@ import pytest -import urllib3 -import certifi from .schema_external import SimpleRemote from datajoint.errors import DataJointError from datajoint.hash import uuid_from_buffer from datajoint.blob import pack -from . import S3_CONN_INFO from minio import Minio -class TestS3: - def test_connection(self, http_client, minio_client): - assert minio_client.bucket_exists(S3_CONN_INFO["bucket"]) +def test_connection(http_client, minio_client, s3_creds): + assert minio_client.bucket_exists(s3_creds["bucket"]) - def test_connection_secure(self, minio_client): - assert minio_client.bucket_exists(S3_CONN_INFO["bucket"]) - def test_remove_object_exception(self, schema_ext): - # https://github.com/datajoint/datajoint-python/issues/952 +def test_connection_secure(minio_client, s3_creds): + assert minio_client.bucket_exists(s3_creds["bucket"]) - # Insert some test data and remove it so that the external table is populated - test = [1, [1, 2, 3]] - SimpleRemote.insert1(test) - SimpleRemote.delete() - # Save the old external table minio client - old_client = schema_ext.external["share"].s3.client +def test_remove_object_exception(schema_ext, s3_creds): + # https://github.com/datajoint/datajoint-python/issues/952 - # Apply our new minio client which has a user that does not exist - schema_ext.external["share"].s3.client = Minio( - S3_CONN_INFO["endpoint"], - access_key="jeffjeff", - secret_key="jeffjeff", - secure=False, - ) + # Insert some test data and remove it so that the external table is populated + test = [1, [1, 2, 3]] + SimpleRemote.insert1(test) + SimpleRemote.delete() - # This method returns a list of errors - error_list = schema_ext.external["share"].delete( - delete_external_files=True, errors_as_string=False - ) + # Save the old external table minio client + old_client = schema_ext.external["share"].s3.client - # Teardown - schema_ext.external["share"].s3.client = old_client - schema_ext.external["share"].delete(delete_external_files=True) + # Apply our new minio client which has a user that does not exist + schema_ext.external["share"].s3.client = Minio( + s3_creds["endpoint"], + access_key="jeffjeff", + secret_key="jeffjeff", + secure=False, + ) - with pytest.raises(DataJointError): - # Raise the error we want if the error matches the expected uuid - if str(error_list[0][0]) == str(uuid_from_buffer(pack(test[1]))): - raise error_list[0][2] + # This method returns a list of errors + error_list = schema_ext.external["share"].delete( + delete_external_files=True, errors_as_string=False + ) + + # Teardown + schema_ext.external["share"].s3.client = old_client + schema_ext.external["share"].delete(delete_external_files=True) + + with pytest.raises(DataJointError): + # Raise the error we want if the error matches the expected uuid + if str(error_list[0][0]) == str(uuid_from_buffer(pack(test[1]))): + raise error_list[0][2] diff --git a/tests/test_schema_keywords.py b/tests/test_schema_keywords.py index 1cad98efd..23ef645db 100644 --- a/tests/test_schema_keywords.py +++ b/tests/test_schema_keywords.py @@ -1,6 +1,6 @@ -from . import PREFIX -import datajoint as dj import pytest +import datajoint as dj +from . import PREFIX class A(dj.Manual): @@ -34,7 +34,7 @@ class D(B): @pytest.fixture -def schema(connection_test): +def schema_kwd(connection_test): schema = dj.Schema(PREFIX + "_keywords", connection=connection_test) schema(A) schema(D) @@ -42,7 +42,7 @@ def schema(connection_test): schema.drop() -def test_inherited_part_table(schema): +def test_inherited_part_table(schema_kwd): assert "a_id" in D().heading.attributes assert "b_id" in D().heading.attributes assert "a_id" in D.C().heading.attributes From fc0cf35ca1362c01a3b93442302d88d4e551b8ea Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 19:34:29 -0700 Subject: [PATCH 183/212] Move all CONN_INFO to fixtures --- tests/__init__.py | 23 ----------------------- tests/schema_external.py | 2 +- tests/schema_uuid.py | 2 +- tests/test_adapted_attributes.py | 5 +++-- 4 files changed, 5 insertions(+), 27 deletions(-) diff --git a/tests/__init__.py b/tests/__init__.py index 219f7f5c0..e12feabe3 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,26 +1,3 @@ -import datajoint as dj -from packaging import version -import pytest import os PREFIX = os.environ.get("DJ_TEST_DB_PREFIX", "djtest") - -# Connection for testing -CONN_INFO = dict( - host=os.environ.get("DJ_TEST_HOST", "fakeservices.datajoint.io"), - user=os.environ.get("DJ_TEST_USER", "datajoint"), - password=os.environ.get("DJ_TEST_PASSWORD", "datajoint"), -) - -CONN_INFO_ROOT = dict( - host=os.environ.get("DJ_HOST", "fakeservices.datajoint.io"), - user=os.environ.get("DJ_USER", "root"), - password=os.environ.get("DJ_PASS", "simple"), -) - -S3_CONN_INFO = dict( - endpoint=os.environ.get("S3_ENDPOINT", "fakeservices.datajoint.io"), - access_key=os.environ.get("S3_ACCESS_KEY", "datajoint"), - secret_key=os.environ.get("S3_SECRET_KEY", "datajoint"), - bucket=os.environ.get("S3_BUCKET", "datajoint.test"), -) diff --git a/tests/schema_external.py b/tests/schema_external.py index 294ecb070..f29aeb8da 100644 --- a/tests/schema_external.py +++ b/tests/schema_external.py @@ -5,7 +5,7 @@ import tempfile import inspect import datajoint as dj -from . import PREFIX, CONN_INFO, S3_CONN_INFO +from . import PREFIX import numpy as np diff --git a/tests/schema_uuid.py b/tests/schema_uuid.py index 6bf994b5b..914fedfad 100644 --- a/tests/schema_uuid.py +++ b/tests/schema_uuid.py @@ -1,7 +1,7 @@ import uuid import inspect import datajoint as dj -from . import PREFIX, CONN_INFO +from . import PREFIX top_level_namespace_id = uuid.UUID("00000000-0000-0000-0000-000000000000") diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index c0fb6b0eb..ee159002d 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -6,7 +6,7 @@ from itertools import zip_longest from . import schema_adapted from .schema_adapted import Connectivity, Layout -from . import PREFIX, S3_CONN_INFO +from . import PREFIX SCHEMA_NAME = PREFIX + "_test_custom_datatype" @@ -22,11 +22,12 @@ def schema_ad( adapted_graph_instance, enable_adapted_types, enable_filepath_feature, + s3_creds, tmpdir ): dj.config["stores"] = { "repo-s3": dict( - S3_CONN_INFO, + s3_creds, protocol="s3", location="adapted/repo", stage=str(tmpdir) From b689ec29202a6a63fd45b757fbc14a6fa162ae30 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 19:34:49 -0700 Subject: [PATCH 184/212] Session scoped s3_creds --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 0409565dc..6fc540d4e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -182,7 +182,7 @@ def connection_test(connection_root): connection.close() -@pytest.fixture +@pytest.fixture(scope="session") def s3_creds() -> Dict: return dict( endpoint=os.environ.get("S3_ENDPOINT", "fakeservices.datajoint.io"), From c3e96e90360ab1350c2b42559d5975b2ccb8da8c Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 19:35:50 -0700 Subject: [PATCH 185/212] Format with black --- tests/conftest.py | 1 - tests/test_adapted_attributes.py | 7 ++-- tests/test_autopopulate.py | 17 +++------ tests/test_blob_matlab.py | 12 ++----- tests/test_connection.py | 4 ++- tests/test_declare.py | 20 +++++++++-- tests/test_fetch.py | 61 +++++++++++++++++++------------- tests/test_fetch_same.py | 3 ++ tests/test_json.py | 4 ++- tests/test_nan.py | 4 ++- tests/test_relation_u.py | 9 +++++ 11 files changed, 85 insertions(+), 57 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 6fc540d4e..dccc18ce9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -495,4 +495,3 @@ def channel(schema_any): @pytest.fixture def trash(schema_any): return schema.UberTrash() - diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index ee159002d..997da2131 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -23,14 +23,11 @@ def schema_ad( enable_adapted_types, enable_filepath_feature, s3_creds, - tmpdir + tmpdir, ): dj.config["stores"] = { "repo-s3": dict( - s3_creds, - protocol="s3", - location="adapted/repo", - stage=str(tmpdir) + s3_creds, protocol="s3", location="adapted/repo", stage=str(tmpdir) ) } context = { diff --git a/tests/test_autopopulate.py b/tests/test_autopopulate.py index d1225a140..4fc4b9f77 100644 --- a/tests/test_autopopulate.py +++ b/tests/test_autopopulate.py @@ -10,10 +10,7 @@ def test_populate(trial, subject, experiment, ephys, channel): assert subject, "root tables are empty" assert not experiment, "table already filled?" experiment.populate() - assert ( - len(experiment) - == len(subject) * experiment.fake_experiments_per_subject - ) + assert len(experiment) == len(subject) * experiment.fake_experiments_per_subject # test restricted populate assert not trial, "table already filled?" @@ -65,10 +62,7 @@ def test_populate_exclude_error_and_ignore_jobs(schema_any, subject, experiment) schema_any.jobs.error(experiment.table_name, key, "") experiment.populate(reserve_jobs=True) - assert ( - len(experiment.key_source & experiment) - == len(experiment.key_source) - 2 - ) + assert len(experiment.key_source & experiment) == len(experiment.key_source) - 2 def test_allow_direct_insert(subject, experiment): @@ -79,15 +73,12 @@ def test_allow_direct_insert(subject, experiment): experiment.insert1(key, allow_direct_insert=True) -@pytest.mark.parametrize('processes', [None, 2]) +@pytest.mark.parametrize("processes", [None, 2]) def test_multi_processing(subject, experiment, processes): assert subject, "root tables are empty" assert not experiment, "table already filled?" experiment.populate(processes=None) - assert ( - len(experiment) - == len(subject) * experiment.fake_experiments_per_subject - ) + assert len(experiment) == len(subject) * experiment.fake_experiments_per_subject def test_allow_insert(subject, experiment): diff --git a/tests/test_blob_matlab.py b/tests/test_blob_matlab.py index 6d99d4118..2ec23d3c2 100644 --- a/tests/test_blob_matlab.py +++ b/tests/test_blob_matlab.py @@ -121,9 +121,7 @@ def test_complex_matlab_squeeze(schema_blob_pop): ) # 'simple string' 'character string' assert blob == "character string" - blob = (Blob & "id=2").fetch1( - "blob", squeeze=True - ) # '1D vector' 1:15:180 + blob = (Blob & "id=2").fetch1("blob", squeeze=True) # '1D vector' 1:15:180 assert_array_equal(blob, np.r_[1:180:15]) blob = (Blob & "id=3").fetch1( @@ -155,15 +153,11 @@ def test_complex_matlab_squeeze(schema_blob_pop): assert np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F")) assert blob.dtype == "float64" - blob = (Blob & "id=6").fetch1( - "blob", squeeze=True - ) # reshape(uint8(1:24), [2,3,4]) + blob = (Blob & "id=6").fetch1("blob", squeeze=True) # reshape(uint8(1:24), [2,3,4]) assert np.array_equal(blob, np.r_[1:25].reshape((2, 3, 4), order="F")) assert blob.dtype == "uint8" - blob = (Blob & "id=7").fetch1( - "blob", squeeze=True - ) # fftn(reshape(1:24, [2,3,4])) + blob = (Blob & "id=7").fetch1("blob", squeeze=True) # fftn(reshape(1:24, [2,3,4])) assert tuple(blob.shape) == (2, 3, 4) assert blob.dtype == "complex128" diff --git a/tests/test_connection.py b/tests/test_connection.py index 98b930660..025992e8f 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -22,7 +22,9 @@ class Subjects(dj.Manual): @pytest.fixture def schema_tx(connection_test): schema = dj.Schema( - PREFIX + "_transactions", context=dict(Subjects=Subjects), connection=connection_test + PREFIX + "_transactions", + context=dict(Subjects=Subjects), + connection=connection_test, ) schema(Subjects) yield schema diff --git a/tests/test_declare.py b/tests/test_declare.py index 13d91c8ff..dfca54c27 100644 --- a/tests/test_declare.py +++ b/tests/test_declare.py @@ -9,18 +9,21 @@ def test_schema_decorator(schema_any): assert issubclass(Subject, dj.Lookup) assert not issubclass(Subject, dj.Part) + def test_class_help(schema_any): help(TTest) help(TTest2) assert TTest.definition in TTest.__doc__ assert TTest.definition in TTest2.__doc__ + def test_instance_help(schema_any): help(TTest()) help(TTest2()) assert TTest().definition in TTest().__doc__ assert TTest2().definition in TTest2().__doc__ + def test_describe(schema_any): """real_definition should match original definition""" rel = Experiment() @@ -29,6 +32,7 @@ def test_describe(schema_any): s2 = declare(rel.full_table_name, rel.describe(), context) assert s1 == s2 + def test_describe_indexes(schema_any): """real_definition should match original definition""" rel = IndexRich() @@ -37,6 +41,7 @@ def test_describe_indexes(schema_any): s2 = declare(rel.full_table_name, rel.describe(), context) assert s1 == s2 + def test_describe_dependencies(schema_any): """real_definition should match original definition""" rel = ThingC() @@ -45,6 +50,7 @@ def test_describe_dependencies(schema_any): s2 = declare(rel.full_table_name, rel.describe(), context) assert s1 == s2 + def test_part(schema_any): """ Lookup and part with the same name. See issue #365 @@ -70,6 +76,7 @@ class Type(dj.Part): -> Type """ + def test_attributes(schema_any): """ Test autoincrement declaration @@ -136,6 +143,7 @@ def test_attributes(schema_any): assert channel.primary_key == ["animal", "experiment_id", "trial_id", "channel"] assert channel.heading.attributes["voltage"].is_blob + def test_dependencies(schema_any): user = User() subject = Subject() @@ -149,8 +157,7 @@ def test_dependencies(schema_any): assert experiment.full_table_name in user.children(primary=False) assert set(experiment.parents(primary=False)) == {user.full_table_name} assert set( - s.full_table_name - for s in experiment.parents(primary=False, as_objects=True) + s.full_table_name for s in experiment.parents(primary=False, as_objects=True) ) == {user.full_table_name} assert experiment.full_table_name in subject.descendants() @@ -192,6 +199,7 @@ def test_dependencies(schema_any): s.full_table_name for s in channel.parents(primary=True, as_objects=True) ) == {ephys.full_table_name} + def test_descendants_only_contain_part_table(schema_any): """issue #927""" @@ -227,6 +235,7 @@ class Part(dj.Part): "`djtest_test1`.`master__part`", ] + def test_bad_attribute_name(schema_any): class BadName(dj.Manual): definition = """ @@ -236,6 +245,7 @@ class BadName(dj.Manual): with pytest.raises(dj.DataJointError): schema_any(BadName) + def test_bad_fk_rename(schema_any): """issue #381""" @@ -253,6 +263,7 @@ class B(dj.Manual): with pytest.raises(dj.DataJointError): schema_any(B) + def test_primary_nullable_foreign_key(schema_any): class Q(dj.Manual): definition = """ @@ -262,6 +273,7 @@ class Q(dj.Manual): with pytest.raises(dj.DataJointError): schema_any(Q) + def test_invalid_foreign_key_option(schema_any): class R(dj.Manual): definition = """ @@ -273,6 +285,7 @@ class R(dj.Manual): with pytest.raises(dj.DataJointError): schema_any(R) + def test_unsupported_datatype(schema_any): class Q(dj.Manual): definition = """ @@ -284,6 +297,7 @@ class Q(dj.Manual): with pytest.raises(dj.DataJointError): schema_any(Q) + def test_int_datatype(schema_any): @schema_any class Owner(dj.Manual): @@ -293,6 +307,7 @@ class Owner(dj.Manual): car_count : integer """ + def test_unsupported_int_datatype(schema_any): class Driver(dj.Manual): definition = """ @@ -304,6 +319,7 @@ class Driver(dj.Manual): with pytest.raises(dj.DataJointError): schema_any(Driver) + def test_long_table_name(schema_any): """ test issue #205 -- reject table names over 64 characters in length diff --git a/tests/test_fetch.py b/tests/test_fetch.py index eef14a140..4f45ae9e9 100644 --- a/tests/test_fetch.py +++ b/tests/test_fetch.py @@ -24,14 +24,13 @@ def test_getattribute(subject): subject_notes, key, real_id = subject.fetch("subject_notes", dj.key, "real_id") - np.testing.assert_array_equal( - sorted(subject_notes), sorted(tmp["subject_notes"]) - ) + np.testing.assert_array_equal(sorted(subject_notes), sorted(tmp["subject_notes"])) np.testing.assert_array_equal(sorted(real_id), sorted(tmp["real_id"])) list1 = sorted(key, key=itemgetter("subject_id")) for l1, l2 in zip(list1, list2): assert l1 == l2, "Primary key is not returned correctly" + def test_getattribute_for_fetch1(subject): """Testing Fetch1.__call__ with attributes""" assert (subject & "subject_id=10").fetch1("subject_id") == 10 @@ -40,6 +39,7 @@ def test_getattribute_for_fetch1(subject): "monkey", ) + def test_order_by(lang, languages): """Tests order_by sorting order""" for ord_name, ord_lang in itertools.product(*2 * [["ASC", "DESC"]]): @@ -51,15 +51,15 @@ def test_order_by(lang, languages): cc == ll for cc, ll in zip(c, l) ), "Sorting order is different" + def test_order_by_default(lang, languages): """Tests order_by sorting order with defaults""" cur = lang.fetch(order_by=("language", "name DESC")) languages.sort(key=itemgetter(0), reverse=True) languages.sort(key=itemgetter(1), reverse=False) for c, l in zip(cur, languages): - assert np.all( - [cc == ll for cc, ll in zip(c, l)] - ), "Sorting order is different" + assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" + def test_limit(lang): """Test the limit kwarg""" @@ -67,6 +67,7 @@ def test_limit(lang): cur = lang.fetch(limit=limit) assert len(cur) == limit, "Length is not correct" + def test_order_by_limit(lang, languages): """Test the combination of order by and limit kwargs""" cur = lang.fetch(limit=4, order_by=["language", "name DESC"]) @@ -74,9 +75,8 @@ def test_order_by_limit(lang, languages): languages.sort(key=itemgetter(1), reverse=False) assert len(cur) == 4, "Length is not correct" for c, l in list(zip(cur, languages))[:4]: - assert np.all( - [cc == ll for cc, ll in zip(c, l)] - ), "Sorting order is different" + assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" + def test_head_tail(schema_any): query = schema.User * schema.Language @@ -95,6 +95,7 @@ def test_head_tail(schema_any): assert len(frame) == n assert query.primary_key == frame.index.names + def test_limit_offset(lang, languages): """Test the limit and offset kwargs together""" cur = lang.fetch(offset=2, limit=4, order_by=["language", "name DESC"]) @@ -102,9 +103,8 @@ def test_limit_offset(lang, languages): languages.sort(key=itemgetter(1), reverse=False) assert len(cur) == 4, "Length is not correct" for c, l in list(zip(cur, languages[2:6])): - assert np.all( - [cc == ll for cc, ll in zip(c, l)] - ), "Sorting order is different" + assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" + def test_iter(lang, languages): """Test iterator""" @@ -120,6 +120,7 @@ def test_iter(lang, languages): row["name"] == tname and row["language"] == tlang ), "Values are not the same" + def test_keys(lang, languages): """test key fetch""" languages.sort(key=itemgetter(0), reverse=True) @@ -132,6 +133,7 @@ def test_keys(lang, languages): for c, c2 in zip(zip(*cur), cur2): assert c == tuple(c2.values()), "Values are not the same" + def test_attributes_as_dict(subject): """ Issue #595 @@ -141,6 +143,7 @@ def test_attributes_as_dict(subject): assert bool(result) and len(result) == len(subject) assert set(result[0]) == set(attrs) + def test_fetch1_step1(lang, languages): assert ( lang.contents @@ -160,10 +163,12 @@ def test_fetch1_step1(lang, languages): for k, (ke, c) in zip(true, dat.items()): assert k == c == (lang & key).fetch1(ke), "Values are not the same" + def test_misspelled_attribute(schema_any): with pytest.raises(dj.DataJointError): f = (schema.Language & 'lang = "ENGLISH"').fetch() + def test_repr(subject): """Test string representation of fetch, returning table preview""" repr = subject.fetch.__repr__() @@ -172,17 +177,20 @@ def test_repr(subject): # 3 lines are used for headers (2) and summary statement (1) assert n - 3 <= limit + def test_fetch_none(lang): """Test preparing attributes for getitem""" with pytest.raises(dj.DataJointError): lang.fetch(None) + def test_asdict(lang): """Test returns as dictionaries""" d = lang.fetch(as_dict=True) for dd in d: assert isinstance(dd, dict) + def test_offset(lang, languages): """Tests offset""" cur = lang.fetch(limit=4, offset=1, order_by=["language", "name DESC"]) @@ -191,9 +199,8 @@ def test_offset(lang, languages): languages.sort(key=itemgetter(1), reverse=False) assert len(cur) == 4, "Length is not correct" for c, l in list(zip(cur, languages[1:]))[:4]: - assert np.all( - [cc == ll for cc, ll in zip(c, l)] - ), "Sorting order is different" + assert np.all([cc == ll for cc, ll in zip(c, l)]), "Sorting order is different" + def test_limit_warning(lang): """Tests whether warning is raised if offset is used without limit.""" @@ -216,20 +223,24 @@ def test_limit_warning(lang): logger.removeHandler(handler) assert "[WARNING]: Offset set, but no limit." in log_contents + def test_len(lang): """Tests __len__""" assert len(lang.fetch()) == len(lang), "__len__ is not behaving properly" + def test_fetch1_step2(lang): """Tests whether fetch1 raises error""" with pytest.raises(dj.DataJointError): lang.fetch1() + def test_fetch1_step3(lang): """Tests whether fetch1 raises error""" with pytest.raises(dj.DataJointError): lang.fetch1("name") + def test_decimal(schema_any): """Tests that decimal fields are correctly fetched and used in restrictions, see issue #334""" rel = schema.DecimalPrimaryKey() @@ -242,6 +253,7 @@ def test_decimal(schema_any): assert len(keys) >= 2 assert len(rel & keys[1]) == 1 + def test_nullable_numbers(schema_any): """test mixture of values and nulls in numeric attributes""" table = schema.NullableNumbers() @@ -262,13 +274,12 @@ def test_nullable_numbers(schema_any): assert any(np.isnan(d)) assert any(np.isnan(f)) + def test_fetch_format(subject): """test fetch_format='frame'""" with dj.config(fetch_format="frame"): # test if lists are both dicts - list1 = sorted( - subject.proj().fetch(as_dict=True), key=itemgetter("subject_id") - ) + list1 = sorted(subject.proj().fetch(as_dict=True), key=itemgetter("subject_id")) list2 = sorted(subject.fetch(dj.key), key=itemgetter("subject_id")) for l1, l2 in zip(list1, list2): assert l1 == l2, "Primary key is not returned correctly" @@ -278,9 +289,7 @@ def test_fetch_format(subject): assert isinstance(tmp, pandas.DataFrame) tmp = tmp.to_records() - subject_notes, key, real_id = subject.fetch( - "subject_notes", dj.key, "real_id" - ) + subject_notes, key, real_id = subject.fetch("subject_notes", dj.key, "real_id") np.testing.assert_array_equal( sorted(subject_notes), sorted(tmp["subject_notes"]) @@ -290,6 +299,7 @@ def test_fetch_format(subject): for l1, l2 in zip(list1, list2): assert l1 == l2, "Primary key is not returned correctly" + def test_key_fetch1(subject): """test KEY fetch1 - issue #976""" with dj.config(fetch_format="array"): @@ -298,11 +308,13 @@ def test_key_fetch1(subject): k2 = (subject & "subject_id=10").fetch1("KEY") assert k1 == k2 + def test_same_secondary_attribute(schema_any): children = (schema.Child * schema.Parent().proj()).fetch()["name"] assert len(children) == 1 assert children[0] == "Dan" + def test_query_caching(schema_any): # initialize cache directory os.mkdir(os.path.expanduser("~/dj_query_cache")) @@ -315,9 +327,7 @@ def test_query_caching(schema_any): cached_res = schema.TTest3().fetch() # attempt to insert while caching enabled try: - schema.TTest3.insert( - [dict(key=200 + i, value=400 + i) for i in range(2)] - ) + schema.TTest3.insert([dict(key=200 + i, value=400 + i) for i in range(2)]) assert False, "Insert allowed while query caching enabled" except dj.DataJointError: conn.set_query_cache() @@ -337,6 +347,7 @@ def test_query_caching(schema_any): # reset cache directory state (will fail if purge was unsuccessful) os.rmdir(os.path.expanduser("~/dj_query_cache")) + def test_fetch_group_by(schema_any): """ https://github.com/datajoint/datajoint-python/issues/914 @@ -344,6 +355,7 @@ def test_fetch_group_by(schema_any): assert schema.Parent().fetch("KEY", order_by="name") == [{"parent_id": 1}] + def test_dj_u_distinct(schema_any): """ Test developed to see if removing DISTINCT from the select statement @@ -369,6 +381,7 @@ def test_dj_u_distinct(schema_any): schema.Stimulus.delete_quick() assert fetched_result == expected_result + def test_backslash(schema_any): """ https://github.com/datajoint/datajoint-python/issues/999 diff --git a/tests/test_fetch_same.py b/tests/test_fetch_same.py index 8f4cac5f2..a253ca092 100644 --- a/tests/test_fetch_same.py +++ b/tests/test_fetch_same.py @@ -50,16 +50,19 @@ def test_object_conversion_one(schema_fetch_same): new = ProjData().proj(sub="resp").fetch("sub") assert new.dtype == np.float64 + def test_object_conversion_two(schema_fetch_same): [sub, add] = ProjData().proj(sub="resp", add="sim").fetch("sub", "add") assert sub.dtype == np.float64 assert add.dtype == np.float64 + def test_object_conversion_all(schema_fetch_same): new = ProjData().proj(sub="resp", add="sim").fetch() assert new["sub"].dtype == np.float64 assert new["add"].dtype == np.float64 + def test_object_no_convert(schema_fetch_same): new = ProjData().fetch() assert new["big"].dtype == "object" diff --git a/tests/test_json.py b/tests/test_json.py index a63baaca2..26a209f55 100644 --- a/tests/test_json.py +++ b/tests/test_json.py @@ -66,7 +66,9 @@ class Team(dj.Lookup): @pytest.fixture def schema_json(connection_test): - schema = dj.Schema(PREFIX + "_json", context=dict(Team=Team), connection=connection_test) + schema = dj.Schema( + PREFIX + "_json", context=dict(Team=Team), connection=connection_test + ) schema(Team) yield schema schema.drop() diff --git a/tests/test_nan.py b/tests/test_nan.py index deaa097ec..48f2bd38b 100644 --- a/tests/test_nan.py +++ b/tests/test_nan.py @@ -14,7 +14,9 @@ class NanTest(dj.Manual): @pytest.fixture def schema_nan(connection_test): - schema = dj.Schema(PREFIX + "_nantest", context=dict(NanTest=NanTest), connection=connection_test) + schema = dj.Schema( + PREFIX + "_nantest", context=dict(NanTest=NanTest), connection=connection_test + ) schema(NanTest) yield schema schema.drop() diff --git a/tests/test_relation_u.py b/tests/test_relation_u.py index f9a0f3a8f..d5dd3a7fc 100644 --- a/tests/test_relation_u.py +++ b/tests/test_relation_u.py @@ -16,6 +16,7 @@ # cls.img = Image() # cls.trash = UberTrash() + def test_restriction(lang, languages, trial): language_set = {s[1] for s in languages} rel = dj.U("language") & lang @@ -28,14 +29,17 @@ def test_restriction(lang, languages, trial): assert list(rel.primary_key) == list((rel & "trial_id>3").primary_key) assert list((dj.U("start_time") & trial).primary_key) == ["start_time"] + def test_invalid_restriction(schema_any): with raises(dj.DataJointError): result = dj.U("color") & dict(color="red") + def test_ineffective_restriction(lang): rel = lang & dj.U("language") assert rel.make_sql() == lang.make_sql() + def test_join(experiment): rel = experiment * dj.U("experiment_date") assert experiment.primary_key == ["subject_id", "experiment_id"] @@ -45,15 +49,18 @@ def test_join(experiment): assert experiment.primary_key == ["subject_id", "experiment_id"] assert rel.primary_key == experiment.primary_key + ["experiment_date"] + def test_invalid_join(schema_any): with raises(dj.DataJointError): rel = dj.U("language") * dict(language="English") + def test_repr_without_attrs(schema_any): """test dj.U() display""" query = dj.U().aggr(Language, n="count(*)") repr(query) + def test_aggregations(schema_any): lang = Language() # test total aggregation on expression object @@ -66,12 +73,14 @@ def test_aggregations(schema_any): assert len(rel) == len(set(l[1] for l in Language.contents)) assert (rel & 'language="English"').fetch1("number_of_speakers") == 3 + def test_argmax(schema_any): rel = TTest() # get the tuples corresponding to the maximum value mx = (rel * dj.U().aggr(rel, mx="max(value)")) & "mx=value" assert mx.fetch("value")[0] == max(rel.fetch("value")) + def test_aggr(schema_any, schema_simp): rel = ArgmaxTest() amax1 = (dj.U("val") * rel) & dj.U("secondary_key").aggr(rel, val="min(val)") From a59325acead485fec0047f40331d48a20bbfb2f3 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 19:42:53 -0700 Subject: [PATCH 186/212] Replace PREFIX with fixture --- tests/__init__.py | 1 - tests/conftest.py | 34 +++++++++++++++++------------- tests/schema_external.py | 1 - tests/schema_uuid.py | 1 - tests/test_adapted_attributes.py | 17 +++++++++------ tests/test_aggr_regressions.py | 9 ++++---- tests/test_alter.py | 2 +- tests/test_autopopulate.py | 6 +++--- tests/test_blob_matlab.py | 5 ++--- tests/test_bypass_serialization.py | 7 +++--- tests/test_connection.py | 5 ++--- tests/test_fetch_same.py | 5 ++--- tests/test_json.py | 5 ++--- tests/test_nan.py | 5 ++--- tests/test_privileges.py | 14 ++++++------ tests/test_relational_operand.py | 3 +-- tests/test_schema_keywords.py | 5 ++--- 17 files changed, 60 insertions(+), 65 deletions(-) diff --git a/tests/__init__.py b/tests/__init__.py index e12feabe3..b48e5a074 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,3 +1,2 @@ import os -PREFIX = os.environ.get("DJ_TEST_DB_PREFIX", "djtest") diff --git a/tests/conftest.py b/tests/conftest.py index dccc18ce9..3baf05094 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -18,7 +18,6 @@ DataJointError, ) from . import ( - PREFIX, schema, schema_simple, schema_advanced, @@ -28,6 +27,11 @@ ) +@pytest.fixture(scope="session") +def prefix(): + return os.environ.get("DJ_TEST_DB_PREFIX", "djtest") + + @pytest.fixture(scope="session") def monkeysession(): with pytest.MonkeyPatch.context() as mp: @@ -70,7 +74,7 @@ def connection_root_bare(db_creds_root): @pytest.fixture(scope="session") -def connection_root(connection_root_bare): +def connection_root(connection_root_bare, prefix): """Root user database connection.""" dj.config["safemode"] = False conn_root = connection_root_bare @@ -125,7 +129,7 @@ def connection_root(connection_root_bare): # Teardown conn_root.query("SET FOREIGN_KEY_CHECKS=0") - cur = conn_root.query('SHOW DATABASES LIKE "{}\\_%%"'.format(PREFIX)) + cur = conn_root.query('SHOW DATABASES LIKE "{}\\_%%"'.format(prefix)) for db in cur.fetchall(): conn_root.query("DROP DATABASE `{}`".format(db[0])) conn_root.query("SET FOREIGN_KEY_CHECKS=1") @@ -140,9 +144,9 @@ def connection_root(connection_root_bare): @pytest.fixture(scope="session") -def connection_test(connection_root): +def connection_test(connection_root, prefix): """Test user database connection.""" - database = f"{PREFIX}%%" + database = f"{prefix}%%" credentials = dict( host=os.getenv("DJ_HOST"), user="datajoint", password="datajoint" ) @@ -240,9 +244,9 @@ def mock_cache(tmpdir_factory): @pytest.fixture -def schema_any(connection_test): +def schema_any(connection_test, prefix): schema_any = dj.Schema( - PREFIX + "_test1", schema.LOCALS_ANY, connection=connection_test + prefix + "_test1", schema.LOCALS_ANY, connection=connection_test ) assert schema.LOCALS_ANY, "LOCALS_ANY is empty" try: @@ -294,9 +298,9 @@ def schema_any(connection_test): @pytest.fixture -def schema_simp(connection_test): +def schema_simp(connection_test, prefix): schema = dj.Schema( - PREFIX + "_relational", schema_simple.LOCALS_SIMPLE, connection=connection_test + prefix + "_relational", schema_simple.LOCALS_SIMPLE, connection=connection_test ) schema(schema_simple.IJ) schema(schema_simple.JI) @@ -321,9 +325,9 @@ def schema_simp(connection_test): @pytest.fixture -def schema_adv(connection_test): +def schema_adv(connection_test, prefix): schema = dj.Schema( - PREFIX + "_advanced", + prefix + "_advanced", schema_advanced.LOCALS_ADVANCED, connection=connection_test, ) @@ -341,9 +345,9 @@ def schema_adv(connection_test): @pytest.fixture -def schema_ext(connection_test, enable_filepath_feature, mock_stores, mock_cache): +def schema_ext(connection_test, enable_filepath_feature, mock_stores, mock_cache, prefix): schema = dj.Schema( - PREFIX + "_extern", + prefix + "_extern", context=schema_external.LOCALS_EXTERNAL, connection=connection_test, ) @@ -360,9 +364,9 @@ def schema_ext(connection_test, enable_filepath_feature, mock_stores, mock_cache @pytest.fixture -def schema_uuid(connection_test): +def schema_uuid(connection_test, prefix): schema = dj.Schema( - PREFIX + "_test1", + prefix + "_test1", context=schema_uuid_module.LOCALS_UUID, connection=connection_test, ) diff --git a/tests/schema_external.py b/tests/schema_external.py index f29aeb8da..ce51af9c5 100644 --- a/tests/schema_external.py +++ b/tests/schema_external.py @@ -5,7 +5,6 @@ import tempfile import inspect import datajoint as dj -from . import PREFIX import numpy as np diff --git a/tests/schema_uuid.py b/tests/schema_uuid.py index 914fedfad..00b45ee78 100644 --- a/tests/schema_uuid.py +++ b/tests/schema_uuid.py @@ -1,7 +1,6 @@ import uuid import inspect import datajoint as dj -from . import PREFIX top_level_namespace_id = uuid.UUID("00000000-0000-0000-0000-000000000000") diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index 997da2131..3ed034f23 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -6,9 +6,11 @@ from itertools import zip_longest from . import schema_adapted from .schema_adapted import Connectivity, Layout -from . import PREFIX -SCHEMA_NAME = PREFIX + "_test_custom_datatype" + +@pytest.fixture +def schema_name(prefix): + return prefix + "_test_custom_datatype" @pytest.fixture @@ -24,6 +26,7 @@ def schema_ad( enable_filepath_feature, s3_creds, tmpdir, + schema_name ): dj.config["stores"] = { "repo-s3": dict( @@ -35,7 +38,7 @@ def schema_ad( "graph": adapted_graph_instance, "layout_to_filepath": schema_adapted.LayoutToFilepath(), } - schema = dj.schema(SCHEMA_NAME, context=context, connection=connection_test) + schema = dj.schema(schema_name, context=context, connection=connection_test) schema(schema_adapted.Connectivity) schema(schema_adapted.Layout) yield schema @@ -43,19 +46,19 @@ def schema_ad( @pytest.fixture -def local_schema(schema_ad): +def local_schema(schema_ad, schema_name): """Fixture for testing spawned classes""" - local_schema = dj.Schema(SCHEMA_NAME) + local_schema = dj.Schema(schema_name) local_schema.spawn_missing_classes() yield local_schema local_schema.drop() @pytest.fixture -def schema_virtual_module(schema_ad, adapted_graph_instance): +def schema_virtual_module(schema_ad, adapted_graph_instance, schema_name): """Fixture for testing virtual modules""" schema_virtual_module = dj.VirtualModule( - "virtual_module", SCHEMA_NAME, add_objects={"graph": adapted_graph_instance} + "virtual_module", schema_name, add_objects={"graph": adapted_graph_instance} ) return schema_virtual_module diff --git a/tests/test_aggr_regressions.py b/tests/test_aggr_regressions.py index 31ec81faa..7cc5119ea 100644 --- a/tests/test_aggr_regressions.py +++ b/tests/test_aggr_regressions.py @@ -4,16 +4,15 @@ import pytest import datajoint as dj -from . import PREFIX import uuid from .schema_uuid import Topic, Item, top_level_namespace_id from .schema_aggr_regress import R, Q, S, A, B, X, LOCALS_AGGR_REGRESS @pytest.fixture(scope="function") -def schema_aggr_reg(connection_test): +def schema_aggr_reg(connection_test, prefix): schema = dj.Schema( - PREFIX + "_aggr_regress", + prefix + "_aggr_regress", context=LOCALS_AGGR_REGRESS, connection=connection_test, ) @@ -25,9 +24,9 @@ def schema_aggr_reg(connection_test): @pytest.fixture(scope="function") -def schema_aggr_reg_with_abx(connection_test): +def schema_aggr_reg_with_abx(connection_test, prefix): schema = dj.Schema( - PREFIX + "_aggr_regress_with_abx", + prefix + "_aggr_regress_with_abx", context=LOCALS_AGGR_REGRESS, connection=connection_test, ) diff --git a/tests/test_alter.py b/tests/test_alter.py index f2acafb36..5146d6266 100644 --- a/tests/test_alter.py +++ b/tests/test_alter.py @@ -1,7 +1,7 @@ import pytest import re import datajoint as dj -from . import schema as schema_any_module, PREFIX +from . import schema as schema_any_module from .schema_alter import Experiment, Parent, LOCALS_ALTER COMBINED_CONTEXT = { diff --git a/tests/test_autopopulate.py b/tests/test_autopopulate.py index 4fc4b9f77..d1f0726e1 100644 --- a/tests/test_autopopulate.py +++ b/tests/test_autopopulate.py @@ -1,8 +1,8 @@ import pytest -from . import schema, PREFIX from datajoint import DataJointError import datajoint as dj import pymysql +from . import schema def test_populate(trial, subject, experiment, ephys, channel): @@ -90,8 +90,8 @@ def test_allow_insert(subject, experiment): experiment.insert1(key) -def test_load_dependencies(): - schema = dj.Schema(f"{PREFIX}_load_dependencies_populate") +def test_load_dependencies(prefix): + schema = dj.Schema(f"{prefix}_load_dependencies_populate") @schema class ImageSource(dj.Lookup): diff --git a/tests/test_blob_matlab.py b/tests/test_blob_matlab.py index 2ec23d3c2..4578bb834 100644 --- a/tests/test_blob_matlab.py +++ b/tests/test_blob_matlab.py @@ -3,7 +3,6 @@ import datajoint as dj from datajoint.blob import pack, unpack from numpy.testing import assert_array_equal -from . import PREFIX class Blob(dj.Manual): @@ -16,8 +15,8 @@ class Blob(dj.Manual): @pytest.fixture -def schema_blob(connection_test): - schema = dj.Schema(PREFIX + "_test1", dict(Blob=Blob), connection=connection_test) +def schema_blob(connection_test, prefix): + schema = dj.Schema(prefix + "_test1", dict(Blob=Blob), connection=connection_test) schema(Blob) yield schema schema.drop() diff --git a/tests/test_bypass_serialization.py b/tests/test_bypass_serialization.py index 5f73e1d2e..a173108d0 100644 --- a/tests/test_bypass_serialization.py +++ b/tests/test_bypass_serialization.py @@ -1,7 +1,6 @@ import pytest import datajoint as dj import numpy as np -from . import PREFIX from numpy.testing import assert_array_equal test_blob = np.array([1, 2, 3]) @@ -25,9 +24,9 @@ class Output(dj.Manual): @pytest.fixture -def schema_in(connection_test): +def schema_in(connection_test, prefix): schema = dj.Schema( - PREFIX + "_test_bypass_serialization_in", + prefix + "_test_bypass_serialization_in", context=dict(Input=Input), connection=connection_test, ) @@ -39,7 +38,7 @@ def schema_in(connection_test): @pytest.fixture def schema_out(connection_test): schema = dj.Schema( - PREFIX + "_test_blob_bypass_serialization_out", + prefix + "_test_blob_bypass_serialization_out", context=dict(Output=Output), connection=connection_test, ) diff --git a/tests/test_connection.py b/tests/test_connection.py index 025992e8f..497255753 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -5,7 +5,6 @@ import datajoint as dj from datajoint import DataJointError import numpy as np -from . import PREFIX import pytest @@ -20,9 +19,9 @@ class Subjects(dj.Manual): @pytest.fixture -def schema_tx(connection_test): +def schema_tx(connection_test, prefix): schema = dj.Schema( - PREFIX + "_transactions", + prefix + "_transactions", context=dict(Subjects=Subjects), connection=connection_test, ) diff --git a/tests/test_fetch_same.py b/tests/test_fetch_same.py index a253ca092..32d041347 100644 --- a/tests/test_fetch_same.py +++ b/tests/test_fetch_same.py @@ -1,5 +1,4 @@ import pytest -from . import PREFIX import numpy as np import datajoint as dj @@ -16,9 +15,9 @@ class ProjData(dj.Manual): @pytest.fixture -def schema_fetch_same(connection_test): +def schema_fetch_same(connection_test, prefix): schema = dj.Schema( - PREFIX + "_fetch_same", + prefix + "_fetch_same", context=dict(ProjData=ProjData), connection=connection_test, ) diff --git a/tests/test_json.py b/tests/test_json.py index 26a209f55..53016505c 100644 --- a/tests/test_json.py +++ b/tests/test_json.py @@ -4,7 +4,6 @@ import datajoint as dj import numpy as np from packaging.version import Version -from . import PREFIX if Version(dj.conn().query("select @@version;").fetchone()[0]) < Version("8.0.0"): pytest.skip("These tests require MySQL >= v8.0.0", allow_module_level=True) @@ -65,9 +64,9 @@ class Team(dj.Lookup): @pytest.fixture -def schema_json(connection_test): +def schema_json(connection_test, prefix): schema = dj.Schema( - PREFIX + "_json", context=dict(Team=Team), connection=connection_test + prefix + "_json", context=dict(Team=Team), connection=connection_test ) schema(Team) yield schema diff --git a/tests/test_nan.py b/tests/test_nan.py index 48f2bd38b..68a28079c 100644 --- a/tests/test_nan.py +++ b/tests/test_nan.py @@ -1,6 +1,5 @@ import numpy as np import datajoint as dj -from . import PREFIX import pytest @@ -13,9 +12,9 @@ class NanTest(dj.Manual): @pytest.fixture -def schema_nan(connection_test): +def schema_nan(connection_test, prefix): schema = dj.Schema( - PREFIX + "_nantest", context=dict(NanTest=NanTest), connection=connection_test + prefix + "_nantest", context=dict(NanTest=NanTest), connection=connection_test ) schema(NanTest) yield schema diff --git a/tests/test_privileges.py b/tests/test_privileges.py index fc20b430e..27cbfacfd 100644 --- a/tests/test_privileges.py +++ b/tests/test_privileges.py @@ -1,7 +1,7 @@ import os import pytest import datajoint as dj -from . import schema, PREFIX, schema_privileges +from . import schema, schema_privileges namespace = locals() @@ -22,10 +22,10 @@ def schema_priv(connection_test): @pytest.fixture -def connection_djsubset(connection_root, db_creds_root, schema_priv): +def connection_djsubset(connection_root, db_creds_root, schema_priv, prefix): user = "djsubset" conn = dj.conn(**db_creds_root, reset=True) - schema_priv.activate(f"{PREFIX}_schema_privileges") + schema_priv.activate(f"{prefix}_schema_privileges") conn.query( f""" CREATE USER IF NOT EXISTS '{user}'@'%%' @@ -35,14 +35,14 @@ def connection_djsubset(connection_root, db_creds_root, schema_priv): conn.query( f""" GRANT SELECT, INSERT, UPDATE, DELETE - ON `{PREFIX}_schema_privileges`.`#parent` + ON `{prefix}_schema_privileges`.`#parent` TO '{user}'@'%%' """ ) conn.query( f""" GRANT SELECT, INSERT, UPDATE, DELETE - ON `{PREFIX}_schema_privileges`.`__child` + ON `{prefix}_schema_privileges`.`__child` TO '{user}'@'%%' """ ) @@ -54,7 +54,7 @@ def connection_djsubset(connection_root, db_creds_root, schema_priv): ) yield conn_djsubset conn.query(f"DROP USER {user}") - conn.query(f"DROP DATABASE {PREFIX}_schema_privileges") + conn.query(f"DROP DATABASE {prefix}_schema_privileges") @pytest.fixture @@ -111,7 +111,7 @@ class Try(dj.Manual): class TestSubset: def test_populate_activate(self, connection_djsubset, schema_priv): schema_priv.activate( - f"{PREFIX}_schema_privileges", create_schema=True, create_tables=False + f"{prefix}_schema_privileges", create_schema=True, create_tables=False ) schema_privileges.Child.populate() assert schema_privileges.Child.progress(display=False)[0] == 0 diff --git a/tests/test_relational_operand.py b/tests/test_relational_operand.py index acd117509..afa665c8a 100644 --- a/tests/test_relational_operand.py +++ b/tests/test_relational_operand.py @@ -7,7 +7,6 @@ import datajoint as dj from .schema_simple import * from .schema import * -from . import PREFIX @pytest.fixture @@ -190,7 +189,7 @@ def test_project(schema_simp_pop): def test_rename_non_dj_attribute(connection_test, schema_simp_pop, schema_any_pop): - schema = PREFIX + "_test1" + schema = prefix + "_test1" connection_test.query( f"CREATE TABLE {schema}.test_table (oldID int PRIMARY KEY)" ).fetchall() diff --git a/tests/test_schema_keywords.py b/tests/test_schema_keywords.py index 23ef645db..22ed1c2a0 100644 --- a/tests/test_schema_keywords.py +++ b/tests/test_schema_keywords.py @@ -1,6 +1,5 @@ import pytest import datajoint as dj -from . import PREFIX class A(dj.Manual): @@ -34,8 +33,8 @@ class D(B): @pytest.fixture -def schema_kwd(connection_test): - schema = dj.Schema(PREFIX + "_keywords", connection=connection_test) +def schema_kwd(connection_test, prefix): + schema = dj.Schema(prefix + "_keywords", connection=connection_test) schema(A) schema(D) yield schema From a3f63382d0b165a7ef3d0ea84b908a496c7399cb Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 19:43:08 -0700 Subject: [PATCH 187/212] Format with black --- tests/__init__.py | 1 - tests/conftest.py | 4 +++- tests/test_adapted_attributes.py | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/__init__.py b/tests/__init__.py index b48e5a074..21b405d8c 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,2 +1 @@ import os - diff --git a/tests/conftest.py b/tests/conftest.py index 3baf05094..3598f0611 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -345,7 +345,9 @@ def schema_adv(connection_test, prefix): @pytest.fixture -def schema_ext(connection_test, enable_filepath_feature, mock_stores, mock_cache, prefix): +def schema_ext( + connection_test, enable_filepath_feature, mock_stores, mock_cache, prefix +): schema = dj.Schema( prefix + "_extern", context=schema_external.LOCALS_EXTERNAL, diff --git a/tests/test_adapted_attributes.py b/tests/test_adapted_attributes.py index 3ed034f23..714da8a69 100644 --- a/tests/test_adapted_attributes.py +++ b/tests/test_adapted_attributes.py @@ -26,7 +26,7 @@ def schema_ad( enable_filepath_feature, s3_creds, tmpdir, - schema_name + schema_name, ): dj.config["stores"] = { "repo-s3": dict( From 462f1117874901e710d6ef906af8f0c1b1b4fa6f Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 19:43:43 -0700 Subject: [PATCH 188/212] Empty tests init --- tests/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/__init__.py b/tests/__init__.py index 21b405d8c..e69de29bb 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1 +0,0 @@ -import os From 3f48bf98fb2b290cf423d5bd0e8eb786219cb2fc Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 19:51:54 -0700 Subject: [PATCH 189/212] Add missing prefix injection --- tests/test_bypass_serialization.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_bypass_serialization.py b/tests/test_bypass_serialization.py index a173108d0..90fc35090 100644 --- a/tests/test_bypass_serialization.py +++ b/tests/test_bypass_serialization.py @@ -36,7 +36,7 @@ def schema_in(connection_test, prefix): @pytest.fixture -def schema_out(connection_test): +def schema_out(connection_test, prefix): schema = dj.Schema( prefix + "_test_blob_bypass_serialization_out", context=dict(Output=Output), From 70318e21144d53fa6b8b5bade72d1b021b04b237 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 19:58:02 -0700 Subject: [PATCH 190/212] Add missing prefix injection --- tests/test_privileges.py | 2 +- tests/test_relational_operand.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_privileges.py b/tests/test_privileges.py index 27cbfacfd..57880081c 100644 --- a/tests/test_privileges.py +++ b/tests/test_privileges.py @@ -109,7 +109,7 @@ class Try(dj.Manual): class TestSubset: - def test_populate_activate(self, connection_djsubset, schema_priv): + def test_populate_activate(self, connection_djsubset, schema_priv, prefix): schema_priv.activate( f"{prefix}_schema_privileges", create_schema=True, create_tables=False ) diff --git a/tests/test_relational_operand.py b/tests/test_relational_operand.py index afa665c8a..da3b40d3c 100644 --- a/tests/test_relational_operand.py +++ b/tests/test_relational_operand.py @@ -188,7 +188,7 @@ def test_project(schema_simp_pop): ) -def test_rename_non_dj_attribute(connection_test, schema_simp_pop, schema_any_pop): +def test_rename_non_dj_attribute(connection_test, schema_simp_pop, schema_any_pop, prefix): schema = prefix + "_test1" connection_test.query( f"CREATE TABLE {schema}.test_table (oldID int PRIMARY KEY)" From 23529b4fb7f2b05861f1249f67d15320dd9f3e81 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 20:03:51 -0700 Subject: [PATCH 191/212] Format with black --- tests/test_relational_operand.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/test_relational_operand.py b/tests/test_relational_operand.py index da3b40d3c..65c6a5d74 100644 --- a/tests/test_relational_operand.py +++ b/tests/test_relational_operand.py @@ -188,7 +188,9 @@ def test_project(schema_simp_pop): ) -def test_rename_non_dj_attribute(connection_test, schema_simp_pop, schema_any_pop, prefix): +def test_rename_non_dj_attribute( + connection_test, schema_simp_pop, schema_any_pop, prefix +): schema = prefix + "_test1" connection_test.query( f"CREATE TABLE {schema}.test_table (oldID int PRIMARY KEY)" From e81d5ce192b670b3fe527ed8544e9df64d9bd247 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Thu, 14 Dec 2023 20:12:26 -0700 Subject: [PATCH 192/212] Fix warnings in test_cascading_delete --- tests/test_cascading_delete.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/tests/test_cascading_delete.py b/tests/test_cascading_delete.py index dcaaa86d2..70fedf687 100644 --- a/tests/test_cascading_delete.py +++ b/tests/test_cascading_delete.py @@ -17,9 +17,8 @@ def schema_simp_pop(schema_simp): def test_delete_tree(schema_simp_pop): assert not dj.config["safemode"], "safemode must be off for testing" assert ( - L() and A() and B() and B.C() and D() and E() and E.F(), - "schema is not populated", - ) + L() and A() and B() and B.C() and D() and E() and E.F() + ), "schema is not populated" A().delete() assert not A() or B() or B.C() or D() or E() or E.F(), "incomplete delete" @@ -64,10 +63,9 @@ def test_delete_tree_restricted(schema_simp_pop): def test_delete_lookup(schema_simp_pop): assert not dj.config["safemode"], "safemode must be off for testing" - assert ( - bool(L() and A() and B() and B.C() and D() and E() and E.F()), - "schema is not populated", - ) + assert bool( + L() and A() and B() and B.C() and D() and E() and E.F() + ), "schema is not populated" L().delete() assert not bool(L() or D() or E() or E.F()), "incomplete delete" A().delete() # delete all is necessary because delete L deletes from subtables. @@ -76,9 +74,8 @@ def test_delete_lookup(schema_simp_pop): def test_delete_lookup_restricted(schema_simp_pop): assert not dj.config["safemode"], "safemode must be off for testing" assert ( - L() and A() and B() and B.C() and D() and E() and E.F(), - "schema is not populated", - ) + L() and A() and B() and B.C() and D() and E() and E.F() + ), "schema is not populated" rel = L() & "cond_in_l" original_count = len(L()) deleted_count = len(rel) From 573df44cc2a4caa55e361473482bf388f6853fb4 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Fri, 15 Dec 2023 14:19:45 -0700 Subject: [PATCH 193/212] Clean up from merge conflict --- tests/conftest.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 23b76d8fb..cc2c8062e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -156,9 +156,6 @@ def connection_root(connection_root_bare, prefix): def connection_test(connection_root, prefix, db_creds_test): """Test user database connection.""" database = f"{prefix}%%" - credentials = dict( - host=os.getenv("DJ_HOST"), user="datajoint", password="datajoint" - ) permission = "ALL PRIVILEGES" # Create MySQL users From b149868808f0449989be3d4f0c559adbeaade53b Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Fri, 15 Dec 2023 14:29:10 -0700 Subject: [PATCH 194/212] Add @dimitri-yatsenko suggested changes on #1131 See comment https://github.com/datajoint/datajoint-python/pull/1131#discussion_r1428175734 --- tests/test_jobs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index ebe257f8b..9d1d4636b 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -101,7 +101,7 @@ def test_long_error_message(subject, schema_any): assert subject table_name = "fake_table" - key = subject.fetch("KEY")[0] + key = subject.fetch("KEY", limit=1)[0] # test long error message schema_any.jobs.reserve(table_name, key) @@ -137,7 +137,7 @@ def test_long_error_stack(subject, schema_any): assert subject table_name = "fake_table" - key = subject.fetch("KEY")[0] + key = subject.fetch("KEY", limit=1)[0] # test long error stack schema_any.jobs.reserve(table_name, key) From abda8f1e5c22a6a20e5d361baf12688405c79acd Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Fri, 15 Dec 2023 14:32:38 -0700 Subject: [PATCH 195/212] Clean up last modules Clean up modules that were migrated in PRs: #1136, 1137, 1138, 1139, 1140 --- tests/test_schema.py | 6 ++---- tests/test_university.py | 6 +++--- tests/test_update1.py | 5 ++--- 3 files changed, 7 insertions(+), 10 deletions(-) diff --git a/tests/test_schema.py b/tests/test_schema.py index 7b262204f..1c49c58e8 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -2,10 +2,8 @@ import pytest import inspect import datajoint as dj -from unittest.mock import patch from inspect import getmembers from . import schema -from . import PREFIX class Ephys(dj.Imported): @@ -49,10 +47,10 @@ def schema_empty_module(schema_any, schema_empty): @pytest.fixture -def schema_empty(connection_test, schema_any): +def schema_empty(connection_test, schema_any, prefix): context = {**schema.LOCALS_ANY, "Ephys": Ephys} schema_empty = dj.Schema( - PREFIX + "_test1", context=context, connection=connection_test + prefix + "_test1", context=context, connection=connection_test ) schema_empty(Ephys) # load the rest of the classes diff --git a/tests/test_university.py b/tests/test_university.py index 956cc506f..800ee7cdf 100644 --- a/tests/test_university.py +++ b/tests/test_university.py @@ -4,7 +4,7 @@ from datajoint import DataJointError import datajoint as dj from .schema_university import * -from . import PREFIX, schema_university +from . import schema_university def _hash4(table): @@ -32,10 +32,10 @@ def schema_uni_inactive(): @pytest.fixture -def schema_uni(db_creds_test, schema_uni_inactive, connection_test): +def schema_uni(db_creds_test, schema_uni_inactive, connection_test, prefix): # Deferred activation schema_uni_inactive.activate( - PREFIX + "_university", connection=dj.conn(**db_creds_test) + prefix + "_university", connection=dj.conn(**db_creds_test) ) # --------------- Fill University ------------------- test_data_dir = Path(__file__).parent / "data" diff --git a/tests/test_update1.py b/tests/test_update1.py index 07e0e5b80..f29d2ab0e 100644 --- a/tests/test_update1.py +++ b/tests/test_update1.py @@ -4,7 +4,6 @@ from pathlib import Path import tempfile import datajoint as dj -from . import PREFIX from datajoint import DataJointError @@ -42,9 +41,9 @@ def mock_stores_update(tmpdir_factory): @pytest.fixture -def schema_update1(connection_test): +def schema_update1(connection_test, prefix): schema = dj.Schema( - PREFIX + "_update1", context=dict(Thing=Thing), connection=connection_test + prefix + "_update1", context=dict(Thing=Thing), connection=connection_test ) schema(Thing) yield schema From fd539310153e6e0274c5c5a0986ddbffca445439 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Fri, 15 Dec 2023 14:44:07 -0700 Subject: [PATCH 196/212] Replace PREFIX in test_schema --- tests/test_schema.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_schema.py b/tests/test_schema.py index 1c49c58e8..d9e220892 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -143,9 +143,9 @@ def test_unauthorized_database(db_creds_test): ) -def test_drop_database(db_creds_test): +def test_drop_database(db_creds_test, prefix): schema = dj.Schema( - PREFIX + "_drop_test", connection=dj.conn(reset=True, **db_creds_test) + prefix + "_drop_test", connection=dj.conn(reset=True, **db_creds_test) ) assert schema.exists schema.drop() @@ -153,8 +153,8 @@ def test_drop_database(db_creds_test): schema.drop() # should do nothing -def test_overlapping_name(connection_test): - test_schema = dj.Schema(PREFIX + "_overlapping_schema", connection=connection_test) +def test_overlapping_name(connection_test, prefix): + test_schema = dj.Schema(prefix + "_overlapping_schema", connection=connection_test) @test_schema class Unit(dj.Manual): From 725cae21d29aff2ea77f56b97519a251cf63f588 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 18 Dec 2023 14:34:47 -0700 Subject: [PATCH 197/212] Resolve DeprecationWarning in tests/test_blob.py::test_insert_longblob --- tests/test_blob.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/test_blob.py b/tests/test_blob.py index e55488987..62e6312ad 100644 --- a/tests/test_blob.py +++ b/tests/test_blob.py @@ -191,7 +191,8 @@ def test_insert_longblob(schema_any): ) dj.conn().query(query_32_blob).fetchall() dj.blob.use_32bit_dims = True - assert (Longblob & "id=1").fetch1() == { + fetched = (Longblob & "id=1").fetch1() + expected = { "id": 1, "data": np.rec.array( [ @@ -207,6 +208,8 @@ def test_insert_longblob(schema_any): dtype=[("hits", "O"), ("sides", "O"), ("tasks", "O"), ("stage", "O")], ), } + assert fetched['id'] == expected['id'] + assert np.array_equal(fetched['data'], expected['data']) (Longblob & "id=1").delete() dj.blob.use_32bit_dims = False From 34161a8cd7b31c31dee92f89ffe4d50fa66e9d98 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 18 Dec 2023 14:41:17 -0700 Subject: [PATCH 198/212] Move insert_blobs to module scope No reason for this to be defined in the fixture. Move to the module level to stay consistent. --- tests/test_blob_matlab.py | 69 ++++++++++++++++++++------------------- 1 file changed, 35 insertions(+), 34 deletions(-) diff --git a/tests/test_blob_matlab.py b/tests/test_blob_matlab.py index 4578bb834..8e467cf06 100644 --- a/tests/test_blob_matlab.py +++ b/tests/test_blob_matlab.py @@ -14,6 +14,41 @@ class Blob(dj.Manual): """ +def insert_blobs(schema): + """ + This function inserts blobs resulting from the following datajoint-matlab code: + + self.insert({ + 1 'simple string' 'character string' + 2 '1D vector' 1:15:180 + 3 'string array' {'string1' 'string2'} + 4 'struct array' struct('a', {1,2}, 'b', {struct('c', magic(3)), struct('C', magic(5))}) + 5 '3D double array' reshape(1:24, [2,3,4]) + 6 '3D uint8 array' reshape(uint8(1:24), [2,3,4]) + 7 '3D complex array' fftn(reshape(1:24, [2,3,4])) + }) + + and then dumped using the command + mysqldump -u username -p --hex-blob test_schema blob_table > blob.sql + """ + + schema.connection.query( + """ + INSERT INTO {table_name} VALUES + (1,'simple string',0x6D596D00410200000000000000010000000000000010000000000000000400000000000000630068006100720061006300740065007200200073007400720069006E006700), + (2,'1D vector',0x6D596D0041020000000000000001000000000000000C000000000000000600000000000000000000000000F03F00000000000030400000000000003F4000000000000047400000000000804E4000000000000053400000000000C056400000000000805A400000000000405E4000000000000061400000000000E062400000000000C06440), + (3,'string array',0x6D596D00430200000000000000010000000000000002000000000000002F0000000000000041020000000000000001000000000000000700000000000000040000000000000073007400720069006E00670031002F0000000000000041020000000000000001000000000000000700000000000000040000000000000073007400720069006E0067003200), + (4,'struct array',0x6D596D005302000000000000000100000000000000020000000000000002000000610062002900000000000000410200000000000000010000000000000001000000000000000600000000000000000000000000F03F9000000000000000530200000000000000010000000000000001000000000000000100000063006900000000000000410200000000000000030000000000000003000000000000000600000000000000000000000000204000000000000008400000000000001040000000000000F03F0000000000001440000000000000224000000000000018400000000000001C40000000000000004029000000000000004102000000000000000100000000000000010000000000000006000000000000000000000000000040100100000000000053020000000000000001000000000000000100000000000000010000004300E9000000000000004102000000000000000500000000000000050000000000000006000000000000000000000000003140000000000000374000000000000010400000000000002440000000000000264000000000000038400000000000001440000000000000184000000000000028400000000000003240000000000000F03F0000000000001C400000000000002A400000000000003340000000000000394000000000000020400000000000002C400000000000003440000000000000354000000000000000400000000000002E400000000000003040000000000000364000000000000008400000000000002240), + (5,'3D double array',0x6D596D004103000000000000000200000000000000030000000000000004000000000000000600000000000000000000000000F03F000000000000004000000000000008400000000000001040000000000000144000000000000018400000000000001C40000000000000204000000000000022400000000000002440000000000000264000000000000028400000000000002A400000000000002C400000000000002E40000000000000304000000000000031400000000000003240000000000000334000000000000034400000000000003540000000000000364000000000000037400000000000003840), + (6,'3D uint8 array',0x6D596D0041030000000000000002000000000000000300000000000000040000000000000009000000000000000102030405060708090A0B0C0D0E0F101112131415161718), + (7,'3D complex array',0x6D596D0041030000000000000002000000000000000300000000000000040000000000000006000000010000000000000000C0724000000000000028C000000000000038C0000000000000000000000000000038C0000000000000000000000000000052C00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000052C00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000052C00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000AA4C58E87AB62B400000000000000000AA4C58E87AB62BC0000000000000008000000000000052400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000008000000000000052C000000000000000800000000000000080000000000000008000000000000000800000000000000080 + ); + """.format( + table_name=Blob.full_table_name + ) + ) + + @pytest.fixture def schema_blob(connection_test, prefix): schema = dj.Schema(prefix + "_test1", dict(Blob=Blob), connection=connection_test) @@ -24,40 +59,6 @@ def schema_blob(connection_test, prefix): @pytest.fixture def schema_blob_pop(schema_blob): - def insert_blobs(schema): - """ - This function inserts blobs resulting from the following datajoint-matlab code: - - self.insert({ - 1 'simple string' 'character string' - 2 '1D vector' 1:15:180 - 3 'string array' {'string1' 'string2'} - 4 'struct array' struct('a', {1,2}, 'b', {struct('c', magic(3)), struct('C', magic(5))}) - 5 '3D double array' reshape(1:24, [2,3,4]) - 6 '3D uint8 array' reshape(uint8(1:24), [2,3,4]) - 7 '3D complex array' fftn(reshape(1:24, [2,3,4])) - }) - - and then dumped using the command - mysqldump -u username -p --hex-blob test_schema blob_table > blob.sql - """ - - schema.connection.query( - """ - INSERT INTO {table_name} VALUES - (1,'simple string',0x6D596D00410200000000000000010000000000000010000000000000000400000000000000630068006100720061006300740065007200200073007400720069006E006700), - (2,'1D vector',0x6D596D0041020000000000000001000000000000000C000000000000000600000000000000000000000000F03F00000000000030400000000000003F4000000000000047400000000000804E4000000000000053400000000000C056400000000000805A400000000000405E4000000000000061400000000000E062400000000000C06440), - (3,'string array',0x6D596D00430200000000000000010000000000000002000000000000002F0000000000000041020000000000000001000000000000000700000000000000040000000000000073007400720069006E00670031002F0000000000000041020000000000000001000000000000000700000000000000040000000000000073007400720069006E0067003200), - (4,'struct array',0x6D596D005302000000000000000100000000000000020000000000000002000000610062002900000000000000410200000000000000010000000000000001000000000000000600000000000000000000000000F03F9000000000000000530200000000000000010000000000000001000000000000000100000063006900000000000000410200000000000000030000000000000003000000000000000600000000000000000000000000204000000000000008400000000000001040000000000000F03F0000000000001440000000000000224000000000000018400000000000001C40000000000000004029000000000000004102000000000000000100000000000000010000000000000006000000000000000000000000000040100100000000000053020000000000000001000000000000000100000000000000010000004300E9000000000000004102000000000000000500000000000000050000000000000006000000000000000000000000003140000000000000374000000000000010400000000000002440000000000000264000000000000038400000000000001440000000000000184000000000000028400000000000003240000000000000F03F0000000000001C400000000000002A400000000000003340000000000000394000000000000020400000000000002C400000000000003440000000000000354000000000000000400000000000002E400000000000003040000000000000364000000000000008400000000000002240), - (5,'3D double array',0x6D596D004103000000000000000200000000000000030000000000000004000000000000000600000000000000000000000000F03F000000000000004000000000000008400000000000001040000000000000144000000000000018400000000000001C40000000000000204000000000000022400000000000002440000000000000264000000000000028400000000000002A400000000000002C400000000000002E40000000000000304000000000000031400000000000003240000000000000334000000000000034400000000000003540000000000000364000000000000037400000000000003840), - (6,'3D uint8 array',0x6D596D0041030000000000000002000000000000000300000000000000040000000000000009000000000000000102030405060708090A0B0C0D0E0F101112131415161718), - (7,'3D complex array',0x6D596D0041030000000000000002000000000000000300000000000000040000000000000006000000010000000000000000C0724000000000000028C000000000000038C0000000000000000000000000000038C0000000000000000000000000000052C00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000052C00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000052C00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000AA4C58E87AB62B400000000000000000AA4C58E87AB62BC0000000000000008000000000000052400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000008000000000000052C000000000000000800000000000000080000000000000008000000000000000800000000000000080 - ); - """.format( - table_name=Blob.full_table_name - ) - ) - assert not dj.config["safemode"], "safemode must be disabled" Blob().delete() insert_blobs(schema_blob) From a7ad2139452e203912db1556954d138121d07104 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 18 Dec 2023 14:51:23 -0700 Subject: [PATCH 199/212] Delete commented code --- tests/test_relation_u.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/tests/test_relation_u.py b/tests/test_relation_u.py index d5dd3a7fc..dbb3b6737 100644 --- a/tests/test_relation_u.py +++ b/tests/test_relation_u.py @@ -5,18 +5,6 @@ from .schema_simple import * -# def setup_class(cls): -# cls.user = User() -# cls.language = Language() -# cls.subject = Subject() -# cls.experiment = Experiment() -# cls.trial = Trial() -# cls.ephys = Ephys() -# cls.channel = Ephys.Channel() -# cls.img = Image() -# cls.trash = UberTrash() - - def test_restriction(lang, languages, trial): language_set = {s[1] for s in languages} rel = dj.U("language") & lang From bf17e75ffcc3716fd3ad14a1e44baa9085bb2315 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 18 Dec 2023 14:55:56 -0700 Subject: [PATCH 200/212] Format with black --- tests/test_blob.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_blob.py b/tests/test_blob.py index 62e6312ad..a838c4620 100644 --- a/tests/test_blob.py +++ b/tests/test_blob.py @@ -208,8 +208,8 @@ def test_insert_longblob(schema_any): dtype=[("hits", "O"), ("sides", "O"), ("tasks", "O"), ("stage", "O")], ), } - assert fetched['id'] == expected['id'] - assert np.array_equal(fetched['data'], expected['data']) + assert fetched["id"] == expected["id"] + assert np.array_equal(fetched["data"], expected["data"]) (Longblob & "id=1").delete() dj.blob.use_32bit_dims = False From e755b9763d950137718fb8447967ceef5172f57d Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 18 Dec 2023 15:08:14 -0700 Subject: [PATCH 201/212] Remove nosetest commands from dev docs --- docs/src/develop.md | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/docs/src/develop.md b/docs/src/develop.md index 842c04d96..99f291652 100644 --- a/docs/src/develop.md +++ b/docs/src/develop.md @@ -39,24 +39,16 @@ The following will verify there are no regression errors by running our test sui - Entire test suite: ``` - nosetests -vw tests_old + pytest -sv --cov-report term-missing --cov=datajoint tests ``` - > Note: We are in the process of upgrading to `pytest` tests. To run those, use: - > ``` - > pytest -sv --cov-report term-missing --cov=datajoint tests - > ``` - A single functional test: ``` - nosetests -vs --tests=tests_old.test_external_class:test_insert_and_fetch + pytest -sv tests/test_connection.py::test_dj_conn ``` - > Note: We are in the process of upgrading to `pytest` tests. To run those, use: - > ``` - > pytest -sv tests/test_connection.py::test_dj_conn - > ``` - A single class test: ``` - nosetests -vs --tests=tests_old.test_fetch:TestFetch.test_getattribute_for_fetch1 + pytest -sv tests/test_aggr_regressions.py::TestIssue558 ``` ### Style Tests From 207ed12464474c3595ce6d619aa7b09364787c6a Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 18 Dec 2023 16:15:55 -0700 Subject: [PATCH 202/212] Add Python 3.10 and 3.11 to CI --- .github/workflows/development.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/development.yaml b/.github/workflows/development.yaml index 5f3b1f075..9eb21cd97 100644 --- a/.github/workflows/development.yaml +++ b/.github/workflows/development.yaml @@ -55,7 +55,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - py_ver: ["3.9"] + py_ver: ["3.9", "3.10", "3.11"] mysql_ver: ["8.0", "5.7"] include: - py_ver: "3.8" From d96a04bb0e044369f48904551847e25a174ca244 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 18 Dec 2023 16:32:30 -0700 Subject: [PATCH 203/212] Test 3.10 and 3.11 with MySQL 8.0 only --- .github/workflows/development.yaml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/development.yaml b/.github/workflows/development.yaml index 9eb21cd97..e9a72f485 100644 --- a/.github/workflows/development.yaml +++ b/.github/workflows/development.yaml @@ -55,9 +55,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - py_ver: ["3.9", "3.10", "3.11"] + py_ver: ["3.9"] mysql_ver: ["8.0", "5.7"] include: + - py_ver: "3.11" + mysql_ver: "8.0" + - py_ver: "3.10" + mysql_ver: "8.0" - py_ver: "3.8" mysql_ver: "5.7" - py_ver: "3.7" From b58e2386d5b56276fc3613a187c4c779b87adf4b Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Mon, 18 Dec 2023 16:44:14 -0700 Subject: [PATCH 204/212] Remove nosetest commands from compose stack --- LNX-docker-compose.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/LNX-docker-compose.yml b/LNX-docker-compose.yml index eaf3a48cd..cba91dc8c 100644 --- a/LNX-docker-compose.yml +++ b/LNX-docker-compose.yml @@ -87,11 +87,9 @@ services: - -c - | set -e - pip install --user nose nose-cov pip install -e . pip list --format=freeze | grep datajoint pytest -sv --cov-report term-missing --cov=datajoint tests - nosetests -vsw tests_old --with-coverage --cover-package=datajoint # ports: # - "8888:8888" user: ${HOST_UID:-1000}:anaconda From 27c6be10a99cdd9cd71d262e70adb46973f62d24 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 19 Dec 2023 16:35:21 -0700 Subject: [PATCH 205/212] Add fixture enable_feature_32bit_dims Ensures that dj.blob.use_32bit_dims is turned off even if test_insert_longblob fails. --- tests/test_blob.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/tests/test_blob.py b/tests/test_blob.py index a838c4620..6d90cf544 100644 --- a/tests/test_blob.py +++ b/tests/test_blob.py @@ -1,3 +1,4 @@ +import pytest import datajoint as dj import timeit import numpy as np @@ -10,6 +11,13 @@ from .schema import Longblob +@pytest.fixture +def enable_feature_32bit_dims(): + dj.blob.use_32bit_dims = True + yield + dj.blob.use_32bit_dims = False + + def test_pack(): for x in ( 32, @@ -180,6 +188,8 @@ def test_insert_longblob(schema_any): assert (Longblob & "id=1").fetch1()["data"].all() == query_mym_blob["data"].all() (Longblob & "id=1").delete() + +def test_insert_longblob_32bit(schema_any, enable_feature_32bit_dims): query_32_blob = ( "INSERT INTO djtest_test1.longblob (id, data) VALUES (1, " "X'6D596D00530200000001000000010000000400000068697473007369646573007461736B73007374" @@ -190,7 +200,6 @@ def test_insert_longblob(schema_any): "00000041020000000100000008000000040000000000000053007400610067006500200031003000')" ) dj.conn().query(query_32_blob).fetchall() - dj.blob.use_32bit_dims = True fetched = (Longblob & "id=1").fetch1() expected = { "id": 1, @@ -211,7 +220,6 @@ def test_insert_longblob(schema_any): assert fetched["id"] == expected["id"] assert np.array_equal(fetched["data"], expected["data"]) (Longblob & "id=1").delete() - dj.blob.use_32bit_dims = False def test_datetime_serialization_speed(): From 7422f9442256dcb7847dd3bde474b03c443206d8 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 19 Dec 2023 16:36:31 -0700 Subject: [PATCH 206/212] Explicitly define timeit context A possible fix for #1145. --- tests/test_blob.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/tests/test_blob.py b/tests/test_blob.py index 6d90cf544..7c081c54b 100644 --- a/tests/test_blob.py +++ b/tests/test_blob.py @@ -225,19 +225,26 @@ def test_insert_longblob_32bit(schema_any, enable_feature_32bit_dims): def test_datetime_serialization_speed(): # If this fails that means for some reason deserializing/serializing # np arrays of np.datetime64 types is now slower than regular arrays of datetime + assert not dj.blob.use_32bit_dims, "32 bit dims should be off for this test" + context = dict( + np=np, + datetime=datetime, + pack=pack, + unpack=unpack, + ) optimized_exe_time = timeit.timeit( setup="myarr=pack(np.array([np.datetime64('2022-10-13 03:03:13') for _ in range(0, 10000)]))", stmt="unpack(myarr)", number=10, - globals=globals(), + globals=context ) print(f"np time {optimized_exe_time}") baseline_exe_time = timeit.timeit( setup="myarr2=pack(np.array([datetime(2022,10,13,3,3,13) for _ in range (0, 10000)]))", stmt="unpack(myarr2)", number=10, - globals=globals(), + globals=context ) print(f"python time {baseline_exe_time}") From 674a669beffd4f66a7104759360ee4a7225483e5 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 19 Dec 2023 16:37:00 -0700 Subject: [PATCH 207/212] Format with black --- tests/test_blob.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_blob.py b/tests/test_blob.py index 7c081c54b..12039f7fb 100644 --- a/tests/test_blob.py +++ b/tests/test_blob.py @@ -237,14 +237,14 @@ def test_datetime_serialization_speed(): setup="myarr=pack(np.array([np.datetime64('2022-10-13 03:03:13') for _ in range(0, 10000)]))", stmt="unpack(myarr)", number=10, - globals=context + globals=context, ) print(f"np time {optimized_exe_time}") baseline_exe_time = timeit.timeit( setup="myarr2=pack(np.array([datetime(2022,10,13,3,3,13) for _ in range (0, 10000)]))", stmt="unpack(myarr2)", number=10, - globals=context + globals=context, ) print(f"python time {baseline_exe_time}") From a0a4a96e1c15bf7ddfcf1a04e9e7b07548e53a35 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Tue, 2 Jan 2024 11:48:56 -0700 Subject: [PATCH 208/212] Use latest datajoint/nginx image New image pushed as part of https://datajoint.atlassian.net/browse/DEV-397?atlOrigin=eyJpIjoiMTRhMDU4YjkyMjljNDg0NjkyMzBlMjQyNGViOWRjMzEiLCJwIjoiamlyYS1zbGFjay1pbnQifQ&focusedCommentId=10885&page=com.atlassian.jira.plugin.system.issuetabpanels%3Acomment-tabpanel#comment-10885 --- LNX-docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LNX-docker-compose.yml b/LNX-docker-compose.yml index eaf3a48cd..3b2e15e1f 100644 --- a/LNX-docker-compose.yml +++ b/LNX-docker-compose.yml @@ -45,7 +45,7 @@ services: interval: 15s fakeservices.datajoint.io: <<: *net - image: datajoint/nginx:v0.2.7 + image: datajoint/nginx:v0.2.8 environment: - ADD_db_TYPE=DATABASE - ADD_db_ENDPOINT=db:3306 From 960e3e4922fccce40e2cfa6385c7ac91a8e8c162 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Wed, 3 Jan 2024 11:37:37 -0700 Subject: [PATCH 209/212] Partially merge changes from dev-tests-plat-143-debug-ssl --- LNX-docker-compose.yml | 4 ++-- tests/conftest.py | 7 ++++++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/LNX-docker-compose.yml b/LNX-docker-compose.yml index 7e131a3d6..4214c72f7 100644 --- a/LNX-docker-compose.yml +++ b/LNX-docker-compose.yml @@ -7,7 +7,7 @@ x-net: services: db: <<: *net - image: datajoint/mysql:${MYSQL_VER:-8.0} + image: datajoint/mysql:${MYSQL_VER:-5.7} environment: - MYSQL_ROOT_PASSWORD=${DJ_PASS:-password} command: mysqld --default-authentication-plugin=mysql_native_password @@ -59,7 +59,7 @@ services: # - "3306:3306" app: <<: *net - image: datajoint/djtest:py${PY_VER:-3.8}-${DISTRO:-alpine} + image: datajoint/djtest:py${PY_VER:-3.11}-${DISTRO:-alpine} depends_on: db: condition: service_healthy diff --git a/tests/conftest.py b/tests/conftest.py index cc2c8062e..deae0aab1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -76,6 +76,11 @@ def db_creds_root() -> Dict: ) +@pytest.fixture(scope="session") +def setup_ssl_config(): + dj.config["database.use_tls"] = False + yield + @pytest.fixture(scope="session") def connection_root_bare(db_creds_root): connection = dj.Connection(**db_creds_root) @@ -83,7 +88,7 @@ def connection_root_bare(db_creds_root): @pytest.fixture(scope="session") -def connection_root(connection_root_bare, prefix): +def connection_root(connection_root_bare, prefix, setup_ssl_config): """Root user database connection.""" dj.config["safemode"] = False conn_root = connection_root_bare From 6ffe78a485b6a0c5ed619bf789faaa11d941823d Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Wed, 3 Jan 2024 12:07:26 -0700 Subject: [PATCH 210/212] Use TLS config for Connection constructor --- tests/conftest.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index deae0aab1..91db43e30 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -81,14 +81,15 @@ def setup_ssl_config(): dj.config["database.use_tls"] = False yield + @pytest.fixture(scope="session") -def connection_root_bare(db_creds_root): +def connection_root_bare(db_creds_root, setup_ssl_config): connection = dj.Connection(**db_creds_root) yield connection @pytest.fixture(scope="session") -def connection_root(connection_root_bare, prefix, setup_ssl_config): +def connection_root(connection_root_bare, prefix): """Root user database connection.""" dj.config["safemode"] = False conn_root = connection_root_bare From 3ca55ddca97be32dd1c76bbdd381f85256694b93 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Wed, 3 Jan 2024 12:07:54 -0700 Subject: [PATCH 211/212] Use fixture for test_json skip --- tests/test_json.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/tests/test_json.py b/tests/test_json.py index 53016505c..803737165 100644 --- a/tests/test_json.py +++ b/tests/test_json.py @@ -5,10 +5,18 @@ import numpy as np from packaging.version import Version -if Version(dj.conn().query("select @@version;").fetchone()[0]) < Version("8.0.0"): - pytest.skip("These tests require MySQL >= v8.0.0", allow_module_level=True) + +@pytest.fixture +def skip_for_mysql_lt_8(connection_test): + """ + Skip test if MySQL version is less than 8.0.0 + """ + mysql_version = connection_root.query("select @@version;").fetchone()[0] + if Version(mysql_version) < Version("8.0.0"): + pytest.skip("These tests require MySQL >= v8.0.0", allow_module_level=True) +@pytest.mark.usefixtures("skip_for_mysql_lt_8") class Team(dj.Lookup): definition = """ name: varchar(40) From 85a0189477381008fa7a87672dcfa2c7afc3c960 Mon Sep 17 00:00:00 2001 From: Ethan Ho Date: Wed, 3 Jan 2024 12:11:14 -0700 Subject: [PATCH 212/212] For use_tls, use config value if not provided as kwarg --- datajoint/connection.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/datajoint/connection.py b/datajoint/connection.py index 65b096ab0..45c75e702 100644 --- a/datajoint/connection.py +++ b/datajoint/connection.py @@ -182,6 +182,8 @@ def __init__(self, host, user, password, port=None, init_fun=None, use_tls=None) elif port is None: port = config["database.port"] self.conn_info = dict(host=host, port=port, user=user, passwd=password) + if use_tls is None: + use_tls = config["database.use_tls"] if use_tls is not False: self.conn_info["ssl"] = ( use_tls if isinstance(use_tls, dict) else {"ssl": {}}