Skip to content

Commit

Permalink
Merge pull request #785 from MolSSI/cleanup
Browse files Browse the repository at this point in the history
Some small pre-3.12 cleanups
  • Loading branch information
bennybp authored Oct 27, 2023
2 parents de33af2 + f48db30 commit e53ffd9
Show file tree
Hide file tree
Showing 11 changed files with 27 additions and 18 deletions.
1 change: 1 addition & 0 deletions conda-build/qcportal/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ requirements:
- tqdm
- pandas
- pyjwt
- packaging


test:
Expand Down
3 changes: 2 additions & 1 deletion qcarchivetesting/conda-envs/fulltest_qcportal.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,5 @@ dependencies:
- tabulate
- tqdm
- pandas
- pyjwt
- pyjwt
- packaging
1 change: 1 addition & 0 deletions qcarchivetesting/conda-envs/fulltest_server.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ dependencies:
- tqdm
- pandas
- pyjwt
- packaging

# QCFractal dependencies
- flask
Expand Down
2 changes: 2 additions & 0 deletions qcarchivetesting/conda-envs/fulltest_snowflake.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ dependencies:
- postgresql

# QCPortal dependencies
# NOTE: msgpack-python in conda is msgpack in pypi (due to a rename around v0.5)
- numpy
- msgpack-python
- requests
Expand All @@ -20,6 +21,7 @@ dependencies:
- tqdm
- pandas
- pyjwt
- packaging

# QCFractalCompute dependencies
- parsl
Expand Down
1 change: 1 addition & 0 deletions qcarchivetesting/conda-envs/fulltest_worker.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ dependencies:
- tqdm
- pandas
- pyjwt
- packaging

# QCFractalCompute dependencies
- parsl
Expand Down
3 changes: 2 additions & 1 deletion qcarchivetesting/qcarchivetesting/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from qcelemental.models import Molecule
from qcelemental.models.results import WavefunctionProperties

from qcfractal.components.serverinfo.socket import geoip2_found
from qcportal.serialization import _json_decode

# Valid client encodings
Expand All @@ -26,7 +27,7 @@
geoip_filename = "GeoLite2-City-Test.mmdb"
ip_testdata_path = os.path.join(_my_path, "MaxMind-DB", "source-data", "GeoIP2-City-Test.json")

ip_tests_enabled = os.path.exists(geoip_path) and os.path.exists(ip_testdata_path)
ip_tests_enabled = os.path.exists(geoip_path) and os.path.exists(ip_testdata_path) and geoip2_found

testconfig_path = os.path.join(_my_path, "config_files")
migrationdata_path = os.path.join(_my_path, "migration_data")
Expand Down
7 changes: 4 additions & 3 deletions qcfractal/qcfractal/test_periodics.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,15 +27,16 @@ def test_periodics_server_stats(snowflake: QCATestingSnowflake):
sleep_time = snowflake._qcf_config.statistics_frequency

snowflake.start_job_runner()
time.sleep(0.25)
time.sleep(sleep_time / 2)

for i in range(5):
time_0 = datetime.utcnow()
time.sleep(sleep_time)
time_1 = datetime.utcnow()

stats = storage_socket.serverinfo.query_server_stats(ServerStatsQueryFilters())
assert len(stats) == i + 1
filters = ServerStatsQueryFilters(before=time_1, after=time_0)
stats = storage_socket.serverinfo.query_server_stats(filters)
assert len(stats) == 1
assert time_0 < stats[0]["timestamp"] < time_1


Expand Down
2 changes: 1 addition & 1 deletion qcfractalcompute/qcfractalcompute/compute_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@

import parsl.executors.high_throughput.interchange
import tabulate
from packaging.version import parse as parse_version
from parsl.config import Config as ParslConfig
from parsl.dataflow.dflow import DataFlowKernel
from parsl.dataflow.futures import Future as ParslFuture
from parsl.executors import HighThroughputExecutor, ThreadPoolExecutor
from pkg_resources import parse_version
from pydantic import BaseModel, Extra, Field
from requests.exceptions import Timeout

Expand Down
1 change: 1 addition & 0 deletions qcportal/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ dependencies = [
"tqdm",
"pandas",
"pyjwt",
"packaging",
]


Expand Down
2 changes: 1 addition & 1 deletion qcportal/qcportal/client_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
import pydantic
import requests
import yaml
from pkg_resources import parse_version
from packaging.version import parse as parse_version

from . import __version__
from .exceptions import AuthenticationFailure
Expand Down
22 changes: 11 additions & 11 deletions qcportal/qcportal/dataset_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -1190,20 +1190,20 @@ def compile_values(
Parameters
-----------
value_call
Function to call on each record to extract the desired value. Must return a scalar value or
value_call
Function to call on each record to extract the desired value. Must return a scalar value or
a sequence of values if 'unpack' is set to True.
value_names
Column name(s) for the extracted value(s). If a string is provided and multiple values are
returned by 'value_call', columns are named by appending an index to this string. If a list
Column name(s) for the extracted value(s). If a string is provided and multiple values are
returned by 'value_call', columns are named by appending an index to this string. If a list
of strings is provided, it must match the length of the sequence returned by 'value_call'.
Default is "value".
entry_names
entry_names
Entry names to filter records. If not provided, considers all entries.
specification_names
specification_names
Specification names to filter records. If not provided, considers all specifications.
unpack
Expand All @@ -1226,11 +1226,11 @@ def compile_values(
Notes
------
1. The DataFrame is structured such that the rows are entries and columns are specifications.
2. If 'unpack' is True, the function assumes 'value_call' returns a sequence of values that need
2. If 'unpack' is True, the function assumes 'value_call' returns a sequence of values that need
to be distributed across columns in the resulting DataFrame. 'value_call' should always return the
same number of values for each record if unpack is True.
"""

def _data_generator(unpack=False):
for entry_name, spec_name, record in self.iterate_records(
entry_names=entry_names,
Expand Down Expand Up @@ -1279,13 +1279,13 @@ def get_properties_df(self, properties_list: Sequence[str]) -> pd.DataFrame:
This function uses the provided list of property names to extract corresponding
values from each record's properties. It returns a DataFrame where rows represent
each record. Each column corresponds has a top level index as a specification,
and a second level index as the appropriate value name. Columns with all
each record. Each column corresponds has a top level index as a specification,
and a second level index as the appropriate value name. Columns with all
NaN values are dropped.
Parameters:
-----------
properties_list
properties_list
List of property names to retrieve from the records.
Returns:
Expand Down

0 comments on commit e53ffd9

Please sign in to comment.