Skip to content

Commit

Permalink
Merge branch 'main' into repack
Browse files Browse the repository at this point in the history
  • Loading branch information
pauladkisson authored Jan 25, 2025
2 parents b07c002 + ce63b7c commit a8c57e8
Show file tree
Hide file tree
Showing 258 changed files with 9,189 additions and 4,602 deletions.
97 changes: 97 additions & 0 deletions .github/actions/load-data/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
name: 'Prepare Datasets'
description: 'Restores data from caches or downloads it from S3.'
inputs:
aws-access-key-id:
description: 'AWS Access Key ID'
required: true
aws-secret-access-key:
description: 'AWS Secret Access Key'
required: true
s3-gin-bucket:
description: 'S3 GIN Bucket URL'
required: true
os:
description: 'Operating system'
required: true
runs:
using: 'composite'
steps:
- name: Get ephy_testing_data current head hash
id: ephys
shell: bash
run: |
HASH=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)
echo "HASH_EPHY_DATASET=$HASH" >> $GITHUB_OUTPUT
- name: Cache ephys dataset
uses: actions/cache@v4
id: cache-ephys-datasets
with:
path: ./ephy_testing_data
key: ephys-datasets-${{ inputs.os }}-${{ steps.ephys.outputs.HASH_EPHY_DATASET }}

- name: Get ophys_testing_data current head hash
id: ophys
shell: bash
run: |
HASH=$(git ls-remote https://gin.g-node.org/CatalystNeuro/ophys_testing_data.git HEAD | cut -f1)
echo "HASH_OPHYS_DATASET=$HASH" >> $GITHUB_OUTPUT
- name: Cache ophys dataset
uses: actions/cache@v4
id: cache-ophys-datasets
with:
path: ./ophys_testing_data
key: ophys-datasets-${{ inputs.os }}-${{ steps.ophys.outputs.HASH_OPHYS_DATASET }}

- name: Get behavior_testing_data current head hash
id: behavior
shell: bash
run: |
HASH=$(git ls-remote https://gin.g-node.org/CatalystNeuro/behavior_testing_data.git HEAD | cut -f1)
echo "HASH_BEHAVIOR_DATASET=$HASH" >> $GITHUB_OUTPUT
- name: Cache behavior dataset
uses: actions/cache@v4
id: cache-behavior-datasets
with:
path: ./behavior_testing_data
key: behavior-datasets-${{ inputs.os }}-${{ steps.behavior.outputs.HASH_BEHAVIOR_DATASET }}

- name: Determine if downloads are required
id: download-check
shell: bash # Added shell property
run: |
if [[ "${{ steps.cache-ephys-datasets.outputs.cache-hit }}" != 'true' || \
"${{ steps.cache-ophys-datasets.outputs.cache-hit }}" != 'true' || \
"${{ steps.cache-behavior-datasets.outputs.cache-hit }}" != 'true' ]]; then
echo "DOWNLOAD_REQUIRED=true" >> $GITHUB_OUTPUT
else
echo "DOWNLOAD_REQUIRED=false" >> $GITHUB_OUTPUT
fi
- if: ${{ steps.download-check.outputs.DOWNLOAD_REQUIRED == 'true' }}
name: Install and configure AWS CLI
shell: bash
run: |
pip install awscli
aws configure set aws_access_key_id "${{ inputs.aws-access-key-id }}"
aws configure set aws_secret_access_key "${{ inputs.aws-secret-access-key }}"
- if: ${{ steps.cache-ephys-datasets.outputs.cache-hit != 'true' }}
name: Download ephys dataset from S3
shell: bash
run: |
aws s3 cp --recursive "${{ inputs.s3-gin-bucket }}/ephy_testing_data" ./ephy_testing_data
- if: ${{ steps.cache-ophys-datasets.outputs.cache-hit != 'true' }}
name: Download ophys dataset from S3
shell: bash
run: |
aws s3 cp --recursive "${{ inputs.s3-gin-bucket }}/ophys_testing_data" ./ophys_testing_data
- if: ${{ steps.cache-behavior-datasets.outputs.cache-hit != 'true' }}
name: Download behavior dataset from S3
shell: bash
run: |
aws s3 cp --recursive "${{ inputs.s3-gin-bucket }}/behavior_testing_data" ./behavior_testing_data
1 change: 1 addition & 0 deletions .github/workflows/all_os_versions.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
["ubuntu-latest", "macos-latest", "windows-latest", "macos-13"]
1 change: 1 addition & 0 deletions .github/workflows/all_python_versions.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
["3.9", "3.10", "3.11", "3.12"]
2 changes: 1 addition & 1 deletion .github/workflows/assess-file-changes.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ jobs:
echo "CHANGELOG_UPDATED=false" >> $GITHUB_OUTPUT
for file in ${{ steps.changed-files.outputs.all_changed_files }}; do
echo $file
if [[ $file == "src/"* || $file == "tests/"* || $file == "requirements-minimal.txt" || $file == "requirements-testing.txt" || $file == "setup.py" || $file == ".github/"* ]]
if [[ $file == "src/"* || $file == "tests/"* || $file == "pyproject.toml" || $file == "setup.py" || $file == ".github/"* ]]
then
echo "Source changed"
echo "SOURCE_CHANGED=true" >> $GITHUB_OUTPUT
Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/auto-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,14 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install dependencies
python-version: "3.11"
- name: Install Building Dependencies
run: |
python -m pip install --upgrade pip
pip install wheel
python -m pip install --upgrade build
- name: Build package
run: |
python setup.py sdist bdist_wheel
python -m build
- name: pypi-publish
uses: pypa/[email protected]
with:
Expand Down
8 changes: 6 additions & 2 deletions .github/workflows/build_and_upload_docker_image_dev.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
name: Build and Upload Docker Image of Current Dev Branch to GHCR

on:
schedule:
- cron: "0 13 * * *" # Daily at 9 EST
workflow_dispatch:
workflow_call:
secrets:
DOCKER_UPLOADER_USERNAME:
required: true
DOCKER_UPLOADER_PASSWORD:
required: true

concurrency: # Cancel previous workflows on the same pull request
group: ${{ github.workflow }}-${{ github.ref }}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
name: Build and Upload Docker Image of Rclone With Config to GHCR

on:
schedule:
- cron: "0 16 * * 1" # Weekly at noon EST on Monday
workflow_dispatch:
workflow_call:
secrets:
DOCKER_UPLOADER_USERNAME:
required: true
DOCKER_UPLOADER_PASSWORD:
required: true

concurrency: # Cancel previous workflows on the same pull request
group: ${{ github.workflow }}-${{ github.ref }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ jobs:
uses: docker/build-push-action@v5
with:
push: true # Push is a shorthand for --output=type=registry
tags: ghcr.io/catalystneuro/neuroconv:yaml_variable
tags: ghcr.io/catalystneuro/neuroconv_yaml_variable:latest
context: .
file: dockerfiles/neuroconv_latest_yaml_variable
provenance: false
138 changes: 137 additions & 1 deletion .github/workflows/dailies.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,20 +3,92 @@ name: Daily workflows
on:
workflow_dispatch:
schedule:
- cron: "0 16 * * *" # Daily at noon EST
- cron: "0 4 * * *" # Daily at 8PM PST, 11PM EST, 5AM CET to avoid working hours

jobs:
load_python_and_os_versions:
runs-on: ubuntu-latest
outputs:
ALL_PYTHON_VERSIONS: ${{ steps.load_python_versions.outputs.python_versions }}
ALL_OS_VERSIONS: ${{ steps.load_os_versions.outputs.os_versions }}
steps:
- uses: actions/checkout@v4
- id: load_python_versions
run: echo "python_versions=$(cat ./.github/workflows/all_python_versions.txt)" >> "$GITHUB_OUTPUT"
- id: load_os_versions
run: echo "os_versions=$(cat ./.github/workflows/all_os_versions.txt)" >> "$GITHUB_OUTPUT"

build-and-upload-docker-image-dev:
uses: ./.github/workflows/build_and_upload_docker_image_dev.yml
secrets:
DOCKER_UPLOADER_USERNAME: ${{ secrets.DOCKER_UPLOADER_USERNAME }}
DOCKER_UPLOADER_PASSWORD: ${{ secrets.DOCKER_UPLOADER_PASSWORD }}

run-daily-tests:
needs: load_python_and_os_versions
uses: ./.github/workflows/testing.yml
secrets:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
S3_GIN_BUCKET: ${{ secrets.S3_GIN_BUCKET }}
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
python-versions: ${{ needs.load_python_and_os_versions.outputs.ALL_PYTHON_VERSIONS }}
os-versions: ${{ needs.load_python_and_os_versions.outputs.ALL_OS_VERSIONS }}

run-daily-dev-tests:
needs: load_python_and_os_versions
uses: ./.github/workflows/dev-testing.yml
secrets:
DANDI_API_KEY: ${{ secrets.DANDI_API_KEY }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
S3_GIN_BUCKET: ${{ secrets.S3_GIN_BUCKET }}
with:
python-versions: ${{ needs.load_python_and_os_versions.outputs.ALL_PYTHON_VERSIONS }}

run-daily-live-service-testing:
needs: load_python_and_os_versions
uses: ./.github/workflows/live-service-testing.yml
secrets:
DANDI_API_KEY: ${{ secrets.DANDI_API_KEY }}
with:
python-versions: ${{ needs.load_python_and_os_versions.outputs.ALL_PYTHON_VERSIONS }}
os-versions: ${{ needs.load_python_and_os_versions.outputs.ALL_OS_VERSIONS }}

run-daily-neuroconv-docker-testing:
uses: ./.github/workflows/neuroconv_docker_testing.yml
secrets:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
S3_GIN_BUCKET: ${{ secrets.S3_GIN_BUCKET }}

run-daily-rclone-docker-testing:
uses: ./.github/workflows/rclone_docker_testing.yml
secrets:
RCLONE_DRIVE_ACCESS_TOKEN: ${{ secrets.RCLONE_DRIVE_ACCESS_TOKEN }}
RCLONE_DRIVE_REFRESH_TOKEN: ${{ secrets.RCLONE_DRIVE_REFRESH_TOKEN }}
RCLONE_EXPIRY_TOKEN: ${{ secrets.RCLONE_EXPIRY_TOKEN }}

run-daily-doc-link-checks:
uses: ./.github/workflows/test-external-links.yml

notify-build-and-upload-docker-image-dev:
runs-on: ubuntu-latest
needs: [build-and-upload-docker-image-dev]
if: ${{ always() && needs.build-and-upload-docker-image-dev.result == 'failure' }}
steps:
- uses: dawidd6/action-send-mail@v3
with:
server_address: smtp.gmail.com
server_port: 465
username: ${{ secrets.MAIL_USERNAME }}
password: ${{ secrets.MAIL_PASSWORD }}
subject: NeuroConv Daily Docker Image Build and Upload Failure
to: ${{ secrets.DAILY_FAILURE_EMAIL_LIST }}
from: NeuroConv
body: "The daily build and upload of the Docker image failed, please check status at https://github.com/catalystneuro/neuroconv/actions/workflows/dailies.yml"

notify-test-failure:
runs-on: ubuntu-latest
needs: [run-daily-tests]
Expand All @@ -33,6 +105,70 @@ jobs:
from: NeuroConv
body: "The daily test workflow failed, please check status at https://github.com/catalystneuro/neuroconv/actions/workflows/dailies.yml"

notify-dev-test-failure:
runs-on: ubuntu-latest
needs: [run-daily-dev-tests]
if: ${{ always() && needs.run-daily-dev-tests.result == 'failure' }}
steps:
- uses: dawidd6/action-send-mail@v3
with:
server_address: smtp.gmail.com
server_port: 465
username: ${{ secrets.MAIL_USERNAME }}
password: ${{ secrets.MAIL_PASSWORD }}
subject: NeuroConv Daily Dev Test Failure
to: ${{ secrets.DAILY_FAILURE_EMAIL_LIST }}
from: NeuroConv
body: "The daily dev test workflow failed, please check status at https://github.com/catalystneuro/neuroconv/actions/workflows/dailies.yml"

notify-live-service-test-failure:
runs-on: ubuntu-latest
needs: [run-daily-live-service-testing]
if: ${{ always() && needs.run-daily-live-service-testing.result == 'failure' }}
steps:
- uses: dawidd6/action-send-mail@v3
with:
server_address: smtp.gmail.com
server_port: 465
username: ${{ secrets.MAIL_USERNAME }}
password: ${{ secrets.MAIL_PASSWORD }}
subject: NeuroConv Daily Live Service Test Failure
to: ${{ secrets.DAILY_FAILURE_EMAIL_LIST }}
from: NeuroConv
body: "The daily live service test workflow failed, please check status at https://github.com/catalystneuro/neuroconv/actions/workflows/dailies.yml"

notify-neuroconv-docker-test-failure:
runs-on: ubuntu-latest
needs: [run-daily-neuroconv-docker-testing]
if: ${{ always() && needs.run-daily-neuroconv-docker-testing.result == 'failure' }}
steps:
- uses: dawidd6/action-send-mail@v3
with:
server_address: smtp.gmail.com
server_port: 465
username: ${{ secrets.MAIL_USERNAME }}
password: ${{ secrets.MAIL_PASSWORD }}
subject: NeuroConv Daily NeuroConv Docker Test Failure
to: ${{ secrets.DAILY_FAILURE_EMAIL_LIST }}
from: NeuroConv
body: "The daily neuroconv docker test workflow failed, please check status at https://github.com/catalystneuro/neuroconv/actions/workflows/dailies.yml"

notify-rclone-docker-test-failure:
runs-on: ubuntu-latest
needs: [run-daily-rclone-docker-testing]
if: ${{ always() && needs.run-daily-rclone-docker-testing.result == 'failure' }}
steps:
- uses: dawidd6/action-send-mail@v3
with:
server_address: smtp.gmail.com
server_port: 465
username: ${{ secrets.MAIL_USERNAME }}
password: ${{ secrets.MAIL_PASSWORD }}
subject: NeuroConv Daily Rclone Docker Test Failure
to: ${{ secrets.DAILY_FAILURE_EMAIL_LIST }}
from: NeuroConv
body: "The daily rclone docker test workflow failed, please check status at https://github.com/catalystneuro/neuroconv/actions/workflows/dailies.yml"

notify-link-check-failure:
runs-on: ubuntu-latest
needs: [run-daily-doc-link-checks]
Expand Down
Loading

0 comments on commit a8c57e8

Please sign in to comment.