Skip to content

add llama more test #18

add llama more test

add llama more test #18

Workflow file for this run

name: CPU tests
on:
push:
branches:
- main
pull_request:
schedule:
- cron: "0 8 * * *"
jobs:
pytest:
runs-on: ubuntu-22.04
strategy:
matrix:
python-version: ["3.10", "3.11"]
container:
image: us-central1-docker.pkg.dev/tpu-pytorch-releases/docker/xla:nightly_${{ matrix.python-version }}_tpuvm
steps:
- uses: actions/checkout@v4
- name: Install torchax
run: |
pip install 'torch_xla2 @ git+https://[email protected]/pytorch/xla.git#subdirectory=experimental/torch_xla2'
- name: Install dev dependencies
run: |
python -m pip install --upgrade pip
pip install -e '.[dev]'
- name: Run PyTest
run: |
# TODO(https://github.com/AI-Hypercomputer/torchprime/issues/14): Remove and burn the token.
export HF_TOKEN=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
pytest