Skip to content

Commit

Permalink
Copybara import of the project:
Browse files Browse the repository at this point in the history
--
52882e5 by Mohit Khatwani <[email protected]>:

install torch[cpu] in test script

COPYBARA_INTEGRATE_REVIEW=AI-Hypercomputer#1165 from AI-Hypercomputer:mohit/fix_orbax_hf 52882e5
PiperOrigin-RevId: 715183387
  • Loading branch information
khatwanimohit authored and maxtext authors committed Jan 14, 2025
1 parent bfc4264 commit b010030
Show file tree
Hide file tree
Showing 12 changed files with 42 additions and 0 deletions.
3 changes: 3 additions & 0 deletions end_to_end/tpu/gemma/2b/test_gemma.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@ set -ex
idx=$(date +%Y-%m-%d-%H-%M)
export MODEL_VARIATION='2b'

# Installing torch for deps in forward_pass_logit_chekcker.py
pip install torch --index-url https://download.pytorch.org/whl/cpu

# After downloading checkpoints, copy them to GCS bucket at $CHKPT_BUCKET \
# Non-Googlers please remember to use separate GCS paths for uploading model weights from kaggle ($CHKPT_BUCKET) and MaxText compatible weights ($MODEL_BUCKET).
# Non-Googlers please remember to point these variables to GCS buckets that you own, this script uses internal buckets for testing.
Expand Down
2 changes: 2 additions & 0 deletions end_to_end/tpu/gemma2/27b/2_test_gemma.sh
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ if [ -z "${BASE_OUTPUT_PATH}" ]; then
echo "BASE_OUTPUT_PATH is not set, using BASE_OUTPUT_PATH = ${BASE_OUTPUT_PATH}"
fi

# Installing torch for deps in forward_pass_logit_chekcker.py
pip install torch --index-url https://download.pytorch.org/whl/cpu

# Non-Googlers please remember to point `DATASET_PATH` to the GCS bucket where you have your training data
export DATASET_PATH=gs://maxtext-dataset
Expand Down
2 changes: 2 additions & 0 deletions end_to_end/tpu/gemma2/2b/test_gemma2.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ set -ex
idx=$(date +%Y-%m-%d-%H-%M)
export MODEL_VARIATION='2b'

# Installing torch for deps in forward_pass_logit_chekcker.py
pip install torch --index-url https://download.pytorch.org/whl/cpu

# After downloading checkpoints, copy them to GCS bucket at $CHKPT_BUCKET \
# Non-Googlers please remember to use separate GCS paths for uploading model weights from kaggle ($CHKPT_BUCKET) and MaxText compatible weights ($MODEL_BUCKET).
Expand Down
3 changes: 3 additions & 0 deletions end_to_end/tpu/gemma2/9b/2_test_gemma.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@
set -ex
export MODEL_VARIATION='9b'

# Installing torch for deps in forward_pass_logit_chekcker.py
pip install torch --index-url https://download.pytorch.org/whl/cpu

if [ -z "${BASE_OUTPUT_PATH}" ]; then
# Non-Googlers please remember to point `BASE_OUTPUT_PATH` to a GCS bucket that you own, this bucket will store all the files generated by MaxText during a run
# Use the same BASE_OUTPUT_PATH as end_to_end/tpu/gemma2/9b/1_test_gemma.sh
Expand Down
4 changes: 4 additions & 0 deletions end_to_end/tpu/llama2/70b/2_test_llama2_70b.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@
# the subfolders names aka RUN_NAMEs are static. Please remember to change BASE_OUTPUT_PATH across different runs.

set -ex

# Installing torch for deps in forward_pass_logit_chekcker.py
pip install torch --index-url https://download.pytorch.org/whl/cpu

export MODEL_VARIATION='llama2-70b'

if [ -z "${BASE_OUTPUT_PATH}" ]; then
Expand Down
4 changes: 4 additions & 0 deletions end_to_end/tpu/llama3.1/405b/2_test_llama3.1_405b.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@
# the subfolders names aka RUN_NAMEs are static. Please remember to change BASE_OUTPUT_PATH across different runs.

set -ex

# Installing torch for deps in forward_pass_logit_chekcker.py
pip install torch --index-url https://download.pytorch.org/whl/cpu

export MODEL_VARIATION='llama3.1-405b'

if [ -z "${BASE_OUTPUT_PATH}" ]; then
Expand Down
4 changes: 4 additions & 0 deletions end_to_end/tpu/llama3.1/70b/2_test_llama3.1_70b.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@
# the subfolders names aka RUN_NAMEs are static. Please remember to change BASE_OUTPUT_PATH across different runs.

set -ex

# Installing torch for deps in forward_pass_logit_chekcker.py
pip install torch --index-url https://download.pytorch.org/whl/cpu

export MODEL_VARIATION='llama3.1-70b'

if [ -z "${BASE_OUTPUT_PATH}" ]; then
Expand Down
4 changes: 4 additions & 0 deletions end_to_end/tpu/llama3.1/8b/2_test_llama3.1_8b.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@
# the subfolders names aka RUN_NAMEs are static. Please remember to change BASE_OUTPUT_PATH across different runs.

set -ex

# Installing torch for deps in forward_pass_logit_chekcker.py
pip install torch --index-url https://download.pytorch.org/whl/cpu

export MODEL_VARIATION='llama3.1-8b'

if [ -z "${BASE_OUTPUT_PATH}" ]; then
Expand Down
4 changes: 4 additions & 0 deletions end_to_end/tpu/llama3/70b/2_test_llama3_70b.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@
# the subfolders names aka RUN_NAMEs are static. Please remember to change BASE_OUTPUT_PATH across different runs.

set -ex

# Installing torch for deps in forward_pass_logit_chekcker.py
pip install torch --index-url https://download.pytorch.org/whl/cpu

export MODEL_VARIATION='llama3-70b'

if [ -z "${BASE_OUTPUT_PATH}" ]; then
Expand Down
4 changes: 4 additions & 0 deletions end_to_end/tpu/llama3/8b/2_test_llama3_8b.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@
# the subfolders names aka RUN_NAMEs are static. Please remember to change BASE_OUTPUT_PATH across different runs.

set -ex

# Installing torch for deps in forward_pass_logit_chekcker.py
pip install torch --index-url https://download.pytorch.org/whl/cpu

export MODEL_VARIATION='llama3-8b'

if [ -z "${BASE_OUTPUT_PATH}" ]; then
Expand Down
4 changes: 4 additions & 0 deletions end_to_end/tpu/mistral/7b/test_mistral-7b.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@
# Example Usage: export BASE_OUTPUT_PATH=/path/to/GCS/bucket; bash end_to_end/tpu/mistral/7b/test_mistral-7b.sh

set -ex

# Installing torch for deps in forward_pass_logit_chekcker.py
pip install torch --index-url https://download.pytorch.org/whl/cpu

MODEL_VARIATION='7b'

if [ -z "${BASE_OUTPUT_PATH}" ]; then
Expand Down
4 changes: 4 additions & 0 deletions end_to_end/tpu/mixtral/8x7b/2_test_mixtral.sh
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,10 @@
# Use the same BASE_OUTPUT_PATH for both 1_test_mixtral.sh & 2_test_mixtral.sh.

set -ex

# Installing torch for deps in forward_pass_logit_chekcker.py
pip install torch --index-url https://download.pytorch.org/whl/cpu

MODEL_VARIATION='8x7b'

if [ -z "${BASE_OUTPUT_PATH}" ]; then
Expand Down

0 comments on commit b010030

Please sign in to comment.