We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
2 parents 1e1e9d4 + ac1d9b0 commit 08454d3Copy full SHA for 08454d3
jetstream/tools/maxtext/model_ckpt_conversion.sh
@@ -65,11 +65,11 @@ else
65
pip install torch --index-url https://download.pytorch.org/whl/cpu
66
# llama_or_mistral_ckpt.py requires local path, so we need to copy the checkpoint from CHKPT_BUCKET to local.
67
tmp_ckpt_path="/tmp/"
68
- #gcloud storage cp -r ${CHKPT_BUCKET} ${tmp_ckpt_path}
+ gcloud storage cp -r ${CHKPT_BUCKET} ${tmp_ckpt_path}
69
70
path_parts=(${CHKPT_BUCKET//\// })
71
directory_substring=${path_parts[-1]}
72
- CONVERT_CKPT_SCRIPT="llama_or_mistral_ckpt.py"
+ CONVERT_CKPT_SCRIPT="llama_or_mistral_ckpt"
73
74
if [[ ! -z "${LORA_INPUT_ADAPTERS_PATH}" ]]; then
75
lora_local_path="/tmp/"
0 commit comments