30 lines
No EOL
1.3 KiB
Bash
30 lines
No EOL
1.3 KiB
Bash
PROJECT_PATH="your-project-path"
|
|
EMBEDDING_MODEL_PATH="${PROJECT_PATH}/sentence-transformer/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/e4ce9877abf3edfe10b0d82785e83bdcb973e22e"
|
|
|
|
OPTS=""
|
|
OPTS+=" --embedding_model_path ${EMBEDDING_MODEL_PATH}"
|
|
OPTS+=" --test_data_dir ${PROJECT_PATH}/data/Mind2Web/test"
|
|
OPTS+=" --train_data_dir ${PROJECT_PATH}/data/Mind2Web/train/train_with_steps_insert_mistral.json"
|
|
OPTS+=" --prompt_file ${PROJECT_PATH}/prompts/summarisation/summarisation_prompt.txt"
|
|
|
|
MODEL_NAME_OR_PATH_BMT="${PROJECT_PATH}/ckpts/experiment/epoch_14"
|
|
MODEL_NAME_OR_PATH_HF="${MODEL_NAME_OR_PATH_BMT}-hf"
|
|
MODEL_NAME_OR_PATH_ORIGINAL_MISTRAL="${PROJECT_PATH}/Mistral-7B-v0.1/snapshots/26bca36bde8333b5d7f72e9ed20ccda6a618af24"
|
|
|
|
# Convert the model to HF format
|
|
if [ ! -f "${MODEL_NAME_OR_PATH_HF}/config.json" ]; then
|
|
CMD="python3 ${PROJECT_PATH}/hf_bmt/bmt_hf.py --in_path ${MODEL_NAME_OR_PATH_BMT} --output_path ${MODEL_NAME_OR_PATH_HF} --original_mistral_path ${MODEL_NAME_OR_PATH_ORIGINAL_MISTRAL}"
|
|
echo "-------BMT -> HF CMD is------"
|
|
echo "CMD: ${CMD}"
|
|
echo "-------BMT -> HF CMD end------"
|
|
eval ${CMD}
|
|
fi
|
|
|
|
OPTS+=" --model_name_or_path ${MODEL_NAME_OR_PATH_HF}"
|
|
|
|
CMD="python3 inference.py ${OPTS}"
|
|
|
|
echo "-------final CMD is------"
|
|
echo "${CMD}"
|
|
echo "-------final CMD end------"
|
|
eval ${CMD} |