This repository has been archived by the owner on Oct 25, 2024. It is now read-only.
add inference test for mosaicml-mpt-7b-chat #2
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Chatbot inference on llama-2-7b-chat-hf | |
on: | |
workflow_call: | |
jobs: | |
inference: | |
name: inference test | |
runs-on: lms-lab | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v2 | |
- name: Load environment variables | |
uses: actions/dotenv-action | |
with: | |
path: ~/itrex-actions-runner/.env | |
export-variables: true | |
- name: Build Docker Image | |
run: echo "token is $HF_ACCESS_TOKEN" | |
run: docker build ./ --target cpu --build-arg http_proxy="${{ HTTP_PROXY_IMAGE_BUILD }}" --build-arg https_proxy="$HTTPS_PROXY_IMAGE_BUILD" -f workflows/chatbot/inference/docker/Dockerfile -t chatbotinfer:latest && yes | docker container prune && yes | docker image prune | |
- name: Start Docker Container | |
run: | | |
cid=$(docker ps -q --filter "name=chatbotinfer") | |
if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid; fi | |
docker run -tid -v /mnt/DP_disk1/huggingface/cache/:/root/.cache/huggingface/hub -v .:/root/chatbot -e http_proxy="$HTTP_PROXY_CONTAINER_RUN" -e https_proxy="$HTTPS_PROXY_CONTAINER_RUN" --name="chatbotinfer" --hostname="chatbotinfer-container" chatbotinfer:latest | |
- name: Run Inference Test | |
run: | | |
docker exec "chatbotinfer" bash -c "cd /root/chatbot && source activate && conda activate chatbot-demo; python workflows/chatbot/inference/generate.py --base_model_path \"meta-llama/Llama-2-7b-chat-hf\" --hf_access_token \"$HF_ACCESS_TOKEN\" --instructions \"Transform the following sentence into one that shows contrast. The tree is rotten.\" " | |
- name: Stop Container | |
if: success() || failure() | |
run: | | |
cid=$(docker ps -q --filter "name=chatbotinfer") | |
if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid; fi | |
- name: Test Summary | |
run: echo "Inference completed successfully" |