migrate from circleci to github actions #6
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Build and Test | |
on: | |
push: | |
branches: [ '*' ] | |
tags: [ '*' ] | |
pull_request: | |
branches: [ '*' ] | |
env: | |
PYTHON_VERSION: "3.12" | |
jobs: | |
check_line_count: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ env.PYTHON_VERSION }} | |
- name: Install dependencies | |
run: | | |
python -m pip install --upgrade pip | |
pip install tabulate | |
- name: Run line count check | |
run: | | |
if [[ -n "${{ github.event.pull_request }}" ]]; then | |
git fetch origin ${{ github.base_ref }} | |
git clone -b ${{ github.base_ref }} --single-branch \ | |
https://github.com/${{ github.repository }}.git base_branch | |
python extra/line_counter.py base_branch . | |
else | |
python extra/line_counter.py . | |
fi | |
- name: Upload artifacts | |
uses: actions/upload-artifact@v4 | |
with: | |
name: line-count-results | |
path: | | |
line-count-snapshot.json | |
line-count-diff.json | |
unit_test: | |
runs-on: macos-15 | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ env.PYTHON_VERSION }} | |
- name: Install dependencies | |
run: | | |
python -m venv env | |
source env/bin/activate | |
pip install --upgrade pip | |
pip install . | |
- name: Run tests | |
run: | | |
source env/bin/activate | |
# set TEMPERATURE to 0 for deterministic sampling | |
echo "Running inference engine tests..." | |
METAL_DEVICE_WRAPPER_TYPE=1 METAL_DEBUG_ERROR_MODE=0 METAL_XCODE=1 TEMPERATURE=0 python3 -m exo.inference.test_inference_engine | |
echo "Running tokenizer tests..." | |
python3 ./test/test_tokenizers.py | |
python3 ./test/test_model_helpers.py | |
discovery_integration_test: | |
runs-on: macos-15 | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ env.PYTHON_VERSION }} | |
- name: Install dependencies | |
run: | | |
python -m venv env | |
source env/bin/activate | |
pip install --upgrade pip | |
pip install . | |
- name: Run discovery integration test | |
run: | | |
source env/bin/activate | |
DEBUG_DISCOVERY=7 DEBUG=7 exo --node-id "node1" --listen-port 5678 --broadcast-port 5679 --chatgpt-api-port 8000 --disable-tui > output1.log 2>&1 & | |
PID1=$! | |
DEBUG_DISCOVERY=7 DEBUG=7 exo --node-id "node2" --listen-port 5679 --broadcast-port 5678 --chatgpt-api-port 8001 --disable-tui > output2.log 2>&1 & | |
PID2=$! | |
sleep 10 | |
kill $PID1 $PID2 | |
if grep -q "Peer statuses: {\\'node2\\': \\'is_connected=True, health_check=True" output1.log && ! grep -q "Failed to connect peers:" output1.log && grep -q "Peer statuses: {\\'node1\\': \\'is_connected=True, health_check=True" output2.log && ! grep -q "Failed to connect peers:" output2.log; then | |
echo "Test passed: Both instances discovered each other" | |
exit 0 | |
else | |
echo "Test failed: Devices did not discover each other" | |
echo "Output of first instance:" | |
cat output1.log | |
echo "Output of second instance:" | |
cat output2.log | |
exit 1 | |
fi | |
chatgpt_api_tests: | |
runs-on: macos-15 | |
strategy: | |
matrix: | |
inference_engine: [mlx, tinygrad, dummy] | |
include: | |
- inference_engine: mlx | |
model_id: llama-3.2-1b | |
prompt: "Keep responses concise. Who was the king of pop?" | |
expected_output: "Michael Jackson" | |
- inference_engine: tinygrad | |
model_id: llama-3.2-1b | |
prompt: "Keep responses concise. Who was the king of pop?" | |
expected_output: "Michael Jackson" | |
- inference_engine: dummy | |
model_id: dummy | |
prompt: "Dummy prompt." | |
expected_output: "dummy" | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ env.PYTHON_VERSION }} | |
- name: Install dependencies | |
run: | | |
python -m venv env | |
source env/bin/activate | |
pip install --upgrade pip | |
pip install . | |
if [ "${{ matrix.inference_engine }}" = "tinygrad" ]; then | |
pip install llvmlite | |
fi | |
- name: Run ChatGPT API test | |
env: | |
TOKENIZERS_PARALLELISM: ${{ matrix.inference_engine == 'tinygrad' && 'true' || 'false' }} | |
SUPPORT_BF16: ${{ matrix.inference_engine == 'tinygrad' && '0' || '' }} | |
CLANG: ${{ matrix.inference_engine == 'tinygrad' && '1' || '' }} | |
run: | | |
source env/bin/activate | |
# Start first instance | |
HF_HOME="$(pwd)/.hf_cache_node1" DEBUG_DISCOVERY=7 DEBUG=7 exo --inference-engine ${{ matrix.inference_engine }} \ | |
--node-id "node1" --listen-port 5678 --broadcast-port 5679 --chatgpt-api-port 8000 \ | |
--chatgpt-api-response-timeout 900 --disable-tui > output1.log & | |
PID1=$! | |
tail -f output1.log & | |
TAIL1=$! | |
# Start second instance | |
HF_HOME="$(pwd)/.hf_cache_node2" DEBUG_DISCOVERY=7 DEBUG=7 exo --inference-engine ${{ matrix.inference_engine }} \ | |
--node-id "node2" --listen-port 5679 --broadcast-port 5678 --chatgpt-api-port 8001 \ | |
--chatgpt-api-response-timeout 900 --disable-tui > output2.log & | |
PID2=$! | |
tail -f output2.log & | |
TAIL2=$! | |
# Remember to kill the tail processes at the end | |
trap 'kill $TAIL1 $TAIL2' EXIT | |
# Rest of the test script remains the same as in your CircleCI config | |
# ... (Copy the remaining test logic from the CircleCI config) | |
measure_pip_sizes: | |
runs-on: macos-15 | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ env.PYTHON_VERSION }} | |
- name: Install dependencies and measure sizes | |
run: | | |
python -m venv env | |
source env/bin/activate | |
pip install --upgrade pip | |
pip install . | |
python ./extra/pipsize.py --json ./pipsize.json | |
- name: Upload pip sizes artifact | |
uses: actions/upload-artifact@v4 | |
with: | |
name: pip-sizes | |
path: ./pipsize.json |