Skip to content

Feat/bazel8

Feat/bazel8 #348

Workflow file for this run

name: CI
on:
pull_request:
branches:
- "*"
workflow_dispatch:
jobs:
test:
runs-on: ${{ matrix.os }}
timeout-minutes: 40
strategy:
matrix:
include: # TODO: windows-latest
- os: ubuntu-latest
cache: ~/.cache/bazel
# - os: macos-latest
# cache: /private/var/tmp/_bazel_runner
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.8"
- name: Setup bazelisk
uses: bazelbuild/setup-bazelisk@v2
- name: Show info
id: info
run: |
uname -a
bazel info
shell: bash
# - name: Mount bazel cache
# uses: actions/[email protected]
# with:
# path: ${{ matrix.cache }}
# key: bazel-${{ matrix.os }}
# restore-keys: bazel-${{ matrix.os }}
# - name: GCC test
# id: gcc
# run: |
# echo ==========
# cat /usr/include/c++/13/stdatomic.h
# echo ==========
# /usr/bin/x86_64-linux-gnu-gcc-13 -DGGML_BACKEND_BUILD -DGGML_BACKEND_SHARED -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -DGGML_USE_CPU_AARCH64 -DGGML_USE_LLAMAFILE -DGGML_USE_OPENMP -D_GNU_SOURCE -D_XOPEN_SOURCE=600 -Dggml_cpu_EXPORTS -Ixxx -I/tmp/pip-install-pm_ulnbb/llama-cpp-python_3ed1a23dbf3b43d6b7c0ecaa44743b4a/vendor/llama.cpp/ggml/src/.. -I/tmp/pip-install-pm_ulnbb/llama-cpp-python_3ed1a23dbf3b43d6b7c0ecaa44743b4a/vendor/llama.cpp/ggml/src/. -I$(pwd)/xxx -I/tmp/pip-install-pm_ulnbb/llama-cpp-python_3ed1a23dbf3b43d6b7c0ecaa44743b4a/vendor/llama.cpp/ggml/src/../include -U_FORTIFY_SOURCE -fstack-protector -Wall -Wunused-but-set-parameter -Wno-free-nonheap-object -fno-omit-frame-pointer -fPIC -I/usr/lib/gcc/x86_64-linux-gnu/13/include -I/usr/local/include -I/usr/include/x86_64-linux-gnu -I/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk -fno-canonical-system-headers -Wno-builtin-macro-redefined -D__DATE__="redacted" -D__TIMESTAMP__="redacted" -D__TIME__="redacted" -O3 -DNDEBUG -std=gnu11 -fPIC -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes -Werror=implicit-int -Werror=implicit-function-declaration -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wdouble-promotion -march=native -fopenmp -MD -MT xxx/ggml-cpu.c.o -MF xxx/ggml-cpu.c.o.d -o xxx/ggml-cpu.c.o -c xxx/ggml-cpu.c
# #/usr/bin/x86_64-linux-gnu-gcc-13 -DGGML_BACKEND_BUILD -DGGML_BACKEND_SHARED -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -DGGML_USE_CPU_AARCH64 -DGGML_USE_LLAMAFILE -DGGML_USE_OPENMP -D_GNU_SOURCE -D_XOPEN_SOURCE=600 -Dggml_cpu_EXPORTS -Ixxx -I/tmp/pip-install-pm_ulnbb/llama-cpp-python_3ed1a23dbf3b43d6b7c0ecaa44743b4a/vendor/llama.cpp/ggml/src/.. -I/tmp/pip-install-pm_ulnbb/llama-cpp-python_3ed1a23dbf3b43d6b7c0ecaa44743b4a/vendor/llama.cpp/ggml/src/. -I$(pwd)/xxx -I/tmp/pip-install-pm_ulnbb/llama-cpp-python_3ed1a23dbf3b43d6b7c0ecaa44743b4a/vendor/llama.cpp/ggml/src/../include -U_FORTIFY_SOURCE -fstack-protector -Wall -Wunused-but-set-parameter -Wno-free-nonheap-object -fno-omit-frame-pointer -fPIC -I/usr/lib/gcc/x86_64-linux-gnu/13/include -I/usr/local/include -I/usr/include/x86_64-linux-gnu -I/usr/include -I/usr/include/c++/13 -I/usr/include/x86_64-linux-gnu/c++/13 -I/usr/include/c++/13/backward -I/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk -fno-canonical-system-headers -Wno-builtin-macro-redefined -D__DATE__="redacted" -D__TIMESTAMP__="redacted" -D__TIME__="redacted" -O3 -DNDEBUG -std=gnu11 -fPIC -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes -Werror=implicit-int -Werror=implicit-function-declaration -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wdouble-promotion -march=native -fopenmp -MD -MT xxx/ggml-cpu.c.o -MF xxx/ggml-cpu.c.o.d -o xxx/ggml-cpu.c.E -c xxx/ggml-cpu.c -E
# #cat xxx/ggml-cpu.c.E
# #/usr/bin/x86_64-linux-gnu-gcc-13 -DGGML_BACKEND_BUILD -DGGML_BACKEND_SHARED -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -DGGML_USE_CPU_AARCH64 -DGGML_USE_LLAMAFILE -DGGML_USE_OPENMP -D_GNU_SOURCE -D_XOPEN_SOURCE=600 -Dggml_cpu_EXPORTS -Ixxx -I/tmp/pip-install-pm_ulnbb/llama-cpp-python_3ed1a23dbf3b43d6b7c0ecaa44743b4a/vendor/llama.cpp/ggml/src/.. -I/tmp/pip-install-pm_ulnbb/llama-cpp-python_3ed1a23dbf3b43d6b7c0ecaa44743b4a/vendor/llama.cpp/ggml/src/. -I$(pwd)/xxx -I/tmp/pip-install-pm_ulnbb/llama-cpp-python_3ed1a23dbf3b43d6b7c0ecaa44743b4a/vendor/llama.cpp/ggml/src/../include -U_FORTIFY_SOURCE -fstack-protector -Wall -Wunused-but-set-parameter -Wno-free-nonheap-object -fno-omit-frame-pointer -fPIC -I/usr/lib/gcc/x86_64-linux-gnu/13/include -I/usr/local/include -I/usr/include/x86_64-linux-gnu -I/usr/include -I/usr/include/c++/13 -I/usr/include/x86_64-linux-gnu/c++/13 -I/usr/include/c++/13/backward -I/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk -fno-canonical-system-headers -Wno-builtin-macro-redefined -D__DATE__="redacted" -D__TIMESTAMP__="redacted" -D__TIME__="redacted" -O3 -DNDEBUG -std=gnu11 -fPIC -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes -Werror=implicit-int -Werror=implicit-function-declaration -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wdouble-promotion -march=native -fopenmp -MD -MT xxx/ggml-cpu.c.o -MF xxx/ggml-cpu.c.o.d -o xxx/ggml-cpu.c.o -c xxx/ggml-cpu.c
- name: Run tests
id: tests
#run: bazel test ... --spawn_strategy=standalone --verbose_failures
run: cd examples/cc_toolchain && bazel test ... --spawn_strategy=standalone --verbose_failures
shell: bash