Integration v0.11.0 #1226
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: CI | |
on: | |
push: | |
branches: [ master ] | |
pull_request: | |
branches: [ master, integration ] | |
# Cancel any in-flight jobs for the same PR/branch so there's only one active | |
# at a time | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.ref }} | |
cancel-in-progress: true | |
jobs: | |
ci_pre_hashes: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
fetch-depth: 2 | |
- name: Compile hash_scene | |
run: | | |
mkdir build | |
clang test/hash_scene.c misc/fdlibm.c -O2 -o build/hash_scene | |
- name: Generate reference hashes | |
run: | | |
python3 misc/generate_hashses.py --exe build/hash_scene -o build/hashes.txt --verbose | |
- name: Upload reference hashes | |
uses: actions/upload-artifact@v3 | |
with: | |
name: reference-hashes | |
path: build/hashes.txt | |
ci_ubuntu: | |
runs-on: ubuntu-latest | |
needs: ci_pre_hashes | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
fetch-depth: 2 | |
- name: Update apt | |
run: sudo apt-get update | |
- name: Install apt dependencies | |
run: sudo apt-get install -y gcc-multilib g++-multilib lcov llvm-11 | |
- name: Download reference hashes | |
uses: actions/download-artifact@v3 | |
with: | |
name: reference-hashes | |
path: build | |
- name: Run tests | |
run: python3 misc/run_tests.py tests stack readme threadcheck hashes --runner ci_linux | |
- name: Verbose test info | |
run: build/runner_clang_release_x64/runner -d data -v | |
- name: Compress hashdumps | |
if: ${{ always() && !cancelled() }} | |
run: python3 misc/hash_diff.py compress build/hashdumps -o build/hashdump_ci_linux.txt.gz | |
- name: Upload hashdumps | |
uses: actions/upload-artifact@v3 | |
if: ${{ always() && !cancelled() }} | |
with: | |
name: hash-dumps | |
path: build/hashdump_ci_linux.txt.gz | |
ci_windows: | |
runs-on: windows-2022 | |
needs: ci_pre_hashes | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Download reference hashes | |
uses: actions/download-artifact@v3 | |
with: | |
name: reference-hashes | |
path: build | |
- run: type build\hashes.txt | |
- name: Run tests | |
run: python misc/run_tests.py tests stack readme threadcheck hashes --runner ci_windows | |
- name: Verbose test info | |
run: build\runner_vs_cl64_release_x64\runner.exe -d data -v | |
- name: Check C++ tests | |
run: python misc/run_tests.py cpp --compiler vs_cl64 | |
- name: Check features | |
run: python misc/run_tests.py features --compiler vs_cl64 | |
- name: Compress hashdumps | |
if: ${{ always() && !cancelled() }} | |
run: python3 misc/hash_diff.py compress build/hashdumps -o build/hashdump_ci_windows.txt.gz | |
- name: Upload hashdumps | |
uses: actions/upload-artifact@v3 | |
if: ${{ always() && !cancelled() }} | |
with: | |
name: hash-dumps | |
path: build/hashdump_ci_windows.txt.gz | |
ci_macos: | |
runs-on: macos-11 | |
needs: ci_pre_hashes | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Download reference hashes | |
uses: actions/download-artifact@v3 | |
with: | |
name: reference-hashes | |
path: build | |
- name: Run tests | |
run: python3 misc/run_tests.py tests cpp stack readme threadcheck hashes --remove-compiler gcc --additional-compiler gcc-11 --remove-arch x86 --runner ci_macos | |
- name: Compile ufbx as Objective C | |
run: clang -Wall -Wextra -Werror -ObjC -c ufbx.c -o build/ufbx-objc.o | |
- name: Compress hashdumps | |
if: ${{ always() && !cancelled() }} | |
run: python3 misc/hash_diff.py compress build/hashdumps -o build/hashdump_ci_macos.txt.gz | |
- name: Upload hashdumps | |
uses: actions/upload-artifact@v3 | |
if: ${{ always() && !cancelled() }} | |
with: | |
name: hash-dumps | |
path: build/hashdump_ci_macos.txt.gz | |
ci_coverage: | |
runs-on: ubuntu-20.04 | |
needs: ci_pre_hashes | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
fetch-depth: 2 | |
- name: Update apt | |
run: sudo apt-get update | |
- name: Install apt dependencies | |
run: sudo apt-get install -y gcc-multilib g++-multilib lcov llvm-11 | |
- name: Generate coverage | |
run: LLVM_COV=llvm-cov-11 bash misc/generate_coverage.sh | |
- name: Upload coverage | |
run: bash <(curl -s https://codecov.io/bash) -F tests -f coverage.lcov -y .github/codecov.yml | |
ci_cpp: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Update apt | |
run: sudo apt-get update | |
- name: Install apt dependencies | |
run: sudo apt-get install -y gcc-multilib g++-multilib llvm-11 | |
- name: Run tests | |
run: python3 misc/run_tests.py cpp --heavy | |
ci_picort: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Run picort | |
run: python misc/run_tests.py picort | |
- name: Upload result images | |
env: | |
BUNNY_API_TOKEN: ${{ secrets.BUNNY_API_TOKEN }} | |
if: ${{ always() && !cancelled() }} | |
continue-on-error: true | |
run: | | |
HASH="$(git rev-parse --short=16 HEAD)" | |
URL="https://storage.bunnycdn.com/ufbx-images/$HASH" | |
DST_URL="https://ufbx-images.b-cdn.net/$HASH" | |
curl -sS -w " -> $DST_URL/barbarian.png \n" --request PUT --header "AccessKey: $BUNNY_API_TOKEN" --header "Content-Type: image/png" --url $URL/barbarian.png --data-binary @build/images/barbarian.png | |
curl -sS -w " -> $DST_URL/barbarian-scalar.png \n" --request PUT --header "AccessKey: $BUNNY_API_TOKEN" --header "Content-Type: image/png" --url $URL/barbarian-scalar.png --data-binary @build/images/barbarian-scalar.png | |
curl -sS -w " -> $DST_URL/barbarian-big.png \n" --request PUT --header "AccessKey: $BUNNY_API_TOKEN" --header "Content-Type: image/png" --url $URL/barbarian-big.png --data-binary @build/images/barbarian-big.png | |
curl -sS -w " -> $DST_URL/slime-binary.png \n" --request PUT --header "AccessKey: $BUNNY_API_TOKEN" --header "Content-Type: image/png" --url $URL/slime-binary.png --data-binary @build/images/slime-binary.png | |
curl -sS -w " -> $DST_URL/slime-ascii.png \n" --request PUT --header "AccessKey: $BUNNY_API_TOKEN" --header "Content-Type: image/png" --url $URL/slime-ascii.png --data-binary @build/images/slime-ascii.png | |
curl -sS -w " -> $DST_URL/slime-big.png \n" --request PUT --header "AccessKey: $BUNNY_API_TOKEN" --header "Content-Type: image/png" --url $URL/slime-big.png --data-binary @build/images/slime-big.png | |
ci_domfuzz: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Run domfuzz/objfuzz | |
run: python misc/run_tests.py domfuzz objfuzz | |
ci_viewer: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Run viewer | |
run: python misc/run_tests.py viewer | |
ci_features: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Update apt | |
run: sudo apt-get update | |
- name: Install apt dependencies | |
run: sudo apt-get install -y tcc | |
- name: Compile feature permutations (limited, Clang+GCC) | |
run: python misc/run_tests.py features | |
- name: Compile feature permutations (exhaustive, TCC) | |
run: python misc/run_tests.py features --heavy --additional-compiler tcc --compiler tcc | |
ci_analysis: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Update apt | |
run: sudo apt-get update | |
- name: Install apt dependencies | |
run: sudo apt-get install -y cppcheck | |
- name: Install PIP dependencies | |
run: sudo pip3 install pcpp pycparser | |
- name: Prepare directories | |
run: mkdir build | |
- name: cppcheck | |
run: cppcheck --error-exitcode=1 --inline-suppr --std=c99 ufbx.c -DUFBX_STATIC_ANALYSIS=1 | |
- name: Clang Analyzer | |
if: ${{ always() && !cancelled() }} | |
run: clang --analyze -Xanalyzer -analyzer-werror -Xanalyzer -analyzer-disable-checker=unix.Malloc ufbx.c | |
- name: Get Clang debug stack usage | |
if: ${{ always() && !cancelled() }} | |
run: clang -fstack-usage -DUFBX_DEV -DUFBX_STANDARD_C -c ufbx.c -o build/ufbx-clang-debug.o | |
- name: Get Clang release stack usage | |
if: ${{ always() && !cancelled() }} | |
run: clang -fstack-usage -DUFBX_DEV -O2 -c ufbx.c -o build/ufbx-clang-release.o | |
- name: Get GCC debug stack usage | |
if: ${{ always() && !cancelled() }} | |
run: gcc -fstack-usage -DUFBX_DEV -DUFBX_STANDARD_C -c ufbx.c -o build/ufbx-gcc-debug.o | |
- name: Get GCC release stack usage | |
if: ${{ always() && !cancelled() }} | |
run: gcc -fstack-usage -DUFBX_DEV -O2 -c ufbx.c -o build/ufbx-gcc-release.o | |
- name: Parse ufbx.c to AST for analysis | |
if: ${{ always() && !cancelled() }} | |
run: python3 misc/analyze_stack.py --no-su --cache build/ufbx-cache.pickle | |
- name: Analyze Clang debug stack | |
if: ${{ always() && !cancelled() }} | |
run: python3 misc/analyze_stack.py build/ufbx-clang-debug.su --limit 0x20000 --cache build/ufbx-cache.pickle | |
- name: Analyze Clang release stack | |
if: ${{ always() && !cancelled() }} | |
run: python3 misc/analyze_stack.py build/ufbx-clang-release.su --limit 0x20000 --cache build/ufbx-cache.pickle | |
- name: Analyze GCC debug stack | |
if: ${{ always() && !cancelled() }} | |
run: python3 misc/analyze_stack.py build/ufbx-gcc-debug.su --limit 0x20000 --cache build/ufbx-cache.pickle | |
- name: Analyze GCC release stack | |
if: ${{ always() && !cancelled() }} | |
run: python3 misc/analyze_stack.py build/ufbx-gcc-release.su --limit 0x20000 --cache build/ufbx-cache.pickle | |
ci_compat: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
fetch-depth: 0 | |
- name: Update apt | |
run: sudo apt-get update | |
- name: Install apt dependencies | |
run: sudo apt-get install -y gcc-multilib g++-multilib | |
- name: Copy the current ufbx.h to ufbx_new.h | |
run: cp ufbx.h ufbx_new.h | |
- name: Downgrade to oldest compatible header | |
run: bash misc/downgrade_header.sh | |
- name: Run tests with old header (source compatability) | |
run: python3 misc/run_tests.py tests --no-sanitize --runner ci_compat | |
- name: Run tests with new header in ufbx.c, old header in runner.c (ABI compatability) | |
run: python3 misc/run_tests.py tests --no-sanitize-arch x86 --runner ci_compat --define UFBX_HEADER_PATH=\"ufbx_new.h\" | |
ci_lint: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Check formatting | |
if: ${{ always() && !cancelled() }} | |
run: python3 misc/check_formatting.py ufbx.h ufbx.c | |
- name: Check for typos | |
if: ${{ always() && !cancelled() }} | |
uses: crate-ci/typos@master | |
with: | |
files: ufbx.h ufbx.c | |
config: ./misc/typos.toml | |
ci_bindgen: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Create bindgen/build directory | |
run: mkdir bindgen/build | |
- name: Parse ufbx.h | |
run: python3 bindgen/ufbx_parser.py | |
- name: Generate typed IR | |
run: python3 bindgen/ufbx_ir.py | |
ci_exotic: | |
runs-on: ubuntu-latest | |
needs: ci_pre_hashes | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Download reference hashes | |
uses: actions/download-artifact@v3 | |
with: | |
name: reference-hashes | |
path: build | |
- name: Update apt | |
run: sudo apt-get update | |
- name: Install apt dependencies | |
run: sudo apt-get install -y tcc qemu qemu-user gcc-multilib | |
- name: Install zig cc via PIP | |
run: sudo pip3 install ziglang | |
- name: Run tests with TCC | |
run: python3 misc/run_tests.py tests hashes --additional-compiler tcc --compiler tcc --remove-arch x86 --runner ci_exotic | |
- name: Compile ufbx.c wth x86 TCC | |
run: i386-tcc -Wall -Werror -c ufbx.c -o build/tcc-ufbx-x86.o | |
- name: Compile PowerPC tests | |
run: python3 -m ziglang cc -fno-sanitize=undefined -target powerpc-linux -DUFBX_DEV ufbx.c test/runner.c -g -o build/ppc-runner | |
- name: Run PowerPC tests | |
run: qemu-ppc build/ppc-runner -d data | |
- name: Compile PowerPC hash_scene | |
run: | | |
python3 -m ziglang cc -fno-sanitize=undefined -ffp-contract=off -target powerpc-linux misc/fdlibm.c test/hash_scene.c -g -o build/ppc_hasher_debug | |
python3 -m ziglang cc -fno-sanitize=undefined -ffp-contract=off -O2 -target powerpc-linux misc/fdlibm.c test/hash_scene.c -g -o build/ppc_hasher_release | |
- name: Check PowerPC hashes | |
run: | | |
qemu-ppc build/ppc_hasher_debug --check build/hashes.txt --dump build/hashdumps/ci_exotic_powerpc_debug.txt --max-dump-errors 3 | |
qemu-ppc build/ppc_hasher_release --check build/hashes.txt --dump build/hashdumps/ci_exotic_powerpc_release.txt --max-dump-errors 3 | |
- name: Compile runner with fdlibm | |
run: clang -DUFBX_NO_MATH_H -DUFBX_MATH_PREFIX=fdlibm_ ufbx.c test/runner.c misc/fdlibm.c -lm -o build/fdlibm_runner | |
- name: Run tests using fdlibm | |
run: build/fdlibm_runner -d data | |
- name: Compile runner.c | |
run: gcc -c test/runner.c -o build/runner.o | |
- name: Hash scenes with threads | |
run: python3 misc/run_tests.py hashes --hash-threads --runner ci_hash_threads | |
- name: Build and run standard C99 | |
run: | | |
gcc -Wall -Wextra -Werror -Wno-unused-function -std=c99 -DUFBX_STANDARD_C build/runner.o ufbx.c -lm -o build/runner-c99 | |
build/runner-c99 -d data --allow-non-thread-safe | |
- name: Build and run standard C11 | |
run: | | |
gcc -Wall -Wextra -Werror -Wno-unused-function -std=c11 -DUFBX_STANDARD_C build/runner.o ufbx.c -lm -o build/runner-c11 | |
build/runner-c11 -d data | |
- name: Build and run standard C++98 | |
run: | | |
g++ -Wall -Wextra -Werror -Wno-unused-function -std=c++98 -DUFBX_STANDARD_C build/runner.o -x c++ ufbx.c -lm -o build/runner-cpp98 | |
build/runner-cpp98 -d data --allow-non-thread-safe | |
- name: Build and run standard C++11 | |
run: | | |
g++ -Wall -Wextra -Werror -Wno-unused-function -std=c++11 -DUFBX_STANDARD_C build/runner.o -x c++ ufbx.c -lm -o build/runner-cpp11 | |
build/runner-cpp11 -d data | |
- name: Build and run threadcheck (standard C11) | |
run: | | |
gcc -Wall -Wextra -Werror -Wno-unused-function -std=c11 -DUFBX_STANDARD_C -c ufbx.c -o build/ufbx-c11.o | |
g++ -Wall -Wextra -Werror -Wno-unused-function -pthread -std=c++11 build/ufbx-c11.o test/threadcheck.cpp -lm -o build/threadcheck-c11 | |
build/threadcheck-c11 data/maya_cube_7500_binary.fbx 2 | |
- name: Build and run threadcheck (standard C++11) | |
run: | | |
g++ -Wall -Wextra -Werror -Wno-unused-function -pthread -std=c++11 -DUFBX_STANDARD_C -x c++ ufbx.c test/threadcheck.cpp -lm -o build/threadcheck-cpp11 | |
build/threadcheck-cpp11 data/maya_cube_7500_binary.fbx 2 | |
- name: Link C/C++ LTO | |
run: bash misc/lto_compatability.sh | |
- name: Compress hashdumps | |
if: ${{ always() && !cancelled() }} | |
run: python3 misc/hash_diff.py compress build/hashdumps -o build/hashdump_ci_exotic.txt.gz | |
- name: Upload hashdumps | |
uses: actions/upload-artifact@v3 | |
if: ${{ always() && !cancelled() }} | |
with: | |
name: hash-dumps | |
path: build/hashdump_ci_exotic.txt.gz | |
ci_compilers: | |
runs-on: ubuntu-22.04 | |
needs: ci_pre_hashes | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Add Ubuntu sources | |
run: | | |
echo "deb http://azure.archive.ubuntu.com/ubuntu/ xenial main universe" | sudo tee -a /etc/apt/sources.list | |
echo "deb http://azure.archive.ubuntu.com/ubuntu/ bionic main universe" | sudo tee -a /etc/apt/sources.list | |
echo "deb http://azure.archive.ubuntu.com/ubuntu/ focal main universe" | sudo tee -a /etc/apt/sources.list | |
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 40976EAF437D05B5 3B4FE6ACC0B21F32 | |
sudo apt update | |
- name: Install compilers | |
run: sudo apt-get install -y gcc-5 g++-5 gcc-6 g++-6 gcc-7 g++-7 gcc-8 g++-8 gcc-9 gcc-10 clang-6.0 clang-7 clang-8 clang-9 clang-10 clang-11 clang-12 clang-13 | |
- name: Make build directory | |
run: mkdir build | |
- name: Run tests | |
run: python3 misc/run_tests.py tests --no-sanitize --force-opt O0 --additional-compiler gcc-5 --additional-compiler gcc-6 --additional-compiler gcc-7 --additional-compiler gcc-8 --additional-compiler gcc-9 --additional-compiler gcc-10 --additional-compiler clang-6.0 --additional-compiler clang-7 --additional-compiler clang-8 --additional-compiler clang-9 --additional-compiler clang-10 --additional-compiler clang-11 --additional-compiler clang-12 --additional-compiler clang-13 | |
ci_hasher: | |
runs-on: ubuntu-latest | |
needs: ci_pre_hashes | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Prepare directories | |
run: | | |
mkdir build | |
mkdir build/hashdumps | |
- name: Download reference hashes | |
uses: actions/download-artifact@v3 | |
with: | |
name: reference-hashes | |
path: build | |
- name: Compile and run hash_scene with various configurations | |
run: | | |
clang test/hash_scene.c misc/fdlibm.c -O2 -o build/hash_scene | |
build/hash_scene --check build/hashes.txt --dump build/hashdumps/ci_hasher_x64_release.txt --max-dump-errors 3 | |
build/hash_scene --check build/hashes.txt --dump build/hashdumps/ci_hasher_unbuffered_x64_release.txt --max-dump-errors 3 --no-read-buffer | |
build/hash_scene --check build/hashes.txt --dump build/hashdumps/ci_hasher_dedicated_x64_release.txt --max-dump-errors 3 --dedicated-allocs | |
build/hash_scene --check build/hashes.txt --dump build/hashdumps/ci_hasher_linear_x64_release.txt --max-dump-errors 3 --dedicated-allocs --allocator linear | |
build/hash_scene --check build/hashes.txt --dump build/hashdumps/ci_hasher_linear_reverse_x64_release.txt --max-dump-errors 3 --dedicated-allocs --allocator linear-reverse | |
clang -DUFBX_DEV -DUFBX_REGRESSION test/hash_scene.c misc/fdlibm.c -O2 -o build/hash_scene | |
build/hash_scene --check build/hashes.txt --dump build/hashdumps/ci_hasher_regression_x64_release.txt --max-dump-errors 3 | |
build/hash_scene --check build/hashes.txt --dump build/hashdumps/ci_hasher_regression_unbuffered_x64_release.txt --max-dump-errors 3 --no-read-buffer | |
build/hash_scene --check build/hashes.txt --dump build/hashdumps/ci_hasher_regression_dedicated_x64_release.txt --max-dump-errors 3 --dedicated-allocs | |
build/hash_scene --check build/hashes.txt --dump build/hashdumps/ci_hasher_regression_linear_x64_release.txt --max-dump-errors 3 --dedicated-allocs --allocator linear | |
build/hash_scene --check build/hashes.txt --dump build/hashdumps/ci_hasher_regression_linear_reverse_x64_release.txt --max-dump-errors 3 --dedicated-allocs --allocator linear-reverse | |
clang -fshort-enums -fshort-wchar test/hash_scene.c misc/fdlibm.c -O2 -o build/hash_scene | |
build/hash_scene --check build/hashes.txt --dump build/hashdumps/ci_hasher_short_x64_release.txt --max-dump-errors 3 | |
clang -fsigned-char test/hash_scene.c misc/fdlibm.c -O2 -o build/hash_scene | |
build/hash_scene --check build/hashes.txt --dump build/hashdumps/ci_hasher_schar_x64_release.txt --max-dump-errors 3 | |
clang -funsigned-char test/hash_scene.c misc/fdlibm.c -O2 -o build/hash_scene | |
build/hash_scene --check build/hashes.txt --dump build/hashdumps/ci_hasher_uchar_x64_release.txt --max-dump-errors 3 | |
clang -DUFBX_STANDARD_C -DUFBX_DEV -DUFBX_REGRESSION test/hash_scene.c misc/fdlibm.c -O2 -o build/hash_scene | |
build/hash_scene --check build/hashes.txt --dump build/hashdumps/ci_hasher_stdc_x64_release.txt --max-dump-errors 3 | |
- name: Compress hashdumps | |
if: ${{ always() && !cancelled() }} | |
run: python3 misc/hash_diff.py compress build/hashdumps -o build/hashdump_ci_hasher.txt.gz | |
- name: Upload hashdumps | |
uses: actions/upload-artifact@v3 | |
if: ${{ always() && !cancelled() }} | |
with: | |
name: hash-dumps | |
path: build/hashdump_ci_hasher.txt.gz | |
ci_wasm: | |
runs-on: ubuntu-latest | |
needs: ci_pre_hashes | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Install wasi-sdk | |
run: | | |
mkdir build | |
export WASI_VERSION=17 | |
export WASI_VERSION_FULL=${WASI_VERSION}.0 | |
curl -L https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-${WASI_VERSION}/wasi-sdk-${WASI_VERSION_FULL}-linux.tar.gz -o build/wasi-sdk.tar.gz | |
tar xzf build/wasi-sdk.tar.gz -C build | |
mv build/wasi-sdk-${WASI_VERSION_FULL} build/wasi-sdk | |
- name: Install wasmtime | |
run: | | |
curl https://wasmtime.dev/install.sh -sSf > wasmtime-install.sh | |
bash wasmtime-install.sh --version v4.0.0 | |
echo "$HOME/.wasmtime/bin" >> $GITHUB_PATH | |
- name: Test wasmtime | |
run: wasmtime --version | |
- name: Download reference hashes | |
uses: actions/download-artifact@v3 | |
with: | |
name: reference-hashes | |
path: build | |
- name: Run tests | |
run: python3 misc/run_tests.py tests hashes --wasi-sdk build/wasi-sdk --compiler wasi_clang --runner ci_wasm | |
- name: Compress hashdumps | |
if: ${{ always() && !cancelled() }} | |
run: python3 misc/hash_diff.py compress build/hashdumps -o build/hashdump_ci_wasm.txt.gz | |
- name: Upload hashdumps | |
uses: actions/upload-artifact@v3 | |
if: ${{ always() && !cancelled() }} | |
with: | |
name: hash-dumps | |
path: build/hashdump_ci_wasm.txt.gz | |
ci_arm32: | |
runs-on: [self-hosted, ARM] | |
needs: ci_pre_hashes | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
fetch-depth: 2 | |
- name: Download reference hashes | |
uses: actions/download-artifact@v3 | |
with: | |
name: reference-hashes | |
path: build | |
- name: Run tests | |
run: python3 misc/run_tests.py tests stack hashes --no-sanitize --threads 2 --runner ci_arm64 | |
- name: Compress hashdumps | |
if: ${{ always() && !cancelled() }} | |
run: python3 misc/hash_diff.py compress build/hashdumps -o build/hashdump_ci_arm32.txt.gz | |
- name: Upload hashdumps | |
uses: actions/upload-artifact@v3 | |
if: ${{ always() && !cancelled() }} | |
with: | |
name: hash-dumps | |
path: build/hashdump_ci_arm32.txt.gz | |
ci_arm64: | |
runs-on: [self-hosted, ARM64] | |
needs: ci_pre_hashes | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
fetch-depth: 2 | |
- name: Download reference hashes | |
uses: actions/download-artifact@v3 | |
with: | |
name: reference-hashes | |
path: build | |
- name: Run tests | |
run: python3 misc/run_tests.py tests stack hashes --no-sanitize --threads 2 --runner ci_arm64 | |
- name: Compress hashdumps | |
if: ${{ always() && !cancelled() }} | |
run: python3 misc/hash_diff.py compress build/hashdumps -o build/hashdump_ci_arm64.txt.gz | |
- name: Upload hashdumps | |
uses: actions/upload-artifact@v3 | |
if: ${{ always() && !cancelled() }} | |
with: | |
name: hash-dumps | |
path: build/hashdump_ci_arm64.txt.gz | |
ci_dataset: | |
runs-on: [self-hosted, ARM64] | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
fetch-depth: 2 | |
- name: Create build directory | |
run: mkdir build | |
- name: Compile check_fbx.c with coverage | |
run: clang -coverage -DUFBX_DEV=1 -DEXTERNAL_UFBX -O2 ufbx.c test/check_fbx.c -lm -o build/check_fbx | |
- name: Run dataset tests | |
run: python3 misc/check_dataset.py --exe build/check_fbx --root /mnt/fbx-files --host-url https://ufbx-dataset.b-cdn.net | |
- name: Generate coverage | |
run: LLVM_COV=llvm-cov bash misc/generate_lcov.sh | |
- name: Upload coverage | |
run: bash /codecov.sh -F dataset -f coverage.lcov -y .github/codecov.yml | |
ci_fuzz: | |
runs-on: [self-hosted, ARM64] | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
fetch-depth: 2 | |
- name: Create build directory | |
run: mkdir build | |
- name: Compile check_fbx.c with coverage | |
run: clang -coverage -DUFBX_DEV=1 -DEXTERNAL_UFBX -O2 ufbx.c test/check_fbx.c -lm -o build/check_fbx | |
- name: Check fuzz files | |
run: | | |
build/check_fbx -d /mnt/fbx-files/fuzz/fuzz-fa | |
build/check_fbx -d /mnt/fbx-files/fuzz/fuzz-fb | |
- name: Generate coverage | |
run: LLVM_COV=llvm-cov bash misc/generate_lcov.sh | |
- name: Upload coverage | |
run: bash /codecov.sh -F fuzz -f coverage.lcov -y .github/codecov.yml | |
ci_post_hashes: | |
runs-on: ubuntu-latest | |
if: ${{ always() }} | |
needs: [ci_ubuntu, ci_windows, ci_macos, ci_wasm, ci_arm32, ci_arm64, ci_exotic, ci_hasher] | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
fetch-depth: 2 | |
- name: Compile hash_scene | |
run: | | |
mkdir build | |
clang test/hash_scene.c misc/fdlibm.c -O2 -o build/hash_scene | |
- name: Download combined hashdumps | |
uses: actions/download-artifact@v3 | |
with: | |
name: hash-dumps | |
path: build/ci-hashdumps | |
- name: List files to dump | |
run: python3 misc/hash_diff.py list build/ci-hashdumps -o build/dumped-files.txt | |
- name: Dump reference hashes | |
run: | | |
mkdir build/ref-hashdumps | |
build/hash_scene --check build/dumped-files.txt --dump build/ref-hashdumps/reference_linux_x64_release.txt --dump-all | |
- name: Compress reference hashdumps | |
run: python3 misc/hash_diff.py compress build/ref-hashdumps -o build/hashdump_ci_reference.txt.gz | |
- name: Diff CI and reference dumps | |
run: python3 misc/hash_diff.py diff build/ci-hashdumps --ref build/hashdump_ci_reference.txt.gz | |
- name: Upload reference hashdumps | |
uses: actions/upload-artifact@v3 | |
if: ${{ always() && !cancelled() }} | |
with: | |
name: hash-dumps | |
path: build/hashdump_ci_reference.txt.gz |