chore: modify rust tool chain version #1979
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Linux (Ubuntu 22.04) | |
| on: | |
| release: | |
| types: [created] | |
| pull_request: | |
| types: [opened, synchronize, reopened] | |
| paths-ignore: | |
| - "tools/**" | |
| - ".vscode/**" | |
| - ".devcontainer/**" | |
| - ".github/**" | |
| - "!.github/workflows/linux_ubuntu2204.yml" | |
| - "core/src/ten_manager/designer_frontend/**" | |
| - "**.md" | |
| - "ai_agents/**" | |
| permissions: | |
| contents: write | |
| discussions: write | |
| security-events: write | |
| concurrency: | |
| group: linux-intel-ubuntu2204-${{ github.head_ref }} | |
| cancel-in-progress: true | |
| jobs: | |
| call-check-pr-status: | |
| uses: ./.github/workflows/_check_pr_status.yml | |
| build: | |
| needs: call-check-pr-status | |
| if: ${{ needs.call-check-pr-status.outputs.should_continue == 'true' }} | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| compiler: [gcc, clang] | |
| build_type: [debug, release] | |
| arch: [x64, x86] | |
| include: | |
| - compiler: gcc | |
| build_type: debug | |
| arch: x86 | |
| exclude: | |
| - compiler: clang | |
| arch: x86 | |
| container: | |
| image: ghcr.io/ten-framework/ten_building_ubuntu2204 | |
| steps: | |
| - name: Debug context | |
| run: | | |
| echo "event_name=${{ github.event_name }}" | |
| echo "event_action=${{ github.event.action }}" | |
| echo "ref=${{ github.ref }}" | |
| echo "ref_name=${{ github.ref_name }}" | |
| echo "ref_type=${{ github.ref_type }}" | |
| - name: Check PR status before matrix execution | |
| uses: actions/github-script@v7 | |
| with: | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| script: | | |
| console.log('Event name:', context.eventName); | |
| console.log('Matrix:', ${{ toJson(matrix) }}); | |
| // Only check for PR events | |
| if (context.eventName !== 'pull_request') { | |
| console.log('Not a PR event, continuing...'); | |
| return; | |
| } | |
| // Ensure we have PR data | |
| if (!context.payload.pull_request) { | |
| console.log('No pull_request data in payload, continuing...'); | |
| return; | |
| } | |
| const { owner, repo } = context.repo; | |
| const prNumber = context.payload.pull_request.number; | |
| console.log(`Checking PR #${prNumber} status for matrix: compiler=${{ matrix.compiler }}, build_type=${{ matrix.build_type }}, arch=${{ matrix.arch }}...`); | |
| try { | |
| const pr = await github.rest.pulls.get({ | |
| owner, | |
| repo, | |
| pull_number: prNumber, | |
| }); | |
| console.log(`PR #${prNumber} state: ${pr.data.state}, merged: ${pr.data.merged}`); | |
| if (pr.data.state === 'closed') { | |
| if (pr.data.merged) { | |
| console.log(`PR #${prNumber} has been merged. Stopping matrix execution.`); | |
| core.setFailed('PR has been merged, stopping execution to save resources.'); | |
| } else { | |
| console.log(`PR #${prNumber} has been closed. Stopping matrix execution.`); | |
| core.setFailed('PR has been closed, stopping execution to save resources.'); | |
| } | |
| } else { | |
| console.log(`PR #${prNumber} is still open. Continuing matrix execution.`); | |
| } | |
| } catch (error) { | |
| console.error(`Error checking PR status: ${error.message}`); | |
| console.log('Error details:', error); | |
| console.log('Continuing matrix execution due to error...'); | |
| } | |
| - uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| submodules: false | |
| - name: Trust working directory | |
| run: git config --global --add safe.directory "${GITHUB_WORKSPACE}" | |
| - name: Initialize and update submodules except portal/ | |
| run: | | |
| # Retrieve all submodule paths, excluding `portal/`. | |
| submodules=$(git config --file .gitmodules --get-regexp path | awk '$2 != "portal" { print $2 }') | |
| git submodule init | |
| for submodule in $submodules; do | |
| echo "Initializing submodule: $submodule" | |
| git submodule update --init --recursive --depth 1 "$submodule" | |
| done | |
| - uses: actions/setup-node@v4 | |
| with: | |
| node-version: 20 | |
| - name: Update version | |
| run: | | |
| python3 tools/version/update_version_in_ten_framework.py | |
| python3 tools/version/check_version_in_ten_framework.py | |
| - name: Install gcc-multilib and g++-multilib | |
| if: matrix.arch == 'x86' | |
| run: | | |
| apt-get update | |
| apt-get install -y gcc-multilib g++-multilib | |
| - name: Build | |
| run: | | |
| export PATH=$(pwd)/core/ten_gn:/usr/local/go/bin:/root/go/bin:/root/.cargo/bin:$PATH | |
| echo $PATH | |
| # $HOME could be different in different commands, causing path conflicts. | |
| export CARGO_HOME=/root/.cargo | |
| export RUSTUP_HOME=/root/.rustup | |
| if [ "${{ matrix.arch }}" = "x64" ]; then | |
| go env -w GOFLAGS="-buildvcs=false" | |
| go install golang.org/dl/go1.24.3@latest | |
| export PATH=$PATH:$(go env GOPATH)/bin | |
| go1.24.3 download | |
| go1.24.3 version | |
| # Set Rust toolchain based on build type | |
| # Linux debug uses nightly (for ASAN support) | |
| # Linux release, Mac, and Windows use stable | |
| if [ "${{ matrix.build_type }}" = "debug" ]; then | |
| echo "Setting Rust toolchain to latest nightly for Linux debug build (ASAN support)" | |
| rustup update nightly | |
| rustup default nightly | |
| else | |
| echo "Setting Rust toolchain to stable for Linux release build" | |
| rustup default stable | |
| fi | |
| rustc --version | |
| cargo --version | |
| fi | |
| df -h . | |
| if [ "${{ matrix.arch }}" = "x86" ]; then | |
| EXTRA_ARGS="is_clang=false ten_enable_ten_rust=false ten_enable_ten_manager=false ten_manager_enable_tests=false ten_enable_go_binding=false ten_enable_python_binding=false ten_enable_nodejs_binding=false ten_manager_enable_frontend=false" | |
| else | |
| EXTRA_ARGS="is_clang=${{ matrix.compiler == 'gcc' && 'false' || 'true' }} ten_rust_force_release=true log_level=1 enable_serialized_actions=true ten_rust_enable_gen_cargo_config=false ten_enable_cargo_clean=true ten_enable_go_lint=true ten_enable_rust_incremental_build=false ten_manager_enable_frontend=false ten_enable_ffmpeg_extensions=true" | |
| fi | |
| echo $EXTRA_ARGS | |
| tgn gen linux ${{ matrix.arch }} ${{ matrix.build_type }} -- $EXTRA_ARGS | |
| tgn build linux ${{ matrix.arch }} ${{ matrix.build_type }} | |
| df -h . | |
| tree -I 'gen|obj' out | |
| - name: Update supports before upload or publish | |
| if: matrix.arch == 'x64' | |
| continue-on-error: true | |
| run: | | |
| UPDATE_SUPPORTS_SCRIPT=$(pwd)/tools/supports/update_supports_in_manifest_json.py | |
| cd out/linux/${{ matrix.arch }}/ten_packages | |
| ARRAY=( | |
| "system/ten_runtime" | |
| "system/ten_runtime_go" | |
| "system/ten_runtime_python" | |
| "system/ten_runtime_nodejs" | |
| "addon_loader/python_addon_loader" | |
| "addon_loader/nodejs_addon_loader" | |
| ) | |
| for item in "${ARRAY[@]}"; do | |
| echo "Processing item: $item" | |
| if python3 ${UPDATE_SUPPORTS_SCRIPT} --os-arch-pairs linux:x64 --input-file ${item}/manifest.json --output-file ${item}/manifest.json --log-level 1; then | |
| echo "✓ Successfully updated supports for $item" | |
| cat ${item}/manifest.json | |
| else | |
| echo "✗ Failed to update supports for $item, continuing with next item..." | |
| fi | |
| done | |
| df -h . | |
| shell: bash | |
| # Package the tests artifacts into a tar file while preserving file | |
| # permissions. | |
| - name: Package tests relevant artifacts preserving permissions | |
| run: | | |
| files="" | |
| for item in tests ten_manager tgn_args.txt; do | |
| if [ -e "out/linux/${{ matrix.arch }}/$item" ]; then | |
| files="$files out/linux/${{ matrix.arch }}/$item" | |
| fi | |
| done | |
| if [ -n "$files" ]; then | |
| tar -czvf tests-artifacts.tar.gz $files | |
| fi | |
| - name: Upload tests relevant artifacts | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: tests-artifacts-linux-ubuntu2204-${{ matrix.compiler }}-${{ matrix.build_type }}-${{ matrix.arch }} | |
| path: tests-artifacts.tar.gz | |
| if-no-files-found: ignore | |
| - name: Package assets | |
| if: matrix.arch == 'x64' && (github.event_name == 'release' || (github.ref != '' && startsWith(github.ref, 'refs/tags/'))) | |
| continue-on-error: true | |
| run: | | |
| cd out/linux/${{ matrix.arch }} | |
| # Define array of files/directories to package | |
| FILES_TO_PACKAGE=( | |
| "app/default_app_cpp" | |
| "app/default_app_go" | |
| "app/default_app_python" | |
| "app/default_app_nodejs" | |
| "app/transcriber_demo" | |
| "ten_packages/system/ten_runtime" | |
| "ten_packages/system/ten_runtime_go" | |
| "ten_packages/system/ten_runtime_python" | |
| "ten_packages/system/ten_runtime_nodejs" | |
| "ten_packages/system/pytest_ten" | |
| "ten_packages/extension/default_extension_cpp" | |
| "ten_packages/extension/default_extension_go" | |
| "ten_packages/extension/default_extension_python" | |
| "ten_packages/extension/default_async_extension_python" | |
| "ten_packages/extension/default_asr_extension_python" | |
| "ten_packages/extension/default_llm_extension_python" | |
| "ten_packages/extension/default_tts_extension_python" | |
| "ten_packages/extension/default_mllm_extension_python" | |
| "ten_packages/extension/default_extension_nodejs" | |
| "ten_packages/extension/webrtc_vad_cpp" | |
| "ten_packages/addon_loader/python_addon_loader" | |
| "ten_packages/addon_loader/nodejs_addon_loader" | |
| ) | |
| # Create zip archive with existing files only | |
| EXISTING_FILES=() | |
| for file in "${FILES_TO_PACKAGE[@]}"; do | |
| if [ -e "$file" ]; then | |
| EXISTING_FILES+=("$file") | |
| echo "✓ Found: $file" | |
| else | |
| echo "✗ Missing: $file (will be skipped)" | |
| fi | |
| done | |
| if [ ${#EXISTING_FILES[@]} -gt 0 ]; then | |
| echo "Creating zip archive with ${#EXISTING_FILES[@]} items..." | |
| zip -vr ten_packages-linux-${{ matrix.compiler }}-${{ matrix.build_type }}-${{ matrix.arch }}.zip "${EXISTING_FILES[@]}" || { | |
| echo "Warning: zip command failed, but continuing..." | |
| } | |
| else | |
| echo "Warning: No files found to package" | |
| fi | |
| df -h . | |
| shell: bash | |
| - name: Publish to release assets | |
| uses: softprops/action-gh-release@v2 | |
| if: matrix.arch == 'x64' && (github.event_name == 'release' || (github.ref != '' && startsWith(github.ref, 'refs/tags/'))) | |
| with: | |
| tag_name: ${{ github.event_name == 'release' && github.event.release.tag_name || github.ref_name }} | |
| files: | | |
| out/linux/${{ matrix.arch }}/ten_packages-linux-${{ matrix.compiler }}-${{ matrix.build_type }}-${{ matrix.arch }}.zip | |
| - name: Clean up | |
| if: matrix.arch == 'x64' && (github.event_name == 'release' || (github.ref != '' && startsWith(github.ref, 'refs/tags/'))) | |
| continue-on-error: true | |
| run: | | |
| rm -rf out/linux/${{ matrix.arch }}/ten_packages-linux-${{ matrix.compiler }}-${{ matrix.build_type }}-${{ matrix.arch }}.zip | |
| df -h . | |
| - name: Publish release to TEN cloud store | |
| if: ${{ matrix.arch == 'x64' && (github.event_name == 'release' || (github.ref != '' && startsWith(github.ref, 'refs/tags/'))) && matrix.compiler == 'gcc' && matrix.build_type == 'release' && matrix.arch == 'x64' }} | |
| continue-on-error: true | |
| run: | | |
| TMAN_BIN=$(pwd)/out/linux/${{ matrix.arch }}/ten_manager/bin/tman | |
| cd out/linux/${{ matrix.arch }} | |
| ARRAY=( | |
| "app/default_app_cpp" | |
| "app/default_app_go" | |
| "app/default_app_python" | |
| "app/default_app_nodejs" | |
| "app/transcriber_demo" | |
| "ten_packages/system/ten_runtime" | |
| "ten_packages/system/ten_runtime_go" | |
| "ten_packages/system/ten_runtime_python" | |
| "ten_packages/system/ten_runtime_nodejs" | |
| "ten_packages/system/pytest_ten" | |
| "ten_packages/extension/default_extension_cpp" | |
| "ten_packages/extension/default_extension_go" | |
| "ten_packages/extension/default_extension_python" | |
| "ten_packages/extension/default_async_extension_python" | |
| "ten_packages/extension/default_asr_extension_python" | |
| "ten_packages/extension/default_llm_extension_python" | |
| "ten_packages/extension/default_tts_extension_python" | |
| "ten_packages/extension/default_mllm_extension_python" | |
| "ten_packages/extension/default_extension_nodejs" | |
| "ten_packages/extension/webrtc_vad_cpp" | |
| "ten_packages/addon_loader/python_addon_loader" | |
| "ten_packages/addon_loader/nodejs_addon_loader" | |
| ) | |
| SUCCESSFUL_PUBLISHES=0 | |
| FAILED_PUBLISHES=0 | |
| for item in "${ARRAY[@]}"; do | |
| echo "Processing: $item" | |
| if [ ! -d "$item" ]; then | |
| echo "✗ Directory not found: $item, skipping..." | |
| FAILED_PUBLISHES=$((FAILED_PUBLISHES + 1)) | |
| continue | |
| fi | |
| cd "$item" || { | |
| echo "✗ Failed to enter directory: $item, skipping..." | |
| FAILED_PUBLISHES=$((FAILED_PUBLISHES + 1)) | |
| continue | |
| } | |
| # Try to get identity | |
| if identity=$(${TMAN_BIN} package --get-identity 2>/dev/null); then | |
| echo "Identity: $identity" | |
| # Try to delete existing package (this can fail, it's okay) | |
| echo "Attempting to delete existing package..." | |
| # Parse identity string into separate arguments: type name version hash | |
| read -r pkg_type pkg_name pkg_version pkg_hash <<< "$identity" | |
| ${TMAN_BIN} --verbose --admin-token ${{ secrets.TEN_CLOUD_STORE_ADMIN_TOKEN }} delete "$pkg_type" "$pkg_name" "$pkg_version" "$pkg_hash" || { | |
| echo "Warning: Failed to delete existing package (this is normal if package doesn't exist)" | |
| } | |
| # Try to publish | |
| echo "Attempting to publish package..." | |
| if ${TMAN_BIN} --verbose --user-token ${{ secrets.TEN_CLOUD_STORE }} publish; then | |
| echo "✓ Successfully published: $item" | |
| SUCCESSFUL_PUBLISHES=$((SUCCESSFUL_PUBLISHES + 1)) | |
| else | |
| echo "✗ Failed to publish: $item" | |
| FAILED_PUBLISHES=$((FAILED_PUBLISHES + 1)) | |
| fi | |
| else | |
| echo "✗ Failed to get identity for: $item" | |
| FAILED_PUBLISHES=$((FAILED_PUBLISHES + 1)) | |
| fi | |
| cd - >/dev/null | |
| done | |
| echo "Publication summary: $SUCCESSFUL_PUBLISHES successful, $FAILED_PUBLISHES failed" | |
| df -h . | |
| shell: bash | |
| - name: Clean up | |
| if: ${{ matrix.arch == 'x64' && (github.event_name == 'release' || (github.ref != '' && startsWith(github.ref, 'refs/tags/'))) && matrix.compiler == 'gcc' && matrix.build_type == 'release' && matrix.arch == 'x64' }} | |
| continue-on-error: true | |
| run: | | |
| cd out/linux/${{ matrix.arch }} | |
| ARRAY=( | |
| "app/default_app_cpp" | |
| "app/default_app_go" | |
| "app/default_app_python" | |
| "app/default_app_nodejs" | |
| "app/transcriber_demo" | |
| "ten_packages/system/ten_runtime" | |
| "ten_packages/system/ten_runtime_go" | |
| "ten_packages/system/ten_runtime_python" | |
| "ten_packages/system/ten_runtime_nodejs" | |
| "ten_packages/system/pytest_ten" | |
| "ten_packages/extension/default_extension_cpp" | |
| "ten_packages/extension/default_extension_go" | |
| "ten_packages/extension/default_extension_python" | |
| "ten_packages/extension/default_async_extension_python" | |
| "ten_packages/extension/default_asr_extension_python" | |
| "ten_packages/extension/default_llm_extension_python" | |
| "ten_packages/extension/default_tts_extension_python" | |
| "ten_packages/extension/default_mllm_extension_python" | |
| "ten_packages/extension/default_extension_nodejs" | |
| "ten_packages/extension/webrtc_vad_cpp" | |
| "ten_packages/addon_loader/python_addon_loader" | |
| "ten_packages/addon_loader/nodejs_addon_loader" | |
| ) | |
| SUCCESSFUL_CLEANUPS=0 | |
| FAILED_CLEANUPS=0 | |
| for item in "${ARRAY[@]}"; do | |
| echo "Cleaning up: $item" | |
| if [ -e "$item" ]; then | |
| if rm -rf "$item"; then | |
| echo "✓ Successfully removed: $item" | |
| SUCCESSFUL_CLEANUPS=$((SUCCESSFUL_CLEANUPS + 1)) | |
| else | |
| echo "✗ Failed to remove: $item" | |
| FAILED_CLEANUPS=$((FAILED_CLEANUPS + 1)) | |
| fi | |
| else | |
| echo "• Already absent: $item" | |
| SUCCESSFUL_CLEANUPS=$((SUCCESSFUL_CLEANUPS + 1)) | |
| fi | |
| done | |
| echo "Cleanup summary: $SUCCESSFUL_CLEANUPS successful, $FAILED_CLEANUPS failed" | |
| df -h . | |
| shell: bash | |
| test-standalone: | |
| needs: build | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| compiler: [gcc, clang] | |
| build_type: [debug, release] | |
| arch: [x64, x86] | |
| include: | |
| - compiler: gcc | |
| build_type: debug | |
| arch: x86 | |
| exclude: | |
| - compiler: clang | |
| arch: x86 | |
| container: | |
| image: ghcr.io/ten-framework/ten_building_ubuntu2204 | |
| steps: | |
| - name: Check PR status before matrix execution | |
| uses: actions/github-script@v7 | |
| with: | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| script: | | |
| console.log('Event name:', context.eventName); | |
| console.log('Matrix:', ${{ toJson(matrix) }}); | |
| // Only check for PR events | |
| if (context.eventName !== 'pull_request') { | |
| console.log('Not a PR event, continuing...'); | |
| return; | |
| } | |
| // Ensure we have PR data | |
| if (!context.payload.pull_request) { | |
| console.log('No pull_request data in payload, continuing...'); | |
| return; | |
| } | |
| const { owner, repo } = context.repo; | |
| const prNumber = context.payload.pull_request.number; | |
| console.log(`Checking PR #${prNumber} status for test-standalone matrix: compiler=${{ matrix.compiler }}, build_type=${{ matrix.build_type }}, arch=${{ matrix.arch }}...`); | |
| try { | |
| const pr = await github.rest.pulls.get({ | |
| owner, | |
| repo, | |
| pull_number: prNumber, | |
| }); | |
| console.log(`PR #${prNumber} state: ${pr.data.state}, merged: ${pr.data.merged}`); | |
| if (pr.data.state === 'closed') { | |
| if (pr.data.merged) { | |
| console.log(`PR #${prNumber} has been merged. Stopping matrix execution.`); | |
| core.setFailed('PR has been merged, stopping execution to save resources.'); | |
| } else { | |
| console.log(`PR #${prNumber} has been closed. Stopping matrix execution.`); | |
| core.setFailed('PR has been closed, stopping execution to save resources.'); | |
| } | |
| } else { | |
| console.log(`PR #${prNumber} is still open. Continuing matrix execution.`); | |
| } | |
| } catch (error) { | |
| console.error(`Error checking PR status: ${error.message}`); | |
| console.log('Error details:', error); | |
| console.log('Continuing matrix execution due to error...'); | |
| } | |
| - name: Download build artifacts (tar archive) | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: tests-artifacts-linux-ubuntu2204-${{ matrix.compiler }}-${{ matrix.build_type }}-${{ matrix.arch }} | |
| path: out/linux/${{ matrix.arch }} | |
| - name: Extract tests artifacts preserving permissions | |
| run: | | |
| tar -xzf out/linux/${{ matrix.arch }}/tests-artifacts.tar.gz | |
| - name: View folder structure content | |
| run: | | |
| df -h . | |
| tree -I ".*|*.h|*.hpp|*.py" out/linux/${{ matrix.arch }} | |
| - name: Set ulimit and sysctl | |
| run: | | |
| # Increase max number of open file descriptors as much as allowed. | |
| TARGET=102400 | |
| HARD=$(ulimit -Hn) | |
| echo "Current hard limit for open files: $HARD" | |
| if [ "$HARD" != "unlimited" ] && [ "$HARD" -lt "$TARGET" ]; then | |
| echo "Target ($TARGET) is greater than hard limit ($HARD), using hard limit instead." | |
| TARGET="$HARD" | |
| fi | |
| # Try to set the soft limit; if it fails, just warn and continue. | |
| if ! ulimit -n "$TARGET"; then | |
| echo "WARNING: failed to increase ulimit -n to $TARGET, continuing with existing limit." | |
| fi | |
| # Adjust somaxconn; ignore failure if not permitted. | |
| if ! sysctl -w net.core.somaxconn=8192; then | |
| echo "WARNING: failed to set net.core.somaxconn, continuing." | |
| fi | |
| shell: bash | |
| - uses: actions/setup-node@v4 | |
| with: | |
| node-version: 20 | |
| - name: Update tman config.json | |
| if: matrix.arch == 'x64' | |
| run: | | |
| CONFIG_FILE="out/linux/${{ matrix.arch }}/tests/local_registry/config.json" | |
| echo "Before update:" | |
| cat $CONFIG_FILE | |
| sed -i "s|\(file://\)[^\"]*\(out\/linux\/${{ matrix.arch }}\/tests\/local_registry\)|\1${GITHUB_WORKSPACE}/\2|" $CONFIG_FILE | |
| echo "After update:" | |
| cat $CONFIG_FILE | |
| - name: Run Tests (ten_utils_unit_test) | |
| env: | |
| ASAN_OPTIONS: detect_leaks=1:detect_stack_use_after_return=1:color=always:unmap_shadow_on_exit=1:abort_on_error=1 | |
| MALLOC_CHECK_: 3 | |
| TEN_ENABLE_MEMORY_TRACKING: "true" | |
| TEN_ENABLE_BACKTRACE_DUMP: "true" | |
| run: | | |
| chmod +x out/linux/${{ matrix.arch }}/tests/standalone/ten_utils_unit_test | |
| out/linux/${{ matrix.arch }}/tests/standalone/ten_utils_unit_test || { echo "test failed"; exit 1; } | |
| df -h . | |
| - name: Clean up | |
| continue-on-error: true | |
| run: | | |
| rm -rf out/linux/${{ matrix.arch }}/tests/standalone/ten_utils_unit_test | |
| df -h . | |
| - name: Run Tests (ten_runtime_unit_test) | |
| env: | |
| ASAN_OPTIONS: detect_leaks=1:detect_stack_use_after_return=1:color=always:unmap_shadow_on_exit=1:abort_on_error=1 | |
| MALLOC_CHECK_: 3 | |
| TEN_ENABLE_MEMORY_TRACKING: "true" | |
| TEN_ENABLE_BACKTRACE_DUMP: "true" | |
| run: | | |
| chmod +x out/linux/${{ matrix.arch }}/tests/standalone/ten_runtime_unit_test | |
| out/linux/${{ matrix.arch }}/tests/standalone/ten_runtime_unit_test || { echo "test failed"; exit 1; } | |
| df -h | |
| - name: Clean up | |
| continue-on-error: true | |
| run: | | |
| rm -rf out/linux/${{ matrix.arch }}/tests/standalone/ten_runtime_unit_test | |
| df -h . | |
| - name: Run Tests (ten_rust standalone tests) | |
| if: matrix.arch == 'x64' | |
| env: | |
| ASAN_OPTIONS: detect_leaks=1:detect_stack_use_after_return=1:color=always:unmap_shadow_on_exit=1:abort_on_error=1 | |
| MALLOC_CHECK_: 3 | |
| TEN_ENABLE_MEMORY_TRACKING: "true" | |
| TEN_ENABLE_BACKTRACE_DUMP: "true" | |
| RUST_BACKTRACE: "full" | |
| run: | | |
| cd out/linux/${{ matrix.arch }}/tests/standalone/ten_rust | |
| chmod +x unit_test | |
| chmod +x integration_test | |
| ./unit_test --nocapture --test-threads=1 || { echo "ten_rust unit test failed"; exit 1; } | |
| ./integration_test --nocapture --test-threads=1 || { echo "ten_rust integration test failed"; exit 1; } | |
| df -h . | |
| - name: Clean up | |
| if: matrix.arch == 'x64' | |
| continue-on-error: true | |
| run: | | |
| rm -rf out/linux/${{ matrix.arch }}/tests/standalone/ten_rust | |
| df -h . | |
| - name: Run Tests (ten_manager standalone tests) | |
| if: matrix.arch == 'x64' | |
| env: | |
| ASAN_OPTIONS: detect_leaks=1:detect_stack_use_after_return=1:color=always:unmap_shadow_on_exit=1:abort_on_error=1 | |
| MALLOC_CHECK_: 3 | |
| TEN_ENABLE_MEMORY_TRACKING: "true" | |
| TEN_ENABLE_BACKTRACE_DUMP: "true" | |
| RUST_BACKTRACE: "full" | |
| run: | | |
| cd out/linux/${{ matrix.arch }}/tests/standalone/ten_manager | |
| chmod +x unit_test | |
| chmod +x integration_test | |
| ./unit_test --nocapture --test-threads=1 || { echo "ten_manager unit test failed"; exit 1; } | |
| ./integration_test --nocapture --test-threads=1 || { echo "ten_manager integration test failed"; exit 1; } | |
| df -h . | |
| - name: Clean up | |
| if: matrix.arch == 'x64' | |
| continue-on-error: true | |
| run: | | |
| rm -rf out/linux/${{ matrix.arch }}/tests/standalone/ten_manager | |
| df -h . | |
| - name: Run Tests (ten_runtime_smoke_test) | |
| env: | |
| ASAN_OPTIONS: detect_leaks=1:detect_stack_use_after_return=1:color=always:unmap_shadow_on_exit=1:abort_on_error=1 | |
| MALLOC_CHECK_: 3 | |
| TEN_ENABLE_MEMORY_TRACKING: "true" | |
| TEN_ENABLE_BACKTRACE_DUMP: "true" | |
| run: | | |
| chmod +x out/linux/${{ matrix.arch }}/tests/standalone/ten_runtime_smoke_test | |
| out/linux/${{ matrix.arch }}/tests/standalone/ten_runtime_smoke_test || { echo "test failed"; exit 1; } | |
| df -h . | |
| - name: Clean up | |
| continue-on-error: true | |
| run: | | |
| rm -rf out/linux/${{ matrix.arch }}/tests/standalone/ten_runtime_smoke_test | |
| df -h . | |
| test-integration-ten-manager: | |
| needs: build | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| compiler: [gcc, clang] | |
| build_type: [debug, release] | |
| arch: [x64, x86] | |
| include: | |
| - compiler: gcc | |
| build_type: debug | |
| arch: x86 | |
| exclude: | |
| - compiler: clang | |
| arch: x86 | |
| container: | |
| image: ghcr.io/ten-framework/ten_building_ubuntu2204 | |
| steps: | |
| - name: Check PR status before matrix execution | |
| uses: actions/github-script@v7 | |
| with: | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| script: | | |
| console.log('Event name:', context.eventName); | |
| console.log('Matrix:', ${{ toJson(matrix) }}); | |
| // Only check for PR events | |
| if (context.eventName !== 'pull_request') { | |
| console.log('Not a PR event, continuing...'); | |
| return; | |
| } | |
| // Ensure we have PR data | |
| if (!context.payload.pull_request) { | |
| console.log('No pull_request data in payload, continuing...'); | |
| return; | |
| } | |
| const { owner, repo } = context.repo; | |
| const prNumber = context.payload.pull_request.number; | |
| console.log(`Checking PR #${prNumber} status for test-integration-ten-manager matrix: compiler=${{ matrix.compiler }}, build_type=${{ matrix.build_type }}, arch=${{ matrix.arch }}...`); | |
| try { | |
| const pr = await github.rest.pulls.get({ | |
| owner, | |
| repo, | |
| pull_number: prNumber, | |
| }); | |
| console.log(`PR #${prNumber} state: ${pr.data.state}, merged: ${pr.data.merged}`); | |
| if (pr.data.state === 'closed') { | |
| if (pr.data.merged) { | |
| console.log(`PR #${prNumber} has been merged. Stopping matrix execution.`); | |
| core.setFailed('PR has been merged, stopping execution to save resources.'); | |
| } else { | |
| console.log(`PR #${prNumber} has been closed. Stopping matrix execution.`); | |
| core.setFailed('PR has been closed, stopping execution to save resources.'); | |
| } | |
| } else { | |
| console.log(`PR #${prNumber} is still open. Continuing matrix execution.`); | |
| } | |
| } catch (error) { | |
| console.error(`Error checking PR status: ${error.message}`); | |
| console.log('Error details:', error); | |
| console.log('Continuing matrix execution due to error...'); | |
| } | |
| - uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| submodules: false | |
| - name: Trust working directory | |
| run: git config --global --add safe.directory "${GITHUB_WORKSPACE}" | |
| - name: Initialize and update submodules except portal/ | |
| run: | | |
| # Retrieve all submodule paths, excluding `portal/`. | |
| submodules=$(git config --file .gitmodules --get-regexp path | awk '$2 != "portal" { print $2 }') | |
| git submodule init | |
| for submodule in $submodules; do | |
| echo "Initializing submodule: $submodule" | |
| git submodule update --init --recursive --depth 1 "$submodule" | |
| done | |
| - name: Download build artifacts (tar archive) | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: tests-artifacts-linux-ubuntu2204-${{ matrix.compiler }}-${{ matrix.build_type }}-${{ matrix.arch }} | |
| path: out/linux/${{ matrix.arch }} | |
| - name: Extract tests artifacts preserving permissions | |
| run: | | |
| tar -xzf out/linux/${{ matrix.arch }}/tests-artifacts.tar.gz | |
| - name: View folder structure content | |
| run: | | |
| df -h . | |
| tree -I ".*|*.h|*.hpp|*.py" out/linux/${{ matrix.arch }} | |
| - name: Set ulimit and sysctl | |
| run: | | |
| # Increase max number of open file descriptors as much as allowed. | |
| TARGET=102400 | |
| HARD=$(ulimit -Hn) | |
| echo "Current hard limit for open files: $HARD" | |
| if [ "$HARD" != "unlimited" ] && [ "$HARD" -lt "$TARGET" ]; then | |
| echo "Target ($TARGET) is greater than hard limit ($HARD), using hard limit instead." | |
| TARGET="$HARD" | |
| fi | |
| # Try to set the soft limit; if it fails, just warn and continue. | |
| if ! ulimit -n "$TARGET"; then | |
| echo "WARNING: failed to increase ulimit -n to $TARGET, continuing with existing limit." | |
| fi | |
| # Adjust somaxconn; ignore failure if not permitted. | |
| if ! sysctl -w net.core.somaxconn=8192; then | |
| echo "WARNING: failed to set net.core.somaxconn, continuing." | |
| fi | |
| shell: bash | |
| - uses: actions/setup-node@v4 | |
| with: | |
| node-version: 20 | |
| - name: Update tman config.json | |
| if: matrix.arch == 'x64' | |
| run: | | |
| CONFIG_FILE="out/linux/${{ matrix.arch }}/tests/local_registry/config.json" | |
| echo "Before update:" | |
| cat $CONFIG_FILE | |
| sed -i "s|\(file://\)[^\"]*\(out\/linux\/${{ matrix.arch }}\/tests\/local_registry\)|\1${GITHUB_WORKSPACE}/\2|" $CONFIG_FILE | |
| echo "After update:" | |
| cat $CONFIG_FILE | |
| - name: Install Python dependencies via script | |
| run: | | |
| python .github/tools/setup_pytest_dependencies.py | |
| df -h . | |
| - name: Run Tests (ten_manager pytest tests) | |
| if: matrix.arch == 'x64' | |
| env: | |
| ASAN_OPTIONS: detect_leaks=1:detect_stack_use_after_return=1:color=always:unmap_shadow_on_exit=1:abort_on_error=1 | |
| MALLOC_CHECK_: 3 | |
| TEN_ENABLE_MEMORY_TRACKING: "true" | |
| TEN_ENABLE_BACKTRACE_DUMP: "true" | |
| run: | | |
| cd out/linux/${{ matrix.arch }}/ | |
| pytest -s tests/ten_manager/ | |
| df -h . | |
| - name: Clean up | |
| if: matrix.arch == 'x64' | |
| continue-on-error: true | |
| run: | | |
| rm -rf out/linux/${{ matrix.arch }}/tests/ten_manager | |
| df -h . | |
| test-integration-cpp: | |
| needs: build | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| compiler: [gcc, clang] | |
| build_type: [debug, release] | |
| arch: [x64, x86] | |
| include: | |
| - compiler: gcc | |
| build_type: debug | |
| arch: x86 | |
| exclude: | |
| - compiler: clang | |
| arch: x86 | |
| container: | |
| image: ghcr.io/ten-framework/ten_building_ubuntu2204 | |
| steps: | |
| - name: Check PR status before matrix execution | |
| uses: actions/github-script@v7 | |
| with: | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| script: | | |
| console.log('Event name:', context.eventName); | |
| console.log('Matrix:', ${{ toJson(matrix) }}); | |
| // Only check for PR events | |
| if (context.eventName !== 'pull_request') { | |
| console.log('Not a PR event, continuing...'); | |
| return; | |
| } | |
| // Ensure we have PR data | |
| if (!context.payload.pull_request) { | |
| console.log('No pull_request data in payload, continuing...'); | |
| return; | |
| } | |
| const { owner, repo } = context.repo; | |
| const prNumber = context.payload.pull_request.number; | |
| console.log(`Checking PR #${prNumber} status for test-integration-cpp matrix: compiler=${{ matrix.compiler }}, build_type=${{ matrix.build_type }}, arch=${{ matrix.arch }}...`); | |
| try { | |
| const pr = await github.rest.pulls.get({ | |
| owner, | |
| repo, | |
| pull_number: prNumber, | |
| }); | |
| console.log(`PR #${prNumber} state: ${pr.data.state}, merged: ${pr.data.merged}`); | |
| if (pr.data.state === 'closed') { | |
| if (pr.data.merged) { | |
| console.log(`PR #${prNumber} has been merged. Stopping matrix execution.`); | |
| core.setFailed('PR has been merged, stopping execution to save resources.'); | |
| } else { | |
| console.log(`PR #${prNumber} has been closed. Stopping matrix execution.`); | |
| core.setFailed('PR has been closed, stopping execution to save resources.'); | |
| } | |
| } else { | |
| console.log(`PR #${prNumber} is still open. Continuing matrix execution.`); | |
| } | |
| } catch (error) { | |
| console.error(`Error checking PR status: ${error.message}`); | |
| console.log('Error details:', error); | |
| console.log('Continuing matrix execution due to error...'); | |
| } | |
| - uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| submodules: false | |
| - name: Trust working directory | |
| run: git config --global --add safe.directory "${GITHUB_WORKSPACE}" | |
| - name: Initialize and update submodules except portal/ | |
| run: | | |
| # Retrieve all submodule paths, excluding `portal/`. | |
| submodules=$(git config --file .gitmodules --get-regexp path | awk '$2 != "portal" { print $2 }') | |
| git submodule init | |
| for submodule in $submodules; do | |
| echo "Initializing submodule: $submodule" | |
| git submodule update --init --recursive --depth 1 "$submodule" | |
| done | |
| - name: Download build artifacts (tar archive) | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: tests-artifacts-linux-ubuntu2204-${{ matrix.compiler }}-${{ matrix.build_type }}-${{ matrix.arch }} | |
| path: out/linux/${{ matrix.arch }} | |
| - name: Extract tests artifacts preserving permissions | |
| run: | | |
| tar -xzf out/linux/${{ matrix.arch }}/tests-artifacts.tar.gz | |
| - name: View folder structure content | |
| run: | | |
| df -h . | |
| tree -I ".*|*.h|*.hpp|*.py" out/linux/${{ matrix.arch }} | |
| - name: Set ulimit and sysctl | |
| run: | | |
| # Increase max number of open file descriptors as much as allowed. | |
| TARGET=102400 | |
| HARD=$(ulimit -Hn) | |
| echo "Current hard limit for open files: $HARD" | |
| if [ "$HARD" != "unlimited" ] && [ "$HARD" -lt "$TARGET" ]; then | |
| echo "Target ($TARGET) is greater than hard limit ($HARD), using hard limit instead." | |
| TARGET="$HARD" | |
| fi | |
| # Try to set the soft limit; if it fails, just warn and continue. | |
| if ! ulimit -n "$TARGET"; then | |
| echo "WARNING: failed to increase ulimit -n to $TARGET, continuing with existing limit." | |
| fi | |
| # Adjust somaxconn; ignore failure if not permitted. | |
| if ! sysctl -w net.core.somaxconn=8192; then | |
| echo "WARNING: failed to set net.core.somaxconn, continuing." | |
| fi | |
| shell: bash | |
| - uses: actions/setup-node@v4 | |
| with: | |
| node-version: 20 | |
| - name: Update tman config.json | |
| if: matrix.arch == 'x64' | |
| run: | | |
| CONFIG_FILE="out/linux/${{ matrix.arch }}/tests/local_registry/config.json" | |
| echo "Before update:" | |
| cat $CONFIG_FILE | |
| sed -i "s|\(file://\)[^\"]*\(out\/linux\/${{ matrix.arch }}\/tests\/local_registry\)|\1${GITHUB_WORKSPACE}/\2|" $CONFIG_FILE | |
| echo "After update:" | |
| cat $CONFIG_FILE | |
| - name: Install Python dependencies via script | |
| run: | | |
| python .github/tools/setup_pytest_dependencies.py | |
| df -h . | |
| - name: Run tests (ten_runtime C++ integration tests) | |
| if: matrix.arch == 'x64' | |
| env: | |
| ASAN_OPTIONS: detect_leaks=1:detect_stack_use_after_return=1:color=always:unmap_shadow_on_exit=1:abort_on_error=1 | |
| MALLOC_CHECK_: 3 | |
| TEN_ENABLE_MEMORY_TRACKING: "true" | |
| TEN_ENABLE_BACKTRACE_DUMP: "true" | |
| GOTRACEBACK: crash | |
| run: | | |
| df -h . | |
| export PATH=$(pwd)/core/ten_gn:$PATH | |
| cd out/linux/${{ matrix.arch }}/ | |
| pytest -s tests/ten_runtime/integration/cpp/ | |
| df -h . | |
| test-integration-go: | |
| needs: build | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| compiler: [gcc, clang] | |
| build_type: [debug, release] | |
| arch: [x64] | |
| container: | |
| image: ghcr.io/ten-framework/ten_building_ubuntu2204 | |
| steps: | |
| - name: Check PR status before matrix execution | |
| uses: actions/github-script@v7 | |
| with: | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| script: | | |
| console.log('Event name:', context.eventName); | |
| console.log('Matrix:', ${{ toJson(matrix) }}); | |
| // Only check for PR events | |
| if (context.eventName !== 'pull_request') { | |
| console.log('Not a PR event, continuing...'); | |
| return; | |
| } | |
| // Ensure we have PR data | |
| if (!context.payload.pull_request) { | |
| console.log('No pull_request data in payload, continuing...'); | |
| return; | |
| } | |
| const { owner, repo } = context.repo; | |
| const prNumber = context.payload.pull_request.number; | |
| console.log(`Checking PR #${prNumber} status for test-integration-go matrix: compiler=${{ matrix.compiler }}, build_type=${{ matrix.build_type }}, arch=${{ matrix.arch }}...`); | |
| try { | |
| const pr = await github.rest.pulls.get({ | |
| owner, | |
| repo, | |
| pull_number: prNumber, | |
| }); | |
| console.log(`PR #${prNumber} state: ${pr.data.state}, merged: ${pr.data.merged}`); | |
| if (pr.data.state === 'closed') { | |
| if (pr.data.merged) { | |
| console.log(`PR #${prNumber} has been merged. Stopping matrix execution.`); | |
| core.setFailed('PR has been merged, stopping execution to save resources.'); | |
| } else { | |
| console.log(`PR #${prNumber} has been closed. Stopping matrix execution.`); | |
| core.setFailed('PR has been closed, stopping execution to save resources.'); | |
| } | |
| } else { | |
| console.log(`PR #${prNumber} is still open. Continuing matrix execution.`); | |
| } | |
| } catch (error) { | |
| console.error(`Error checking PR status: ${error.message}`); | |
| console.log('Error details:', error); | |
| console.log('Continuing matrix execution due to error...'); | |
| } | |
| - uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| submodules: false | |
| - name: Trust working directory | |
| run: git config --global --add safe.directory "${GITHUB_WORKSPACE}" | |
| - name: Initialize and update submodules except portal/ | |
| run: | | |
| # Retrieve all submodule paths, excluding `portal/`. | |
| submodules=$(git config --file .gitmodules --get-regexp path | awk '$2 != "portal" { print $2 }') | |
| git submodule init | |
| for submodule in $submodules; do | |
| echo "Initializing submodule: $submodule" | |
| git submodule update --init --recursive --depth 1 "$submodule" | |
| done | |
| - name: Download build artifacts (tar archive) | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: tests-artifacts-linux-ubuntu2204-${{ matrix.compiler }}-${{ matrix.build_type }}-${{ matrix.arch }} | |
| path: out/linux/${{ matrix.arch }} | |
| - name: Extract tests artifacts preserving permissions | |
| run: | | |
| tar -xzf out/linux/${{ matrix.arch }}/tests-artifacts.tar.gz | |
| - name: View folder structure content | |
| run: | | |
| df -h . | |
| tree -I ".*|*.h|*.hpp|*.py" out/linux/${{ matrix.arch }} | |
| - name: Set ulimit and sysctl | |
| run: | | |
| # Increase max number of open file descriptors as much as allowed. | |
| TARGET=102400 | |
| HARD=$(ulimit -Hn) | |
| echo "Current hard limit for open files: $HARD" | |
| if [ "$HARD" != "unlimited" ] && [ "$HARD" -lt "$TARGET" ]; then | |
| echo "Target ($TARGET) is greater than hard limit ($HARD), using hard limit instead." | |
| TARGET="$HARD" | |
| fi | |
| # Try to set the soft limit; if it fails, just warn and continue. | |
| if ! ulimit -n "$TARGET"; then | |
| echo "WARNING: failed to increase ulimit -n to $TARGET, continuing with existing limit." | |
| fi | |
| # Adjust somaxconn; ignore failure if not permitted. | |
| if ! sysctl -w net.core.somaxconn=8192; then | |
| echo "WARNING: failed to set net.core.somaxconn, continuing." | |
| fi | |
| shell: bash | |
| - uses: actions/setup-node@v4 | |
| with: | |
| node-version: 20 | |
| - name: Update tman config.json | |
| if: matrix.arch == 'x64' | |
| run: | | |
| CONFIG_FILE="out/linux/${{ matrix.arch }}/tests/local_registry/config.json" | |
| echo "Before update:" | |
| cat $CONFIG_FILE | |
| sed -i "s|\(file://\)[^\"]*\(out\/linux\/${{ matrix.arch }}\/tests\/local_registry\)|\1${GITHUB_WORKSPACE}/\2|" $CONFIG_FILE | |
| echo "After update:" | |
| cat $CONFIG_FILE | |
| - name: Install Python dependencies via script | |
| run: | | |
| python .github/tools/setup_pytest_dependencies.py | |
| df -h . | |
| - name: Run tests (ten_runtime Go integration tests) | |
| if: matrix.arch == 'x64' | |
| env: | |
| ASAN_OPTIONS: detect_leaks=1:detect_stack_use_after_return=1:color=always:unmap_shadow_on_exit=1:abort_on_error=1 | |
| MALLOC_CHECK_: 3 | |
| TEN_ENABLE_MEMORY_TRACKING: "true" | |
| TEN_ENABLE_BACKTRACE_DUMP: "true" | |
| GOTRACEBACK: crash | |
| run: | | |
| df -h . | |
| export PATH=$(pwd)/core/ten_gn:$PATH | |
| cd out/linux/${{ matrix.arch }}/ | |
| pytest -s tests/ten_runtime/integration/go/ | |
| df -h . | |
| test-integration-python: | |
| needs: build | |
| runs-on: ubuntu-22.04 | |
| strategy: | |
| matrix: | |
| compiler: [gcc, clang] | |
| build_type: [debug, release] | |
| arch: [x64] | |
| defaults: | |
| run: | |
| shell: bash | |
| steps: | |
| - name: Free up disk space on runner | |
| run: | | |
| echo "=== Initial disk space ===" | |
| df -h | |
| # Remove unnecessary pre-installed software on GitHub runner | |
| sudo rm -rf /usr/share/dotnet | |
| sudo rm -rf /usr/local/lib/android | |
| sudo rm -rf /opt/ghc | |
| sudo rm -rf /opt/hostedtoolcache/CodeQL | |
| sudo docker image prune --all --force | |
| echo "=== Disk space after cleanup ===" | |
| df -h | |
| - name: Install system dependencies | |
| run: | | |
| sudo apt-get update | |
| sudo apt-get install -y --no-install-recommends \ | |
| build-essential \ | |
| wget \ | |
| curl \ | |
| git \ | |
| libssl-dev \ | |
| pkg-config \ | |
| cmake \ | |
| autoconf \ | |
| libtool \ | |
| tree \ | |
| zip \ | |
| unzip \ | |
| jq \ | |
| python3 \ | |
| python3-dev \ | |
| python3-pip \ | |
| python3-venv \ | |
| ca-certificates \ | |
| software-properties-common \ | |
| gnupg \ | |
| lsb-release \ | |
| libunwind-dev \ | |
| libasound2 \ | |
| libavformat-dev \ | |
| libavfilter-dev \ | |
| libx264-dev \ | |
| nasm \ | |
| yasm \ | |
| uuid-dev \ | |
| libexpat1-dev \ | |
| libcurl4-openssl-dev \ | |
| zlib1g-dev \ | |
| libncurses5-dev \ | |
| libffi-dev \ | |
| libreadline-dev \ | |
| libmsgpack-dev \ | |
| libcrypto++-dev | |
| # Clean up apt cache | |
| sudo apt-get clean | |
| sudo rm -rf /var/lib/apt/lists/* | |
| echo "=== Disk space after system dependencies ===" | |
| df -h | |
| - name: Check PR status before matrix execution | |
| uses: actions/github-script@v7 | |
| with: | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| script: | | |
| console.log('Event name:', context.eventName); | |
| console.log('Matrix:', ${{ toJson(matrix) }}); | |
| // Only check for PR events | |
| if (context.eventName !== 'pull_request') { | |
| console.log('Not a PR event, continuing...'); | |
| return; | |
| } | |
| // Ensure we have PR data | |
| if (!context.payload.pull_request) { | |
| console.log('No pull_request data in payload, continuing...'); | |
| return; | |
| } | |
| const { owner, repo } = context.repo; | |
| const prNumber = context.payload.pull_request.number; | |
| console.log(`Checking PR #${prNumber} status for test-integration-python matrix: compiler=${{ matrix.compiler }}, build_type=${{ matrix.build_type }}, arch=${{ matrix.arch }}...`); | |
| try { | |
| const pr = await github.rest.pulls.get({ | |
| owner, | |
| repo, | |
| pull_number: prNumber, | |
| }); | |
| console.log(`PR #${prNumber} state: ${pr.data.state}, merged: ${pr.data.merged}`); | |
| if (pr.data.state === 'closed') { | |
| if (pr.data.merged) { | |
| console.log(`PR #${prNumber} has been merged. Stopping matrix execution.`); | |
| core.setFailed('PR has been merged, stopping execution to save resources.'); | |
| } else { | |
| console.log(`PR #${prNumber} has been closed. Stopping matrix execution.`); | |
| core.setFailed('PR has been closed, stopping execution to save resources.'); | |
| } | |
| } else { | |
| console.log(`PR #${prNumber} is still open. Continuing matrix execution.`); | |
| } | |
| } catch (error) { | |
| console.error(`Error checking PR status: ${error.message}`); | |
| console.log('Error details:', error); | |
| console.log('Continuing matrix execution due to error...'); | |
| } | |
| - name: Install LLVM 21 tools | |
| run: | | |
| # Add LLVM official repository for LLVM 21 | |
| wget -qO- https://apt.llvm.org/llvm-snapshot.gpg.key | sudo tee /etc/apt/trusted.gpg.d/apt.llvm.org.asc | |
| sudo add-apt-repository -y "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-21 main" | |
| sudo apt-get update | |
| # Purge preinstalled clang-13/14/15 BEFORE installing clang-21 | |
| sudo apt-get purge -y clang-13 clang-14 clang-15 \ | |
| llvm-13-dev llvm-13-runtime llvm-13 \ | |
| llvm-14-dev llvm-14-runtime llvm-14 \ | |
| llvm-15-dev llvm-15-runtime llvm-15 || true | |
| sudo apt-get autopurge -y || true | |
| # Now install LLVM 21 | |
| sudo apt-get install -y \ | |
| llvm-21 \ | |
| clang-21 \ | |
| libc++-21-dev \ | |
| libc++1-21 \ | |
| libc++abi-21-dev \ | |
| libc++abi1-21 | |
| # Clean up apt cache | |
| sudo apt-get clean | |
| sudo rm -rf /var/lib/apt/lists/* | |
| # Create symlinks to make clang-21 available without version suffix | |
| sudo ln -sf /usr/bin/clang-21 /usr/local/bin/clang | |
| sudo ln -sf /usr/bin/clang++-21 /usr/local/bin/clang++ | |
| sudo ln -sf /usr/bin/llvm-ar-21 /usr/local/bin/llvm-ar | |
| sudo ln -sf /usr/bin/llvm-nm-21 /usr/local/bin/llvm-nm | |
| sudo ln -sf /usr/bin/llvm-ranlib-21 /usr/local/bin/llvm-ranlib | |
| # Verify installation | |
| echo "=== Verifying LLVM 21 installation ===" | |
| which clang | |
| clang --version | head -1 | |
| - uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| submodules: false | |
| - name: Trust working directory | |
| run: git config --global --add safe.directory "${GITHUB_WORKSPACE}" | |
| - name: Initialize and update submodules except portal/ | |
| run: | | |
| # Retrieve all submodule paths, excluding `portal/`. | |
| submodules=$(git config --file .gitmodules --get-regexp path | awk '$2 != "portal" { print $2 }') | |
| git submodule init | |
| for submodule in $submodules; do | |
| echo "Initializing submodule: $submodule" | |
| git submodule update --init --recursive --depth 1 "$submodule" | |
| done | |
| - name: Download build artifacts (tar archive) | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: tests-artifacts-linux-ubuntu2204-${{ matrix.compiler }}-${{ matrix.build_type }}-${{ matrix.arch }} | |
| path: out/linux/${{ matrix.arch }} | |
| - name: Extract tests artifacts preserving permissions | |
| run: | | |
| tar -xzf out/linux/${{ matrix.arch }}/tests-artifacts.tar.gz | |
| - name: View folder structure content | |
| run: | | |
| df -h . | |
| tree -I ".*|*.h|*.hpp|*.py" out/linux/${{ matrix.arch }} | |
| - name: Set ulimit and sysctl | |
| run: | | |
| # Increase max number of open file descriptors as much as allowed. | |
| TARGET=102400 | |
| HARD=$(ulimit -Hn) | |
| echo "Current hard limit for open files: $HARD" | |
| if [ "$HARD" != "unlimited" ] && [ "$HARD" -lt "$TARGET" ]; then | |
| echo "Target ($TARGET) is greater than hard limit ($HARD), using hard limit instead." | |
| TARGET="$HARD" | |
| fi | |
| # Try to set the soft limit; if it fails, just warn and continue. | |
| if ! ulimit -n "$TARGET"; then | |
| echo "WARNING: failed to increase ulimit -n to $TARGET, continuing with existing limit." | |
| fi | |
| # Adjust somaxconn; ignore failure if not permitted. | |
| if ! sudo sysctl -w net.core.somaxconn=8192; then | |
| echo "WARNING: failed to set net.core.somaxconn, continuing." | |
| fi | |
| shell: bash | |
| - uses: actions/setup-node@v4 | |
| with: | |
| node-version: 20 | |
| - name: Update tman config.json | |
| if: matrix.arch == 'x64' | |
| run: | | |
| CONFIG_FILE="out/linux/${{ matrix.arch }}/tests/local_registry/config.json" | |
| echo "Before update:" | |
| cat $CONFIG_FILE | |
| sed -i "s|\(file://\)[^\"]*\(out\/linux\/${{ matrix.arch }}\/tests\/local_registry\)|\1${GITHUB_WORKSPACE}/\2|" $CONFIG_FILE | |
| echo "After update:" | |
| cat $CONFIG_FILE | |
| - name: Install Python dependencies via script | |
| run: | | |
| python .github/tools/setup_pytest_dependencies.py | |
| df -h . | |
| - name: Run tests (ten_runtime Python integration tests) | |
| if: matrix.arch == 'x64' | |
| env: | |
| ASAN_OPTIONS: detect_leaks=1:detect_stack_use_after_return=1:color=always:unmap_shadow_on_exit=1:abort_on_error=1 | |
| MALLOC_CHECK_: 3 | |
| TEN_ENABLE_MEMORY_TRACKING: "true" | |
| TEN_ENABLE_BACKTRACE_DUMP: "true" | |
| GOTRACEBACK: crash | |
| run: | | |
| curl -fsSL https://ollama.com/install.sh | sh | |
| # Start Ollama service. | |
| ollama serve & | |
| # Wait for Ollama to be fully operational. | |
| for i in {1..30}; do | |
| if curl -s http://localhost:11434 >/dev/null; then | |
| echo "✓ Ollama is running" | |
| break | |
| fi | |
| echo "Waiting for Ollama to start..." | |
| sleep 2 | |
| done | |
| ollama pull smollm:135m | |
| # Verify model is pulled correctly. | |
| ollama list | |
| df -h . | |
| export PATH=$(pwd)/core/ten_gn:$PATH | |
| cd out/linux/${{ matrix.arch }}/ | |
| pytest -s tests/ten_runtime/integration/python/ | |
| df -h . | |
| test-integration-nodejs: | |
| needs: build | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| compiler: [gcc, clang] | |
| build_type: [debug, release] | |
| arch: [x64] | |
| container: | |
| image: ghcr.io/ten-framework/ten_building_ubuntu2204 | |
| steps: | |
| - name: Check PR status before matrix execution | |
| uses: actions/github-script@v7 | |
| with: | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| script: | | |
| console.log('Event name:', context.eventName); | |
| console.log('Matrix:', ${{ toJson(matrix) }}); | |
| // Only check for PR events | |
| if (context.eventName !== 'pull_request') { | |
| console.log('Not a PR event, continuing...'); | |
| return; | |
| } | |
| // Ensure we have PR data | |
| if (!context.payload.pull_request) { | |
| console.log('No pull_request data in payload, continuing...'); | |
| return; | |
| } | |
| const { owner, repo } = context.repo; | |
| const prNumber = context.payload.pull_request.number; | |
| console.log(`Checking PR #${prNumber} status for test-integration-nodejs matrix: compiler=${{ matrix.compiler }}, build_type=${{ matrix.build_type }}, arch=${{ matrix.arch }}...`); | |
| try { | |
| const pr = await github.rest.pulls.get({ | |
| owner, | |
| repo, | |
| pull_number: prNumber, | |
| }); | |
| console.log(`PR #${prNumber} state: ${pr.data.state}, merged: ${pr.data.merged}`); | |
| if (pr.data.state === 'closed') { | |
| if (pr.data.merged) { | |
| console.log(`PR #${prNumber} has been merged. Stopping matrix execution.`); | |
| core.setFailed('PR has been merged, stopping execution to save resources.'); | |
| } else { | |
| console.log(`PR #${prNumber} has been closed. Stopping matrix execution.`); | |
| core.setFailed('PR has been closed, stopping execution to save resources.'); | |
| } | |
| } else { | |
| console.log(`PR #${prNumber} is still open. Continuing matrix execution.`); | |
| } | |
| } catch (error) { | |
| console.error(`Error checking PR status: ${error.message}`); | |
| console.log('Error details:', error); | |
| console.log('Continuing matrix execution due to error...'); | |
| } | |
| - uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| submodules: false | |
| - name: Trust working directory | |
| run: git config --global --add safe.directory "${GITHUB_WORKSPACE}" | |
| - name: Initialize and update submodules except portal/ | |
| run: | | |
| # Retrieve all submodule paths, excluding `portal/`. | |
| submodules=$(git config --file .gitmodules --get-regexp path | awk '$2 != "portal" { print $2 }') | |
| git submodule init | |
| for submodule in $submodules; do | |
| echo "Initializing submodule: $submodule" | |
| git submodule update --init --recursive --depth 1 "$submodule" | |
| done | |
| - name: Download build artifacts (tar archive) | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: tests-artifacts-linux-ubuntu2204-${{ matrix.compiler }}-${{ matrix.build_type }}-${{ matrix.arch }} | |
| path: out/linux/${{ matrix.arch }} | |
| - name: Extract tests artifacts preserving permissions | |
| run: | | |
| tar -xzf out/linux/${{ matrix.arch }}/tests-artifacts.tar.gz | |
| - name: View folder structure content | |
| run: | | |
| df -h . | |
| tree -I ".*|*.h|*.hpp|*.py" out/linux/${{ matrix.arch }} | |
| - name: Set ulimit and sysctl | |
| run: | | |
| # Increase max number of open file descriptors as much as allowed. | |
| TARGET=102400 | |
| HARD=$(ulimit -Hn) | |
| echo "Current hard limit for open files: $HARD" | |
| if [ "$HARD" != "unlimited" ] && [ "$HARD" -lt "$TARGET" ]; then | |
| echo "Target ($TARGET) is greater than hard limit ($HARD), using hard limit instead." | |
| TARGET="$HARD" | |
| fi | |
| # Try to set the soft limit; if it fails, just warn and continue. | |
| if ! ulimit -n "$TARGET"; then | |
| echo "WARNING: failed to increase ulimit -n to $TARGET, continuing with existing limit." | |
| fi | |
| # Adjust somaxconn; ignore failure if not permitted. | |
| if ! sysctl -w net.core.somaxconn=8192; then | |
| echo "WARNING: failed to set net.core.somaxconn, continuing." | |
| fi | |
| shell: bash | |
| - uses: actions/setup-node@v4 | |
| with: | |
| node-version: 20 | |
| - name: Update tman config.json | |
| if: matrix.arch == 'x64' | |
| run: | | |
| CONFIG_FILE="out/linux/${{ matrix.arch }}/tests/local_registry/config.json" | |
| echo "Before update:" | |
| cat $CONFIG_FILE | |
| sed -i "s|\(file://\)[^\"]*\(out\/linux\/${{ matrix.arch }}\/tests\/local_registry\)|\1${GITHUB_WORKSPACE}/\2|" $CONFIG_FILE | |
| echo "After update:" | |
| cat $CONFIG_FILE | |
| - name: Install Python dependencies via script | |
| run: | | |
| python .github/tools/setup_pytest_dependencies.py | |
| df -h . | |
| - name: Run tests (ten_runtime Nodejs integration tests) | |
| if: matrix.arch == 'x64' | |
| env: | |
| ASAN_OPTIONS: detect_leaks=1:detect_stack_use_after_return=1:color=always:unmap_shadow_on_exit=1:abort_on_error=1 | |
| MALLOC_CHECK_: 3 | |
| TEN_ENABLE_MEMORY_TRACKING: "true" | |
| TEN_ENABLE_BACKTRACE_DUMP: "true" | |
| GOTRACEBACK: crash | |
| run: | | |
| df -h . | |
| export PATH=$(pwd)/core/ten_gn:$PATH | |
| cd out/linux/${{ matrix.arch }}/ | |
| pytest -s tests/ten_runtime/integration/nodejs/ | |
| df -h . |