mirror of
https://github.com/borgbackup/borg.git
synced 2026-02-18 18:19:16 -05:00
685 lines
26 KiB
YAML
685 lines
26 KiB
YAML
# badge: https://github.com/borgbackup/borg/workflows/CI/badge.svg?branch=master
|
|
|
|
name: CI
|
|
|
|
on:
|
|
push:
|
|
branches: [ master ]
|
|
tags:
|
|
- '2.*'
|
|
pull_request:
|
|
branches: [ master ]
|
|
paths:
|
|
- '**.py'
|
|
- '**.pyx'
|
|
- '**.c'
|
|
- '**.h'
|
|
- '**.yml'
|
|
- '**.toml'
|
|
- '**.cfg'
|
|
- '**.ini'
|
|
- 'requirements.d/*'
|
|
- '!docs/**'
|
|
|
|
concurrency:
|
|
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
|
|
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
|
|
|
permissions:
|
|
contents: read
|
|
|
|
jobs:
|
|
lint:
|
|
|
|
runs-on: ubuntu-22.04
|
|
timeout-minutes: 5
|
|
|
|
steps:
|
|
- uses: actions/checkout@v6
|
|
- uses: astral-sh/ruff-action@v3
|
|
|
|
security:
|
|
|
|
runs-on: ubuntu-24.04
|
|
timeout-minutes: 5
|
|
|
|
steps:
|
|
- uses: actions/checkout@v6
|
|
- name: Set up Python
|
|
uses: actions/setup-python@v6
|
|
with:
|
|
python-version: '3.10'
|
|
- name: Install dependencies
|
|
run: |
|
|
python -m pip install --upgrade pip
|
|
pip install bandit[toml]
|
|
- name: Run Bandit
|
|
run: |
|
|
bandit -r src/borg -c pyproject.toml
|
|
|
|
asan_ubsan:
|
|
|
|
runs-on: ubuntu-24.04
|
|
timeout-minutes: 25
|
|
needs: [lint]
|
|
|
|
steps:
|
|
- uses: actions/checkout@v6
|
|
with:
|
|
# Just fetching one commit is not enough for setuptools-scm, so we fetch all.
|
|
fetch-depth: 0
|
|
fetch-tags: true
|
|
|
|
- name: Set up Python
|
|
uses: actions/setup-python@v6
|
|
with:
|
|
python-version: '3.12'
|
|
|
|
- name: Install system packages
|
|
run: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y pkg-config build-essential
|
|
sudo apt-get install -y libssl-dev libacl1-dev libxxhash-dev liblz4-dev libzstd-dev
|
|
|
|
- name: Install Python dependencies
|
|
run: |
|
|
python -m pip install --upgrade pip
|
|
pip install -r requirements.d/development.txt
|
|
|
|
- name: Build Borg with ASan/UBSan
|
|
# Build the C/Cython extensions with AddressSanitizer and UndefinedBehaviorSanitizer enabled.
|
|
# How this works:
|
|
# - The -fsanitize=address,undefined flags inject runtime checks into our native code. If a bug is hit
|
|
# (e.g., buffer overflow, use-after-free, out-of-bounds, or undefined behavior), the sanitizer prints
|
|
# a detailed error report to stderr, including a stack trace, and forces the process to exit with
|
|
# non-zero status. In CI, this will fail the step/job so you will notice.
|
|
# - ASAN_OPTIONS/UBSAN_OPTIONS configure the sanitizers' runtime behavior (see below for meanings).
|
|
env:
|
|
CFLAGS: "-O1 -g -fno-omit-frame-pointer -fsanitize=address,undefined"
|
|
CXXFLAGS: "-O1 -g -fno-omit-frame-pointer -fsanitize=address,undefined"
|
|
LDFLAGS: "-fsanitize=address,undefined"
|
|
# ASAN_OPTIONS controls AddressSanitizer runtime tweaks:
|
|
# - detect_leaks=0: Disable LeakSanitizer to avoid false positives with CPython/pymalloc in short-lived tests.
|
|
# - strict_string_checks=1: Make invalid string operations (e.g., over-reads) more likely to be detected.
|
|
# - check_initialization_order=1: Catch uses that depend on static initialization order (C++).
|
|
# - detect_stack_use_after_return=1: Detect stack-use-after-return via stack poisoning (may increase overhead).
|
|
ASAN_OPTIONS: "detect_leaks=0:strict_string_checks=1:check_initialization_order=1:detect_stack_use_after_return=1"
|
|
# UBSAN_OPTIONS controls UndefinedBehaviorSanitizer runtime:
|
|
# - print_stacktrace=1: Include a stack trace for UB reports to ease debugging.
|
|
# Note: UBSan is recoverable by default (process may continue after reporting). If you want CI to
|
|
# abort immediately and fail on the first UB, add `halt_on_error=1` (e.g., UBSAN_OPTIONS="print_stacktrace=1:halt_on_error=1").
|
|
UBSAN_OPTIONS: "print_stacktrace=1"
|
|
# PYTHONDEVMODE enables additional Python runtime checks and warnings.
|
|
PYTHONDEVMODE: "1"
|
|
run: pip install -e .
|
|
|
|
- name: Run tests under sanitizers
|
|
env:
|
|
ASAN_OPTIONS: "detect_leaks=0:strict_string_checks=1:check_initialization_order=1:detect_stack_use_after_return=1"
|
|
UBSAN_OPTIONS: "print_stacktrace=1"
|
|
PYTHONDEVMODE: "1"
|
|
# Ensure the ASan runtime is loaded first to avoid "ASan runtime does not come first" warnings.
|
|
# We discover libasan/libubsan paths via gcc and preload them for the Python test process.
|
|
# the remote tests are slow and likely won't find anything useful
|
|
run: |
|
|
set -euo pipefail
|
|
export LD_PRELOAD="$(gcc -print-file-name=libasan.so):$(gcc -print-file-name=libubsan.so)"
|
|
echo "Using LD_PRELOAD=$LD_PRELOAD"
|
|
pytest -v --benchmark-skip -k "not remote"
|
|
|
|
native_tests:
|
|
|
|
needs: [lint]
|
|
permissions:
|
|
contents: read
|
|
id-token: write
|
|
attestations: write
|
|
strategy:
|
|
fail-fast: true
|
|
# noinspection YAMLSchemaValidation
|
|
matrix: >-
|
|
${{ fromJSON(
|
|
github.event_name == 'pull_request' && '{
|
|
"include": [
|
|
{"os": "ubuntu-22.04", "python-version": "3.10", "toxenv": "mypy"},
|
|
{"os": "ubuntu-22.04", "python-version": "3.11", "toxenv": "docs"},
|
|
{"os": "ubuntu-22.04", "python-version": "3.10", "toxenv": "py310-llfuse"},
|
|
{"os": "ubuntu-24.04", "python-version": "3.14", "toxenv": "py314-mfusepy"}
|
|
]
|
|
}' || '{
|
|
"include": [
|
|
{"os": "ubuntu-22.04", "python-version": "3.11", "toxenv": "py311-pyfuse3", "binary": "borg-linux-glibc235-x86_64-gh"},
|
|
{"os": "ubuntu-22.04-arm", "python-version": "3.11", "toxenv": "py311-pyfuse3", "binary": "borg-linux-glibc235-arm64-gh"},
|
|
{"os": "ubuntu-24.04", "python-version": "3.12", "toxenv": "py312-llfuse"},
|
|
{"os": "ubuntu-24.04", "python-version": "3.13", "toxenv": "py313-pyfuse3"},
|
|
{"os": "ubuntu-24.04", "python-version": "3.14", "toxenv": "py314-mfusepy"},
|
|
{"os": "macos-15-intel", "python-version": "3.11", "toxenv": "py311-none", "binary": "borg-macos-15-x86_64-gh"},
|
|
{"os": "macos-15", "python-version": "3.11", "toxenv": "py311-none", "binary": "borg-macos-15-arm64-gh"}
|
|
]
|
|
}'
|
|
) }}
|
|
env:
|
|
TOXENV: ${{ matrix.toxenv }}
|
|
|
|
runs-on: ${{ matrix.os }}
|
|
# macOS machines can be slow, if overloaded.
|
|
timeout-minutes: 360
|
|
|
|
steps:
|
|
- uses: actions/checkout@v6
|
|
with:
|
|
# Just fetching one commit is not enough for setuptools-scm, so we fetch all.
|
|
fetch-depth: 0
|
|
fetch-tags: true
|
|
|
|
- name: Set up Python ${{ matrix.python-version }}
|
|
uses: actions/setup-python@v6
|
|
with:
|
|
python-version: ${{ matrix.python-version }}
|
|
|
|
- name: Cache pip
|
|
uses: actions/cache@v5
|
|
with:
|
|
path: ~/.cache/pip
|
|
key: ${{ runner.os }}-${{ runner.arch }}-pip-${{ hashFiles('requirements.d/development.txt') }}
|
|
restore-keys: |
|
|
${{ runner.os }}-${{ runner.arch }}-pip-
|
|
${{ runner.os }}-${{ runner.arch }}-
|
|
|
|
- name: Cache tox environments
|
|
uses: actions/cache@v5
|
|
with:
|
|
path: .tox
|
|
key: ${{ runner.os }}-${{ runner.arch }}-tox-${{ matrix.toxenv }}-${{ hashFiles('requirements.d/development.txt', 'pyproject.toml') }}
|
|
restore-keys: |
|
|
${{ runner.os }}-${{ runner.arch }}-tox-${{ matrix.toxenv }}-
|
|
${{ runner.os }}-${{ runner.arch }}-tox-
|
|
|
|
- name: Install Linux packages
|
|
if: ${{ runner.os == 'Linux' }}
|
|
run: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y pkg-config build-essential
|
|
sudo apt-get install -y libssl-dev libacl1-dev libxxhash-dev liblz4-dev libzstd-dev
|
|
sudo apt-get install -y bash zsh fish # for shell completion tests
|
|
sudo apt-get install -y rclone openssh-server curl
|
|
if [[ "$TOXENV" == *"llfuse"* ]]; then
|
|
sudo apt-get install -y libfuse-dev fuse # Required for Python llfuse module
|
|
elif [[ "$TOXENV" == *"pyfuse3"* || "$TOXENV" == *"mfusepy"* ]]; then
|
|
sudo apt-get install -y libfuse3-dev fuse3 # Required for Python pyfuse3 module
|
|
fi
|
|
|
|
- name: Install macOS packages
|
|
if: ${{ runner.os == 'macOS' }}
|
|
run: |
|
|
brew unlink pkg-config@0.29.2 || true
|
|
brew bundle install
|
|
|
|
- name: Configure OpenSSH SFTP server (test only)
|
|
if: ${{ runner.os == 'Linux' && !contains(matrix.toxenv, 'mypy') && !contains(matrix.toxenv, 'docs') }}
|
|
run: |
|
|
sudo mkdir -p /run/sshd
|
|
sudo useradd -m -s /bin/bash sftpuser || true
|
|
# Create SSH key for the CI user and authorize it for sftpuser
|
|
mkdir -p ~/.ssh
|
|
chmod 700 ~/.ssh
|
|
test -f ~/.ssh/id_ed25519 || ssh-keygen -t ed25519 -N '' -f ~/.ssh/id_ed25519
|
|
sudo mkdir -p /home/sftpuser/.ssh
|
|
sudo chmod 700 /home/sftpuser/.ssh
|
|
sudo cp ~/.ssh/id_ed25519.pub /home/sftpuser/.ssh/authorized_keys
|
|
sudo chown -R sftpuser:sftpuser /home/sftpuser/.ssh
|
|
sudo chmod 600 /home/sftpuser/.ssh/authorized_keys
|
|
# Allow publickey auth and enable Subsystem sftp
|
|
sudo sed -i 's/^#\?PasswordAuthentication .*/PasswordAuthentication no/' /etc/ssh/sshd_config
|
|
sudo sed -i 's/^#\?PubkeyAuthentication .*/PubkeyAuthentication yes/' /etc/ssh/sshd_config
|
|
if ! grep -q '^Subsystem sftp' /etc/ssh/sshd_config; then echo 'Subsystem sftp /usr/lib/openssh/sftp-server' | sudo tee -a /etc/ssh/sshd_config; fi
|
|
# Ensure host keys exist to avoid slow generation on first sshd start
|
|
sudo ssh-keygen -A
|
|
# Start sshd (listen on default 22 inside runner)
|
|
sudo /usr/sbin/sshd -D &
|
|
# Add host key to known_hosts so paramiko trusts it
|
|
ssh-keyscan -H localhost 127.0.0.1 | tee -a ~/.ssh/known_hosts
|
|
# Start ssh-agent and add our key so paramiko can use the agent
|
|
eval "$(ssh-agent -s)"
|
|
ssh-add ~/.ssh/id_ed25519
|
|
# Export SFTP test URL for tox via GITHUB_ENV
|
|
echo "BORG_TEST_SFTP_REPO=sftp://sftpuser@localhost:22/borg/sftp-repo" >> $GITHUB_ENV
|
|
|
|
- name: Install and configure MinIO S3 server (test only)
|
|
if: ${{ runner.os == 'Linux' && !contains(matrix.toxenv, 'mypy') && !contains(matrix.toxenv, 'docs') }}
|
|
run: |
|
|
set -e
|
|
arch=$(uname -m)
|
|
case "$arch" in
|
|
x86_64|amd64) srv_url=https://dl.min.io/server/minio/release/linux-amd64/minio; cli_url=https://dl.min.io/client/mc/release/linux-amd64/mc ;;
|
|
aarch64|arm64) srv_url=https://dl.min.io/server/minio/release/linux-arm64/minio; cli_url=https://dl.min.io/client/mc/release/linux-arm64/mc ;;
|
|
*) echo "Unsupported arch: $arch"; exit 1 ;;
|
|
esac
|
|
curl -fsSL -o /usr/local/bin/minio "$srv_url"
|
|
curl -fsSL -o /usr/local/bin/mc "$cli_url"
|
|
sudo chmod +x /usr/local/bin/minio /usr/local/bin/mc
|
|
export PATH=/usr/local/bin:$PATH
|
|
# Start MinIO on :9000 with default credentials (minioadmin/minioadmin)
|
|
MINIO_DIR="$GITHUB_WORKSPACE/.minio-data"
|
|
MINIO_LOG="$GITHUB_WORKSPACE/.minio.log"
|
|
mkdir -p "$MINIO_DIR"
|
|
nohup minio server "$MINIO_DIR" --address ":9000" >"$MINIO_LOG" 2>&1 &
|
|
# Wait for MinIO port to be ready
|
|
for i in $(seq 1 60); do (echo > /dev/tcp/127.0.0.1/9000) >/dev/null 2>&1 && break; sleep 1; done
|
|
# Configure client and create bucket
|
|
mc alias set local http://127.0.0.1:9000 minioadmin minioadmin
|
|
mc mb --ignore-existing local/borg
|
|
# Export S3 test URL for tox via GITHUB_ENV
|
|
echo "BORG_TEST_S3_REPO=s3:minioadmin:minioadmin@http://127.0.0.1:9000/borg/s3-repo" >> $GITHUB_ENV
|
|
|
|
- name: Install Python requirements
|
|
run: |
|
|
python -m pip install --upgrade pip setuptools wheel
|
|
pip install -r requirements.d/development.txt
|
|
|
|
- name: Install borgbackup
|
|
run: |
|
|
if [[ "$TOXENV" == *"llfuse"* ]]; then
|
|
pip install -ve ".[llfuse,cockpit,s3,sftp]"
|
|
elif [[ "$TOXENV" == *"pyfuse3"* ]]; then
|
|
pip install -ve ".[pyfuse3,cockpit,s3,sftp]"
|
|
elif [[ "$TOXENV" == *"mfusepy"* ]]; then
|
|
pip install -ve ".[mfusepy,cockpit,s3,sftp]"
|
|
else
|
|
pip install -ve ".[cockpit,s3,sftp]"
|
|
fi
|
|
|
|
- name: Build Borg fat binaries (${{ matrix.binary }})
|
|
if: ${{ matrix.binary && startsWith(github.ref, 'refs/tags/') }}
|
|
run: |
|
|
pip install -r requirements.d/pyinstaller.txt
|
|
mkdir -p dist/binary
|
|
pyinstaller --clean --distpath=dist/binary scripts/borg.exe.spec
|
|
|
|
- name: Smoke-test the built binary (${{ matrix.binary }})
|
|
if: ${{ matrix.binary && startsWith(github.ref, 'refs/tags/') }}
|
|
run: |
|
|
pushd dist/binary
|
|
echo "single-file binary"
|
|
chmod +x borg.exe
|
|
./borg.exe -V
|
|
echo "single-directory binary"
|
|
chmod +x borg-dir/borg.exe
|
|
./borg-dir/borg.exe -V
|
|
tar czf borg.tgz borg-dir
|
|
popd
|
|
# Ensure locally built binary in ./dist/binary/borg-dir is found during tests
|
|
export PATH="$GITHUB_WORKSPACE/dist/binary/borg-dir:$PATH"
|
|
echo "borg.exe binary in PATH"
|
|
borg.exe -V
|
|
|
|
- name: Prepare binaries (${{ matrix.binary }})
|
|
if: ${{ matrix.binary && startsWith(github.ref, 'refs/tags/') }}
|
|
run: |
|
|
mkdir -p artifacts
|
|
if [ -f dist/binary/borg.exe ]; then
|
|
cp dist/binary/borg.exe artifacts/${{ matrix.binary }}
|
|
fi
|
|
if [ -f dist/binary/borg.tgz ]; then
|
|
cp dist/binary/borg.tgz artifacts/${{ matrix.binary }}.tgz
|
|
fi
|
|
echo "binary files"
|
|
ls -l artifacts/
|
|
|
|
- name: Attest binaries provenance (${{ matrix.binary }})
|
|
if: ${{ matrix.binary && startsWith(github.ref, 'refs/tags/') }}
|
|
uses: actions/attest-build-provenance@v3
|
|
with:
|
|
subject-path: 'artifacts/*'
|
|
|
|
- name: Upload binaries (${{ matrix.binary }})
|
|
if: ${{ matrix.binary && startsWith(github.ref, 'refs/tags/') }}
|
|
uses: actions/upload-artifact@v6
|
|
with:
|
|
name: ${{ matrix.binary }}
|
|
path: artifacts/*
|
|
if-no-files-found: error
|
|
|
|
- name: run tox env
|
|
run: |
|
|
# do not use fakeroot, but run as root. avoids the dreaded EISDIR sporadic failures. see #2482.
|
|
#sudo -E bash -c "tox -e py"
|
|
# Ensure locally built binary in ./dist/binary/borg-dir is found during tests
|
|
export PATH="$GITHUB_WORKSPACE/dist/binary/borg-dir:$PATH"
|
|
tox --skip-missing-interpreters -- --junitxml=test-results.xml
|
|
|
|
- name: Upload test results to Codecov
|
|
if: ${{ !cancelled() && !contains(matrix.toxenv, 'mypy') && !contains(matrix.toxenv, 'docs') }}
|
|
uses: codecov/codecov-action@v5
|
|
env:
|
|
OS: ${{ runner.os }}
|
|
python: ${{ matrix.python-version }}
|
|
with:
|
|
token: ${{ secrets.CODECOV_TOKEN }}
|
|
report_type: test_results
|
|
env_vars: OS,python
|
|
files: test-results.xml
|
|
|
|
- name: Upload coverage to Codecov
|
|
if: ${{ !cancelled() && !contains(matrix.toxenv, 'mypy') && !contains(matrix.toxenv, 'docs') }}
|
|
uses: codecov/codecov-action@v5
|
|
env:
|
|
OS: ${{ runner.os }}
|
|
python: ${{ matrix.python-version }}
|
|
with:
|
|
token: ${{ secrets.CODECOV_TOKEN }}
|
|
report_type: coverage
|
|
env_vars: OS,python
|
|
|
|
vm_tests:
|
|
permissions:
|
|
contents: read
|
|
id-token: write
|
|
attestations: write
|
|
runs-on: ubuntu-24.04
|
|
timeout-minutes: 90
|
|
needs: [lint]
|
|
continue-on-error: true
|
|
|
|
strategy:
|
|
fail-fast: false
|
|
matrix:
|
|
include:
|
|
- os: freebsd
|
|
version: '14.3'
|
|
display_name: FreeBSD
|
|
# Controls binary build and provenance attestation on tags
|
|
do_binaries: true
|
|
artifact_prefix: borg-freebsd-14-x86_64-gh
|
|
|
|
- os: netbsd
|
|
version: '10.1'
|
|
display_name: NetBSD
|
|
do_binaries: false
|
|
|
|
- os: openbsd
|
|
version: '7.7'
|
|
display_name: OpenBSD
|
|
do_binaries: false
|
|
|
|
- os: omnios
|
|
version: 'r151056'
|
|
display_name: OmniOS
|
|
do_binaries: false
|
|
|
|
- os: haiku
|
|
version: 'r1beta5'
|
|
display_name: Haiku
|
|
do_binaries: false
|
|
|
|
steps:
|
|
- name: Check out repository
|
|
uses: actions/checkout@v6
|
|
with:
|
|
fetch-depth: 0
|
|
fetch-tags: true
|
|
|
|
- name: Test on ${{ matrix.display_name }}
|
|
id: cross_os
|
|
uses: cross-platform-actions/action@v0.32.0
|
|
env:
|
|
DO_BINARIES: ${{ matrix.do_binaries }}
|
|
with:
|
|
operating_system: ${{ matrix.os }}
|
|
version: ${{ matrix.version }}
|
|
shell: bash
|
|
run: |
|
|
set -euxo pipefail
|
|
|
|
case "${{ matrix.os }}" in
|
|
freebsd)
|
|
export IGNORE_OSVERSION=yes
|
|
sudo -E pkg update -f
|
|
sudo -E pkg install -y xxhash liblz4 zstd pkgconf
|
|
sudo -E pkg install -y fusefs-libs
|
|
sudo -E kldload fusefs
|
|
sudo -E sysctl vfs.usermount=1
|
|
sudo -E chmod 666 /dev/fuse
|
|
sudo -E pkg install -y rust
|
|
sudo -E pkg install -y gmake
|
|
sudo -E pkg install -y git
|
|
sudo -E pkg install -y python310 py310-sqlite3
|
|
sudo -E pkg install -y python311 py311-sqlite3 py311-pip py311-virtualenv
|
|
sudo ln -sf /usr/local/bin/python3.11 /usr/local/bin/python3
|
|
sudo ln -sf /usr/local/bin/python3.11 /usr/local/bin/python
|
|
sudo ln -sf /usr/local/bin/pip3.11 /usr/local/bin/pip3
|
|
sudo ln -sf /usr/local/bin/pip3.11 /usr/local/bin/pip
|
|
|
|
# required for libsodium/pynacl build
|
|
export MAKE=gmake
|
|
|
|
python -m venv .venv
|
|
. .venv/bin/activate
|
|
python -V
|
|
pip -V
|
|
python -m pip install --upgrade pip wheel
|
|
pip install -r requirements.d/development.txt
|
|
pip install -e ".[mfusepy,cockpit,s3,sftp]"
|
|
tox -e py311-mfusepy
|
|
|
|
if [[ "${{ matrix.do_binaries }}" == "true" && "${{ startsWith(github.ref, 'refs/tags/') }}" == "true" ]]; then
|
|
python -m pip install -r requirements.d/pyinstaller.txt
|
|
mkdir -p dist/binary
|
|
pyinstaller --clean --distpath=dist/binary scripts/borg.exe.spec
|
|
pushd dist/binary
|
|
echo "single-file binary"
|
|
chmod +x borg.exe
|
|
./borg.exe -V
|
|
echo "single-directory binary"
|
|
chmod +x borg-dir/borg.exe
|
|
./borg-dir/borg.exe -V
|
|
tar czf borg.tgz borg-dir
|
|
popd
|
|
mkdir -p artifacts
|
|
if [ -f dist/binary/borg.exe ]; then
|
|
cp -v dist/binary/borg.exe artifacts/${{ matrix.artifact_prefix }}
|
|
fi
|
|
if [ -f dist/binary/borg.tgz ]; then
|
|
cp -v dist/binary/borg.tgz artifacts/${{ matrix.artifact_prefix }}.tgz
|
|
fi
|
|
fi
|
|
;;
|
|
|
|
netbsd)
|
|
arch="$(uname -m)"
|
|
sudo -E mkdir -p /usr/pkg/etc/pkgin
|
|
echo "https://ftp.NetBSD.org/pub/pkgsrc/packages/NetBSD/${arch}/10.1/All" | sudo tee /usr/pkg/etc/pkgin/repositories.conf > /dev/null
|
|
sudo -E pkgin update
|
|
sudo -E pkgin -y upgrade
|
|
sudo -E pkgin -y install zstd lz4 xxhash git
|
|
sudo -E pkgin -y install rust
|
|
sudo -E pkgin -y install pkg-config
|
|
sudo -E pkgin -y install py311-pip py311-virtualenv py311-tox
|
|
sudo -E ln -sf /usr/pkg/bin/python3.11 /usr/pkg/bin/python3
|
|
sudo -E ln -sf /usr/pkg/bin/pip3.11 /usr/pkg/bin/pip3
|
|
sudo -E ln -sf /usr/pkg/bin/virtualenv-3.11 /usr/pkg/bin/virtualenv3
|
|
sudo -E ln -sf /usr/pkg/bin/tox-3.11 /usr/pkg/bin/tox3
|
|
|
|
# Ensure base system admin tools are on PATH for the non-root shell
|
|
export PATH="/sbin:/usr/sbin:$PATH"
|
|
|
|
echo "--- Preparing an extattr-enabled filesystem ---"
|
|
# On many NetBSD setups /tmp is tmpfs without extended attributes.
|
|
# Create a FFS image with extended attributes enabled and use it for TMPDIR.
|
|
VNDDEV="vnd0"
|
|
IMGFILE="/tmp/fs.img"
|
|
sudo -E dd if=/dev/zero of=${IMGFILE} bs=1m count=1024
|
|
sudo -E vndconfig -c "${VNDDEV}" "${IMGFILE}"
|
|
sudo -E newfs -O 2ea /dev/r${VNDDEV}a
|
|
MNT="/mnt/eafs"
|
|
sudo -E mkdir -p ${MNT}
|
|
sudo -E mount -t ffs -o extattr /dev/${VNDDEV}a $MNT
|
|
export TMPDIR="${MNT}/tmp"
|
|
sudo -E mkdir -p ${TMPDIR}
|
|
sudo -E chmod 1777 ${TMPDIR}
|
|
|
|
touch ${TMPDIR}/testfile
|
|
lsextattr user ${TMPDIR}/testfile && echo "[xattr] *** xattrs SUPPORTED on ${TMPDIR}! ***"
|
|
|
|
tox3 -e py311-none
|
|
;;
|
|
|
|
openbsd)
|
|
sudo -E pkg_add xxhash lz4 zstd git
|
|
sudo -E pkg_add rust
|
|
sudo -E pkg_add openssl%3.4
|
|
sudo -E pkg_add py3-pip py3-virtualenv py3-tox
|
|
|
|
export BORG_OPENSSL_NAME=eopenssl34
|
|
tox -e py312-none
|
|
;;
|
|
|
|
omnios)
|
|
sudo pkg install gcc14 git pkg-config python-313 gnu-make gnu-coreutils
|
|
sudo ln -sf /usr/bin/python3.13 /usr/bin/python3
|
|
sudo ln -sf /usr/bin/python3.13-config /usr/bin/python3-config
|
|
sudo python3 -m ensurepip
|
|
sudo python3 -m pip install virtualenv
|
|
|
|
# install libxxhash from source
|
|
git clone --depth 1 https://github.com/Cyan4973/xxHash.git
|
|
cd xxHash
|
|
sudo gmake install INSTALL=/usr/gnu/bin/install PREFIX=/usr/local
|
|
cd ..
|
|
export PKG_CONFIG_PATH="/usr/local/lib/pkgconfig:${PKG_CONFIG_PATH:-}"
|
|
export LD_LIBRARY_PATH="/usr/local/lib:${LD_LIBRARY_PATH:-}"
|
|
|
|
python3 -m venv .venv
|
|
. .venv/bin/activate
|
|
python -V
|
|
pip -V
|
|
python -m pip install --upgrade pip wheel
|
|
pip install -r requirements.d/development.txt
|
|
# no fuse support on omnios in our tests usually
|
|
pip install -e .
|
|
|
|
tox -e py313-none
|
|
;;
|
|
|
|
haiku)
|
|
pkgman refresh
|
|
pkgman install -y git pkgconfig zstd lz4 xxhash
|
|
pkgman install -y openssl3
|
|
pkgman install -y rust_bin
|
|
pkgman install -y python3.10
|
|
pkgman install -y cffi
|
|
pkgman install -y lz4_devel zstd_devel xxhash_devel openssl3_devel libffi_devel
|
|
|
|
# there is no pkgman package for tox, so we install it into a venv
|
|
python3 -m ensurepip --upgrade
|
|
python3 -m pip install --upgrade pip wheel
|
|
python3 -m venv .venv
|
|
. .venv/bin/activate
|
|
|
|
export PKG_CONFIG_PATH="/system/develop/lib/pkgconfig:/system/lib/pkgconfig:${PKG_CONFIG_PATH:-}"
|
|
export BORG_LIBLZ4_PREFIX=/system/develop
|
|
export BORG_LIBZSTD_PREFIX=/system/develop
|
|
export BORG_LIBXXHASH_PREFIX=/system/develop
|
|
export BORG_OPENSSL_PREFIX=/system/develop
|
|
pip install -r requirements.d/development.txt
|
|
pip install -e .
|
|
|
|
# troubles with either tox or pytest xdist, so we run pytest manually:
|
|
pytest -v -rs --benchmark-skip -k "not remote and not socket"
|
|
;;
|
|
esac
|
|
|
|
- name: Upload artifacts
|
|
if: startsWith(github.ref, 'refs/tags/') && matrix.do_binaries
|
|
uses: actions/upload-artifact@v6
|
|
with:
|
|
name: ${{ matrix.artifact_prefix }}
|
|
path: artifacts/*
|
|
if-no-files-found: ignore
|
|
|
|
- name: Attest provenance
|
|
if: startsWith(github.ref, 'refs/tags/') && matrix.do_binaries
|
|
uses: actions/attest-build-provenance@v3
|
|
with:
|
|
subject-path: 'artifacts/*'
|
|
|
|
windows_tests:
|
|
|
|
if: true # can be used to temporarily disable the build
|
|
runs-on: windows-latest
|
|
timeout-minutes: 90
|
|
needs: [lint]
|
|
|
|
env:
|
|
PY_COLORS: 1
|
|
|
|
defaults:
|
|
run:
|
|
shell: msys2 {0}
|
|
|
|
steps:
|
|
- uses: actions/checkout@v6
|
|
with:
|
|
fetch-depth: 0
|
|
|
|
- uses: msys2/setup-msys2@v2
|
|
with:
|
|
msystem: UCRT64
|
|
update: true
|
|
|
|
- name: Install system packages
|
|
run: ./scripts/msys2-install-deps development
|
|
|
|
- name: Build python venv
|
|
run: |
|
|
# building cffi / argon2-cffi in the venv fails, so we try to use the system packages
|
|
python -m venv --system-site-packages env
|
|
. env/bin/activate
|
|
# python -m pip install --upgrade pip
|
|
# pip install --upgrade setuptools build wheel
|
|
pip install -r requirements.d/pyinstaller.txt
|
|
|
|
- name: Build
|
|
run: |
|
|
# build borg.exe
|
|
. env/bin/activate
|
|
pip install -e ".[cockpit,s3,sftp]"
|
|
mkdir -p dist/binary
|
|
pyinstaller -y --clean --distpath=dist/binary scripts/borg.exe.spec
|
|
# build sdist and wheel in dist/...
|
|
python -m build
|
|
|
|
- uses: actions/upload-artifact@v6
|
|
with:
|
|
name: borg-windows
|
|
path: dist/binary/borg.exe
|
|
|
|
- name: Run tests
|
|
run: |
|
|
# Ensure locally built binary in ./dist/binary/borg-dir is found during tests
|
|
export PATH="$GITHUB_WORKSPACE/dist/binary/borg-dir:$PATH"
|
|
borg.exe -V
|
|
. env/bin/activate
|
|
python -m pytest -n4 --benchmark-skip -vv -rs -k "not remote" --junitxml=test-results.xml
|
|
|
|
- name: Upload test results to Codecov
|
|
if: ${{ !cancelled() }}
|
|
uses: codecov/codecov-action@v5
|
|
env:
|
|
OS: ${{ runner.os }}
|
|
python: '3.11'
|
|
with:
|
|
token: ${{ secrets.CODECOV_TOKEN }}
|
|
report_type: test_results
|
|
env_vars: OS,python
|
|
files: test-results.xml
|
|
|
|
- name: Upload coverage to Codecov
|
|
uses: codecov/codecov-action@v5
|
|
env:
|
|
OS: ${{ runner.os }}
|
|
python: '3.11'
|
|
with:
|
|
token: ${{ secrets.CODECOV_TOKEN }}
|
|
report_type: coverage
|
|
env_vars: OS,python
|