Compare commits

..

5 Commits

Author SHA1 Message Date
fanquake
c255fcb825 Merge bitcoin/bitcoin#29580: [27.x] Bump version to v27.0rc1
b6ff9c530d doc: point release-notes.md to the dev wiki (fanquake)
8f1b7e5cf9 doc: generate example bitcoin.conf for v27.0rc1 (fanquake)
b4fd211d2c doc: generate manual pages for v27.0rc1 (fanquake)
7589ce3997 build: bump version to v27.0rc1 (fanquake)

Pull request description:

  Bump the version number.
  Generate the man pages.
  Generate example bitcoin.conf.
  Point release-notes.md to the wiki: https://github.com/bitcoin-core/bitcoin-devwiki/wiki/27.0-Release-Notes-Draft.

ACKs for top commit:
  achow101:
    ACK b6ff9c530d
  willcl-ark:
    ACK b6ff9c530d

Tree-SHA512: 914d95f18cb6393db17ed590f62e47f06d5baad0672f8674d6e71b38b9e54c8f9d9aec4631c66c9050e9da9c2a98d3120b6d7fb155ade7d4e5eb4a17fa7ef847
2024-03-06 17:16:36 +00:00
fanquake
b6ff9c530d doc: point release-notes.md to the dev wiki
See
https://github.com/bitcoin-core/bitcoin-devwiki/wiki/27.0-Release-Notes-Draft.
2024-03-06 14:55:17 +00:00
fanquake
8f1b7e5cf9 doc: generate example bitcoin.conf for v27.0rc1 2024-03-06 14:53:06 +00:00
fanquake
b4fd211d2c doc: generate manual pages for v27.0rc1 2024-03-06 14:52:41 +00:00
fanquake
7589ce3997 build: bump version to v27.0rc1 2024-03-06 14:50:10 +00:00
1323 changed files with 52160 additions and 66777 deletions

209
.cirrus.yml Normal file
View File

@@ -0,0 +1,209 @@
env: # Global defaults
CIRRUS_CLONE_DEPTH: 1
PACKAGE_MANAGER_INSTALL: "apt-get update && apt-get install -y"
MAKEJOBS: "-j10"
TEST_RUNNER_PORT_MIN: "14000" # Must be larger than 12321, which is used for the http cache. See https://cirrus-ci.org/guide/writing-tasks/#http-cache
CI_FAILFAST_TEST_LEAVE_DANGLING: "1" # Cirrus CI does not care about dangling processes and setting this variable avoids killing the CI script itself on error
CCACHE_MAXSIZE: "200M"
CCACHE_DIR: "/tmp/ccache_dir"
CCACHE_NOHASHDIR: "1" # Debug info might contain a stale path if the build dir changes, but this is fine
# https://cirrus-ci.org/guide/persistent-workers/
#
# It is possible to select a specific persistent worker by label. Refer to the
# Cirrus CI docs for more details.
#
# Generally, a persistent worker must run Ubuntu 23.04+ or Debian 12+.
# Specifically,
# - apt-get is required due to PACKAGE_MANAGER_INSTALL
# - podman-docker-4.1+ is required due to the use of `podman` when
# RESTART_CI_DOCKER_BEFORE_RUN is set and 4.1+ due to the bugfix in 4.1
# (https://github.com/bitcoin/bitcoin/pull/21652#issuecomment-1657098200)
# - The ./ci/ depedencies (with cirrus-cli) should be installed:
#
# ```
# apt update && apt install git screen python3 bash podman-docker curl -y && curl -L -o cirrus "https://github.com/cirruslabs/cirrus-cli/releases/latest/download/cirrus-linux-$(dpkg --print-architecture)" && mv cirrus /usr/local/bin/cirrus && chmod +x /usr/local/bin/cirrus
# ```
#
# - There are no strict requirements on the hardware, because having less CPUs
# runs the same CI script (maybe slower). To avoid rare and intermittent OOM
# due to short memory usage spikes, it is recommended to add (and persist)
# swap:
#
# ```
# fallocate -l 16G /swapfile_ci && chmod 600 /swapfile_ci && mkswap /swapfile_ci && swapon /swapfile_ci && ( echo '/swapfile_ci none swap sw 0 0' | tee -a /etc/fstab )
# ```
#
# - To register the persistent worker, open a `screen` session and run:
#
# ```
# RESTART_CI_DOCKER_BEFORE_RUN=1 screen cirrus worker run --labels type=todo_fill_in_type --token todo_fill_in_token
# ```
#
# The following specific types should exist, with the following requirements:
# - small: For an x86_64 machine, recommended to have 2 CPUs and 8 GB of memory.
# - medium: For an x86_64 machine, recommended to have 4 CPUs and 16 GB of memory.
# - noble: For a machine running the Linux kernel shipped with exaclty Ubuntu Noble 24.04. The machine is recommended to have 4 CPUs and 16 GB of memory.
# - arm64: For an aarch64 machine, recommended to have 2 CPUs and 8 GB of memory.
# https://cirrus-ci.org/guide/tips-and-tricks/#sharing-configuration-between-tasks
filter_template: &FILTER_TEMPLATE
skip: $CIRRUS_REPO_FULL_NAME == "bitcoin-core/gui" && $CIRRUS_PR == "" # No need to run on the read-only mirror, unless it is a PR. https://cirrus-ci.org/guide/writing-tasks/#conditional-task-execution
stateful: false # https://cirrus-ci.org/guide/writing-tasks/#stateful-tasks
base_template: &BASE_TEMPLATE
<< : *FILTER_TEMPLATE
merge_base_script:
# Unconditionally install git (used in fingerprint_script).
- git --version || bash -c "$PACKAGE_MANAGER_INSTALL git"
- if [ "$CIRRUS_PR" = "" ]; then exit 0; fi
- git fetch --depth=1 $CIRRUS_REPO_CLONE_URL "pull/${CIRRUS_PR}/merge"
- git checkout FETCH_HEAD # Use merged changes to detect silent merge conflicts
# Also, the merge commit is used to lint COMMIT_RANGE="HEAD~..HEAD"
main_template: &MAIN_TEMPLATE
timeout_in: 120m # https://cirrus-ci.org/faq/#instance-timed-out
ci_script:
- ./ci/test_run_all.sh
global_task_template: &GLOBAL_TASK_TEMPLATE
<< : *BASE_TEMPLATE
<< : *MAIN_TEMPLATE
compute_credits_template: &CREDITS_TEMPLATE
# https://cirrus-ci.org/pricing/#compute-credits
# Only use credits for pull requests to the main repo
use_compute_credits: $CIRRUS_REPO_FULL_NAME == 'bitcoin/bitcoin' && $CIRRUS_PR != ""
task:
name: 'lint'
<< : *BASE_TEMPLATE
container:
image: debian:bookworm
cpu: 1
memory: 1G
# For faster CI feedback, immediately schedule the linters
<< : *CREDITS_TEMPLATE
test_runner_cache:
folder: "/lint_test_runner"
fingerprint_script: echo $CIRRUS_TASK_NAME $(git rev-parse HEAD:test/lint/test_runner)
python_cache:
folder: "/python_build"
fingerprint_script: cat .python-version /etc/os-release
unshallow_script:
- git fetch --unshallow --no-tags
lint_script:
- ./ci/lint_run_all.sh
task:
name: 'tidy'
<< : *GLOBAL_TASK_TEMPLATE
persistent_worker:
labels:
type: medium
env:
FILE_ENV: "./ci/test/00_setup_env_native_tidy.sh"
task:
name: 'ARM, unit tests, no functional tests'
<< : *GLOBAL_TASK_TEMPLATE
persistent_worker:
labels:
type: arm64 # Use arm64 worker to sidestep qemu and avoid a slow CI: https://github.com/bitcoin/bitcoin/pull/28087#issuecomment-1649399453
env:
FILE_ENV: "./ci/test/00_setup_env_arm.sh"
task:
name: 'Win64, unit tests, no gui tests, no functional tests'
<< : *GLOBAL_TASK_TEMPLATE
persistent_worker:
labels:
type: small
env:
FILE_ENV: "./ci/test/00_setup_env_win64.sh"
task:
name: '32-bit CentOS, dash, gui'
<< : *GLOBAL_TASK_TEMPLATE
persistent_worker:
labels:
type: small
env:
FILE_ENV: "./ci/test/00_setup_env_i686_centos.sh"
task:
name: 'previous releases, depends DEBUG'
<< : *GLOBAL_TASK_TEMPLATE
persistent_worker:
labels:
type: small
env:
FILE_ENV: "./ci/test/00_setup_env_native_previous_releases.sh"
task:
name: 'TSan, depends, gui'
<< : *GLOBAL_TASK_TEMPLATE
persistent_worker:
labels:
type: medium
env:
FILE_ENV: "./ci/test/00_setup_env_native_tsan.sh"
task:
name: 'MSan, depends'
<< : *GLOBAL_TASK_TEMPLATE
persistent_worker:
labels:
type: small
timeout_in: 300m # Use longer timeout for the *rare* case where a full build (llvm + msan + depends + ...) needs to be done.
env:
FILE_ENV: "./ci/test/00_setup_env_native_msan.sh"
task:
name: 'ASan + LSan + UBSan + integer, no depends, USDT'
enable_bpfcc_script:
# In the image build step, no external environment variables are available,
# so any settings will need to be written to the settings env file:
- sed -i "s|\${CIRRUS_CI}|true|g" ./ci/test/00_setup_env_native_asan.sh
<< : *GLOBAL_TASK_TEMPLATE
persistent_worker:
labels:
type: noble # Must use this specific worker (needed for USDT functional tests)
env:
FILE_ENV: "./ci/test/00_setup_env_native_asan.sh"
task:
name: 'fuzzer,address,undefined,integer, no depends'
<< : *GLOBAL_TASK_TEMPLATE
persistent_worker:
labels:
type: medium
env:
FILE_ENV: "./ci/test/00_setup_env_native_fuzz.sh"
task:
name: 'multiprocess, i686, DEBUG'
<< : *GLOBAL_TASK_TEMPLATE
persistent_worker:
labels:
type: medium
env:
FILE_ENV: "./ci/test/00_setup_env_i686_multiprocess.sh"
task:
name: 'no wallet, libbitcoinkernel'
<< : *GLOBAL_TASK_TEMPLATE
persistent_worker:
labels:
type: small
env:
FILE_ENV: "./ci/test/00_setup_env_native_nowallet_libbitcoinkernel.sh"
task:
name: 'macOS-cross, gui, no tests'
<< : *GLOBAL_TASK_TEMPLATE
persistent_worker:
labels:
type: small
env:
FILE_ENV: "./ci/test/00_setup_env_mac_cross.sh"

View File

@@ -1,52 +0,0 @@
name: 'Configure Docker'
description: 'Set up Docker build driver and configure build cache args'
inputs:
use-cirrus:
description: 'Use cirrus cache'
required: true
runs:
using: 'composite'
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
# Use host network to allow access to cirrus gha cache running on the host
driver-opts: |
network=host
# This is required to allow buildkit to access the actions cache
- name: Expose actions cache variables
uses: actions/github-script@v6
with:
script: |
core.exportVariable('ACTIONS_CACHE_URL', process.env['ACTIONS_CACHE_URL'])
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env['ACTIONS_RUNTIME_TOKEN'])
- name: Construct docker build cache args
shell: bash
run: |
# Configure docker build cache backend
#
# On forks the gha cache will work but will use Github's cache backend.
# Docker will check for variables $ACTIONS_CACHE_URL, $ACTIONS_RESULTS_URL and $ACTIONS_RUNTIME_TOKEN
# which are set automatically when running on GitHub infra: https://docs.docker.com/build/cache/backends/gha/#synopsis
# Use cirrus cache host
if [[ ${{ inputs.use-cirrus }} == 'true' ]]; then
url_args="url=${CIRRUS_CACHE_HOST},url_v2=${CIRRUS_CACHE_HOST}"
else
url_args=""
fi
# Always optimistically --cachefrom in case a cache blob exists
args=(--cache-from "type=gha${url_args:+,${url_args}},scope=${CONTAINER_NAME}")
# If this is a push to the default branch, also add --cacheto to save the cache
if [[ ${{ github.event_name }} == "push" && ${{ github.ref_name }} == ${{ github.event.repository.default_branch }} ]]; then
args+=(--cache-to "type=gha${url_args:+,${url_args}},mode=max,ignore-error=true,scope=${CONTAINER_NAME}")
fi
# Always `--load` into docker images (needed when using the `docker-container` build driver).
args+=(--load)
echo "DOCKER_BUILD_CACHE_ARG=${args[*]}" >> $GITHUB_ENV

View File

@@ -1,27 +0,0 @@
name: 'Configure environment'
description: 'Configure CI, cache and container name environment variables'
runs:
using: 'composite'
steps:
- name: Set CI and cache directories
shell: bash
run: |
echo "BASE_ROOT_DIR=${{ runner.temp }}" >> "$GITHUB_ENV"
echo "BASE_BUILD_DIR=${{ runner.temp }}/build" >> "$GITHUB_ENV"
echo "CCACHE_DIR=${{ runner.temp }}/ccache_dir" >> $GITHUB_ENV
echo "DEPENDS_DIR=${{ runner.temp }}/depends" >> "$GITHUB_ENV"
echo "BASE_CACHE=${{ runner.temp }}/depends/built" >> $GITHUB_ENV
echo "SOURCES_PATH=${{ runner.temp }}/depends/sources" >> $GITHUB_ENV
echo "PREVIOUS_RELEASES_DIR=${{ runner.temp }}/previous_releases" >> $GITHUB_ENV
- name: Set cache hashes
shell: bash
run: |
echo "DEPENDS_HASH=$(git ls-tree HEAD depends "$FILE_ENV" | sha256sum | cut -d' ' -f1)" >> $GITHUB_ENV
echo "PREVIOUS_RELEASES_HASH=$(git ls-tree HEAD test/get_previous_releases.py | sha256sum | cut -d' ' -f1)" >> $GITHUB_ENV
- name: Get container name
shell: bash
run: |
source $FILE_ENV
echo "CONTAINER_NAME=$CONTAINER_NAME" >> "$GITHUB_ENV"

View File

@@ -1,47 +0,0 @@
name: 'Restore Caches'
description: 'Restore ccache, depends sources, and built depends caches'
runs:
using: 'composite'
steps:
- name: Restore Ccache cache
id: ccache-cache
uses: cirruslabs/cache/restore@v4
with:
path: ${{ env.CCACHE_DIR }}
key: ccache-${{ env.CONTAINER_NAME }}-${{ github.run_id }}
restore-keys: |
ccache-${{ env.CONTAINER_NAME }}-
- name: Restore depends sources cache
id: depends-sources
uses: cirruslabs/cache/restore@v4
with:
path: ${{ env.SOURCES_PATH }}
key: depends-sources-${{ env.CONTAINER_NAME }}-${{ env.DEPENDS_HASH }}
restore-keys: |
depends-sources-${{ env.CONTAINER_NAME }}-
- name: Restore built depends cache
id: depends-built
uses: cirruslabs/cache/restore@v4
with:
path: ${{ env.BASE_CACHE }}
key: depends-built-${{ env.CONTAINER_NAME }}-${{ env.DEPENDS_HASH }}
restore-keys: |
depends-built-${{ env.CONTAINER_NAME }}-
- name: Restore previous releases cache
id: previous-releases
uses: cirruslabs/cache/restore@v4
with:
path: ${{ env.PREVIOUS_RELEASES_DIR }}
key: previous-releases-${{ env.CONTAINER_NAME }}-${{ env.PREVIOUS_RELEASES_HASH }}
restore-keys: |
previous-releases-${{ env.CONTAINER_NAME }}-
- name: export cache hits
shell: bash
run: |
echo "depends-sources-cache-hit=${{ steps.depends-sources.outputs.cache-hit }}" >> $GITHUB_ENV
echo "depends-built-cache-hit=${{ steps.depends-built.outputs.cache-hit }}" >> $GITHUB_ENV
echo "previous-releases-cache-hit=${{ steps.previous-releases.outputs.cache-hit }}" >> $GITHUB_ENV

View File

@@ -1,39 +0,0 @@
name: 'Save Caches'
description: 'Save ccache, depends sources, and built depends caches'
runs:
using: 'composite'
steps:
- name: debug cache hit inputs
shell: bash
run: |
echo "depends sources direct cache hit to primary key: ${{ env.depends-sources-cache-hit }}"
echo "depends built direct cache hit to primary key: ${{ env.depends-built-cache-hit }}"
echo "previous releases direct cache hit to primary key: ${{ env.previous-releases-cache-hit }}"
- name: Save Ccache cache
uses: cirruslabs/cache/save@v4
if: ${{ (github.event_name == 'push') && (github.ref_name == github.event.repository.default_branch) }}
with:
path: ${{ env.CCACHE_DIR }}
key: ccache-${{ env.CONTAINER_NAME }}-${{ github.run_id }}
- name: Save depends sources cache
uses: cirruslabs/cache/save@v4
if: ${{ (github.event_name == 'push') && (github.ref_name == github.event.repository.default_branch) && (env.depends-sources-cache-hit != 'true') }}
with:
path: ${{ env.SOURCES_PATH }}
key: depends-sources-${{ env.CONTAINER_NAME }}-${{ env.DEPENDS_HASH }}
- name: Save built depends cache
uses: cirruslabs/cache/save@v4
if: ${{ (github.event_name == 'push') && (github.ref_name == github.event.repository.default_branch) && (env.depends-built-cache-hit != 'true' )}}
with:
path: ${{ env.BASE_CACHE }}
key: depends-built-${{ env.CONTAINER_NAME }}-${{ env.DEPENDS_HASH }}
- name: Save previous releases cache
uses: cirruslabs/cache/save@v4
if: ${{ (github.event_name == 'push') && (github.ref_name == github.event.repository.default_branch) && (env.previous-releases-cache-hit != 'true' )}}
with:
path: ${{ env.PREVIOUS_RELEASES_DIR }}
key: previous-releases-${{ env.CONTAINER_NAME }}-${{ env.PREVIOUS_RELEASES_HASH }}

View File

@@ -18,30 +18,14 @@ concurrency:
cancel-in-progress: true
env:
DANGER_RUN_CI_ON_HOST: 1
CI_FAILFAST_TEST_LEAVE_DANGLING: 1 # GHA does not care about dangling processes and setting this variable avoids killing the CI script itself on error
CIRRUS_CACHE_HOST: http://127.0.0.1:12321/ # When using Cirrus Runners this host can be used by the docker `gha` build cache type.
REPO_USE_CIRRUS_RUNNERS: 'bitcoin/bitcoin' # Use cirrus runners and cache for this repo, instead of falling back to the slow GHA runners
MAKEJOBS: '-j10'
jobs:
runners:
name: 'determine runners'
runs-on: ubuntu-latest
outputs:
use-cirrus-runners: ${{ steps.runners.outputs.use-cirrus-runners }}
steps:
- id: runners
run: |
if [[ "${REPO_USE_CIRRUS_RUNNERS}" == "${{ github.repository }}" ]]; then
echo "use-cirrus-runners=true" >> "$GITHUB_OUTPUT"
echo "::notice title=Runner Selection::Using Cirrus Runners"
else
echo "use-cirrus-runners=false" >> "$GITHUB_OUTPUT"
echo "::notice title=Runner Selection::Using GitHub-hosted runners"
fi
test-each-commit:
name: 'test each commit'
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
if: github.event_name == 'pull_request' && github.event.pull_request.commits != 1
timeout-minutes: 360 # Use maximum time, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idtimeout-minutes. Assuming a worst case time of 1 hour per commit, this leads to a --max-count=6 below.
env:
@@ -75,21 +59,15 @@ jobs:
# and the ^ prefix is used to exclude these parents and all their
# ancestors from the rev-list output as described in:
# https://git-scm.com/docs/git-rev-list
MERGE_BASE=$(git rev-list -n1 --merges HEAD)
EXCLUDE_MERGE_BASE_ANCESTORS=
# MERGE_BASE can be empty due to limited fetch-depth
if test -n "$MERGE_BASE"; then
EXCLUDE_MERGE_BASE_ANCESTORS=^${MERGE_BASE}^@
fi
echo "TEST_BASE=$(git rev-list -n$((${{ env.MAX_COUNT }} + 1)) --reverse HEAD $EXCLUDE_MERGE_BASE_ANCESTORS | head -1)" >> "$GITHUB_ENV"
echo "TEST_BASE=$(git rev-list -n$((${{ env.MAX_COUNT }} + 1)) --reverse HEAD ^$(git rev-list -n1 --merges HEAD)^@ | head -1)" >> "$GITHUB_ENV"
- run: |
sudo apt-get update
sudo apt-get install clang ccache build-essential libtool autotools-dev automake pkg-config bsdmainutils python3-zmq libevent-dev libboost-dev libsqlite3-dev libdb++-dev systemtap-sdt-dev libminiupnpc-dev libnatpmp-dev qtbase5-dev qttools5-dev qttools5-dev-tools qtwayland5 libqrencode-dev -y
sudo apt-get install clang-15 ccache build-essential libtool autotools-dev automake pkg-config bsdmainutils python3-zmq libevent-dev libboost-dev libsqlite3-dev libdb++-dev systemtap-sdt-dev libminiupnpc-dev libnatpmp-dev libqt5gui5 libqt5core5a libqt5dbus5 qttools5-dev qttools5-dev-tools qtwayland5 libqrencode-dev -y
- name: Compile and run tests
run: |
# Run tests on commits after the last merge commit and before the PR head commit
# Use clang++, because it is a bit faster and uses less memory than g++
git rebase --exec "echo Running test-one-commit on \$( git log -1 ) && ./autogen.sh && CC=clang CXX=clang++ ./configure --with-incompatible-bdb && make clean && make -j $(nproc) check && ./test/functional/test_runner.py -j $(( $(nproc) * 2 ))" ${{ env.TEST_BASE }}
git rebase --exec "echo Running test-one-commit on \$( git log -1 ) && ./autogen.sh && CC=clang-15 CXX=clang++-15 ./configure && make clean && make -j $(nproc) check && ./test/functional/test_runner.py -j $(( $(nproc) * 2 ))" ${{ env.TEST_BASE }}
macos-native-x86_64:
name: 'macOS 13 native, x86_64, no depends, sqlite only, gui'
@@ -103,17 +81,12 @@ jobs:
timeout-minutes: 120
env:
DANGER_RUN_CI_ON_HOST: 1
FILE_ENV: './ci/test/00_setup_env_mac_native.sh'
BASE_ROOT_DIR: ${{ github.workspace }}
steps:
- &CHECKOUT
name: Checkout
uses: actions/checkout@v5
with:
# Ensure the latest merged pull request state is used, even on re-runs.
ref: &CHECKOUT_REF_TMPL ${{ github.event_name == 'pull_request' && github.ref || '' }}
- name: Checkout
uses: actions/checkout@v4
- name: Clang version
run: |
@@ -123,10 +96,7 @@ jobs:
- name: Install Homebrew packages
env:
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
run: |
# A workaround for "The `brew link` step did not complete successfully" error.
brew install --quiet python@3 || brew link --overwrite python@3
brew install --quiet automake libtool pkg-config gnu-getopt ccache boost libevent miniupnpc libnatpmp zeromq qt@5 qrencode
run: brew install automake libtool pkg-config gnu-getopt ccache boost libevent miniupnpc libnatpmp zeromq qt@5 qrencode
- name: Set Ccache directory
run: echo "CCACHE_DIR=${RUNNER_TEMP}/ccache_dir" >> "$GITHUB_ENV"
@@ -164,12 +134,13 @@ jobs:
CI_CCACHE_VERSION: '4.7.5'
CI_QT_CONF: '-release -silent -opensource -confirm-license -opengl desktop -static -static-runtime -mp -qt-zlib -qt-pcre -qt-libpng -nomake examples -nomake tests -nomake tools -no-angle -no-dbus -no-gif -no-gtk -no-ico -no-icu -no-libjpeg -no-libudev -no-sql-sqlite -no-sql-odbc -no-sqlite -no-vulkan -skip qt3d -skip qtactiveqt -skip qtandroidextras -skip qtcharts -skip qtconnectivity -skip qtdatavis3d -skip qtdeclarative -skip doc -skip qtdoc -skip qtgamepad -skip qtgraphicaleffects -skip qtimageformats -skip qtlocation -skip qtlottie -skip qtmacextras -skip qtmultimedia -skip qtnetworkauth -skip qtpurchasing -skip qtquick3d -skip qtquickcontrols -skip qtquickcontrols2 -skip qtquicktimeline -skip qtremoteobjects -skip qtscript -skip qtscxml -skip qtsensors -skip qtserialbus -skip qtserialport -skip qtspeech -skip qtsvg -skip qtvirtualkeyboard -skip qtwayland -skip qtwebchannel -skip qtwebengine -skip qtwebglplugin -skip qtwebsockets -skip qtwebview -skip qtx11extras -skip qtxmlpatterns -no-openssl -no-feature-bearermanagement -no-feature-printdialog -no-feature-printer -no-feature-printpreviewdialog -no-feature-printpreviewwidget -no-feature-sql -no-feature-sqlmodel -no-feature-textbrowser -no-feature-textmarkdownwriter -no-feature-textodfwriter -no-feature-xml'
CI_QT_DIR: 'qt-everywhere-src-5.15.11'
CI_QT_URL: 'https://download.qt.io/archive/qt/5.15/5.15.11/single/qt-everywhere-opensource-src-5.15.11.zip'
CI_QT_URL: 'https://download.qt.io/official_releases/qt/5.15/5.15.11/single/qt-everywhere-opensource-src-5.15.11.zip'
PYTHONUTF8: 1
TEST_RUNNER_TIMEOUT_FACTOR: 40
steps:
- *CHECKOUT
- name: Checkout
uses: actions/checkout@v4
- name: Configure Developer Command Prompt for Microsoft Visual C++
# Using microsoft/setup-msbuild is not enough.
@@ -179,12 +150,13 @@ jobs:
- name: Get tool information
run: |
msbuild -version | Tee-Object -FilePath "msbuild_version"
$env:VCToolsVersion | Tee-Object -FilePath "toolset_version"
$env:CI_QT_URL | Out-File -FilePath "qt_url"
$env:CI_QT_CONF | Out-File -FilePath "qt_conf"
msbuild -version | Out-File -FilePath "$env:GITHUB_WORKSPACE\msbuild_version"
Get-Content -Path "$env:GITHUB_WORKSPACE\msbuild_version"
$env:VCToolsVersion | Out-File -FilePath "$env:GITHUB_WORKSPACE\toolset_version"
Write-Host "VCToolsVersion $(Get-Content -Path "$env:GITHUB_WORKSPACE\toolset_version")"
$env:CI_QT_URL | Out-File -FilePath "$env:GITHUB_WORKSPACE\qt_url"
$env:CI_QT_CONF | Out-File -FilePath "$env:GITHUB_WORKSPACE\qt_conf"
py -3 --version
Write-Host "PowerShell version $($PSVersionTable.PSVersion.ToString())"
- name: Restore static Qt cache
id: static-qt-cache
@@ -265,8 +237,10 @@ jobs:
run: |
Set-Location "$env:VCPKG_INSTALLATION_ROOT"
Add-Content -Path "triplets\x64-windows-static.cmake" -Value "set(VCPKG_BUILD_TYPE release)"
Add-Content -Path "triplets\x64-windows-static.cmake" -Value "set(VCPKG_PLATFORM_TOOLSET_VERSION $env:VCToolsVersion)"
.\vcpkg.exe --vcpkg-root "$env:VCPKG_INSTALLATION_ROOT" integrate install
git rev-parse HEAD | Tee-Object -FilePath "$env:GITHUB_WORKSPACE\vcpkg_commit"
git rev-parse HEAD | Out-File -FilePath "$env:GITHUB_WORKSPACE\vcpkg_commit"
Get-Content -Path "$env:GITHUB_WORKSPACE\vcpkg_commit"
- name: vcpkg tools cache
uses: actions/cache@v4
@@ -317,168 +291,3 @@ jobs:
TEST_RUNNER_EXTRA: ${{ github.event_name != 'pull_request' && '--extended' || '' }}
shell: cmd
run: py -3 test\functional\test_runner.py --jobs %NUMBER_OF_PROCESSORS% --ci --quiet --tmpdirprefix=%RUNNER_TEMP% --combinedlogslen=99999999 --timeout-factor=%TEST_RUNNER_TIMEOUT_FACTOR% %TEST_RUNNER_EXTRA%
- name: Clone fuzz corpus
run: |
git clone --depth=1 https://github.com/bitcoin-core/qa-assets "$env:RUNNER_TEMP\qa-assets"
Set-Location "$env:RUNNER_TEMP\qa-assets"
Write-Host "Using qa-assets repo from commit ..."
git log -1
- name: Run fuzz binaries
env:
BITCOINFUZZ: "${{ github.workspace}}\\src\\fuzz.exe"
shell: cmd
run: py -3 test\fuzz\test_runner.py --par %NUMBER_OF_PROCESSORS% --loglevel DEBUG %RUNNER_TEMP%\qa-assets\fuzz_seed_corpus
- name: CI script
run: ./ci/test_run_all.sh
ci-matrix:
name: ${{ matrix.name }}
needs: runners
runs-on: ${{ needs.runners.outputs.use-cirrus-runners == 'true' && matrix.cirrus-runner || matrix.fallback-runner }}
if: ${{ vars.SKIP_BRANCH_PUSH != 'true' || github.event_name == 'pull_request' }}
timeout-minutes: ${{ matrix.timeout-minutes }}
env:
DANGER_CI_ON_HOST_FOLDERS: 1
FILE_ENV: ${{ matrix.file-env }}
strategy:
fail-fast: false
matrix:
include:
- name: 'ARM, unit tests, no functional tests'
cirrus-runner: 'ubuntu-24.04-arm' # Cirrus' Arm runners are Apple (with virtual Linux aarch64), which doesn't support 32-bit mode
fallback-runner: 'ubuntu-24.04-arm'
timeout-minutes: 120
file-env: './ci/test/00_setup_env_arm.sh'
- name: 'Win64, unit tests, no gui tests, no functional tests'
cirrus-runner: 'ghcr.io/cirruslabs/ubuntu-runner-amd64:24.04-md'
fallback-runner: 'ubuntu-24.04'
timeout-minutes: 120
file-env: './ci/test/00_setup_env_win64.sh'
- name: 'ASan + LSan + UBSan + integer, no depends, USDT'
cirrus-runner: 'ghcr.io/cirruslabs/ubuntu-runner-amd64:24.04-md' # has to match container in ci/test/00_setup_env_native_asan.sh for tracing tools
fallback-runner: 'ubuntu-24.04'
timeout-minutes: 120
file-env: './ci/test/00_setup_env_native_asan.sh'
- name: 'macOS-cross, gui, no tests'
cirrus-runner: 'ghcr.io/cirruslabs/ubuntu-runner-amd64:24.04-sm'
fallback-runner: 'ubuntu-24.04'
timeout-minutes: 120
file-env: './ci/test/00_setup_env_mac_cross.sh'
- name: 'No wallet, libbitcoinkernel'
cirrus-runner: 'ghcr.io/cirruslabs/ubuntu-runner-amd64:24.04-sm'
fallback-runner: 'ubuntu-24.04'
timeout-minutes: 120
file-env: './ci/test/00_setup_env_native_nowallet_libbitcoinkernel.sh'
- name: 'i686, multiprocess, DEBUG'
cirrus-runner: 'ghcr.io/cirruslabs/ubuntu-runner-amd64:24.04-md'
fallback-runner: 'ubuntu-24.04'
timeout-minutes: 120
file-env: './ci/test/00_setup_env_i686_multiprocess.sh'
- name: 'fuzzer,address,undefined,integer, no depends'
cirrus-runner: 'ghcr.io/cirruslabs/ubuntu-runner-amd64:24.04-lg'
fallback-runner: 'ubuntu-24.04'
timeout-minutes: 240
file-env: './ci/test/00_setup_env_native_fuzz.sh'
- name: 'previous releases, depends DEBUG'
cirrus-runner: 'ghcr.io/cirruslabs/ubuntu-runner-amd64:24.04-md'
fallback-runner: 'ubuntu-24.04'
timeout-minutes: 120
file-env: './ci/test/00_setup_env_native_previous_releases.sh'
- name: '32bit CentOS, dash, gui'
cirrus-runner: 'ghcr.io/cirruslabs/ubuntu-runner-amd64:24.04-lg'
fallback-runner: 'ubuntu-24.04'
timeout-minutes: 120
file-env: './ci/test/00_setup_env_i686_centos.sh'
- name: 'tidy'
cirrus-runner: 'ghcr.io/cirruslabs/ubuntu-runner-amd64:24.04-md'
fallback-runner: 'ubuntu-24.04'
timeout-minutes: 120
file-env: './ci/test/00_setup_env_native_tidy.sh'
- name: 'TSan, depends, gui'
cirrus-runner: 'ghcr.io/cirruslabs/ubuntu-runner-amd64:24.04-md'
fallback-runner: 'ubuntu-24.04'
timeout-minutes: 120
file-env: './ci/test/00_setup_env_native_tsan.sh'
- name: 'MSan, depends'
cirrus-runner: 'ghcr.io/cirruslabs/ubuntu-runner-amd64:24.04-lg'
fallback-runner: 'ubuntu-24.04'
timeout-minutes: 120
file-env: './ci/test/00_setup_env_native_msan.sh'
steps:
- *CHECKOUT
- name: Configure environment
uses: ./.github/actions/configure-environment
- name: Restore caches
id: restore-cache
uses: ./.github/actions/restore-caches
- name: Configure Docker
uses: ./.github/actions/configure-docker
with:
use-cirrus: ${{ needs.runners.outputs.use-cirrus-runners }}
- name: Enable bpfcc script
if: ${{ env.CONTAINER_NAME == 'ci_native_asan' }}
# In the image build step, no external environment variables are available,
# so any settings will need to be written to the settings env file:
run: sed -i "s|\${INSTALL_BCC_TRACING_TOOLS}|true|g" ./ci/test/00_setup_env_native_asan.sh
- name: Set mmap_rnd_bits
if: ${{ env.CONTAINER_NAME == 'ci_native_tsan' || env.CONTAINER_NAME == 'ci_native_msan' }}
# Prevents crashes due to high ASLR entropy
run: sudo sysctl -w vm.mmap_rnd_bits=28
- name: CI script
run: ./ci/test_run_all.sh
- name: Save caches
uses: ./.github/actions/save-caches
lint:
name: 'lint'
needs: runners
runs-on: ${{ needs.runners.outputs.use-cirrus-runners == 'true' && 'ghcr.io/cirruslabs/ubuntu-runner-amd64:24.04-xs' || 'ubuntu-24.04' }}
if: ${{ vars.SKIP_BRANCH_PUSH != 'true' || github.event_name == 'pull_request' }}
timeout-minutes: 20
env:
CONTAINER_NAME: "bitcoin-linter"
steps:
- name: Checkout
uses: actions/checkout@v5
with:
ref: *CHECKOUT_REF_TMPL
fetch-depth: 0
- name: Configure Docker
uses: ./.github/actions/configure-docker
with:
use-cirrus: ${{ needs.runners.outputs.use-cirrus-runners }}
- name: CI script
run: |
set -o xtrace
docker buildx build -t "$CONTAINER_NAME" $DOCKER_BUILD_CACHE_ARG --file "./ci/lint_imagefile" .
CIRRUS_PR_FLAG=""
if [ "${{ github.event_name }}" = "pull_request" ]; then
CIRRUS_PR_FLAG="-e CIRRUS_PR=1"
fi
docker run --rm $CIRRUS_PR_FLAG -v "$(pwd)":/bitcoin "$CONTAINER_NAME"

3
.gitignore vendored
View File

@@ -130,12 +130,12 @@ win32-build
test/config.ini
test/cache/*
test/.mypy_cache/
test/lint/test_runner/target/
!src/leveldb*/Makefile
/doc/doxygen/
libbitcoinconsensus.pc
contrib/devtools/split-debug.sh
# Output from running db4 installation
@@ -144,6 +144,7 @@ db4/
# clang-check
*.plist
osx_volname
dist/
/guix-build-*

View File

@@ -1,7 +1,7 @@
[main]
host = https://www.transifex.com
[o:bitcoin:p:bitcoin:r:qt-translation-028x]
[o:bitcoin:p:bitcoin:r:qt-translation-027x]
file_filter = src/qt/locale/bitcoin_<lang>.xlf
source_file = src/qt/locale/bitcoin_en.xlf
source_lang = en

View File

@@ -66,10 +66,9 @@ Discussion about codebase improvements happens in GitHub issues and pull
requests.
The developer
[mailing list](https://groups.google.com/g/bitcoindev)
[mailing list](https://lists.linuxfoundation.org/mailman/listinfo/bitcoin-dev)
should be used to discuss complicated or controversial consensus or P2P protocol changes before working on
a patch set.
Archives can be found on [https://gnusha.org/pi/bitcoindev/](https://gnusha.org/pi/bitcoindev/).
Contributor Workflow
@@ -149,7 +148,7 @@ the pull request affects. Valid areas as:
- `net` or `p2p` for changes to the peer-to-peer network code
- `refactor` for structural changes that do not change behavior
- `rpc`, `rest` or `zmq` for changes to the RPC, REST or ZMQ APIs
- `contrib` or `cli` for changes to the scripts and tools
- `script` for changes to the scripts and tools
- `test`, `qa` or `ci` for changes to the unit tests, QA tests or CI code
- `util` or `lib` for changes to the utils or libraries
- `wallet` for changes to the wallet code
@@ -418,8 +417,11 @@ Backporting
Security and bug fixes can be backported from `master` to release
branches.
Maintainers will do backports in batches and
use the proper `Needs backport (...)` labels
If the backport is non-trivial, it may be appropriate to open an
additional PR to backport the change, but only after the original PR
has been merged.
Otherwise, backports will be done in batches and
the maintainers will use the proper `Needs backport (...)` labels
when needed (the original author does not need to worry about it).
A backport should contain the following metadata in the commit body:

View File

@@ -1,7 +1,7 @@
The MIT License (MIT)
Copyright (c) 2009-2025 The Bitcoin Core developers
Copyright (c) 2009-2025 Bitcoin Developers
Copyright (c) 2009-2024 The Bitcoin Core developers
Copyright (c) 2009-2024 Bitcoin Developers
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -14,6 +14,11 @@ endif
.PHONY: deploy FORCE
.INTERMEDIATE: $(COVERAGE_INFO)
if BUILD_BITCOIN_LIBS
pkgconfigdir = $(libdir)/pkgconfig
pkgconfig_DATA = libbitcoinconsensus.pc
endif
BITCOIND_BIN=$(top_builddir)/src/$(BITCOIN_DAEMON_NAME)$(EXEEXT)
BITCOIN_QT_BIN=$(top_builddir)/src/qt/$(BITCOIN_GUI_NAME)$(EXEEXT)
BITCOIN_TEST_BIN=$(top_builddir)/src/test/$(BITCOIN_TEST_NAME)$(EXEEXT)
@@ -113,6 +118,9 @@ OSX_APP_BUILT=$(OSX_APP)/Contents/PkgInfo $(OSX_APP)/Contents/Resources/empty.lp
$(OSX_APP)/Contents/Resources/bitcoin.icns $(OSX_APP)/Contents/Info.plist \
$(OSX_APP)/Contents/MacOS/Bitcoin-Qt $(OSX_APP)/Contents/Resources/Base.lproj/InfoPlist.strings
osx_volname:
echo $(OSX_VOLNAME) >$@
if BUILD_DARWIN
$(OSX_ZIP): $(OSX_APP_BUILT) $(OSX_PACKAGING)
$(PYTHON) $(OSX_DEPLOY_SCRIPT) $(OSX_APP) $(OSX_VOLNAME) -translations-dir=$(QT_TRANSLATION_DIR) -zip
@@ -126,7 +134,7 @@ $(OSX_ZIP): deploydir
cd $(APP_DIST_DIR) && find . | sort | $(ZIP) -X@ $@
$(APP_DIST_DIR)/$(OSX_APP)/Contents/MacOS/Bitcoin-Qt: $(OSX_APP_BUILT) $(OSX_PACKAGING)
OBJDUMP=$(OBJDUMP) $(PYTHON) $(OSX_DEPLOY_SCRIPT) $(OSX_APP) $(OSX_VOLNAME) -translations-dir=$(QT_TRANSLATION_DIR)
INSTALL_NAME_TOOL=$(INSTALL_NAME_TOOL) OTOOL=$(OTOOL) STRIP=$(STRIP) $(PYTHON) $(OSX_DEPLOY_SCRIPT) $(OSX_APP) $(OSX_VOLNAME) -translations-dir=$(QT_TRANSLATION_DIR)
deploydir: $(APP_DIST_DIR)/$(OSX_APP)/Contents/MacOS/Bitcoin-Qt
endif !BUILD_DARWIN
@@ -186,7 +194,7 @@ baseline_filtered.info: baseline.info
$(LCOV) -a $@ $(LCOV_OPTS) -o $@
fuzz.info: baseline_filtered.info
@test/fuzz/test_runner.py $(DIR_FUZZ_SEED_CORPUS) -l DEBUG
@TIMEOUT=15 test/fuzz/test_runner.py $(DIR_FUZZ_SEED_CORPUS) -l DEBUG
$(LCOV) -c $(LCOV_OPTS) -d $(abs_builddir)/src --t fuzz-tests -o $@
$(LCOV) -z $(LCOV_OPTS) -d $(abs_builddir)/src
@@ -204,7 +212,7 @@ test_bitcoin_filtered.info: test_bitcoin.info
$(LCOV) -a $@ $(LCOV_OPTS) -o $@
functional_test.info: test_bitcoin_filtered.info
@test/functional/test_runner.py $(EXTENDED_FUNCTIONAL_TESTS)
@TIMEOUT=15 test/functional/test_runner.py $(EXTENDED_FUNCTIONAL_TESTS)
$(LCOV) -c $(LCOV_OPTS) -d $(abs_builddir)/src --t functional-tests -o $@
$(LCOV) -z $(LCOV_OPTS) -d $(abs_builddir)/src
@@ -330,18 +338,18 @@ clean-docs:
clean-local: clean-docs
rm -rf coverage_percent.txt test_bitcoin.coverage/ total.coverage/ fuzz.coverage/ test/tmp/ cache/ $(OSX_APP)
rm -rf test/functional/__pycache__ test/functional/test_framework/__pycache__ test/cache share/rpcauth/__pycache__
rm -rf dist/ test/lint/test_runner/target/ test/lint/__pycache__
rm -rf osx_volname dist/
test-security-check:
if TARGET_DARWIN
$(AM_V_at) CXX='$(CXX)' CXXFLAGS='$(CXXFLAGS)' CPPFLAGS='$(CPPFLAGS)' LDFLAGS='$(LDFLAGS)' $(PYTHON) $(top_srcdir)/contrib/devtools/test-security-check.py TestSecurityChecks.test_MACHO
$(AM_V_at) CXX='$(CXX)' CXXFLAGS='$(CXXFLAGS)' CPPFLAGS='$(CPPFLAGS)' LDFLAGS='$(LDFLAGS)' $(PYTHON) $(top_srcdir)/contrib/devtools/test-symbol-check.py TestSymbolChecks.test_MACHO
$(AM_V_at) CC='$(CC)' CFLAGS='$(CFLAGS)' CPPFLAGS='$(CPPFLAGS)' LDFLAGS='$(LDFLAGS)' $(PYTHON) $(top_srcdir)/contrib/devtools/test-security-check.py TestSecurityChecks.test_MACHO
$(AM_V_at) CC='$(CC)' CFLAGS='$(CFLAGS)' CPPFLAGS='$(CPPFLAGS)' LDFLAGS='$(LDFLAGS)' $(PYTHON) $(top_srcdir)/contrib/devtools/test-symbol-check.py TestSymbolChecks.test_MACHO
endif
if TARGET_WINDOWS
$(AM_V_at) CXX='$(CXX)' CXXFLAGS='$(CXXFLAGS)' CPPFLAGS='$(CPPFLAGS)' LDFLAGS='$(LDFLAGS)' $(PYTHON) $(top_srcdir)/contrib/devtools/test-security-check.py TestSecurityChecks.test_PE
$(AM_V_at) CXX='$(CXX)' CXXFLAGS='$(CXXFLAGS)' CPPFLAGS='$(CPPFLAGS)' LDFLAGS='$(LDFLAGS)' $(PYTHON) $(top_srcdir)/contrib/devtools/test-symbol-check.py TestSymbolChecks.test_PE
$(AM_V_at) CC='$(CC)' CFLAGS='$(CFLAGS)' CPPFLAGS='$(CPPFLAGS)' LDFLAGS='$(LDFLAGS)' $(PYTHON) $(top_srcdir)/contrib/devtools/test-security-check.py TestSecurityChecks.test_PE
$(AM_V_at) CC='$(CC)' CFLAGS='$(CFLAGS)' CPPFLAGS='$(CPPFLAGS)' LDFLAGS='$(LDFLAGS)' $(PYTHON) $(top_srcdir)/contrib/devtools/test-symbol-check.py TestSymbolChecks.test_PE
endif
if TARGET_LINUX
$(AM_V_at) CXX='$(CXX)' CXXFLAGS='$(CXXFLAGS)' CPPFLAGS='$(CPPFLAGS)' LDFLAGS='$(LDFLAGS)' $(PYTHON) $(top_srcdir)/contrib/devtools/test-security-check.py TestSecurityChecks.test_ELF
$(AM_V_at) CXX='$(CXX)' CXXFLAGS='$(CXXFLAGS)' CPPFLAGS='$(CPPFLAGS)' LDFLAGS='$(LDFLAGS)' $(PYTHON) $(top_srcdir)/contrib/devtools/test-symbol-check.py TestSymbolChecks.test_ELF
$(AM_V_at) CC='$(CC)' CFLAGS='$(CFLAGS)' CPPFLAGS='$(CPPFLAGS)' LDFLAGS='$(LDFLAGS)' $(PYTHON) $(top_srcdir)/contrib/devtools/test-security-check.py TestSecurityChecks.test_ELF
$(AM_V_at) CC='$(CC)' CFLAGS='$(CFLAGS)' CPPFLAGS='$(CPPFLAGS)' LDFLAGS='$(LDFLAGS)' $(PYTHON) $(top_srcdir)/contrib/devtools/test-symbol-check.py TestSymbolChecks.test_ELF
endif

View File

@@ -69,7 +69,7 @@ Translations
------------
Changes to translations as well as new translations can be submitted to
[Bitcoin Core's Transifex page](https://explore.transifex.com/bitcoin/bitcoin/).
[Bitcoin Core's Transifex page](https://www.transifex.com/bitcoin/bitcoin/).
Translations are periodically pulled from Transifex and merged into the git repository. See the
[translation process](doc/translation_process.md) for details on how this works.

View File

@@ -983,7 +983,7 @@ m4_define([_AX_CXX_COMPILE_STDCXX_testbody_new_in_20], [[
#error "This is not a C++ compiler"
#elif __cplusplus < 202002L
#elif __cplusplus < 201709L // Temporary patch on top of upstream to allow g++-10
#error "This is not a C++20 compiler"

View File

@@ -70,6 +70,8 @@ AC_DEFUN([BITCOIN_QT_INIT],[
[qt_lib_suffix= ]); bitcoin_qt_want_version=qt5],
[qt_lib_suffix= ])
AS_CASE([$host], [*android*], [qt_lib_suffix=_$ANDROID_ARCH])
AC_ARG_WITH([qt-incdir],[AS_HELP_STRING([--with-qt-incdir=INC_DIR],[specify qt include path (overridden by pkgconfig)])], [qt_include_path=$withval], [])
AC_ARG_WITH([qt-libdir],[AS_HELP_STRING([--with-qt-libdir=LIB_DIR],[specify qt lib path (overridden by pkgconfig)])], [qt_lib_path=$withval], [])
AC_ARG_WITH([qt-plugindir],[AS_HELP_STRING([--with-qt-plugindir=PLUGIN_DIR],[specify qt plugin path (overridden by pkgconfig)])], [qt_plugin_path=$withval], [])
@@ -78,10 +80,19 @@ AC_DEFUN([BITCOIN_QT_INIT],[
AC_ARG_WITH([qtdbus],
[AS_HELP_STRING([--with-qtdbus],
[enable DBus support (default is yes if qt is enabled and QtDBus is found)])],
[enable DBus support (default is yes if qt is enabled and QtDBus is found, except on Android)])],
[use_dbus=$withval],
[use_dbus=auto])
dnl Android doesn't support D-Bus and certainly doesn't use it for notifications
case $host in
*android*)
if test "$use_dbus" != "yes"; then
use_dbus=no
fi
;;
esac
AC_SUBST(QT_TRANSLATION_DIR,$qt_translation_path)
])
@@ -121,10 +132,16 @@ AC_DEFUN([BITCOIN_QT_CONFIGURE],[
if test -d "$qt_plugin_path/accessible"; then
QT_LIBS="$QT_LIBS -L$qt_plugin_path/accessible"
fi
if test -d "$qt_plugin_path/platforms/android"; then
QT_LIBS="$QT_LIBS -L$qt_plugin_path/platforms/android -lqtfreetype -lEGL"
fi
fi
_BITCOIN_QT_CHECK_STATIC_PLUGIN([QMinimalIntegrationPlugin], [-lqminimal])
AC_DEFINE([QT_QPA_PLATFORM_MINIMAL], [1], [Define this symbol if the minimal qt platform exists])
AC_DEFINE([QT_STATICPLUGIN], [1], [Define this symbol if qt plugins are static])
if test "$TARGET_OS" != "android"; then
_BITCOIN_QT_CHECK_STATIC_PLUGIN([QMinimalIntegrationPlugin], [-lqminimal])
AC_DEFINE([QT_QPA_PLATFORM_MINIMAL], [1], [Define this symbol if the minimal qt platform exists])
fi
if test "$TARGET_OS" = "windows"; then
dnl Linking against wtsapi32 is required. See #17749 and
dnl https://bugreports.qt.io/browse/QTBUG-27097.
@@ -143,6 +160,9 @@ AC_DEFUN([BITCOIN_QT_CONFIGURE],[
_BITCOIN_QT_CHECK_STATIC_PLUGIN([QCocoaIntegrationPlugin], [-lqcocoa])
_BITCOIN_QT_CHECK_STATIC_PLUGIN([QMacStylePlugin], [-lqmacstyle])
AC_DEFINE([QT_QPA_PLATFORM_COCOA], [1], [Define this symbol if the qt platform is cocoa])
elif test "$TARGET_OS" = "android"; then
QT_LIBS="-Wl,--export-dynamic,--undefined=JNI_OnLoad -lplugins_platforms_qtforandroid${qt_lib_suffix} -ljnigraphics -landroid -lqtfreetype${qt_lib_suffix} $QT_LIBS"
AC_DEFINE([QT_QPA_PLATFORM_ANDROID], [1], [Define this symbol if the qt platform is android])
fi
fi
CPPFLAGS=$TEMP_CPPFLAGS
@@ -207,7 +227,7 @@ AC_DEFUN([BITCOIN_QT_CONFIGURE],[
BITCOIN_QT_PATH_PROGS([LUPDATE], [lupdate-qt5 lupdate5 lupdate],$qt_bin_path, yes)
BITCOIN_QT_PATH_PROGS([LCONVERT], [lconvert-qt5 lconvert5 lconvert], $qt_bin_path, yes)
MOC_DEFS='-I$(srcdir)'
MOC_DEFS='-DHAVE_CONFIG_H -I$(srcdir)'
case $host in
*darwin*)
BITCOIN_QT_CHECK([
@@ -337,6 +357,9 @@ AC_DEFUN([_BITCOIN_QT_CHECK_STATIC_LIBS], [
PKG_CHECK_MODULES([QT_SERVICE], [${qt_lib_prefix}ServiceSupport${qt_lib_suffix}], [QT_LIBS="$QT_SERVICE_LIBS $QT_LIBS"])
elif test "$TARGET_OS" = "windows"; then
PKG_CHECK_MODULES([QT_WINDOWSUIAUTOMATION], [${qt_lib_prefix}WindowsUIAutomationSupport${qt_lib_suffix}], [QT_LIBS="$QT_WINDOWSUIAUTOMATION_LIBS $QT_LIBS"])
elif test "$TARGET_OS" = "android"; then
PKG_CHECK_MODULES([QT_EGL], [${qt_lib_prefix}EglSupport${qt_lib_suffix}], [QT_LIBS="$QT_EGL_LIBS $QT_LIBS"])
PKG_CHECK_MODULES([QT_SERVICE], [${qt_lib_prefix}ServiceSupport${qt_lib_suffix}], [QT_LIBS="$QT_SERVICE_LIBS $QT_LIBS"])
fi
])

View File

@@ -7,7 +7,7 @@ dnl warranty.
# Clang, when building for 32-bit,
# and linking against libstdc++, requires linking with
# -latomic if using the C++ atomic library.
# Can be tested with: clang++ -std=c++20 test.cpp -m32
# Can be tested with: clang++ test.cpp -m32
#
# Sourced from http://bugs.debian.org/797228
@@ -27,11 +27,8 @@ m4_define([_CHECK_ATOMIC_testbody], [[
auto t1 = t.load();
t.compare_exchange_strong(t1, 3s);
std::atomic<double> d{};
d.store(3.14);
auto d1 = d.load();
std::atomic<int64_t> a{};
int64_t v = 5;
int64_t r = a.fetch_add(v);
return static_cast<int>(r);

View File

@@ -32,7 +32,7 @@ Qt
---------------------
To build Bitcoin Core with the GUI, a static build of Qt is required.
1. Download a single ZIP archive of Qt source code from https://download.qt.io/archive/qt/ (e.g., [`qt-everywhere-opensource-src-5.15.11.zip`](https://download.qt.io/archive/qt/5.15/5.15.11/single/qt-everywhere-opensource-src-5.15.11.zip)), and expand it into a dedicated folder. The following instructions assume that this folder is `C:\dev\qt-source`.
1. Download a single ZIP archive of Qt source code from https://download.qt.io/official_releases/qt/ (e.g., [`qt-everywhere-opensource-src-5.15.11.zip`](https://download.qt.io/official_releases/qt/5.15/5.15.11/single/qt-everywhere-opensource-src-5.15.11.zip)), and expand it into a dedicated folder. The following instructions assume that this folder is `C:\dev\qt-source`.
> 💡 **Tip:** If you use the default path with "Extract All" for the Qt source code zip file, and end up with something like `C:\dev\qt-everywhere-opensource-src-5.15.11\qt-everywhere-src-5.15.11`, you are likely to encounter a "path too long" error when building. To fix the problem move the source files to a shorter path such as the recommended `C:\dev\qt-source`.

View File

@@ -10,12 +10,6 @@
</PropertyGroup>
<ItemGroup>
@SOURCE_FILES@
<ClCompile Include="..\..\src\bench\coin_selection.cpp" />
<ClCompile Include="..\..\src\bench\wallet_balance.cpp" />
<ClCompile Include="..\..\src\bench\wallet_create.cpp" />
<ClCompile Include="..\..\src\bench\wallet_create_tx.cpp" />
<ClCompile Include="..\..\src\bench\wallet_ismine.cpp" />
<ClCompile Include="..\..\src\bench\wallet_loading.cpp" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\libbitcoin_consensus\libbitcoin_consensus.vcxproj">

View File

@@ -62,7 +62,7 @@
</Link>
<ResourceCompile>
<AdditionalIncludeDirectories>..\..\src;</AdditionalIncludeDirectories>
<PreprocessorDefinitions>_UNICODE;UNICODE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<PreprocessorDefinitions>HAVE_CONFIG_H;_UNICODE;UNICODE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
</ResourceCompile>
</ItemDefinitionGroup>
@@ -75,7 +75,7 @@
</Link>
<ResourceCompile>
<AdditionalIncludeDirectories>..\..\src;</AdditionalIncludeDirectories>
<PreprocessorDefinitions>_UNICODE;UNICODE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<PreprocessorDefinitions>HAVE_CONFIG_H;_UNICODE;UNICODE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
</ResourceCompile>
</ItemDefinitionGroup>

View File

@@ -48,9 +48,7 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "libtest_util", "libtest_uti
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_bitcoin-qt", "test_bitcoin-qt\test_bitcoin-qt.vcxproj", "{51201D5E-D939-4854-AE9D-008F03FF518E}"
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "libminisketch", "libminisketch\libminisketch.vcxproj", "{542007E3-BE0D-4B0D-A6B0-AA8813E2558D}"
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "fuzz", "fuzz\fuzz.vcxproj", "{AFCEE6C1-89FB-49AB-A694-BA580A59E2D8}"
Project("{542007E3-BE0D-4B0D-A6B0-AA8813E2558D}") = "libminisketch", "libminisketch\libminisketch.vcxproj", "{542007E3-BE0D-4B0D-A6B0-AA8813E2558D}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
@@ -154,10 +152,6 @@ Global
{542007E3-BE0D-4B0D-A6B0-AA8813E2558D}.Debug|x64.Build.0 = Debug|x64
{542007E3-BE0D-4B0D-A6B0-AA8813E2558D}.Release|x64.ActiveCfg = Release|x64
{542007E3-BE0D-4B0D-A6B0-AA8813E2558D}.Release|x64.Build.0 = Release|x64
{AFCEE6C1-89FB-49AB-A694-BA580A59E2D8}.Debug|x64.ActiveCfg = Debug|x64
{AFCEE6C1-89FB-49AB-A694-BA580A59E2D8}.Debug|x64.Build.0 = Debug|x64
{AFCEE6C1-89FB-49AB-A694-BA580A59E2D8}.Release|x64.ActiveCfg = Release|x64
{AFCEE6C1-89FB-49AB-A694-BA580A59E2D8}.Release|x64.Build.0 = Release|x64
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE

View File

@@ -41,14 +41,77 @@
/* Define this symbol to enable ZMQ functions */
#define ENABLE_ZMQ 1
/* Define to 1 if you have the declaration of `be16toh', and to 0 if you
don't. */
#define HAVE_DECL_BE16TOH 0
/* Define to 1 if you have the declaration of `be32toh', and to 0 if you
don't. */
#define HAVE_DECL_BE32TOH 0
/* Define to 1 if you have the declaration of `be64toh', and to 0 if you
don't. */
#define HAVE_DECL_BE64TOH 0
/* Define to 1 if you have the declaration of `bswap_16', and to 0 if you
don't. */
#define HAVE_DECL_BSWAP_16 0
/* Define to 1 if you have the declaration of `bswap_32', and to 0 if you
don't. */
#define HAVE_DECL_BSWAP_32 0
/* Define to 1 if you have the declaration of `bswap_64', and to 0 if you
don't. */
#define HAVE_DECL_BSWAP_64 0
/* Define to 1 if you have the declaration of `fork', and to 0 if you don't.
*/
#define HAVE_DECL_FORK 0
/* Define to 1 if you have the declaration of `htobe16', and to 0 if you
don't. */
#define HAVE_DECL_HTOBE16 0
/* Define to 1 if you have the declaration of `htobe32', and to 0 if you
don't. */
#define HAVE_DECL_HTOBE32 0
/* Define to 1 if you have the declaration of `htobe64', and to 0 if you
don't. */
#define HAVE_DECL_HTOBE64 0
/* Define to 1 if you have the declaration of `htole16', and to 0 if you
don't. */
#define HAVE_DECL_HTOLE16 0
/* Define to 1 if you have the declaration of `htole32', and to 0 if you
don't. */
#define HAVE_DECL_HTOLE32 0
/* Define to 1 if you have the declaration of `htole64', and to 0 if you
don't. */
#define HAVE_DECL_HTOLE64 0
/* Define to 1 if you have the declaration of `le16toh', and to 0 if you
don't. */
#define HAVE_DECL_LE16TOH 0
/* Define to 1 if you have the declaration of `le32toh', and to 0 if you
don't. */
#define HAVE_DECL_LE32TOH 0
/* Define to 1 if you have the declaration of `le64toh', and to 0 if you
don't. */
#define HAVE_DECL_LE64TOH 0
/* Define to 1 if you have the declaration of `setsid', and to 0 if you don't.
*/
#define HAVE_DECL_SETSID 0
/* Define if the dllexport attribute is supported. */
#define HAVE_DLLEXPORT_ATTRIBUTE 1
/* Define to the address where bug reports for this package should be sent. */
#define PACKAGE_BUGREPORT "https://github.com/bitcoin/bitcoin/issues"
@@ -70,6 +133,9 @@
/* Define this symbol if the qt platform is windows */
#define QT_QPA_PLATFORM_WINDOWS 1
/* Define this symbol if qt plugins are static */
#define QT_STATICPLUGIN 1
/* Windows Universal Platform constraints */
#if !defined(WINAPI_FAMILY) || (WINAPI_FAMILY == WINAPI_FAMILY_DESKTOP_APP)
/* Either a desktop application without API restrictions, or and older system

View File

@@ -80,7 +80,7 @@
<ReplaceInFile FilePath="$(ConfigIniOut)"
Replace="@BUILD_BITCOIND_TRUE@" By=""></ReplaceInFile>
<ReplaceInFile FilePath="$(ConfigIniOut)"
Replace="@ENABLE_FUZZ_BINARY_TRUE@" By=""></ReplaceInFile>
Replace="@ENABLE_FUZZ_BINARY_TRUE@" By="#"></ReplaceInFile>
<ReplaceInFile FilePath="$(ConfigIniOut)"
Replace="@ENABLE_ZMQ_TRUE@" By=""></ReplaceInFile>
<ReplaceInFile FilePath="$(ConfigIniOut)"

View File

@@ -87,10 +87,10 @@
<ClCompile>
<WarningLevel>Level3</WarningLevel>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<AdditionalOptions>/utf-8 /Zc:preprocessor /Zc:__cplusplus /std:c++20 %(AdditionalOptions)</AdditionalOptions>
<AdditionalOptions>/utf-8 /Zc:__cplusplus /std:c++20 %(AdditionalOptions)</AdditionalOptions>
<DisableSpecificWarnings>4018;4244;4267;4715;4805</DisableSpecificWarnings>
<TreatWarningAsError>true</TreatWarningAsError>
<PreprocessorDefinitions>_SILENCE_CXX17_CODECVT_HEADER_DEPRECATION_WARNING;SECP256K1_STATIC;ZMQ_STATIC;NOMINMAX;WIN32;_CRT_SECURE_NO_WARNINGS;_CONSOLE;_WIN32_WINNT=0x0601;_WIN32_IE=0x0501;WIN32_LEAN_AND_MEAN;PROVIDE_FUZZ_MAIN_FUNCTION;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<PreprocessorDefinitions>_SILENCE_CXX17_CODECVT_HEADER_DEPRECATION_WARNING;SECP256K1_STATIC;ZMQ_STATIC;NOMINMAX;WIN32;HAVE_CONFIG_H;_CRT_SECURE_NO_WARNINGS;_CONSOLE;_WIN32_WINNT=0x0601;_WIN32_IE=0x0501;WIN32_LEAN_AND_MEAN;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>..\..\src;..\..\src\minisketch\include;..\..\src\univalue\include;..\..\src\secp256k1\include;..\..\src\leveldb\include;..\..\src\leveldb\helpers\memenv;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
</ClCompile>
<Link>

View File

@@ -1,85 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Import Project="..\common.init.vcxproj" />
<PropertyGroup Label="Globals">
<ProjectGuid>{AFCEE6C1-89FB-49AB-A694-BA580A59E2D8}</ProjectGuid>
</PropertyGroup>
<PropertyGroup Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<OutDir>$(SolutionDir)$(Platform)\$(Configuration)\</OutDir>
</PropertyGroup>
<ItemGroup>
<ClCompile Include="..\..\src\test\fuzz\*.cpp" Exclude="..\..\src\test\fuzz\utxo_snapshot.cpp" />
<ClCompile Include="..\..\src\test\fuzz\util\descriptor.cpp">
<ObjectFileName>$(IntDir)test_fuzz_util_descriptor.obj</ObjectFileName>
</ClCompile>
<ClCompile Include="..\..\src\test\fuzz\util\mempool.cpp">
<ObjectFileName>$(IntDir)test_fuzz_util_mempool.obj</ObjectFileName>
</ClCompile>
<ClCompile Include="..\..\src\test\fuzz\util\net.cpp">
<ObjectFileName>$(IntDir)test_fuzz_util_net.obj</ObjectFileName>
</ClCompile>
<ClCompile Include="..\..\src\wallet\test\fuzz\coincontrol.cpp">
<ObjectFileName>$(IntDir)wallet_test_fuzz_coincontrol.obj</ObjectFileName>
</ClCompile>
<ClCompile Include="..\..\src\wallet\test\fuzz\coinselection.cpp">
<ObjectFileName>$(IntDir)wallet_test_fuzz_coinselection.obj</ObjectFileName>
</ClCompile>
<ClCompile Include="..\..\src\wallet\test\fuzz\fees.cpp">
<ObjectFileName>$(IntDir)wallet_test_fuzz_fees.obj</ObjectFileName>
</ClCompile>
<ClCompile Include="..\..\src\wallet\test\fuzz\notifications.cpp">
<ObjectFileName>$(IntDir)wallet_test_fuzz_notifications.obj</ObjectFileName>
</ClCompile>
<ClCompile Include="..\..\src\wallet\test\fuzz\parse_iso8601.cpp">
<ObjectFileName>$(IntDir)wallet_test_fuzz_parse_iso8601.obj</ObjectFileName>
</ClCompile>
<ClCompile Include="..\..\src\wallet\test\fuzz\scriptpubkeyman.cpp">
<ObjectFileName>$(IntDir)wallet_test_fuzz_scriptpubkeyman.obj</ObjectFileName>
</ClCompile>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\libminisketch\libminisketch.vcxproj">
<Project>{542007e3-be0d-4b0d-a6b0-aa8813e2558d}</Project>
</ProjectReference>
<ProjectReference Include="..\libbitcoin_consensus\libbitcoin_consensus.vcxproj">
<Project>{2b384fa8-9ee1-4544-93cb-0d733c25e8ce}</Project>
</ProjectReference>
<ProjectReference Include="..\libbitcoin_cli\libbitcoin_cli.vcxproj">
<Project>{0667528c-d734-4009-adf9-c0d6c4a5a5a6}</Project>
</ProjectReference>
<ProjectReference Include="..\libbitcoin_common\libbitcoin_common.vcxproj">
<Project>{7c87e378-df58-482e-aa2f-1bc129bc19ce}</Project>
</ProjectReference>
<ProjectReference Include="..\libbitcoin_crypto\libbitcoin_crypto.vcxproj">
<Project>{6190199c-6cf4-4dad-bfbd-93fa72a760c1}</Project>
</ProjectReference>
<ProjectReference Include="..\libbitcoin_node\libbitcoin_node.vcxproj">
<Project>{460fee33-1fe1-483f-b3bf-931ff8e969a5}</Project>
</ProjectReference>
<ProjectReference Include="..\libbitcoin_util\libbitcoin_util.vcxproj">
<Project>{b53a5535-ee9d-4c6f-9a26-f79ee3bc3754}</Project>
</ProjectReference>
<ProjectReference Include="..\libbitcoin_wallet\libbitcoin_wallet.vcxproj">
<Project>{93b86837-b543-48a5-a89b-7c87abb77df2}</Project>
</ProjectReference>
<ProjectReference Include="..\libbitcoin_zmq\libbitcoin_zmq.vcxproj">
<Project>{792d487f-f14c-49fc-a9de-3fc150f31c3f}</Project>
</ProjectReference>
<ProjectReference Include="..\libtest_util\libtest_util.vcxproj">
<Project>{1e065f03-3566-47d0-8fa9-daa72b084e7d}</Project>
</ProjectReference>
<ProjectReference Include="..\libunivalue\libunivalue.vcxproj">
<Project>{5724ba7d-a09a-4ba8-800b-c4c1561b3d69}</Project>
</ProjectReference>
<ProjectReference Include="..\libsecp256k1\libsecp256k1.vcxproj">
<Project>{bb493552-3b8c-4a8c-bf69-a6e7a51d2ea6}</Project>
</ProjectReference>
<ProjectReference Include="..\libleveldb\libleveldb.vcxproj">
<Project>{18430fef-6b61-4c53-b396-718e02850f1b}</Project>
</ProjectReference>
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<Import Project="..\common.vcxproj" />
</Project>

View File

@@ -8,6 +8,7 @@
<ConfigurationType>StaticLibrary</ConfigurationType>
</PropertyGroup>
<ItemGroup>
<ClCompile Include="..\..\src\common\url.cpp" />
@SOURCE_FILES@
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />

View File

@@ -15,6 +15,7 @@
<ClCompile Include="..\..\src\primitives\block.cpp" />
<ClCompile Include="..\..\src\primitives\transaction.cpp" />
<ClCompile Include="..\..\src\pubkey.cpp" />
<ClCompile Include="..\..\src\script\bitcoinconsensus.cpp" />
<ClCompile Include="..\..\src\script\interpreter.cpp" />
<ClCompile Include="..\..\src\script\script.cpp" />
<ClCompile Include="..\..\src\script\script_error.cpp" />

View File

@@ -59,6 +59,11 @@
<Project>{18430fef-6b61-4c53-b396-718e02850f1b}</Project>
</ProjectReference>
</ItemGroup>
<ItemDefinitionGroup>
<ClCompile>
<DisableSpecificWarnings>4018;4244;4267;4703;4715;4805</DisableSpecificWarnings>
</ClCompile>
</ItemDefinitionGroup>
<Target Name="RawBenchHeaderGen" BeforeTargets="PrepareForBuild">
<PropertyGroup>
<ErrorText>There was an error executing the JSON test header generation task.</ErrorText>

View File

@@ -1,8 +1,8 @@
# CI Scripts
## CI Scripts
This directory contains scripts for each build step in each build stage.
## Running a Stage Locally
### Running a Stage Locally
Be aware that the tests will be built and run in-place, so please run at your own risk.
If the repository is not a fresh git clone, you might have to clean files from previous builds or test runs first.
@@ -27,7 +27,7 @@ with a specific configuration,
env -i HOME="$HOME" PATH="$PATH" USER="$USER" bash -c 'FILE_ENV="./ci/test/00_setup_env_arm.sh" ./ci/test_run_all.sh'
```
## Configurations
### Configurations
The test files (`FILE_ENV`) are constructed to test a wide range of
configurations, rather than a single pass/fail. This helps to catch build
@@ -49,32 +49,8 @@ env -i HOME="$HOME" PATH="$PATH" USER="$USER" bash -c 'MAKEJOBS="-j1" FILE_ENV="
The files starting with `0n` (`n` greater than 0) are the scripts that are run
in order.
## Cache
### Cache
In order to avoid rebuilding all dependencies for each build, the binaries are
cached and reused when possible. Changes in the dependency-generator will
trigger cache-invalidation and rebuilds as necessary.
## Configuring a repository for CI
### Primary repository
To configure the primary repository, follow these steps:
1. Register with [Cirrus Runners](https://cirrus-runners.app/) and purchase runners.
2. Install the Cirrus Runners GitHub app against the GitHub organization.
3. Enable organisation-level runners to be used in public repositories:
1. `Org settings -> Actions -> Runner Groups -> Default -> Allow public repos`
4. Permit the following actions to run:
1. cirruslabs/cache/restore@\*
1. cirruslabs/cache/save@\*
1. docker/setup-buildx-action@\*
1. actions/github-script@\*
### Forked repositories
When used in a fork the CI will run on GitHub's free hosted runners by default.
In this case, due to GitHub's 10GB-per-repo cache size limitations caches will be frequently evicted and missed, but the workflows will run (slowly).
It is also possible to use your own Cirrus Runners in your own fork with an appropriate patch to the `REPO_USE_CIRRUS_RUNNERS` variable in ../.github/workflows/ci.yml
NB that Cirrus Runners only work at an organisation level, therefore in order to use your own Cirrus Runners, *the fork must be within your own organisation*.

View File

@@ -1,14 +1,12 @@
#!/usr/bin/env bash
#
# Copyright (c) 2018-present The Bitcoin Core developers
# Copyright (c) 2018-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C
export CI_RETRY_EXE="/ci_retry --"
pushd "/"
export PATH=$PWD/ci/retry:$PATH
${CI_RETRY_EXE} apt-get update
# Lint dependencies:
@@ -21,7 +19,7 @@ ${CI_RETRY_EXE} apt-get install -y automake pkg-config libtool curl xz-utils git
PYTHON_PATH="/python_build"
if [ ! -d "${PYTHON_PATH}/bin" ]; then
(
${CI_RETRY_EXE} git clone --depth=1 https://github.com/pyenv/pyenv.git
${CI_RETRY_EXE} git clone https://github.com/pyenv/pyenv.git
cd pyenv/plugins/python-build || exit 1
./install.sh
)
@@ -30,7 +28,7 @@ if [ ! -d "${PYTHON_PATH}/bin" ]; then
libbz2-dev libreadline-dev libsqlite3-dev curl llvm \
libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev \
clang
env CC=clang python-build "$(cat "/.python-version")" "${PYTHON_PATH}"
env CC=clang python-build "$(cat "./.python-version")" "${PYTHON_PATH}"
fi
export PATH="${PYTHON_PATH}/bin:${PATH}"
command -v python3
@@ -40,7 +38,7 @@ export LINT_RUNNER_PATH="/lint_test_runner"
if [ ! -d "${LINT_RUNNER_PATH}" ]; then
${CI_RETRY_EXE} apt-get install -y cargo
(
cd "/test/lint/test_runner" || exit 1
cd ./test/lint/test_runner || exit 1
cargo build
mkdir -p "${LINT_RUNNER_PATH}"
mv target/debug/test_runner "${LINT_RUNNER_PATH}"
@@ -48,22 +46,14 @@ if [ ! -d "${LINT_RUNNER_PATH}" ]; then
fi
${CI_RETRY_EXE} pip3 install \
codespell==2.2.6 \
codespell==2.2.5 \
flake8==6.1.0 \
lief==0.13.2 \
mypy==1.4.1 \
pyzmq==25.1.0 \
ruff==0.5.5 \
vulture==2.6
SHELLCHECK_VERSION=v0.8.0
curl -sL "https://github.com/koalaman/shellcheck/releases/download/${SHELLCHECK_VERSION}/shellcheck-${SHELLCHECK_VERSION}.linux.x86_64.tar.xz" | \
tar --xz -xf - --directory /tmp/
mv "/tmp/shellcheck-${SHELLCHECK_VERSION}/shellcheck" /usr/bin/
MLC_VERSION=v0.18.0
MLC_BIN=mlc-x86_64-linux
curl -sL "https://github.com/becheran/mlc/releases/download/${MLC_VERSION}/${MLC_BIN}" -o "/usr/bin/mlc"
chmod +x /usr/bin/mlc
popd || exit

View File

@@ -8,24 +8,21 @@ export LC_ALL=C
set -ex
if [ -n "$CIRRUS_PR" ]; then
if [ -n "$LOCAL_BRANCH" ]; then
# To faithfully recreate CI linting locally, specify all commits on the current
# branch.
COMMIT_RANGE="$(git merge-base HEAD master)..HEAD"
elif [ -n "$CIRRUS_PR" ]; then
COMMIT_RANGE="HEAD~..HEAD"
if [ "$(git rev-list -1 HEAD)" != "$(git rev-list -1 --merges HEAD)" ]; then
echo "Error: The top commit must be a merge commit, usually the remote 'pull/${PR_NUMBER}/merge' branch."
false
fi
echo
git log --no-merges --oneline "$COMMIT_RANGE"
echo
test/lint/commit-script-check.sh "$COMMIT_RANGE"
else
# Otherwise, assume that a merge commit exists. This merge commit is assumed
# to be the base, after which linting will be done. If the merge commit is
# HEAD, the range will be empty.
COMMIT_RANGE="$( git rev-list --max-count=1 --merges HEAD )..HEAD"
COMMIT_RANGE="SKIP_EMPTY_NOT_A_PR"
fi
export COMMIT_RANGE
echo
git log --no-merges --oneline "$COMMIT_RANGE"
echo
test/lint/commit-script-check.sh "$COMMIT_RANGE"
RUST_BACKTRACE=1 "${LINT_RUNNER_PATH}/test_runner"
if [ "$CIRRUS_REPO_FULL_NAME" = "bitcoin/bitcoin" ] && [ "$CIRRUS_PR" = "" ] ; then

View File

@@ -14,7 +14,7 @@ export PATH="/python_build/bin:${PATH}"
export LINT_RUNNER_PATH="/lint_test_runner"
if [ -z "$1" ]; then
bash -ic "./ci/lint/06_script.sh"
LOCAL_BRANCH=1 bash -ic "./ci/lint/06_script.sh"
else
exec "$@"
fi

View File

@@ -4,12 +4,11 @@
# See test/lint/README.md for usage.
FROM docker.io/debian:bookworm
FROM debian:bookworm
ENV DEBIAN_FRONTEND=noninteractive
ENV LC_ALL=C.UTF-8
COPY ./ci/retry/retry /ci_retry
COPY ./.python-version /.python-version
COPY ./ci/lint/container-entrypoint.sh /entrypoint.sh
COPY ./ci/lint/04_install.sh /install.sh

12
ci/lint_run_all.sh Executable file
View File

@@ -0,0 +1,12 @@
#!/usr/bin/env bash
#
# Copyright (c) 2019-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
set -o errexit; source ./ci/test/00_setup_env.sh
set -o errexit; source ./ci/lint/04_install.sh
set -o errexit
./ci/lint/06_script.sh

View File

@@ -35,7 +35,7 @@ fi
echo "Fallback to default values in env (if not yet set)"
# The number of parallel jobs to pass down to make and test_runner.py
export MAKEJOBS=${MAKEJOBS:--j$(if command -v nproc > /dev/null 2>&1; then nproc; else sysctl -n hw.logicalcpu; fi)}
export MAKEJOBS=${MAKEJOBS:--j4}
# Whether to prefer BusyBox over GNU utilities
export USE_BUSY_BOX=${USE_BUSY_BOX:-false}

25
ci/test/00_setup_env_android.sh Executable file
View File

@@ -0,0 +1,25 @@
#!/usr/bin/env bash
#
# Copyright (c) 2019-present The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
export HOST=aarch64-linux-android
export PACKAGES="unzip openjdk-8-jdk gradle"
export CONTAINER_NAME=ci_android
export CI_IMAGE_NAME_TAG="docker.io/amd64/ubuntu:22.04"
export RUN_UNIT_TESTS=false
export RUN_FUNCTIONAL_TESTS=false
export ANDROID_API_LEVEL=28
export ANDROID_BUILD_TOOLS_VERSION=28.0.3
export ANDROID_NDK_VERSION=23.2.8568313
export ANDROID_TOOLS_URL=https://dl.google.com/android/repository/commandlinetools-linux-8512546_latest.zip
export ANDROID_HOME="${DEPENDS_DIR}/SDKs/android"
export ANDROID_NDK_HOME="${ANDROID_HOME}/ndk/${ANDROID_NDK_VERSION}"
export DEP_OPTS="ANDROID_SDK=${ANDROID_HOME} ANDROID_NDK=${ANDROID_NDK_HOME} ANDROID_API_LEVEL=${ANDROID_API_LEVEL} ANDROID_TOOLCHAIN_BIN=${ANDROID_NDK_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin/"
export BITCOIN_CONFIG="--disable-tests --enable-gui-tests --disable-bench --disable-fuzz-binary --without-utils --without-libs --without-daemon"

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env bash
#
# Copyright (c) 2019-present The Bitcoin Core developers
# Copyright (c) 2019-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
@@ -10,11 +10,11 @@ export HOST=arm-linux-gnueabihf
export DPKG_ADD_ARCH="armhf"
export PACKAGES="python3-zmq g++-arm-linux-gnueabihf busybox libc6:armhf libstdc++6:armhf libfontconfig1:armhf libxcb1:armhf"
export CONTAINER_NAME=ci_arm_linux
export CI_IMAGE_NAME_TAG="docker.io/arm64v8/debian:bookworm" # Check that https://packages.debian.org/bookworm/g++-arm-linux-gnueabihf (version 12.2, similar to guix) can cross-compile
export CI_IMAGE_NAME_TAG="docker.io/arm64v8/debian:bookworm"
export USE_BUSY_BOX=true
export RUN_UNIT_TESTS=true
export RUN_FUNCTIONAL_TESTS=false
export GOAL="install"
# -Wno-psabi is to disable ABI warnings: "note: parameter passing for argument of type ... changed in GCC 7.1"
# This could be removed once the ABI change warning does not show up by default
export BITCOIN_CONFIG="--enable-reduce-exports CXXFLAGS='-Wno-psabi -Wno-error=maybe-uninitialized'"
export BITCOIN_CONFIG="--enable-reduce-exports CXXFLAGS=-Wno-psabi"

View File

@@ -8,11 +8,10 @@ export LC_ALL=C.UTF-8
export HOST=i686-pc-linux-gnu
export CONTAINER_NAME=ci_i686_multiprocess
export CI_IMAGE_NAME_TAG="docker.io/amd64/ubuntu:24.04"
export CI_IMAGE_NAME_TAG="docker.io/amd64/ubuntu:22.04"
export PACKAGES="llvm clang g++-multilib"
export DEP_OPTS="DEBUG=1 MULTIPROCESS=1"
export GOAL="install"
export TEST_RUNNER_EXTRA="--v2transport"
export BITCOIN_CONFIG="--enable-debug CC='clang -m32' CXX='clang++ -m32' \
CPPFLAGS='-DBOOST_MULTI_INDEX_ENABLE_SAFE_MODE' CXXFLAGS='-Wno-error=documentation'"
CPPFLAGS='-DBOOST_MULTI_INDEX_ENABLE_SAFE_MODE'"
export BITCOIND=bitcoin-node # Used in functional tests

View File

@@ -9,9 +9,9 @@ export LC_ALL=C.UTF-8
export SDK_URL=${SDK_URL:-https://bitcoincore.org/depends-sources/sdks}
export CONTAINER_NAME=ci_macos_cross
export CI_IMAGE_NAME_TAG="docker.io/ubuntu:24.04"
export CI_IMAGE_NAME_TAG="docker.io/ubuntu:22.04"
export HOST=x86_64-apple-darwin
export PACKAGES="clang lld llvm zip"
export PACKAGES="zip"
export XCODE_VERSION=15.0
export XCODE_BUILD_ID=15A240d
export RUN_UNIT_TESTS=false

View File

@@ -7,10 +7,7 @@
export LC_ALL=C.UTF-8
export HOST=x86_64-apple-darwin
# Homebrew's python@3.12 is marked as externally managed (PEP 668).
# Therefore, `--break-system-packages` is needed.
export CONTAINER_NAME="ci_mac_native" # macos does not use a container, but the env var is needed for logging
export PIP_PACKAGES="--break-system-packages zmq"
export PIP_PACKAGES="zmq"
export GOAL="install"
export BITCOIN_CONFIG="--with-gui --with-miniupnpc --with-natpmp --enable-reduce-exports"
export CI_OS_NAME="macos"

View File

@@ -7,10 +7,8 @@
export LC_ALL=C.UTF-8
export CI_IMAGE_NAME_TAG="docker.io/ubuntu:24.04"
# Only install BCC tracing packages in CI. Container has to match the host for BCC to work.
if [[ "${INSTALL_BCC_TRACING_TOOLS}" == "true" ]]; then
# Required for USDT functional tests to run
# Only install BCC tracing packages in Cirrus CI.
if [[ "${CIRRUS_CI}" == "true" ]]; then
BPFCC_PACKAGE="bpfcc-tools linux-headers-$(uname --kernel-release)"
export CI_CONTAINER_CAP="--privileged -v /sys/kernel:/sys/kernel:rw"
else
@@ -19,11 +17,10 @@ else
fi
export CONTAINER_NAME=ci_native_asan
export PACKAGES="systemtap-sdt-dev clang-18 llvm-18 libclang-rt-18-dev python3-zmq qtbase5-dev qttools5-dev qttools5-dev-tools libevent-dev libboost-dev libdb5.3++-dev libminiupnpc-dev libnatpmp-dev libzmq3-dev libqrencode-dev libsqlite3-dev ${BPFCC_PACKAGE}"
export PACKAGES="systemtap-sdt-dev clang-17 llvm-17 libclang-rt-17-dev python3-zmq qtbase5-dev qttools5-dev-tools libevent-dev libboost-dev libdb5.3++-dev libminiupnpc-dev libnatpmp-dev libzmq3-dev libqrencode-dev libsqlite3-dev ${BPFCC_PACKAGE}"
export NO_DEPENDS=1
export GOAL="install"
export BITCOIN_CONFIG="--enable-usdt --enable-zmq --with-incompatible-bdb --with-gui=qt5 \
export BITCOIN_CONFIG="--enable-c++20 --enable-usdt --enable-zmq --with-incompatible-bdb --with-gui=qt5 \
CPPFLAGS='-DARENA_DEBUG -DDEBUG_LOCKORDER' \
--with-sanitizers=address,float-divide-by-zero,integer,undefined \
CC='clang-18 -ftrivial-auto-var-init=pattern' CXX='clang++-18 -ftrivial-auto-var-init=pattern'"
export CCACHE_MAXSIZE=300M
CC='clang-17 -ftrivial-auto-var-init=pattern' CXX='clang++-17 -ftrivial-auto-var-init=pattern'"

View File

@@ -8,7 +8,7 @@ export LC_ALL=C.UTF-8
export CI_IMAGE_NAME_TAG="docker.io/ubuntu:24.04"
export CONTAINER_NAME=ci_native_fuzz
export PACKAGES="clang-18 llvm-18 libclang-rt-18-dev libevent-dev libboost-dev libsqlite3-dev"
export PACKAGES="clang-17 llvm-17 libclang-rt-17-dev libevent-dev libboost-dev libsqlite3-dev"
export NO_DEPENDS=1
export RUN_UNIT_TESTS=false
export RUN_FUNCTIONAL_TESTS=false
@@ -16,6 +16,6 @@ export RUN_FUZZ_TESTS=true
export GOAL="install"
export CI_CONTAINER_CAP="--cap-add SYS_PTRACE" # If run with (ASan + LSan), the container needs access to ptrace (https://github.com/google/sanitizers/issues/764)
export BITCOIN_CONFIG="--enable-fuzz --with-sanitizers=fuzzer,address,undefined,float-divide-by-zero,integer \
CC='clang-18 -ftrivial-auto-var-init=pattern' CXX='clang++-18 -ftrivial-auto-var-init=pattern'"
CC='clang-17 -ftrivial-auto-var-init=pattern' CXX='clang++-17 -ftrivial-auto-var-init=pattern'"
export CCACHE_MAXSIZE=200M
export LLVM_SYMBOLIZER_PATH="/usr/bin/llvm-symbolizer-18"
export LLVM_SYMBOLIZER_PATH="/usr/bin/llvm-symbolizer-17"

View File

@@ -6,7 +6,7 @@
export LC_ALL=C.UTF-8
export CI_IMAGE_NAME_TAG="docker.io/ubuntu:24.04"
export CI_IMAGE_NAME_TAG="docker.io/ubuntu:22.04"
LIBCXX_DIR="/msan/cxx_build/"
export MSAN_FLAGS="-fsanitize=memory -fsanitize-memory-track-origins=2 -fno-omit-frame-pointer -g -O1 -fno-optimize-sibling-calls"
LIBCXX_FLAGS="-nostdinc++ -nostdlib++ -isystem ${LIBCXX_DIR}include/c++/v1 -L${LIBCXX_DIR}lib -Wl,-rpath,${LIBCXX_DIR}lib -lc++ -lc++abi -lpthread -Wno-unused-command-line-argument"
@@ -17,8 +17,7 @@ export PACKAGES="ninja-build"
# BDB generates false-positives and will be removed in future
export DEP_OPTS="DEBUG=1 NO_BDB=1 NO_QT=1 CC=clang CXX=clang++ CFLAGS='${MSAN_FLAGS}' CXXFLAGS='${MSAN_AND_LIBCXX_FLAGS}'"
export GOAL="install"
# _FORTIFY_SOURCE is not compatible with MSAN.
export BITCOIN_CONFIG="--enable-fuzz --with-sanitizers=fuzzer,memory CPPFLAGS='-DBOOST_MULTI_INDEX_ENABLE_SAFE_MODE -U_FORTIFY_SOURCE'"
export BITCOIN_CONFIG="--enable-fuzz --with-sanitizers=fuzzer,memory --disable-hardening --with-asm=no CFLAGS='${MSAN_FLAGS}' CPPFLAGS='-DBOOST_MULTI_INDEX_ENABLE_SAFE_MODE' CXXFLAGS='${MSAN_AND_LIBCXX_FLAGS}'"
export USE_MEMORY_SANITIZER="true"
export RUN_UNIT_TESTS="false"
export RUN_FUNCTIONAL_TESTS="false"

View File

@@ -6,15 +6,15 @@
export LC_ALL=C.UTF-8
export CI_IMAGE_NAME_TAG="docker.io/ubuntu:24.04"
export CI_IMAGE_NAME_TAG="docker.io/debian:bookworm"
export CONTAINER_NAME=ci_native_fuzz_valgrind
export PACKAGES="clang-16 llvm-16 libclang-rt-16-dev libevent-dev libboost-dev libsqlite3-dev valgrind"
export PACKAGES="clang llvm libclang-rt-dev libevent-dev libboost-dev libsqlite3-dev valgrind"
export NO_DEPENDS=1
export RUN_UNIT_TESTS=false
export RUN_FUNCTIONAL_TESTS=false
export RUN_FUZZ_TESTS=true
export FUZZ_TESTS_CONFIG="--valgrind"
export GOAL="install"
export BITCOIN_CONFIG="--enable-fuzz --with-sanitizers=fuzzer CC=clang-16 CXX=clang++-16"
# Temporarily pin dwarf 4, until using Valgrind 3.20 or later
export BITCOIN_CONFIG="--enable-fuzz --with-sanitizers=fuzzer CC=clang CXX=clang++ CFLAGS=-gdwarf-4 CXXFLAGS=-gdwarf-4"
export CCACHE_MAXSIZE=200M
export LLVM_SYMBOLIZER_PATH="/usr/bin/llvm-symbolizer-16"

View File

@@ -6,7 +6,7 @@
export LC_ALL=C.UTF-8
export CI_IMAGE_NAME_TAG="docker.io/ubuntu:24.04"
export CI_IMAGE_NAME_TAG="docker.io/ubuntu:22.04"
LIBCXX_DIR="/msan/cxx_build/"
export MSAN_FLAGS="-fsanitize=memory -fsanitize-memory-track-origins=2 -fno-omit-frame-pointer -g -O1 -fno-optimize-sibling-calls"
LIBCXX_FLAGS="-nostdinc++ -nostdlib++ -isystem ${LIBCXX_DIR}include/c++/v1 -L${LIBCXX_DIR}lib -Wl,-rpath,${LIBCXX_DIR}lib -lc++ -lc++abi -lpthread -Wno-unused-command-line-argument"
@@ -17,8 +17,7 @@ export PACKAGES="ninja-build"
# BDB generates false-positives and will be removed in future
export DEP_OPTS="DEBUG=1 NO_BDB=1 NO_QT=1 CC=clang CXX=clang++ CFLAGS='${MSAN_FLAGS}' CXXFLAGS='${MSAN_AND_LIBCXX_FLAGS}'"
export GOAL="install"
# _FORTIFY_SOURCE is not compatible with MSAN.
export BITCOIN_CONFIG="--with-sanitizers=memory CPPFLAGS='-U_FORTIFY_SOURCE'"
export BITCOIN_CONFIG="--with-sanitizers=memory --disable-hardening --with-asm=no CFLAGS='${MSAN_FLAGS}' CXXFLAGS='${MSAN_AND_LIBCXX_FLAGS}'"
export USE_MEMORY_SANITIZER="true"
export RUN_FUNCTIONAL_TESTS="false"
export CCACHE_MAXSIZE=250M

View File

@@ -7,9 +7,9 @@
export LC_ALL=C.UTF-8
export CONTAINER_NAME=ci_native_nowallet_libbitcoinkernel
export CI_IMAGE_NAME_TAG="docker.io/debian:bullseye"
# Use minimum supported python3.9 and clang-16, see doc/dependencies.md
export PACKAGES="python3-zmq clang-16 llvm-16 libc++abi-16-dev libc++-16-dev"
export DEP_OPTS="NO_WALLET=1 CC=clang-16 CXX='clang++-16 -stdlib=libc++'"
export CI_IMAGE_NAME_TAG="docker.io/ubuntu:22.04"
# Use minimum supported python3.9 (or best-effort 3.10) and clang-14, see doc/dependencies.md
export PACKAGES="python3-zmq clang-14 llvm-14 libc++abi-14-dev libc++-14-dev"
export DEP_OPTS="NO_WALLET=1 CC=clang-14 CXX='clang++-14 -stdlib=libc++'"
export GOAL="install"
export BITCOIN_CONFIG="--enable-reduce-exports --enable-experimental-util-chainstate --with-experimental-kernel-lib --enable-shared"

View File

@@ -7,14 +7,14 @@
export LC_ALL=C.UTF-8
export CONTAINER_NAME=ci_native_previous_releases
export CI_IMAGE_NAME_TAG="docker.io/ubuntu:22.04"
# Use minimum supported python3.9 (or best effort 3.10) and gcc-11, see doc/dependencies.md
export PACKAGES="gcc-11 g++-11 python3-zmq"
export DEP_OPTS="NO_UPNP=1 NO_NATPMP=1 DEBUG=1 CC=gcc-11 CXX=g++-11"
export CI_IMAGE_NAME_TAG="docker.io/debian:bullseye"
# Use minimum supported python3.9 and gcc-10, see doc/dependencies.md
export PACKAGES="gcc-10 g++-10 python3-zmq"
export DEP_OPTS="NO_UPNP=1 NO_NATPMP=1 DEBUG=1 CC=gcc-10 CXX=g++-10"
export TEST_RUNNER_EXTRA="--previous-releases --coverage --extended --exclude feature_dbcrash" # Run extended tests so that coverage does not fail, but exclude the very slow dbcrash
export RUN_UNIT_TESTS_SEQUENTIAL="true"
export RUN_UNIT_TESTS="false"
export GOAL="install"
export DOWNLOAD_PREVIOUS_RELEASES="true"
export BITCOIN_CONFIG="--enable-zmq --with-gui=qt5 --enable-reduce-exports --enable-debug \
export BITCOIN_CONFIG="--enable-zmq --with-libs=no --with-gui=qt5 --enable-reduce-exports --enable-debug \
CFLAGS=\"-g0 -O2 -funsigned-char\" CPPFLAGS='-DBOOST_MULTI_INDEX_ENABLE_SAFE_MODE' CXXFLAGS=\"-g0 -O2 -funsigned-char\""

View File

@@ -8,13 +8,13 @@ export LC_ALL=C.UTF-8
export CI_IMAGE_NAME_TAG="docker.io/ubuntu:24.04"
export CONTAINER_NAME=ci_native_tidy
export TIDY_LLVM_V="18"
export PACKAGES="clang-${TIDY_LLVM_V} libclang-${TIDY_LLVM_V}-dev llvm-${TIDY_LLVM_V}-dev libomp-${TIDY_LLVM_V}-dev clang-tidy-${TIDY_LLVM_V} jq bear libevent-dev libboost-dev libminiupnpc-dev libnatpmp-dev libzmq3-dev systemtap-sdt-dev qtbase5-dev qttools5-dev qttools5-dev-tools libqrencode-dev libsqlite3-dev libdb++-dev"
export TIDY_LLVM_V="17"
export PACKAGES="clang-${TIDY_LLVM_V} libclang-${TIDY_LLVM_V}-dev llvm-${TIDY_LLVM_V}-dev libomp-${TIDY_LLVM_V}-dev clang-tidy-${TIDY_LLVM_V} jq bear libevent-dev libboost-dev libminiupnpc-dev libnatpmp-dev libzmq3-dev systemtap-sdt-dev libqt5gui5 libqt5core5a libqt5dbus5 qttools5-dev qttools5-dev-tools libqrencode-dev libsqlite3-dev libdb++-dev"
export NO_DEPENDS=1
export RUN_UNIT_TESTS=false
export RUN_FUNCTIONAL_TESTS=false
export RUN_FUZZ_TESTS=false
export RUN_TIDY=true
export GOAL="install"
export BITCOIN_CONFIG="CC=clang-${TIDY_LLVM_V} CXX=clang++-${TIDY_LLVM_V} --with-incompatible-bdb --disable-hardening CFLAGS='-O0 -g0' CXXFLAGS='-O0 -g0'"
export BITCOIN_CONFIG="CC=clang-${TIDY_LLVM_V} CXX=clang++-${TIDY_LLVM_V} --with-incompatible-bdb --disable-hardening CFLAGS='-O0 -g0' CXXFLAGS='-O0 -g0 -I/usr/lib/llvm-${TIDY_LLVM_V}/lib/clang/${TIDY_LLVM_V}/include'"
export CCACHE_MAXSIZE=200M

View File

@@ -8,7 +8,7 @@ export LC_ALL=C.UTF-8
export CONTAINER_NAME=ci_native_tsan
export CI_IMAGE_NAME_TAG="docker.io/ubuntu:24.04"
export PACKAGES="clang-18 llvm-18 libclang-rt-18-dev libc++abi-18-dev libc++-18-dev python3-zmq"
export DEP_OPTS="CC=clang-18 CXX='clang++-18 -stdlib=libc++'"
export PACKAGES="clang-17 llvm-17 libclang-rt-17-dev libc++abi-17-dev libc++-17-dev python3-zmq"
export DEP_OPTS="CC=clang-17 CXX='clang++-17 -stdlib=libc++'"
export GOAL="install"
export BITCOIN_CONFIG="--enable-zmq CPPFLAGS='-DARENA_DEBUG -DDEBUG_LOCKORDER -DDEBUG_LOCKCONTENTION -D_LIBCPP_REMOVE_TRANSITIVE_INCLUDES' --with-sanitizers=thread"
export BITCOIN_CONFIG="--enable-zmq CPPFLAGS='-DARENA_DEBUG -DDEBUG_LOCKORDER -DDEBUG_LOCKCONTENTION' CXXFLAGS='-g' --with-sanitizers=thread"

View File

@@ -6,11 +6,12 @@
export LC_ALL=C.UTF-8
export CI_IMAGE_NAME_TAG="docker.io/ubuntu:24.04"
export CI_IMAGE_NAME_TAG="docker.io/debian:bookworm"
export CONTAINER_NAME=ci_native_valgrind
export PACKAGES="valgrind clang-16 llvm-16 libclang-rt-16-dev python3-zmq libevent-dev libboost-dev libdb5.3++-dev libminiupnpc-dev libnatpmp-dev libzmq3-dev libsqlite3-dev"
export PACKAGES="valgrind clang llvm libclang-rt-dev python3-zmq libevent-dev libboost-dev libdb5.3++-dev libminiupnpc-dev libnatpmp-dev libzmq3-dev libsqlite3-dev"
export USE_VALGRIND=1
export NO_DEPENDS=1
export TEST_RUNNER_EXTRA="--exclude feature_init,rpc_bind,feature_bind_extra" # feature_init excluded for now, see https://github.com/bitcoin/bitcoin/issues/30011 ; bind tests excluded for now, see https://github.com/bitcoin/bitcoin/issues/17765#issuecomment-602068547
export TEST_RUNNER_EXTRA="--exclude feature_init,rpc_bind,feature_bind_extra" # Excluded for now, see https://github.com/bitcoin/bitcoin/issues/17765#issuecomment-602068547
export GOAL="install"
export BITCOIN_CONFIG="--enable-zmq --with-incompatible-bdb --with-gui=no CC=clang-16 CXX=clang++-16" # TODO enable GUI
# Temporarily pin dwarf 4, until using Valgrind 3.20 or later
export BITCOIN_CONFIG="--enable-zmq --with-incompatible-bdb --with-gui=no CC=clang CXX=clang++ CFLAGS=-gdwarf-4 CXXFLAGS=-gdwarf-4" # TODO enable GUI

View File

@@ -9,8 +9,8 @@ export LC_ALL=C.UTF-8
export HOST=s390x-linux-gnu
export PACKAGES="python3-zmq"
export CONTAINER_NAME=ci_s390x
export CI_IMAGE_NAME_TAG="docker.io/s390x/ubuntu:24.04"
export TEST_RUNNER_EXTRA="--exclude rpc_bind,feature_bind_extra" # Excluded for now, see https://github.com/bitcoin/bitcoin/issues/17765#issuecomment-602068547
export CI_IMAGE_NAME_TAG="docker.io/s390x/debian:bookworm"
export TEST_RUNNER_EXTRA="--exclude feature_init,rpc_bind,feature_bind_extra" # Excluded for now, see https://github.com/bitcoin/bitcoin/issues/17765#issuecomment-602068547
export RUN_FUNCTIONAL_TESTS=true
export GOAL="install"
export BITCOIN_CONFIG="--enable-reduce-exports"

View File

@@ -7,7 +7,7 @@
export LC_ALL=C.UTF-8
export CONTAINER_NAME=ci_win64
export CI_IMAGE_NAME_TAG="docker.io/amd64/debian:bookworm" # Check that https://packages.debian.org/bookworm/g++-mingw-w64-x86-64-posix (version 12.2, similar to guix) can cross-compile
export CI_IMAGE_NAME_TAG="docker.io/amd64/ubuntu:22.04" # Check that Jammy can cross-compile to win64
export HOST=x86_64-w64-mingw32
export DPKG_ADD_ARCH="i386"
export PACKAGES="nsis g++-mingw-w64-x86-64-posix wine-binfmt wine64 wine32 file"
@@ -16,4 +16,4 @@ export GOAL="deploy"
# Prior to 11.0.0, the mingw-w64 headers were missing noreturn attributes, causing warnings when
# cross-compiling for Windows. https://sourceforge.net/p/mingw-w64/bugs/306/
# https://github.com/mingw-w64/mingw-w64/commit/1690994f515910a31b9fb7c7bd3a52d4ba987abe
export BITCOIN_CONFIG="--enable-reduce-exports --disable-gui-tests CXXFLAGS='-Wno-return-type -Wno-error=maybe-uninitialized -Wno-error=array-bounds'"
export BITCOIN_CONFIG="--enable-reduce-exports --disable-gui-tests CXXFLAGS=-Wno-return-type"

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env bash
#
# Copyright (c) 2018-present The Bitcoin Core developers
# Copyright (c) 2018-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
@@ -36,7 +36,7 @@ if [ -n "$PIP_PACKAGES" ]; then
fi
if [[ ${USE_MEMORY_SANITIZER} == "true" ]]; then
${CI_RETRY_EXE} git clone --depth=1 https://github.com/llvm/llvm-project -b "llvmorg-18.1.3" /msan/llvm-project
${CI_RETRY_EXE} git clone --depth=1 https://github.com/llvm/llvm-project -b llvmorg-17.0.6 /msan/llvm-project
cmake -G Ninja -B /msan/clang_build/ \
-DLLVM_ENABLE_PROJECTS="clang" \
@@ -53,22 +53,17 @@ if [[ ${USE_MEMORY_SANITIZER} == "true" ]]; then
update-alternatives --install /usr/bin/llvm-symbolizer llvm-symbolizer /msan/clang_build/bin/llvm-symbolizer 100
cmake -G Ninja -B /msan/cxx_build/ \
-DLLVM_ENABLE_RUNTIMES="libcxx;libcxxabi;libunwind" \
-DLLVM_ENABLE_RUNTIMES='libcxx;libcxxabi' \
-DCMAKE_BUILD_TYPE=Release \
-DLLVM_USE_SANITIZER=MemoryWithOrigins \
-DCMAKE_C_COMPILER=clang \
-DCMAKE_CXX_COMPILER=clang++ \
-DLLVM_TARGETS_TO_BUILD=Native \
-DLLVM_ENABLE_PER_TARGET_RUNTIME_DIR=OFF \
-DLIBCXXABI_USE_LLVM_UNWINDER=OFF \
-DLIBCXX_HARDENING_MODE=debug \
-S /msan/llvm-project/runtimes
ninja -C /msan/cxx_build/ "-j$( nproc )" # Use nproc, because MAKEJOBS is the default in docker image builds
# Clear no longer needed source folder
du -sh /msan/llvm-project
rm -rf /msan/llvm-project
fi
if [[ "${RUN_TIDY}" == "true" ]]; then
@@ -90,4 +85,14 @@ if [ -n "$XCODE_VERSION" ] && [ ! -d "${DEPENDS_DIR}/SDKs/${OSX_SDK_BASENAME}" ]
tar -C "${DEPENDS_DIR}/SDKs" -xf "$OSX_SDK_PATH"
fi
if [ -n "$ANDROID_HOME" ] && [ ! -d "$ANDROID_HOME" ]; then
ANDROID_TOOLS_PATH=${DEPENDS_DIR}/sdk-sources/android-tools.zip
if [ ! -f "$ANDROID_TOOLS_PATH" ]; then
${CI_RETRY_EXE} curl --location --fail "${ANDROID_TOOLS_URL}" -o "$ANDROID_TOOLS_PATH"
fi
mkdir -p "$ANDROID_HOME"
unzip -o "$ANDROID_TOOLS_PATH" -d "$ANDROID_HOME"
yes | "${ANDROID_HOME}"/cmdline-tools/bin/sdkmanager --sdk_root="${ANDROID_HOME}" --install "build-tools;${ANDROID_BUILD_TOOLS_VERSION}" "platform-tools" "platforms;android-31" "platforms;android-${ANDROID_API_LEVEL}" "ndk;${ANDROID_NDK_VERSION}"
fi
git config --global ${CFG_DONE} "true"

View File

@@ -16,45 +16,19 @@ if [ -z "$DANGER_RUN_CI_ON_HOST" ]; then
# System-dependent env vars must be kept as is. So read them from the container.
docker run --rm "${CI_IMAGE_NAME_TAG}" bash -c "env | grep --extended-regexp '^(HOME|PATH|USER)='" | tee --append "/tmp/env-$USER-$CONTAINER_NAME"
echo "Creating $CI_IMAGE_NAME_TAG container to run in"
# Use buildx unconditionally
# Using buildx is required to properly load the correct driver, for use with registry caching. Neither build, nor BUILDKIT=1 currently do this properly
# shellcheck disable=SC2086
docker buildx build \
DOCKER_BUILDKIT=1 docker build \
--file "${BASE_READ_ONLY_DIR}/ci/test_imagefile" \
--build-arg "CI_IMAGE_NAME_TAG=${CI_IMAGE_NAME_TAG}" \
--build-arg "FILE_ENV=${FILE_ENV}" \
--build-arg "BASE_ROOT_DIR=${BASE_ROOT_DIR}" \
--label="${CI_IMAGE_LABEL}" \
$DOCKER_BUILD_CACHE_ARG \
--tag="${CONTAINER_NAME}" \
"${BASE_READ_ONLY_DIR}"
docker volume create "${CONTAINER_NAME}_ccache" || true
docker volume create "${CONTAINER_NAME}_depends" || true
docker volume create "${CONTAINER_NAME}_depends_sources" || true
docker volume create "${CONTAINER_NAME}_depends_SDKs_android" || true
docker volume create "${CONTAINER_NAME}_previous_releases" || true
CI_CCACHE_MOUNT="type=volume,src=${CONTAINER_NAME}_ccache,dst=$CCACHE_DIR"
CI_DEPENDS_MOUNT="type=volume,src=${CONTAINER_NAME}_depends,dst=$DEPENDS_DIR/built"
CI_DEPENDS_SOURCES_MOUNT="type=volume,src=${CONTAINER_NAME}_depends_sources,dst=$DEPENDS_DIR/sources"
CI_PREVIOUS_RELEASES_MOUNT="type=volume,src=${CONTAINER_NAME}_previous_releases,dst=$PREVIOUS_RELEASES_DIR"
if [ "$DANGER_CI_ON_HOST_CACHE_FOLDERS" ]; then
# ensure the directories exist
mkdir -p "${CCACHE_DIR}"
mkdir -p "${DEPENDS_DIR}/built"
mkdir -p "${DEPENDS_DIR}/sources"
mkdir -p "${PREVIOUS_RELEASES_DIR}"
CI_CCACHE_MOUNT="type=bind,src=${CCACHE_DIR},dst=$CCACHE_DIR"
CI_DEPENDS_MOUNT="type=bind,src=${DEPENDS_DIR}/built,dst=$DEPENDS_DIR/built"
CI_DEPENDS_SOURCES_MOUNT="type=bind,src=${DEPENDS_DIR}/sources,dst=$DEPENDS_DIR/sources"
CI_PREVIOUS_RELEASES_MOUNT="type=bind,src=${PREVIOUS_RELEASES_DIR},dst=$PREVIOUS_RELEASES_DIR"
fi
docker network create --ipv6 --subnet 1111:1111::/112 ci-ip6net || true
if [ -n "${RESTART_CI_DOCKER_BEFORE_RUN}" ] ; then
echo "Restart docker before run to stop and clear all containers started with --rm"
podman container rm --force --all # Similar to "systemctl restart docker"
@@ -75,13 +49,13 @@ if [ -z "$DANGER_RUN_CI_ON_HOST" ]; then
# shellcheck disable=SC2086
CI_CONTAINER_ID=$(docker run --cap-add LINUX_IMMUTABLE $CI_CONTAINER_CAP --rm --interactive --detach --tty \
--mount "type=bind,src=$BASE_READ_ONLY_DIR,dst=$BASE_READ_ONLY_DIR,readonly" \
--mount "${CI_CCACHE_MOUNT}" \
--mount "${CI_DEPENDS_MOUNT}" \
--mount "${CI_DEPENDS_SOURCES_MOUNT}" \
--mount "${CI_PREVIOUS_RELEASES_MOUNT}" \
--mount "type=volume,src=${CONTAINER_NAME}_ccache,dst=$CCACHE_DIR" \
--mount "type=volume,src=${CONTAINER_NAME}_depends,dst=$DEPENDS_DIR/built" \
--mount "type=volume,src=${CONTAINER_NAME}_depends_sources,dst=$DEPENDS_DIR/sources" \
--mount "type=volume,src=${CONTAINER_NAME}_depends_SDKs_android,dst=$DEPENDS_DIR/SDKs/android" \
--mount "type=volume,src=${CONTAINER_NAME}_previous_releases,dst=$PREVIOUS_RELEASES_DIR" \
--env-file /tmp/env-$USER-$CONTAINER_NAME \
--name "$CONTAINER_NAME" \
--network ci-ip6net \
"$CONTAINER_NAME")
export CI_CONTAINER_ID
export CI_EXEC_CMD_PREFIX="docker exec ${CI_CONTAINER_ID}"

View File

@@ -8,9 +8,9 @@ export LC_ALL=C.UTF-8
set -ex
export ASAN_OPTIONS="detect_leaks=1:detect_stack_use_after_return=1:check_initialization_order=1:strict_init_order=1"
export ASAN_OPTIONS="detect_stack_use_after_return=1:check_initialization_order=1:strict_init_order=1"
export LSAN_OPTIONS="suppressions=${BASE_ROOT_DIR}/test/sanitizer_suppressions/lsan"
export TSAN_OPTIONS="suppressions=${BASE_ROOT_DIR}/test/sanitizer_suppressions/tsan:halt_on_error=1"
export TSAN_OPTIONS="suppressions=${BASE_ROOT_DIR}/test/sanitizer_suppressions/tsan:halt_on_error=1:log_path=${BASE_SCRATCH_DIR}/sanitizer-output/tsan"
export UBSAN_OPTIONS="suppressions=${BASE_ROOT_DIR}/test/sanitizer_suppressions/ubsan:print_stacktrace=1:halt_on_error=1:report_error_type=1"
if [ "$CI_OS_NAME" == "macos" ]; then
@@ -25,14 +25,6 @@ fi
echo "Free disk space:"
df -h
# We force an install of linux-headers again here via $PACKAGES to fix any
# kernel mismatch between a cached docker image and the underlying host.
# This can happen occasionally on hosted runners if the runner image is updated.
if [[ "$CONTAINER_NAME" == "ci_native_asan" ]]; then
$CI_RETRY_EXE apt-get update
${CI_RETRY_EXE} bash -c "apt-get install --no-install-recommends --no-upgrade -y $PACKAGES"
fi
# What host to compile for. See also ./depends/README.md
# Tests that need cross-compilation export the appropriate HOST.
# Tests that run natively guess the host
@@ -79,6 +71,8 @@ elif [ "$RUN_UNIT_TESTS" = "true" ] || [ "$RUN_UNIT_TESTS_SEQUENTIAL" = "true" ]
fi
fi
mkdir -p "${BASE_SCRATCH_DIR}/sanitizer-output/"
if [ "$USE_BUSY_BOX" = "true" ]; then
echo "Setup to use BusyBox utils"
# tar excluded for now because it requires passing in the exact archive type in ./depends (fixed in later BusyBox version)
@@ -118,6 +112,15 @@ fi
ccache --zero-stats
PRINT_CCACHE_STATISTICS="ccache --version | head -n 1 && ccache --show-stats"
if [ -n "$ANDROID_TOOLS_URL" ]; then
make distclean || true
./autogen.sh
bash -c "./configure $BITCOIN_CONFIG_ALL $BITCOIN_CONFIG" || ( (cat config.log) && false)
make "${MAKEJOBS}" && cd src/qt && ANDROID_HOME=${ANDROID_HOME} ANDROID_NDK_HOME=${ANDROID_NDK_HOME} make apk
bash -c "${PRINT_CCACHE_STATISTICS}"
exit 0
fi
BITCOIN_CONFIG_ALL="${BITCOIN_CONFIG_ALL} --enable-external-signer --prefix=$BASE_OUTDIR"
if [ -n "$CONFIG_SHELL" ]; then
@@ -145,12 +148,6 @@ fi
bash -c "${MAYBE_BEAR} ${MAYBE_TOKEN} make $MAKEJOBS $GOAL" || ( echo "Build failure. Verbose build follows." && make "$GOAL" V=1 ; false )
bash -c "${PRINT_CCACHE_STATISTICS}"
if [ "$CI" = "true" ]; then
hit_rate=$(ccache -s | grep "Hits:" | head -1 | sed 's/.*(\(.*\)%).*/\1/')
if [ "${hit_rate%.*}" -lt 75 ]; then
echo "::notice title=low ccache hitrate::Ccache hit-rate in $CONTAINER_NAME was $hit_rate%"
fi
fi
du -sh "${DEPENDS_DIR}"/*/
du -sh "${PREVIOUS_RELEASES_DIR}"
@@ -185,11 +182,7 @@ if [ "${RUN_TIDY}" = "true" ]; then
set -eo pipefail
cd "${BASE_BUILD_DIR}/bitcoin-$HOST/src/"
if ! ( run-clang-tidy-"${TIDY_LLVM_V}" -quiet -load="/tidy-build/libbitcoin-tidy.so" "${MAKEJOBS}" | tee tmp.tidy-out.txt ); then
grep -C5 "error: " tmp.tidy-out.txt
echo "^^^ ⚠️ Failure generated from clang-tidy"
false
fi
( run-clang-tidy-"${TIDY_LLVM_V}" -quiet -load="/tidy-build/libbitcoin-tidy.so" "${MAKEJOBS}" ) | grep -C5 "error"
# Filter out files by regex here, because regex may not be
# accepted in src/.bear-tidy-config
# Filter out:

View File

@@ -4,16 +4,12 @@
# See ci/README.md for usage.
# We never want scratch, but default arg silences a Warning
ARG CI_IMAGE_NAME_TAG=scratch
ARG CI_IMAGE_NAME_TAG
FROM ${CI_IMAGE_NAME_TAG}
ARG FILE_ENV
ENV FILE_ENV=${FILE_ENV}
ARG BASE_ROOT_DIR
ENV BASE_ROOT_DIR=${BASE_ROOT_DIR}
COPY ./ci/retry/retry /usr/bin/retry
COPY ./ci/test/00_setup_env.sh ./${FILE_ENV} ./ci/test/01_base_install.sh /ci_container_base/ci/test/

View File

@@ -1,10 +1,10 @@
AC_PREREQ([2.69])
define(_CLIENT_VERSION_MAJOR, 28)
define(_CLIENT_VERSION_MINOR, 3)
define(_CLIENT_VERSION_MAJOR, 27)
define(_CLIENT_VERSION_MINOR, 0)
define(_CLIENT_VERSION_BUILD, 0)
define(_CLIENT_VERSION_RC, 0)
define(_CLIENT_VERSION_RC, 1)
define(_CLIENT_VERSION_IS_RELEASE, true)
define(_COPYRIGHT_YEAR, 2025)
define(_COPYRIGHT_YEAR, 2024)
define(_COPYRIGHT_HOLDERS,[The %s developers])
define(_COPYRIGHT_HOLDERS_SUBSTITUTION,[[Bitcoin Core]])
AC_INIT([Bitcoin Core],m4_join([.], _CLIENT_VERSION_MAJOR, _CLIENT_VERSION_MINOR, _CLIENT_VERSION_BUILD)m4_if(_CLIENT_VERSION_RC, [0], [], [rc]_CLIENT_VERSION_RC),[https://github.com/bitcoin/bitcoin/issues],[bitcoin],[https://bitcoincore.org/])
@@ -125,7 +125,6 @@ AC_PATH_PROG([GIT], [git])
AC_PATH_PROG([CCACHE], [ccache])
AC_PATH_PROG([XGETTEXT], [xgettext])
AC_PATH_PROG([HEXDUMP], [hexdump])
AC_PATH_TOOL([OBJDUMP], [objdump])
AC_PATH_TOOL([OBJCOPY], [objcopy])
AC_PATH_PROG([DOXYGEN], [doxygen])
AM_CONDITIONAL([HAVE_DOXYGEN], [test -n "$DOXYGEN"])
@@ -206,9 +205,9 @@ AC_ARG_WITH([qrencode],
AC_ARG_ENABLE([hardening],
[AS_HELP_STRING([--disable-hardening],
[do not attempt to harden the resulting executables (default is to harden)])],
[do not attempt to harden the resulting executables (default is to harden when possible)])],
[use_hardening=$enableval],
[use_hardening=yes])
[use_hardening=auto])
AC_ARG_ENABLE([reduce-exports],
[AS_HELP_STRING([--enable-reduce-exports],
@@ -238,6 +237,18 @@ AC_ARG_ENABLE([lcov],
[use_lcov=$enableval],
[use_lcov=no])
AC_ARG_ENABLE([lcov-branch-coverage],
[AS_HELP_STRING([--enable-lcov-branch-coverage],
[enable lcov testing branch coverage (default is no)])],
[use_lcov_branch=yes],
[use_lcov_branch=no])
AC_ARG_ENABLE([threadlocal],
[AS_HELP_STRING([--enable-threadlocal],
[enable features that depend on the c++ thread_local keyword (currently just thread names in debug logs). (default is to enable if there is platform support)])],
[use_thread_local=$enableval],
[use_thread_local=auto])
AC_ARG_ENABLE([zmq],
[AS_HELP_STRING([--disable-zmq],
[disable ZMQ notifications])],
@@ -281,6 +292,13 @@ AC_ARG_WITH([sanitizers],
[comma separated list of extra sanitizers to build with (default is none enabled)])],
[use_sanitizers=$withval])
dnl Enable gprof profiling
AC_ARG_ENABLE([gprof],
[AS_HELP_STRING([--enable-gprof],
[use gprof profiling compiler flags (default is no)])],
[enable_gprof=$enableval],
[enable_gprof=no])
dnl Turn warnings into errors
AC_ARG_ENABLE([werror],
[AS_HELP_STRING([--enable-werror],
@@ -289,9 +307,9 @@ AC_ARG_ENABLE([werror],
[enable_werror=no])
AC_ARG_ENABLE([external-signer],
[AS_HELP_STRING([--enable-external-signer],[compile external signer support (default is yes)])],
[AS_HELP_STRING([--enable-external-signer],[compile external signer support (default is auto, requires Boost::Process)])],
[use_external_signer=$enableval],
[use_external_signer=yes])
[use_external_signer=auto])
AC_LANG_PUSH([C++])
@@ -383,34 +401,38 @@ if test "$enable_werror" = "yes"; then
ERROR_CXXFLAGS=$CXXFLAG_WERROR
fi
AX_CHECK_COMPILE_FLAG([-Wall], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wall"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wextra], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wextra"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wgnu], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wgnu"], [], [$CXXFLAG_WERROR])
dnl some compilers will ignore -Wformat-security without -Wformat, so just combine the two here.
AX_CHECK_COMPILE_FLAG([-Wformat -Wformat-security], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wformat -Wformat-security"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wvla], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wvla"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wshadow-field], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wshadow-field"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wthread-safety], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wthread-safety"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wloop-analysis], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wloop-analysis"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wredundant-decls], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wredundant-decls"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wunused-member-function], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wunused-member-function"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wdate-time], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wdate-time"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wconditional-uninitialized], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wconditional-uninitialized"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wduplicated-branches], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wduplicated-branches"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wduplicated-cond], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wduplicated-cond"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wlogical-op], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wlogical-op"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Woverloaded-virtual], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Woverloaded-virtual"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wsuggest-override], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wsuggest-override"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wimplicit-fallthrough], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wimplicit-fallthrough"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wunreachable-code], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wunreachable-code"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wdocumentation], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wdocumentation"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wself-assign], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wself-assign"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wundef], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wundef"], [], [$CXXFLAG_WERROR])
if test "$CXXFLAGS_overridden" = "no"; then
AX_CHECK_COMPILE_FLAG([-Wall], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wall"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wextra], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wextra"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wgnu], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wgnu"], [], [$CXXFLAG_WERROR])
dnl some compilers will ignore -Wformat-security without -Wformat, so just combine the two here.
AX_CHECK_COMPILE_FLAG([-Wformat -Wformat-security], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wformat -Wformat-security"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wvla], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wvla"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wshadow-field], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wshadow-field"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wthread-safety], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wthread-safety"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wloop-analysis], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wloop-analysis"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wredundant-decls], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wredundant-decls"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wunused-member-function], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wunused-member-function"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wdate-time], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wdate-time"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wconditional-uninitialized], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wconditional-uninitialized"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wduplicated-branches], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wduplicated-branches"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wduplicated-cond], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wduplicated-cond"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wlogical-op], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wlogical-op"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Woverloaded-virtual], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Woverloaded-virtual"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wsuggest-override], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wsuggest-override"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wimplicit-fallthrough], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wimplicit-fallthrough"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wunreachable-code], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wunreachable-code"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wdocumentation], [WARN_CXXFLAGS="$WARN_CXXFLAGS -Wdocumentation"], [], [$CXXFLAG_WERROR])
dnl Some compilers (gcc) ignore unknown -Wno-* options, but warn about all
dnl unknown options if any other warning is produced. Test the -Wfoo case, and
dnl set the -Wno-foo case if it works.
AX_CHECK_COMPILE_FLAG([-Wunused-parameter], [NOWARN_CXXFLAGS="$NOWARN_CXXFLAGS -Wno-unused-parameter"], [], [$CXXFLAG_WERROR])
dnl Some compilers (gcc) ignore unknown -Wno-* options, but warn about all
dnl unknown options if any other warning is produced. Test the -Wfoo case, and
dnl set the -Wno-foo case if it works.
AX_CHECK_COMPILE_FLAG([-Wunused-parameter], [NOWARN_CXXFLAGS="$NOWARN_CXXFLAGS -Wno-unused-parameter"], [], [$CXXFLAG_WERROR])
AX_CHECK_COMPILE_FLAG([-Wself-assign], [NOWARN_CXXFLAGS="$NOWARN_CXXFLAGS -Wno-self-assign"], [], [$CXXFLAG_WERROR])
if test "$suppress_external_warnings" != "yes" ; then
AX_CHECK_COMPILE_FLAG([-Wdeprecated-copy], [NOWARN_CXXFLAGS="$NOWARN_CXXFLAGS -Wno-deprecated-copy"], [], [$CXXFLAG_WERROR])
fi
fi
dnl Don't allow extended (non-ASCII) symbols in identifiers. This is easier for code review.
AX_CHECK_COMPILE_FLAG([-fno-extended-identifiers], [CORE_CXXFLAGS="$CORE_CXXFLAGS -fno-extended-identifiers"], [], [$CXXFLAG_WERROR])
@@ -482,12 +504,11 @@ TEMP_CXXFLAGS="$CXXFLAGS"
CXXFLAGS="$SSE41_CXXFLAGS $CXXFLAGS"
AC_MSG_CHECKING([for SSE4.1 intrinsics])
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
#include <stdint.h>
#include <immintrin.h>
]],[[
__m128i a = _mm_set1_epi32(0);
__m128i b = _mm_set1_epi32(1);
__m128i r = _mm_blend_epi16(a, b, 0xFF);
return _mm_extract_epi32(r, 3);
__m128i l = _mm_set1_epi32(0);
return _mm_extract_epi32(l, 3);
]])],
[ AC_MSG_RESULT([yes]); enable_sse41=yes; AC_DEFINE([ENABLE_SSE41], [1], [Define this symbol to build code that uses SSE4.1 intrinsics]) ],
[ AC_MSG_RESULT([no])]
@@ -605,9 +626,15 @@ AC_ARG_ENABLE([experimental-util-chainstate],
[build_bitcoin_chainstate=$enableval],
[build_bitcoin_chainstate=no])
AC_ARG_WITH([libs],
[AS_HELP_STRING([--with-libs],
[build libraries (default=yes)])],
[build_bitcoin_libs=$withval],
[build_bitcoin_libs=yes])
AC_ARG_WITH([experimental-kernel-lib],
[AS_HELP_STRING([--with-experimental-kernel-lib],
[build experimental bitcoinkernel library (default is to build if we're building the experimental build-chainstate executable)])],
[build experimental bitcoinkernel library (default is to build if we're building libraries and the experimental build-chainstate executable)])],
[build_experimental_kernel_lib=$withval],
[build_experimental_kernel_lib=auto])
@@ -675,9 +702,6 @@ case $host in
TARGET_OS=darwin
if test $cross_compiling != "yes"; then
BUILD_OS=darwin
AX_CHECK_LINK_FLAG([-Wl,-headerpad_max_install_names], [CORE_LDFLAGS="$CORE_LDFLAGS -Wl,-headerpad_max_install_names"], [], [$LDFLAG_WERROR])
AC_CHECK_PROG([BREW], [brew], [brew])
if test "$BREW" = "brew"; then
dnl These Homebrew packages may be keg-only, meaning that they won't be found
@@ -741,6 +765,8 @@ case $host in
;;
*)
AC_PATH_TOOL([DSYMUTIL], [dsymutil], [dsymutil])
AC_PATH_TOOL([INSTALL_NAME_TOOL], [install_name_tool], [install_name_tool])
AC_PATH_TOOL([OTOOL], [otool], [otool])
AC_PATH_PROG([ZIP], [zip], [zip])
dnl libtool will try to strip the static lib, which is a problem for
@@ -752,11 +778,25 @@ case $host in
esac
fi
AX_CHECK_LINK_FLAG([-Wl,-headerpad_max_install_names], [CORE_LDFLAGS="$CORE_LDFLAGS -Wl,-headerpad_max_install_names"], [], [$LDFLAG_WERROR])
CORE_CPPFLAGS="$CORE_CPPFLAGS -DMAC_OSX -DOBJC_OLD_DISPATCH_PROTOTYPES=0"
dnl ignore deprecated-declarations warnings coming from objcxx code
dnl "'NSUserNotificationCenter' is deprecated: first deprecated in macOS 11.0".
OBJCXXFLAGS="$CXXFLAGS -Wno-deprecated-declarations"
OBJCXXFLAGS="$CXXFLAGS"
;;
*android*)
dnl make sure android stays above linux for hosts like *linux-android*
TARGET_OS=android
case $host in
*x86_64*)
ANDROID_ARCH=x86_64
;;
*aarch64*)
ANDROID_ARCH=arm64-v8a
;;
*armv7a*)
ANDROID_ARCH=armeabi-v7a
;;
*) AC_MSG_ERROR([Could not determine Android arch, or it is unsupported]) ;;
esac
;;
*linux*)
TARGET_OS=linux
@@ -807,8 +847,10 @@ if test "$use_lcov" = "yes"; then
AX_CHECK_COMPILE_FLAG([--coverage],[CORE_CXXFLAGS="$CORE_CXXFLAGS --coverage"],
[AC_MSG_ERROR([lcov testing requested but --coverage flag does not work])])
CORE_CXXFLAGS="$CORE_CXXFLAGS -Og"
fi
AC_SUBST(LCOV_OPTS)
if test "$use_lcov_branch" != "no"; then
AC_SUBST(LCOV_OPTS, "$LCOV_OPTS --rc lcov_branch_coverage=1")
fi
dnl Check for endianness
@@ -839,12 +881,30 @@ if test "$ac_cv_sys_large_files" != "" &&
CORE_CPPFLAGS="$CORE_CPPFLAGS -D_LARGE_FILES=$ac_cv_sys_large_files"
fi
if test "$enable_gprof" = "yes"; then
dnl -pg is incompatible with -pie. Since hardening and profiling together doesn't make sense,
dnl we simply make them mutually exclusive here. Additionally, hardened toolchains may force
dnl -pie by default, in which case it needs to be turned off with -no-pie.
if test "$use_hardening" = "yes"; then
AC_MSG_ERROR([gprof profiling is not compatible with hardening. Reconfigure with --disable-hardening or --disable-gprof])
fi
use_hardening=no
AX_CHECK_COMPILE_FLAG([-pg],[GPROF_CXXFLAGS="-pg"],
[AC_MSG_ERROR([gprof profiling requested but not available])], [$CXXFLAG_WERROR])
AX_CHECK_LINK_FLAG([-no-pie], [GPROF_LDFLAGS="-no-pie"])
AX_CHECK_LINK_FLAG([-pg], [GPROF_LDFLAGS="$GPROF_LDFLAGS -pg"],
[AC_MSG_ERROR([gprof profiling requested but not available])], [$GPROF_LDFLAGS])
fi
if test "$TARGET_OS" != "windows"; then
dnl All windows code is PIC, forcing it on just adds useless compile warnings
AX_CHECK_COMPILE_FLAG([-fPIC], [PIC_FLAGS="-fPIC"])
fi
if test "$use_hardening" != "no"; then
use_hardening=yes
AX_CHECK_COMPILE_FLAG([-Wstack-protector], [HARDENED_CXXFLAGS="$HARDENED_CXXFLAGS -Wstack-protector"])
AX_CHECK_COMPILE_FLAG([-fstack-protector-all], [HARDENED_CXXFLAGS="$HARDENED_CXXFLAGS -fstack-protector-all"])
@@ -898,7 +958,7 @@ if test "$TARGET_OS" = "darwin"; then
AX_CHECK_LINK_FLAG([-Wl,-fixup_chains], [HARDENED_LDFLAGS="$HARDENED_LDFLAGS -Wl,-fixup_chains"], [], [$LDFLAG_WERROR])
fi
AC_CHECK_HEADERS([sys/select.h sys/prctl.h vm/vm_param.h sys/vmmeter.h sys/resources.h])
AC_CHECK_HEADERS([sys/select.h sys/prctl.h sys/sysctl.h vm/vm_param.h sys/vmmeter.h sys/resources.h])
AC_CHECK_DECLS([getifaddrs, freeifaddrs],[CHECK_SOCKET],,
[#include <sys/types.h>
@@ -911,6 +971,24 @@ AC_CHECK_DECLS([setsid])
AC_CHECK_DECLS([pipe2])
AC_CHECK_FUNCS([timingsafe_bcmp])
AC_MSG_CHECKING([for __builtin_clzl])
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[ ]], [[
(void) __builtin_clzl(0);
]])],
[ AC_MSG_RESULT([yes]); have_clzl=yes; AC_DEFINE([HAVE_BUILTIN_CLZL], [1], [Define this symbol if you have __builtin_clzl])],
[ AC_MSG_RESULT([no]); have_clzl=no;]
)
AC_MSG_CHECKING([for __builtin_clzll])
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[ ]], [[
(void) __builtin_clzll(0);
]])],
[ AC_MSG_RESULT([yes]); have_clzll=yes; AC_DEFINE([HAVE_BUILTIN_CLZLL], [1], [Define this symbol if you have __builtin_clzll])],
[ AC_MSG_RESULT([no]); have_clzll=no;]
)
dnl Check for malloc_info (for memory statistics information in getmemoryinfo)
AC_MSG_CHECKING([for getmemoryinfo])
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <malloc.h>]],
@@ -943,6 +1021,90 @@ AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
[ AC_MSG_RESULT([no])]
)
AC_MSG_CHECKING([for default visibility attribute])
AC_COMPILE_IFELSE([AC_LANG_SOURCE([
int foo(void) __attribute__((visibility("default")));
int main(){}
])],
[
AC_DEFINE([HAVE_DEFAULT_VISIBILITY_ATTRIBUTE], [1], [Define if the visibility attribute is supported.])
AC_MSG_RESULT([yes])
],
[
AC_MSG_RESULT([no])
if test "$use_reduce_exports" = "yes"; then
AC_MSG_ERROR([Cannot find a working visibility attribute. Use --disable-reduce-exports.])
fi
]
)
AC_MSG_CHECKING([for dllexport attribute])
AC_COMPILE_IFELSE([AC_LANG_SOURCE([
__declspec(dllexport) int foo(void);
int main(){}
])],
[
AC_DEFINE([HAVE_DLLEXPORT_ATTRIBUTE], [1], [Define if the dllexport attribute is supported.])
AC_MSG_RESULT([yes])
],
[AC_MSG_RESULT([no])]
)
if test "$use_thread_local" = "yes" || test "$use_thread_local" = "auto"; then
TEMP_LDFLAGS="$LDFLAGS"
LDFLAGS="$TEMP_LDFLAGS $PTHREAD_CFLAGS"
AC_MSG_CHECKING([for thread_local support])
AC_LINK_IFELSE([AC_LANG_SOURCE([
#include <thread>
static thread_local int foo = 0;
static void run_thread() { foo++;}
int main(){
for(int i = 0; i < 10; i++) { std::thread(run_thread).detach();}
return foo;
}
])],
[
case $host in
*mingw*)
dnl mingw32's implementation of thread_local has also been shown to behave
dnl erroneously under concurrent usage; see:
dnl https://gist.github.com/jamesob/fe9a872051a88b2025b1aa37bfa98605
AC_MSG_RESULT([no])
;;
*freebsd*)
dnl FreeBSD's implementation of thread_local is also buggy (per
dnl https://groups.google.com/d/msg/bsdmailinglist/22ncTZAbDp4/Dii_pII5AwAJ)
AC_MSG_RESULT([no])
;;
*)
AC_DEFINE([HAVE_THREAD_LOCAL], [1], [Define if thread_local is supported.])
AC_MSG_RESULT([yes])
;;
esac
],
[
AC_MSG_RESULT([no])
]
)
LDFLAGS="$TEMP_LDFLAGS"
fi
dnl check for gmtime_r(), fallback to gmtime_s() if that is unavailable
dnl fail if neither are available.
AC_MSG_CHECKING([for gmtime_r])
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <ctime>]],
[[ gmtime_r((const time_t *) nullptr, (struct tm *) nullptr); ]])],
[ AC_MSG_RESULT([yes]); AC_DEFINE([HAVE_GMTIME_R], [1], [Define this symbol if gmtime_r is available]) ],
[ AC_MSG_RESULT([no]);
AC_MSG_CHECKING([for gmtime_s]);
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <ctime>]],
[[ gmtime_s((struct tm *) nullptr, (const time_t *) nullptr); ]])],
[ AC_MSG_RESULT([yes])],
[ AC_MSG_RESULT([no]); AC_MSG_ERROR([Both gmtime_r and gmtime_s are unavailable]) ]
)
]
)
dnl Check for different ways of gathering OS randomness
AC_MSG_CHECKING([for Linux getrandom function])
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
@@ -1033,23 +1195,10 @@ AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
]], [[
getauxval(AT_HWCAP);
]])],
[ AC_MSG_RESULT([yes]); HAVE_STRONG_GETAUXVAL=1; AC_DEFINE([HAVE_STRONG_GETAUXVAL], [1], [Define this symbol to build code that uses getauxval]) ],
[ AC_MSG_RESULT([yes]); HAVE_STRONG_GETAUXVAL=1; AC_DEFINE([HAVE_STRONG_GETAUXVAL], [1], [Define this symbol to build code that uses getauxval)]) ],
[ AC_MSG_RESULT([no]); HAVE_STRONG_GETAUXVAL=0 ]
)
# Check for UNIX sockets
AC_MSG_CHECKING(for sockaddr_un)
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
#include <sys/socket.h>
#include <sys/un.h>
]], [[
struct sockaddr_un addr;
addr.sun_family = AF_UNIX;
]])],
[ AC_MSG_RESULT([yes]); AC_DEFINE([HAVE_SOCKADDR_UN], [1], [Define this symbol if platform supports unix domain sockets]) ],
[ AC_MSG_RESULT([no]); ]
)
have_any_system=no
AC_MSG_CHECKING([for std::system])
AC_LINK_IFELSE(
@@ -1097,6 +1246,7 @@ if test "$enable_fuzz" = "yes"; then
build_bitcoin_chainstate=no
build_bitcoin_wallet=no
build_bitcoind=no
build_bitcoin_libs=no
bitcoin_enable_qt=no
bitcoin_enable_qt_test=no
bitcoin_enable_qt_dbus=no
@@ -1255,7 +1405,7 @@ if test "$use_boost" = "yes"; then
dnl Check for Boost headers
AX_BOOST_BASE([1.73.0],[],[AC_MSG_ERROR([Boost is not available!])])
if test "$want_boost" = "no"; then
AC_MSG_ERROR([Boost is required])
AC_MSG_ERROR([only libbitcoinconsensus can be built without Boost])
fi
dnl we don't use multi_index serialization
@@ -1273,14 +1423,56 @@ if test "$use_boost" = "yes"; then
fi
fi
case $host in
dnl Re-enable it after enabling Windows support in cpp-subprocess.
*mingw*)
use_external_signer="no"
;;
esac
if test "$use_external_signer" = "yes"; then
AC_DEFINE([ENABLE_EXTERNAL_SIGNER], [1], [Define if external signer support is enabled])
if test "$use_external_signer" != "no"; then
AC_MSG_CHECKING([whether Boost.Process can be used])
TEMP_CXXFLAGS="$CXXFLAGS"
dnl Boost 1.78 requires the following workaround.
dnl See: https://github.com/boostorg/process/issues/235
CXXFLAGS="$CXXFLAGS -Wno-error=narrowing"
TEMP_CPPFLAGS="$CPPFLAGS"
CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
TEMP_LDFLAGS="$LDFLAGS"
dnl Boost 1.73 and older require the following workaround.
LDFLAGS="$LDFLAGS $PTHREAD_CFLAGS"
AC_LINK_IFELSE([AC_LANG_PROGRAM([[
#define BOOST_PROCESS_USE_STD_FS
#include <boost/process.hpp>
]],[[
namespace bp = boost::process;
bp::opstream stdin_stream;
bp::ipstream stdout_stream;
bp::child c("dummy", bp::std_out > stdout_stream, bp::std_err > stdout_stream, bp::std_in < stdin_stream);
stdin_stream << std::string{"test"} << std::endl;
if (c.running()) c.terminate();
c.wait();
c.exit_code();
]])],
[have_boost_process="yes"],
[have_boost_process="no"])
LDFLAGS="$TEMP_LDFLAGS"
CPPFLAGS="$TEMP_CPPFLAGS"
CXXFLAGS="$TEMP_CXXFLAGS"
AC_MSG_RESULT([$have_boost_process])
if test "$have_boost_process" = "yes"; then
case $host in
dnl Boost Process for Windows uses Boost ASIO. Boost ASIO performs
dnl pre-main init of Windows networking libraries, which we do not
dnl want.
*mingw*)
use_external_signer="no"
;;
*)
use_external_signer="yes"
AC_DEFINE([ENABLE_EXTERNAL_SIGNER], [1], [Define if external signer support is enabled])
AC_DEFINE([BOOST_PROCESS_USE_STD_FS], [1], [Defined to avoid Boost::Process trying to use Boost Filesystem])
;;
esac
else
if test "$use_external_signer" = "yes"; then
AC_MSG_ERROR([External signing is not supported for this Boost version])
fi
use_external_signer="no";
fi
fi
AM_CONDITIONAL([ENABLE_EXTERNAL_SIGNER], [test "$use_external_signer" = "yes"])
@@ -1289,7 +1481,6 @@ if test "$use_reduce_exports" = "yes"; then
AX_CHECK_COMPILE_FLAG([-fvisibility=hidden], [CORE_CXXFLAGS="$CORE_CXXFLAGS -fvisibility=hidden"],
[AC_MSG_ERROR([Cannot set hidden symbol visibility. Use --disable-reduce-exports.])], [$CXXFLAG_WERROR])
AX_CHECK_LINK_FLAG([-Wl,--exclude-libs,ALL], [RELDFLAGS="-Wl,--exclude-libs,ALL"], [], [$LDFLAG_WERROR])
AX_CHECK_LINK_FLAG([-Wl,-no_exported_symbols], [LIBTOOL_APP_LDFLAGS="$LIBTOOL_APP_LDFLAGS -Wl,-no_exported_symbols"], [], [$LDFLAG_WERROR])
fi
if test "$use_tests" = "yes"; then
@@ -1366,9 +1557,6 @@ if test "$with_libmultiprocess" = "yes" || test "$with_libmultiprocess" = "auto"
PKG_CHECK_MODULES([LIBMULTIPROCESS], [libmultiprocess], [
libmultiprocess_found=yes;
libmultiprocess_prefix=`$PKG_CONFIG --variable=prefix libmultiprocess`;
if test "$suppress_external_warnings" != "no" ; then
LIBMULTIPROCESS_CFLAGS=SUPPRESS_WARNINGS($LIBMULTIPROCESS_CFLAGS)
fi
], [true])
elif test "$with_libmultiprocess" != "no"; then
AC_MSG_ERROR([--with-libmultiprocess=$with_libmultiprocess value is not yes, auto, or no])
@@ -1431,8 +1619,18 @@ fi
AM_CONDITIONAL([BUILD_BITCOIN_CHAINSTATE], [test $build_bitcoin_chainstate = "yes"])
AC_MSG_RESULT($build_bitcoin_chainstate)
AC_MSG_CHECKING([whether to build libraries])
AM_CONDITIONAL([BUILD_BITCOIN_LIBS], [test $build_bitcoin_libs = "yes"])
if test "$build_bitcoin_libs" = "yes"; then
AC_DEFINE([HAVE_CONSENSUS_LIB], [1], [Define this symbol if the consensus lib has been built])
AC_CONFIG_FILES([libbitcoinconsensus.pc:libbitcoinconsensus.pc.in])
fi
AM_CONDITIONAL([BUILD_BITCOIN_KERNEL_LIB], [test "$build_experimental_kernel_lib" != "no" && ( test "$build_experimental_kernel_lib" = "yes" || test "$build_bitcoin_chainstate" = "yes" )])
AC_MSG_RESULT($build_bitcoin_libs)
AC_LANG_POP
if test "$use_ccache" != "no"; then
@@ -1566,8 +1764,8 @@ else
AC_MSG_RESULT([no])
fi
if test "$build_bitcoin_wallet$build_bitcoin_cli$build_bitcoin_tx$build_bitcoin_util$build_bitcoind$bitcoin_enable_qt$enable_fuzz_binary$use_bench$use_tests" = "nonononononononono"; then
AC_MSG_ERROR([No targets! Please specify at least one of: --with-utils --with-daemon --with-gui --enable-fuzz(-binary) --enable-bench or --enable-tests])
if test "$build_bitcoin_wallet$build_bitcoin_cli$build_bitcoin_tx$build_bitcoin_util$build_bitcoin_libs$build_bitcoind$bitcoin_enable_qt$enable_fuzz_binary$use_bench$use_tests" = "nononononononononono"; then
AC_MSG_ERROR([No targets! Please specify at least one of: --with-utils --with-libs --with-daemon --with-gui --enable-fuzz(-binary) --enable-bench or --enable-tests])
fi
AM_CONDITIONAL([TARGET_DARWIN], [test "$TARGET_OS" = "darwin"])
@@ -1585,6 +1783,7 @@ AM_CONDITIONAL([ENABLE_QT_TESTS], [test "$BUILD_TEST_QT" = "yes"])
AM_CONDITIONAL([ENABLE_BENCH], [test "$use_bench" = "yes"])
AM_CONDITIONAL([USE_QRCODE], [test "$use_qr" = "yes"])
AM_CONDITIONAL([USE_LCOV], [test "$use_lcov" = "yes"])
AM_CONDITIONAL([USE_LIBEVENT], [test "$use_libevent" = "yes"])
AM_CONDITIONAL([HARDEN], [test "$use_hardening" = "yes"])
AM_CONDITIONAL([ENABLE_SSE42], [test "$enable_sse42" = "yes"])
AM_CONDITIONAL([ENABLE_SSE41], [test "$enable_sse41" = "yes"])
@@ -1598,6 +1797,7 @@ AM_CONDITIONAL([USE_UPNP], [test "$use_upnp" = "yes"])
dnl for minisketch
AM_CONDITIONAL([ENABLE_CLMUL], [test "$enable_clmul" = "yes"])
AM_CONDITIONAL([HAVE_CLZ], [test "$have_clzl$have_clzll" = "yesyes"])
AC_DEFINE([CLIENT_VERSION_MAJOR], [_CLIENT_VERSION_MAJOR], [Major version])
AC_DEFINE([CLIENT_VERSION_MINOR], [_CLIENT_VERSION_MINOR], [Minor version])
@@ -1636,6 +1836,8 @@ AC_SUBST(WARN_CXXFLAGS)
AC_SUBST(NOWARN_CXXFLAGS)
AC_SUBST(DEBUG_CXXFLAGS)
AC_SUBST(ERROR_CXXFLAGS)
AC_SUBST(GPROF_CXXFLAGS)
AC_SUBST(GPROF_LDFLAGS)
AC_SUBST(HARDENED_CXXFLAGS)
AC_SUBST(HARDENED_CPPFLAGS)
AC_SUBST(HARDENED_LDFLAGS)
@@ -1661,12 +1863,14 @@ AC_SUBST(MINIUPNPC_CPPFLAGS)
AC_SUBST(MINIUPNPC_LIBS)
AC_SUBST(NATPMP_CPPFLAGS)
AC_SUBST(NATPMP_LIBS)
AC_SUBST(HAVE_GMTIME_R)
AC_SUBST(HAVE_FDATASYNC)
AC_SUBST(HAVE_FULLFSYNC)
AC_SUBST(HAVE_O_CLOEXEC)
AC_SUBST(HAVE_BUILTIN_PREFETCH)
AC_SUBST(HAVE_MM_PREFETCH)
AC_SUBST(HAVE_STRONG_GETAUXVAL)
AC_SUBST(ANDROID_ARCH)
AC_SUBST(HAVE_EVHTTP_CONNECTION_GET_PEER_CONST_CHAR)
AC_CONFIG_FILES([Makefile src/Makefile doc/man/Makefile share/setup.nsi share/qt/Info.plist test/config.ini])
AC_CONFIG_FILES([contrib/devtools/split-debug.sh],[chmod +x contrib/devtools/split-debug.sh])
@@ -1721,6 +1925,7 @@ echo
echo "Options used to compile and link:"
echo " external signer = $use_external_signer"
echo " multiprocess = $build_multiprocess"
echo " with libs = $build_bitcoin_libs"
echo " with wallet = $enable_wallet"
if test "$enable_wallet" != "no"; then
echo " with sqlite = $use_sqlite"
@@ -1743,6 +1948,7 @@ echo " with natpmp = $use_natpmp"
echo " USDT tracing = $use_usdt"
echo " sanitizers = $use_sanitizers"
echo " debug enabled = $enable_debug"
echo " gprof enabled = $enable_gprof"
echo " werror = $enable_werror"
echo
echo " target os = $host_os"
@@ -1752,8 +1958,8 @@ echo " CC = $CC"
echo " CFLAGS = $PTHREAD_CFLAGS $SANITIZER_CFLAGS $CFLAGS"
echo " CPPFLAGS = $DEBUG_CPPFLAGS $HARDENED_CPPFLAGS $CORE_CPPFLAGS $CPPFLAGS"
echo " CXX = $CXX"
echo " CXXFLAGS = $CORE_CXXFLAGS $DEBUG_CXXFLAGS $HARDENED_CXXFLAGS $WARN_CXXFLAGS $NOWARN_CXXFLAGS $ERROR_CXXFLAGS $SANITIZER_CXXFLAGS $CXXFLAGS"
echo " LDFLAGS = $PTHREAD_LIBS $HARDENED_LDFLAGS $SANITIZER_LDFLAGS $CORE_LDFLAGS $LDFLAGS"
echo " CXXFLAGS = $CORE_CXXFLAGS $DEBUG_CXXFLAGS $HARDENED_CXXFLAGS $WARN_CXXFLAGS $NOWARN_CXXFLAGS $ERROR_CXXFLAGS $GPROF_CXXFLAGS $SANITIZER_CXXFLAGS $CXXFLAGS"
echo " LDFLAGS = $PTHREAD_LIBS $HARDENED_LDFLAGS $GPROF_LDFLAGS $SANITIZER_LDFLAGS $CORE_LDFLAGS $LDFLAGS"
echo " AR = $AR"
echo " ARFLAGS = $ARFLAGS"
echo

View File

@@ -1,12 +0,0 @@
# ASMap Tool
Tool for performing various operations on textual and binary asmap files,
particularly encoding/compressing the raw data to the binary format that can
be used in Bitcoin Core with the `-asmap` option.
Example usage:
```
python3 asmap-tool.py encode /path/to/input.file /path/to/output.file
python3 asmap-tool.py decode /path/to/input.file /path/to/output.file
python3 asmap-tool.py diff /path/to/first.file /path/to/second.file
```

View File

@@ -1,197 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2022 Pieter Wuille
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import argparse
import sys
import ipaddress
import json
import math
from collections import defaultdict
import asmap
def load_file(input_file):
try:
contents = input_file.read()
except OSError as err:
sys.exit(f"Input file '{input_file.name}' cannot be read: {err.strerror}.")
try:
bin_asmap = asmap.ASMap.from_binary(contents)
except ValueError:
bin_asmap = None
txt_error = None
entries = None
try:
txt_contents = str(contents, encoding="utf-8")
except UnicodeError:
txt_error = "invalid UTF-8"
txt_contents = None
if txt_contents is not None:
entries = []
for line in txt_contents.split("\n"):
idx = line.find('#')
if idx >= 0:
line = line[:idx]
line = line.lstrip(' ').rstrip(' \t\r\n')
if len(line) == 0:
continue
fields = line.split(' ')
if len(fields) != 2:
txt_error = f"unparseable line '{line}'"
entries = None
break
prefix, asn = fields
if len(asn) <= 2 or asn[:2] != "AS" or any(c < '0' or c > '9' for c in asn[2:]):
txt_error = f"invalid ASN '{asn}'"
entries = None
break
try:
net = ipaddress.ip_network(prefix)
except ValueError:
txt_error = f"invalid network '{prefix}'"
entries = None
break
entries.append((asmap.net_to_prefix(net), int(asn[2:])))
if entries is not None and bin_asmap is not None and len(contents) > 0:
sys.exit(f"Input file '{input_file.name}' is ambiguous.")
if entries is not None:
state = asmap.ASMap()
state.update_multi(entries)
return state
if bin_asmap is not None:
return bin_asmap
sys.exit(f"Input file '{input_file.name}' is neither a valid binary asmap file nor valid text input ({txt_error}).")
def save_binary(output_file, state, fill):
contents = state.to_binary(fill=fill)
try:
output_file.write(contents)
output_file.close()
except OSError as err:
sys.exit(f"Output file '{output_file.name}' cannot be written to: {err.strerror}.")
def save_text(output_file, state, fill, overlapping):
for prefix, asn in state.to_entries(fill=fill, overlapping=overlapping):
net = asmap.prefix_to_net(prefix)
try:
print(f"{net} AS{asn}", file=output_file)
except OSError as err:
sys.exit(f"Output file '{output_file.name}' cannot be written to: {err.strerror}.")
try:
output_file.close()
except OSError as err:
sys.exit(f"Output file '{output_file.name}' cannot be written to: {err.strerror}.")
def main():
parser = argparse.ArgumentParser(description="Tool for performing various operations on textual and binary asmap files.")
subparsers = parser.add_subparsers(title="valid subcommands", dest="subcommand")
parser_encode = subparsers.add_parser("encode", help="convert asmap data to binary format")
parser_encode.add_argument('-f', '--fill', dest="fill", default=False, action="store_true",
help="permit reassigning undefined network ranges arbitrarily to reduce size")
parser_encode.add_argument('infile', nargs='?', type=argparse.FileType('rb'), default=sys.stdin.buffer,
help="input asmap file (text or binary); default is stdin")
parser_encode.add_argument('outfile', nargs='?', type=argparse.FileType('wb'), default=sys.stdout.buffer,
help="output binary asmap file; default is stdout")
parser_decode = subparsers.add_parser("decode", help="convert asmap data to text format")
parser_decode.add_argument('-f', '--fill', dest="fill", default=False, action="store_true",
help="permit reassigning undefined network ranges arbitrarily to reduce length")
parser_decode.add_argument('-n', '--nonoverlapping', dest="overlapping", default=True, action="store_false",
help="output strictly non-overall ping network ranges (increases output size)")
parser_decode.add_argument('infile', nargs='?', type=argparse.FileType('rb'), default=sys.stdin.buffer,
help="input asmap file (text or binary); default is stdin")
parser_decode.add_argument('outfile', nargs='?', type=argparse.FileType('w'), default=sys.stdout,
help="output text file; default is stdout")
parser_diff = subparsers.add_parser("diff", help="compute the difference between two asmap files")
parser_diff.add_argument('-i', '--ignore-unassigned', dest="ignore_unassigned", default=False, action="store_true",
help="ignore unassigned ranges in the first input (useful when second input is filled)")
parser_diff.add_argument('infile1', type=argparse.FileType('rb'),
help="first file to compare (text or binary)")
parser_diff.add_argument('infile2', type=argparse.FileType('rb'),
help="second file to compare (text or binary)")
parser_diff_addrs = subparsers.add_parser("diff_addrs",
help="compute difference between two asmap files for a set of addresses")
parser_diff_addrs.add_argument('-s', '--show-addresses', dest="show_addresses", default=False, action="store_true",
help="include reassigned addresses in the output")
parser_diff_addrs.add_argument("infile1", type=argparse.FileType("rb"),
help="first file to compare (text or binary)")
parser_diff_addrs.add_argument("infile2", type=argparse.FileType("rb"),
help="second file to compare (text or binary)")
parser_diff_addrs.add_argument("addrs_file", type=argparse.FileType("r"),
help="address file containing getnodeaddresses output to use in the comparison "
"(make sure to set the count parameter to zero to get all node addresses, "
"e.g. 'bitcoin-cli getnodeaddresses 0 > addrs.json')")
args = parser.parse_args()
if args.subcommand is None:
parser.print_help()
elif args.subcommand == "encode":
state = load_file(args.infile)
save_binary(args.outfile, state, fill=args.fill)
elif args.subcommand == "decode":
state = load_file(args.infile)
save_text(args.outfile, state, fill=args.fill, overlapping=args.overlapping)
elif args.subcommand == "diff":
state1 = load_file(args.infile1)
state2 = load_file(args.infile2)
ipv4_changed = 0
ipv6_changed = 0
for prefix, old_asn, new_asn in state1.diff(state2):
if args.ignore_unassigned and old_asn == 0:
continue
net = asmap.prefix_to_net(prefix)
if isinstance(net, ipaddress.IPv4Network):
ipv4_changed += 1 << (32 - net.prefixlen)
elif isinstance(net, ipaddress.IPv6Network):
ipv6_changed += 1 << (128 - net.prefixlen)
if new_asn == 0:
print(f"# {net} was AS{old_asn}")
elif old_asn == 0:
print(f"{net} AS{new_asn} # was unassigned")
else:
print(f"{net} AS{new_asn} # was AS{old_asn}")
ipv4_change_str = "" if ipv4_changed == 0 else f" (2^{math.log2(ipv4_changed):.2f})"
ipv6_change_str = "" if ipv6_changed == 0 else f" (2^{math.log2(ipv6_changed):.2f})"
print(
f"# {ipv4_changed}{ipv4_change_str} IPv4 addresses changed; "
f"{ipv6_changed}{ipv6_change_str} IPv6 addresses changed"
)
elif args.subcommand == "diff_addrs":
state1 = load_file(args.infile1)
state2 = load_file(args.infile2)
address_info = json.load(args.addrs_file)
addrs = {a["address"] for a in address_info if a["network"] in ["ipv4", "ipv6"]}
reassignments = defaultdict(list)
for addr in addrs:
net = ipaddress.ip_network(addr)
prefix = asmap.net_to_prefix(net)
old_asn = state1.lookup(prefix)
new_asn = state2.lookup(prefix)
if new_asn != old_asn:
reassignments[(old_asn, new_asn)].append(addr)
reassignments = sorted(reassignments.items(), key=lambda item: len(item[1]), reverse=True)
num_reassignment_type = defaultdict(int)
for (old_asn, new_asn), reassigned_addrs in reassignments:
num_reassigned = len(reassigned_addrs)
num_reassignment_type[(bool(old_asn), bool(new_asn))] += num_reassigned
old_asn_str = f"AS{old_asn}" if old_asn else "unassigned"
new_asn_str = f"AS{new_asn}" if new_asn else "unassigned"
opt = ": " + ", ".join(reassigned_addrs) if args.show_addresses else ""
print(f"{num_reassigned} address(es) reassigned from {old_asn_str} to {new_asn_str}{opt}")
num_reassignments = sum(len(addrs) for _, addrs in reassignments)
share = num_reassignments / len(addrs) if len(addrs) > 0 else 0
print(f"Summary: {num_reassignments:,} ({share:.2%}) of {len(addrs):,} addresses were reassigned "
f"(migrations={num_reassignment_type[True, True]}, assignments={num_reassignment_type[False, True]}, "
f"unassignments={num_reassignment_type[True, False]})")
else:
parser.print_help()
sys.exit("No command provided.")
if __name__ == '__main__':
main()

View File

@@ -9,7 +9,7 @@ _bitcoin_rpc() {
local rpcargs=()
for i in ${COMP_LINE}; do
case "$i" in
-conf=*|-datadir=*|-regtest|-rpc*|-testnet|-testnet4)
-conf=*|-datadir=*|-regtest|-rpc*|-testnet)
rpcargs=( "${rpcargs[@]}" "$i" )
;;
esac

View File

@@ -5,7 +5,7 @@ Upstream-Contact: Satoshi Nakamoto <satoshin@gmx.com>
Source: https://github.com/bitcoin/bitcoin
Files: *
Copyright: 2009-2025, Bitcoin Core Developers
Copyright: 2009-2024, Bitcoin Core Developers
License: Expat
Comment: The Bitcoin Core Developers encompasses all contributors to the
project, listed in the release notes or the git log.

View File

@@ -1,31 +1,20 @@
cmake_minimum_required(VERSION 3.22)
cmake_minimum_required(VERSION 3.9)
project(bitcoin-tidy
VERSION
1.0.0
DESCRIPTION "clang-tidy checks for Bitcoin Core"
LANGUAGES CXX)
project(bitcoin-tidy VERSION 1.0.0 DESCRIPTION "clang-tidy checks for Bitcoin Core")
include(GNUInstallDirs)
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED True)
set(CMAKE_CXX_EXTENSIONS False)
set(CMAKE_DISABLE_FIND_PACKAGE_CURL ON)
set(CMAKE_DISABLE_FIND_PACKAGE_FFI ON)
set(CMAKE_DISABLE_FIND_PACKAGE_LibEdit ON)
set(CMAKE_DISABLE_FIND_PACKAGE_LibXml2 ON)
set(CMAKE_DISABLE_FIND_PACKAGE_Terminfo ON)
set(CMAKE_DISABLE_FIND_PACKAGE_ZLIB ON)
set(CMAKE_DISABLE_FIND_PACKAGE_zstd ON)
# TODO: Figure out how to avoid the terminfo check
find_package(LLVM REQUIRED CONFIG)
find_program(CLANG_TIDY_EXE NAMES "clang-tidy-${LLVM_VERSION_MAJOR}" "clang-tidy" HINTS ${LLVM_TOOLS_BINARY_DIR})
message(STATUS "Found LLVM ${LLVM_PACKAGE_VERSION}")
message(STATUS "Found clang-tidy: ${CLANG_TIDY_EXE}")
add_library(bitcoin-tidy MODULE bitcoin-tidy.cpp logprintf.cpp nontrivial-threadlocal.cpp)
add_library(bitcoin-tidy MODULE bitcoin-tidy.cpp logprintf.cpp)
target_include_directories(bitcoin-tidy SYSTEM PRIVATE ${LLVM_INCLUDE_DIRS})
# Disable RTTI and exceptions as necessary
@@ -58,7 +47,7 @@ else()
endif()
# Create a dummy library that runs clang-tidy tests as a side-effect of building
add_library(bitcoin-tidy-tests OBJECT EXCLUDE_FROM_ALL example_logprintf.cpp example_nontrivial-threadlocal.cpp)
add_library(bitcoin-tidy-tests OBJECT EXCLUDE_FROM_ALL example_logprintf.cpp)
add_dependencies(bitcoin-tidy-tests bitcoin-tidy)
set_target_properties(bitcoin-tidy-tests PROPERTIES CXX_CLANG_TIDY "${CLANG_TIDY_COMMAND}")

View File

@@ -3,7 +3,6 @@
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "logprintf.h"
#include "nontrivial-threadlocal.h"
#include <clang-tidy/ClangTidyModule.h>
#include <clang-tidy/ClangTidyModuleRegistry.h>
@@ -14,7 +13,6 @@ public:
void addCheckFactories(clang::tidy::ClangTidyCheckFactories& CheckFactories) override
{
CheckFactories.registerCheck<bitcoin::LogPrintfCheck>("bitcoin-unterminated-logprintf");
CheckFactories.registerCheck<bitcoin::NonTrivialThreadLocal>("bitcoin-nontrivial-threadlocal");
}
};

View File

@@ -1,2 +0,0 @@
#include <string>
thread_local std::string foo;

View File

@@ -1,44 +0,0 @@
// Copyright (c) 2023 Bitcoin Developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "nontrivial-threadlocal.h"
#include <clang/AST/ASTContext.h>
#include <clang/ASTMatchers/ASTMatchFinder.h>
// Copied from clang-tidy's UnusedRaiiCheck
namespace {
AST_MATCHER(clang::CXXRecordDecl, hasNonTrivialDestructor) {
// TODO: If the dtor is there but empty we don't want to warn either.
return Node.hasDefinition() && Node.hasNonTrivialDestructor();
}
} // namespace
namespace bitcoin {
void NonTrivialThreadLocal::registerMatchers(clang::ast_matchers::MatchFinder* finder)
{
using namespace clang::ast_matchers;
/*
thread_local std::string foo;
*/
finder->addMatcher(
varDecl(
hasThreadStorageDuration(),
hasType(hasCanonicalType(recordType(hasDeclaration(cxxRecordDecl(hasNonTrivialDestructor())))))
).bind("nontrivial_threadlocal"),
this);
}
void NonTrivialThreadLocal::check(const clang::ast_matchers::MatchFinder::MatchResult& Result)
{
if (const clang::VarDecl* var = Result.Nodes.getNodeAs<clang::VarDecl>("nontrivial_threadlocal")) {
const auto user_diag = diag(var->getBeginLoc(), "Variable with non-trivial destructor cannot be thread_local.");
}
}
} // namespace bitcoin

View File

@@ -1,29 +0,0 @@
// Copyright (c) 2023 Bitcoin Developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#ifndef NONTRIVIAL_THREADLOCAL_CHECK_H
#define NONTRIVIAL_THREADLOCAL_CHECK_H
#include <clang-tidy/ClangTidyCheck.h>
namespace bitcoin {
// Warn about any thread_local variable with a non-trivial destructor.
class NonTrivialThreadLocal final : public clang::tidy::ClangTidyCheck
{
public:
NonTrivialThreadLocal(clang::StringRef Name, clang::tidy::ClangTidyContext* Context)
: clang::tidy::ClangTidyCheck(Name, Context) {}
bool isLanguageVersionSupported(const clang::LangOptions& LangOpts) const override
{
return LangOpts.CPlusPlus;
}
void registerMatchers(clang::ast_matchers::MatchFinder* Finder) override;
void check(const clang::ast_matchers::MatchFinder::MatchResult& Result) override;
};
} // namespace bitcoin
#endif // NONTRIVIAL_THREADLOCAL_CHECK_H

View File

@@ -1,203 +0,0 @@
#!/usr/bin/env bash
export LC_ALL=C
set -Eeuo pipefail
# Declare paths to libraries
declare -A LIBS
LIBS[cli]="libbitcoin_cli.a"
LIBS[common]="libbitcoin_common.a"
LIBS[consensus]="libbitcoin_consensus.a"
LIBS[crypto]="crypto/.libs/libbitcoin_crypto_base.a crypto/.libs/libbitcoin_crypto_x86_shani.a crypto/.libs/libbitcoin_crypto_sse41.a crypto/.libs/libbitcoin_crypto_avx2.a"
LIBS[node]="libbitcoin_node.a"
LIBS[util]="libbitcoin_util.a"
LIBS[wallet]="libbitcoin_wallet.a"
LIBS[wallet_tool]="libbitcoin_wallet_tool.a"
# Declare allowed dependencies "X Y" where X is allowed to depend on Y. This
# list is taken from doc/design/libraries.md.
ALLOWED_DEPENDENCIES=(
"cli common"
"cli util"
"common consensus"
"common crypto"
"common util"
"consensus crypto"
"node common"
"node consensus"
"node crypto"
"node kernel"
"node util"
"util crypto"
"wallet common"
"wallet crypto"
"wallet util"
"wallet_tool util"
"wallet_tool wallet"
)
# Add minor dependencies omitted from doc/design/libraries.md to keep the
# dependency diagram simple.
ALLOWED_DEPENDENCIES+=(
"wallet consensus"
"wallet_tool common"
"wallet_tool crypto"
)
# Declare list of known errors that should be suppressed.
declare -A SUPPRESS
# init.cpp file currently calls Berkeley DB sanity check function on startup, so
# there is an undocumented dependency of the node library on the wallet library.
SUPPRESS["libbitcoin_node_a-init.o libbitcoin_wallet_a-bdb.o _ZN6wallet27BerkeleyDatabaseSanityCheckEv"]=1
# init/common.cpp file calls InitError and InitWarning from interface_ui which
# is currently part of the node library. interface_ui should just be part of the
# common library instead, and is moved in
# https://github.com/bitcoin/bitcoin/issues/10102
SUPPRESS["libbitcoin_common_a-common.o libbitcoin_node_a-interface_ui.o _Z11InitWarningRK13bilingual_str"]=1
SUPPRESS["libbitcoin_common_a-common.o libbitcoin_node_a-interface_ui.o _Z9InitErrorRK13bilingual_str"]=1
# rpc/external_signer.cpp adds defines node RPC methods but is built as part of the
# common library. It should be moved to the node library instead.
SUPPRESS["libbitcoin_common_a-external_signer.o libbitcoin_node_a-server.o _ZN9CRPCTable13appendCommandERKNSt7__cxx1112basic_stringIcSt11char_traitsIcESaIcEEEPK11CRPCCommand"]=1
usage() {
echo "Usage: $(basename "${BASH_SOURCE[0]}") [BUILD_DIR]"
}
# Output makefile targets, converting library .a paths to libtool .la targets
lib_targets() {
for lib in "${!LIBS[@]}"; do
for lib_path in ${LIBS[$lib]}; do
# shellcheck disable=SC2001
sed 's:/.libs/\(.*\)\.a$:/\1.la:g' <<<"$lib_path"
done
done
}
# Extract symbol names and object names and write to text files
extract_symbols() {
local temp_dir="$1"
for lib in "${!LIBS[@]}"; do
for lib_path in ${LIBS[$lib]}; do
nm -o "$lib_path" | grep ' T ' | awk '{print $3, $1}' >> "${temp_dir}/${lib}_exports.txt"
nm -o "$lib_path" | grep ' U ' | awk '{print $3, $1}' >> "${temp_dir}/${lib}_imports.txt"
awk '{print $1}' "${temp_dir}/${lib}_exports.txt" | sort -u > "${temp_dir}/${lib}_exported_symbols.txt"
awk '{print $1}' "${temp_dir}/${lib}_imports.txt" | sort -u > "${temp_dir}/${lib}_imported_symbols.txt"
done
done
}
# Lookup object name(s) corresponding to symbol name in text file
obj_names() {
local symbol="$1"
local txt_file="$2"
sed -n "s/^$symbol [^:]\\+:\\([^:]\\+\\):[^:]*\$/\\1/p" "$txt_file" | sort -u
}
# Iterate through libraries and find disallowed dependencies
check_libraries() {
local temp_dir="$1"
local result=0
for src in "${!LIBS[@]}"; do
for dst in "${!LIBS[@]}"; do
if [ "$src" != "$dst" ] && ! is_allowed "$src" "$dst"; then
if ! check_disallowed "$src" "$dst" "$temp_dir"; then
result=1
fi
fi
done
done
check_not_suppressed
return $result
}
# Return whether src library is allowed to depend on dst.
is_allowed() {
local src="$1"
local dst="$2"
for allowed in "${ALLOWED_DEPENDENCIES[@]}"; do
if [ "$src $dst" = "$allowed" ]; then
return 0
fi
done
return 1
}
# Return whether src library imports any symbols from dst, assuming src is not
# allowed to depend on dst.
check_disallowed() {
local src="$1"
local dst="$2"
local temp_dir="$3"
local result=0
# Loop over symbol names exported by dst and imported by src
while read symbol; do
local dst_obj
dst_obj=$(obj_names "$symbol" "${temp_dir}/${dst}_exports.txt")
while read src_obj; do
if ! check_suppress "$src_obj" "$dst_obj" "$symbol"; then
echo "Error: $src_obj depends on $dst_obj symbol '$(c++filt "$symbol")', can suppess with:"
echo " SUPPRESS[\"$src_obj $dst_obj $symbol\"]=1"
result=1
fi
done < <(obj_names "$symbol" "${temp_dir}/${src}_imports.txt")
done < <(comm -12 "${temp_dir}/${dst}_exported_symbols.txt" "${temp_dir}/${src}_imported_symbols.txt")
return $result
}
# Declare array to track errors which were suppressed.
declare -A SUPPRESSED
# Return whether error should be suppressed and record suppresssion in
# SUPPRESSED array.
check_suppress() {
local src_obj="$1"
local dst_obj="$2"
local symbol="$3"
for suppress in "${!SUPPRESS[@]}"; do
read suppress_src suppress_dst suppress_pattern <<<"$suppress"
if [[ "$src_obj" == "$suppress_src" && "$dst_obj" == "$suppress_dst" && "$symbol" =~ $suppress_pattern ]]; then
SUPPRESSED["$suppress"]=1
return 0
fi
done
return 1
}
# Warn about error which were supposed to be suppress, but were not encountered.
check_not_suppressed() {
for suppress in "${!SUPPRESS[@]}"; do
if [[ ! -v SUPPRESSED[$suppress] ]]; then
echo >&2 "Warning: suppression '$suppress' was ignored, consider deleting."
fi
done
}
# Check arguments.
if [ "$#" = 0 ]; then
BUILD_DIR="$(dirname "${BASH_SOURCE[0]}")/../../src"
elif [ "$#" = 1 ]; then
BUILD_DIR="$1"
else
echo >&2 "Error: wrong number of arguments."
usage >&2
exit 1
fi
if [ ! -f "$BUILD_DIR/Makefile" ]; then
echo >&2 "Error: directory '$BUILD_DIR' does not contain a makefile, please specify path to build directory for library targets."
usage >&2
exit 1
fi
# Build libraries and run checks.
cd "$BUILD_DIR"
# shellcheck disable=SC2046
make -j"$(nproc)" $(lib_targets)
TEMP_DIR="$(mktemp -d)"
extract_symbols "$TEMP_DIR"
if check_libraries "$TEMP_DIR"; then
echo "Success! No unexpected dependencies were detected."
else
echo >&2 "Error: Unexpected dependencies were detected. Check previous output."
fi
rm -r "$TEMP_DIR"

View File

@@ -19,6 +19,7 @@ EXCLUDE = [
'src/qt/bitcoinstrings.cpp',
'src/chainparamsseeds.h',
# other external copyrights:
'src/reverse_iterator.h',
'src/test/fuzz/FuzzedDataProvider.h',
'src/tinyformat.h',
'src/bench/nanobench.h',

View File

@@ -72,12 +72,9 @@ cat >> "${EXAMPLE_CONF_FILE}" << 'EOF'
# Options for mainnet
[main]
# Options for testnet3
# Options for testnet
[test]
# Options for testnet4
[testnet4]
# Options for signet
[signet]

View File

@@ -62,10 +62,6 @@ with tempfile.NamedTemporaryFile('w', suffix='.h2m') as footer:
# Copyright is the same for all binaries, so just use the first.
footer.write('[COPYRIGHT]\n')
footer.write('\n'.join(versions[0][2]).strip())
# Create SEE ALSO section
footer.write('\n[SEE ALSO]\n')
footer.write(', '.join(s.rpartition('/')[2] + '(1)' for s in BINARIES))
footer.write('\n')
footer.flush()
# Call the binaries through help2man to produce a manual page for each of them.

4
contrib/devtools/headerssync-params.py Executable file → Normal file
View File

@@ -12,13 +12,13 @@ import random
# Parameters:
# Aim for still working fine at some point in the future. [datetime]
TIME = datetime(2027, 4, 1)
TIME = datetime(2026, 10, 5)
# Expected block interval. [timedelta]
BLOCK_INTERVAL = timedelta(seconds=600)
# The number of headers corresponding to the minchainwork parameter. [headers]
MINCHAINWORK_HEADERS = 856760
MINCHAINWORK_HEADERS = 804000
# Combined processing bandwidth from all attackers to one victim. [bit/s]
# 6 Gbit/s is approximately the speed at which a single thread of a Ryzen 5950X CPU thread can hash

View File

@@ -38,13 +38,13 @@ def check_ELF_RELRO(binary) -> bool:
return have_gnu_relro and have_bindnow
def check_ELF_CANARY(binary) -> bool:
def check_ELF_Canary(binary) -> bool:
'''
Check for use of stack canary
'''
return binary.has_symbol('__stack_chk_fail')
def check_ELF_SEPARATE_CODE(binary):
def check_ELF_separate_code(binary):
'''
Check that sections are appropriately separated in virtual memory,
based on their permissions. This checks for missing -Wl,-z,separate-code
@@ -105,7 +105,7 @@ def check_ELF_SEPARATE_CODE(binary):
return False
return True
def check_ELF_CONTROL_FLOW(binary) -> bool:
def check_ELF_control_flow(binary) -> bool:
'''
Check for control flow instrumentation
'''
@@ -130,7 +130,7 @@ def check_PE_RELOC_SECTION(binary) -> bool:
'''Check for a reloc section. This is required for functional ASLR.'''
return binary.has_relocations
def check_PE_CONTROL_FLOW(binary) -> bool:
def check_PE_control_flow(binary) -> bool:
'''
Check for control flow instrumentation
'''
@@ -145,7 +145,7 @@ def check_PE_CONTROL_FLOW(binary) -> bool:
return True
return False
def check_PE_CANARY(binary) -> bool:
def check_PE_Canary(binary) -> bool:
'''
Check for use of stack canary
'''
@@ -163,7 +163,7 @@ def check_MACHO_FIXUP_CHAINS(binary) -> bool:
'''
return binary.has_dyld_chained_fixups
def check_MACHO_CANARY(binary) -> bool:
def check_MACHO_Canary(binary) -> bool:
'''
Check for use of stack canary
'''
@@ -182,7 +182,7 @@ def check_NX(binary) -> bool:
'''
return binary.has_nx
def check_MACHO_CONTROL_FLOW(binary) -> bool:
def check_MACHO_control_flow(binary) -> bool:
'''
Check for control flow instrumentation
'''
@@ -192,7 +192,7 @@ def check_MACHO_CONTROL_FLOW(binary) -> bool:
return True
return False
def check_MACHO_BRANCH_PROTECTION(binary) -> bool:
def check_MACHO_branch_protection(binary) -> bool:
'''
Check for branch protection instrumentation
'''
@@ -206,8 +206,8 @@ BASE_ELF = [
('PIE', check_PIE),
('NX', check_NX),
('RELRO', check_ELF_RELRO),
('CANARY', check_ELF_CANARY),
('SEPARATE_CODE', check_ELF_SEPARATE_CODE),
('Canary', check_ELF_Canary),
('separate_code', check_ELF_separate_code),
]
BASE_PE = [
@@ -216,19 +216,19 @@ BASE_PE = [
('HIGH_ENTROPY_VA', check_PE_HIGH_ENTROPY_VA),
('NX', check_NX),
('RELOC_SECTION', check_PE_RELOC_SECTION),
('CONTROL_FLOW', check_PE_CONTROL_FLOW),
('CANARY', check_PE_CANARY),
('CONTROL_FLOW', check_PE_control_flow),
('Canary', check_PE_Canary),
]
BASE_MACHO = [
('NOUNDEFS', check_MACHO_NOUNDEFS),
('CANARY', check_MACHO_CANARY),
('Canary', check_MACHO_Canary),
('FIXUP_CHAINS', check_MACHO_FIXUP_CHAINS),
]
CHECKS = {
lief.EXE_FORMATS.ELF: {
lief.ARCHITECTURES.X86: BASE_ELF + [('CONTROL_FLOW', check_ELF_CONTROL_FLOW)],
lief.ARCHITECTURES.X86: BASE_ELF + [('CONTROL_FLOW', check_ELF_control_flow)],
lief.ARCHITECTURES.ARM: BASE_ELF,
lief.ARCHITECTURES.ARM64: BASE_ELF,
lief.ARCHITECTURES.PPC: BASE_ELF,
@@ -240,24 +240,39 @@ CHECKS = {
lief.EXE_FORMATS.MACHO: {
lief.ARCHITECTURES.X86: BASE_MACHO + [('PIE', check_PIE),
('NX', check_NX),
('CONTROL_FLOW', check_MACHO_CONTROL_FLOW)],
lief.ARCHITECTURES.ARM64: BASE_MACHO + [('BRANCH_PROTECTION', check_MACHO_BRANCH_PROTECTION)],
('CONTROL_FLOW', check_MACHO_control_flow)],
lief.ARCHITECTURES.ARM64: BASE_MACHO + [('BRANCH_PROTECTION', check_MACHO_branch_protection)],
}
}
if __name__ == '__main__':
retval: int = 0
for filename in sys.argv[1:]:
binary = lief.parse(filename)
etype = binary.format
arch = binary.abstract.header.architecture
binary.concrete
try:
binary = lief.parse(filename)
etype = binary.format
arch = binary.abstract.header.architecture
binary.concrete
failed: list[str] = []
for (name, func) in CHECKS[etype][arch]:
if not func(binary):
failed.append(name)
if failed:
print(f'{filename}: failed {" ".join(failed)}')
if etype == lief.EXE_FORMATS.UNKNOWN:
print(f'{filename}: unknown executable format')
retval = 1
continue
if arch == lief.ARCHITECTURES.NONE:
print(f'{filename}: unknown architecture')
retval = 1
continue
failed: list[str] = []
for (name, func) in CHECKS[etype][arch]:
if not func(binary):
failed.append(name)
if failed:
print(f'{filename}: failed {" ".join(failed)}')
retval = 1
except IOError:
print(f'{filename}: cannot open')
retval = 1
sys.exit(retval)

View File

@@ -14,31 +14,31 @@ import sys
import lief
# Debian 11 (Bullseye) EOL: 2026. https://wiki.debian.org/LTS
# Debian 10 (Buster) EOL: 2024. https://wiki.debian.org/LTS
#
# - libgcc version 10.2.1 (https://packages.debian.org/bullseye/libgcc-s1)
# - libc version 2.31 (https://packages.debian.org/source/bullseye/glibc)
# - libgcc version 8.3.0 (https://packages.debian.org/search?suite=buster&arch=any&searchon=names&keywords=libgcc1)
# - libc version 2.28 (https://packages.debian.org/search?suite=buster&arch=any&searchon=names&keywords=libc6)
#
# Ubuntu 20.04 (Focal) EOL: 2030. https://wiki.ubuntu.com/ReleaseTeam
# Ubuntu 18.04 (Bionic) EOL: 2028. https://wiki.ubuntu.com/ReleaseTeam
#
# - libgcc version 10.5.0 (https://packages.ubuntu.com/focal/libgcc1)
# - libc version 2.31 (https://packages.ubuntu.com/focal/libc6)
# - libgcc version 8.4.0 (https://packages.ubuntu.com/bionic/libgcc1)
# - libc version 2.27 (https://packages.ubuntu.com/bionic/libc6)
#
# CentOS Stream 9 EOL: 2027. https://www.centos.org/cl-vs-cs/#end-of-life
# CentOS Stream 8 EOL: 2024. https://wiki.centos.org/About/Product
#
# - libgcc version 12.2.1 (https://mirror.stream.centos.org/9-stream/AppStream/x86_64/os/Packages/)
# - libc version 2.34 (https://mirror.stream.centos.org/9-stream/AppStream/x86_64/os/Packages/)
# - libgcc version 8.5.0 (http://mirror.centos.org/centos/8-stream/AppStream/x86_64/os/Packages/)
# - libc version 2.28 (http://mirror.centos.org/centos/8-stream/AppStream/x86_64/os/Packages/)
#
# See https://gcc.gnu.org/onlinedocs/libstdc++/manual/abi.html for more info.
MAX_VERSIONS = {
'GCC': (4,3,0),
'GLIBC': {
lief.ELF.ARCH.x86_64: (2,31),
lief.ELF.ARCH.ARM: (2,31),
lief.ELF.ARCH.AARCH64:(2,31),
lief.ELF.ARCH.PPC64: (2,31),
lief.ELF.ARCH.RISCV: (2,31),
lief.ELF.ARCH.x86_64: (2,27),
lief.ELF.ARCH.ARM: (2,27),
lief.ELF.ARCH.AARCH64:(2,27),
lief.ELF.ARCH.PPC64: (2,27),
lief.ELF.ARCH.RISCV: (2,27),
},
'LIBATOMIC': (1,0),
'V': (0,5,0), # xkb (bitcoin-qt only)
@@ -212,11 +212,6 @@ def check_exported_symbols(binary) -> bool:
ok = False
return ok
def check_RUNPATH(binary) -> bool:
assert binary.get(lief.ELF.DYNAMIC_TAGS.RUNPATH) is None
assert binary.get(lief.ELF.DYNAMIC_TAGS.RPATH) is None
return True
def check_ELF_libraries(binary) -> bool:
ok: bool = True
for library in binary.libraries:
@@ -244,8 +239,8 @@ def check_MACHO_sdk(binary) -> bool:
return True
return False
def check_MACHO_lld(binary) -> bool:
if binary.build_version.tools[0].version == [18, 1, 8]:
def check_MACHO_ld64(binary) -> bool:
if binary.build_version.tools[0].version == [711, 0, 0]:
return True
return False
@@ -282,13 +277,12 @@ lief.EXE_FORMATS.ELF: [
('LIBRARY_DEPENDENCIES', check_ELF_libraries),
('INTERPRETER_NAME', check_ELF_interpreter),
('ABI', check_ELF_ABI),
('RUNPATH', check_RUNPATH),
],
lief.EXE_FORMATS.MACHO: [
('DYNAMIC_LIBRARIES', check_MACHO_libraries),
('MIN_OS', check_MACHO_min_os),
('SDK', check_MACHO_sdk),
('LLD', check_MACHO_lld),
('LD64', check_MACHO_ld64),
],
lief.EXE_FORMATS.PE: [
('DYNAMIC_LIBRARIES', check_PE_libraries),
@@ -299,14 +293,22 @@ lief.EXE_FORMATS.PE: [
if __name__ == '__main__':
retval: int = 0
for filename in sys.argv[1:]:
binary = lief.parse(filename)
etype = binary.format
try:
binary = lief.parse(filename)
etype = binary.format
if etype == lief.EXE_FORMATS.UNKNOWN:
print(f'{filename}: unknown executable format')
retval = 1
continue
failed: list[str] = []
for (name, func) in CHECKS[etype]:
if not func(binary):
failed.append(name)
if failed:
print(f'{filename}: failed {" ".join(failed)}')
failed: list[str] = []
for (name, func) in CHECKS[etype]:
if not func(binary):
failed.append(name)
if failed:
print(f'{filename}: failed {" ".join(failed)}')
retval = 1
except IOError:
print(f'{filename}: cannot open')
retval = 1
sys.exit(retval)

View File

@@ -15,10 +15,10 @@ from utils import determine_wellknown_cmd
def write_testcode(filename):
with open(filename, 'w', encoding="utf8") as f:
f.write('''
#include <cstdio>
#include <stdio.h>
int main()
{
std::printf("the quick brown fox jumps over the lazy god\\n");
printf("the quick brown fox jumps over the lazy god\\n");
return 0;
}
''')
@@ -27,24 +27,22 @@ def clean_files(source, executable):
os.remove(source)
os.remove(executable)
def env_flags() -> list[str]:
def call_security_check(cc: str, source: str, executable: str, options) -> tuple:
# This should behave the same as AC_TRY_LINK, so arrange well-known flags
# in the same order as autoconf would.
#
# See the definitions for ac_link in autoconf's lib/autoconf/c.m4 file for
# reference.
flags: list[str] = []
for var in ['CXXFLAGS', 'CPPFLAGS', 'LDFLAGS']:
flags += filter(None, os.environ.get(var, '').split(' '))
return flags
env_flags: list[str] = []
for var in ['CFLAGS', 'CPPFLAGS', 'LDFLAGS']:
env_flags += filter(None, os.environ.get(var, '').split(' '))
def call_security_check(cxx: str, source: str, executable: str, options) -> tuple:
subprocess.run([*cxx,source,'-o',executable] + env_flags() + options, check=True)
subprocess.run([*cc,source,'-o',executable] + env_flags + options, check=True)
p = subprocess.run([os.path.join(os.path.dirname(__file__), 'security-check.py'), executable], stdout=subprocess.PIPE, text=True)
return (p.returncode, p.stdout.rstrip())
def get_arch(cxx, source, executable):
subprocess.run([*cxx, source, '-o', executable] + env_flags(), check=True)
def get_arch(cc, source, executable):
subprocess.run([*cc, source, '-o', executable], check=True)
binary = lief.parse(executable)
arch = binary.abstract.header.architecture
os.remove(executable)
@@ -52,72 +50,101 @@ def get_arch(cxx, source, executable):
class TestSecurityChecks(unittest.TestCase):
def test_ELF(self):
source = 'test1.cpp'
source = 'test1.c'
executable = 'test1'
cxx = determine_wellknown_cmd('CXX', 'g++')
cc = determine_wellknown_cmd('CC', 'gcc')
write_testcode(source)
arch = get_arch(cxx, source, executable)
arch = get_arch(cc, source, executable)
if arch == lief.ARCHITECTURES.X86:
pass_flags = ['-Wl,-znoexecstack', '-Wl,-zrelro', '-Wl,-z,now', '-pie', '-fPIE', '-Wl,-z,separate-code', '-fcf-protection=full']
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-Wl,-zexecstack']), (1, executable + ': failed NX'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-no-pie','-fno-PIE']), (1, executable + ': failed PIE'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-Wl,-znorelro']), (1, executable + ': failed RELRO'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-Wl,-z,noseparate-code']), (1, executable + ': failed SEPARATE_CODE'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-fcf-protection=none']), (1, executable + ': failed CONTROL_FLOW'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags), (0, ''))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-zexecstack','-fno-stack-protector','-Wl,-znorelro','-no-pie','-fno-PIE', '-Wl,-z,separate-code']),
(1, executable+': failed PIE NX RELRO Canary CONTROL_FLOW'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fno-stack-protector','-Wl,-znorelro','-no-pie','-fno-PIE', '-Wl,-z,separate-code']),
(1, executable+': failed PIE RELRO Canary CONTROL_FLOW'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fstack-protector-all','-Wl,-znorelro','-no-pie','-fno-PIE', '-Wl,-z,separate-code']),
(1, executable+': failed PIE RELRO CONTROL_FLOW'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fstack-protector-all','-Wl,-znorelro','-pie','-fPIE', '-Wl,-z,separate-code']),
(1, executable+': failed RELRO CONTROL_FLOW'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fstack-protector-all','-Wl,-zrelro','-Wl,-z,now','-pie','-fPIE', '-Wl,-z,noseparate-code']),
(1, executable+': failed separate_code CONTROL_FLOW'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fstack-protector-all','-Wl,-zrelro','-Wl,-z,now','-pie','-fPIE', '-Wl,-z,separate-code']),
(1, executable+': failed CONTROL_FLOW'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fstack-protector-all','-Wl,-zrelro','-Wl,-z,now','-pie','-fPIE', '-Wl,-z,separate-code', '-fcf-protection=full']),
(0, ''))
else:
pass_flags = ['-Wl,-znoexecstack', '-Wl,-zrelro', '-Wl,-z,now', '-pie', '-fPIE', '-Wl,-z,separate-code']
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-Wl,-zexecstack']), (1, executable + ': failed NX'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-no-pie','-fno-PIE']), (1, executable + ': failed PIE'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-Wl,-znorelro']), (1, executable + ': failed RELRO'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-Wl,-z,noseparate-code']), (1, executable + ': failed SEPARATE_CODE'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags), (0, ''))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-zexecstack','-fno-stack-protector','-Wl,-znorelro','-no-pie','-fno-PIE', '-Wl,-z,separate-code']),
(1, executable+': failed PIE NX RELRO Canary'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fno-stack-protector','-Wl,-znorelro','-no-pie','-fno-PIE', '-Wl,-z,separate-code']),
(1, executable+': failed PIE RELRO Canary'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fstack-protector-all','-Wl,-znorelro','-no-pie','-fno-PIE', '-Wl,-z,separate-code']),
(1, executable+': failed PIE RELRO'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fstack-protector-all','-Wl,-znorelro','-pie','-fPIE', '-Wl,-z,separate-code']),
(1, executable+': failed RELRO'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fstack-protector-all','-Wl,-zrelro','-Wl,-z,now','-pie','-fPIE', '-Wl,-z,noseparate-code']),
(1, executable+': failed separate_code'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fstack-protector-all','-Wl,-zrelro','-Wl,-z,now','-pie','-fPIE', '-Wl,-z,separate-code']),
(0, ''))
clean_files(source, executable)
def test_PE(self):
source = 'test1.cpp'
source = 'test1.c'
executable = 'test1.exe'
cxx = determine_wellknown_cmd('CXX', 'x86_64-w64-mingw32-g++')
cc = determine_wellknown_cmd('CC', 'x86_64-w64-mingw32-gcc')
write_testcode(source)
pass_flags = ['-Wl,--nxcompat', '-Wl,--enable-reloc-section', '-Wl,--dynamicbase', '-Wl,--high-entropy-va', '-pie', '-fPIE', '-fcf-protection=full', '-fstack-protector-all', '-lssp']
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-fno-stack-protector']), (1, executable + ': failed CANARY'))
# https://github.com/lief-project/LIEF/issues/1076 - in future, we could test this individually.
# self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-Wl,--disable-reloc-section']), (1, executable + ': failed RELOC_SECTION'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-Wl,--disable-nxcompat']), (1, executable + ': failed NX'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-Wl,--disable-dynamicbase']), (1, executable + ': failed PIE DYNAMIC_BASE HIGH_ENTROPY_VA')) # -pie -fPIE does nothing without --dynamicbase
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-Wl,--disable-high-entropy-va']), (1, executable + ': failed HIGH_ENTROPY_VA'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-fcf-protection=none']), (1, executable + ': failed CONTROL_FLOW'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags), (0, ''))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,--disable-nxcompat','-Wl,--disable-reloc-section','-Wl,--disable-dynamicbase','-Wl,--disable-high-entropy-va','-no-pie','-fno-PIE','-fno-stack-protector']),
(1, executable+': failed PIE DYNAMIC_BASE HIGH_ENTROPY_VA NX RELOC_SECTION CONTROL_FLOW Canary'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,--nxcompat','-Wl,--disable-reloc-section','-Wl,--disable-dynamicbase','-Wl,--disable-high-entropy-va','-no-pie','-fno-PIE','-fstack-protector-all', '-lssp']),
(1, executable+': failed PIE DYNAMIC_BASE HIGH_ENTROPY_VA RELOC_SECTION CONTROL_FLOW'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,--nxcompat','-Wl,--enable-reloc-section','-Wl,--disable-dynamicbase','-Wl,--disable-high-entropy-va','-no-pie','-fno-PIE','-fstack-protector-all', '-lssp']),
(1, executable+': failed PIE DYNAMIC_BASE HIGH_ENTROPY_VA CONTROL_FLOW'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,--nxcompat','-Wl,--enable-reloc-section','-Wl,--disable-dynamicbase','-Wl,--disable-high-entropy-va','-pie','-fPIE','-fstack-protector-all', '-lssp']),
(1, executable+': failed PIE DYNAMIC_BASE HIGH_ENTROPY_VA CONTROL_FLOW')) # -pie -fPIE does nothing unless --dynamicbase is also supplied
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,--nxcompat','-Wl,--enable-reloc-section','-Wl,--dynamicbase','-Wl,--disable-high-entropy-va','-pie','-fPIE','-fstack-protector-all', '-lssp']),
(1, executable+': failed HIGH_ENTROPY_VA CONTROL_FLOW'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,--nxcompat','-Wl,--enable-reloc-section','-Wl,--dynamicbase','-Wl,--high-entropy-va','-pie','-fPIE','-fstack-protector-all', '-lssp']),
(1, executable+': failed CONTROL_FLOW'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,--nxcompat','-Wl,--enable-reloc-section','-Wl,--dynamicbase','-Wl,--high-entropy-va','-pie','-fPIE', '-fcf-protection=full','-fstack-protector-all', '-lssp']),
(0, ''))
clean_files(source, executable)
def test_MACHO(self):
source = 'test1.cpp'
source = 'test1.c'
executable = 'test1'
cxx = determine_wellknown_cmd('CXX', 'clang++')
cc = determine_wellknown_cmd('CC', 'clang')
write_testcode(source)
arch = get_arch(cxx, source, executable)
arch = get_arch(cc, source, executable)
if arch == lief.ARCHITECTURES.X86:
pass_flags = ['-Wl,-pie', '-fstack-protector-all', '-fcf-protection=full', '-Wl,-fixup_chains']
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-Wl,-no_pie', '-Wl,-no_fixup_chains']), (1, executable+': failed FIXUP_CHAINS PIE')) # -fixup_chains is incompatible with -no_pie
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-Wl,-no_fixup_chains']), (1, executable + ': failed FIXUP_CHAINS'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-fno-stack-protector']), (1, executable + ': failed CANARY'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-Wl,-flat_namespace']), (1, executable + ': failed NOUNDEFS'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-fcf-protection=none']), (1, executable + ': failed CONTROL_FLOW'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags), (0, ''))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-no_pie','-Wl,-flat_namespace','-Wl,-allow_stack_execute','-fno-stack-protector', '-Wl,-no_fixup_chains']),
(1, executable+': failed NOUNDEFS Canary FIXUP_CHAINS PIE NX CONTROL_FLOW'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-no_pie','-Wl,-flat_namespace','-Wl,-allow_stack_execute','-fno-stack-protector', '-Wl,-fixup_chains']),
(1, executable+': failed NOUNDEFS Canary PIE NX CONTROL_FLOW'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-no_pie','-Wl,-flat_namespace','-Wl,-allow_stack_execute','-fstack-protector-all', '-Wl,-fixup_chains']),
(1, executable+': failed NOUNDEFS PIE NX CONTROL_FLOW'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-no_pie','-Wl,-flat_namespace','-fstack-protector-all', '-Wl,-fixup_chains']),
(1, executable+': failed NOUNDEFS PIE CONTROL_FLOW'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-no_pie','-fstack-protector-all', '-Wl,-fixup_chains']),
(1, executable+': failed PIE CONTROL_FLOW'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-no_pie','-fstack-protector-all', '-Wl,-fixup_chains']),
(1, executable+': failed PIE CONTROL_FLOW'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-no_pie','-fstack-protector-all', '-fcf-protection=full', '-Wl,-fixup_chains']),
(1, executable+': failed PIE'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-pie','-fstack-protector-all', '-fcf-protection=full', '-Wl,-fixup_chains']),
(0, ''))
else:
# arm64 darwin doesn't support non-PIE binaries or executable stacks
pass_flags = ['-fstack-protector-all', '-Wl,-fixup_chains', '-mbranch-protection=bti']
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-mbranch-protection=none']), (1, executable + ': failed BRANCH_PROTECTION'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-Wl,-no_fixup_chains']), (1, executable + ': failed FIXUP_CHAINS'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-fno-stack-protector']), (1, executable + ': failed CANARY'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags + ['-Wl,-flat_namespace']), (1, executable + ': failed NOUNDEFS'))
self.assertEqual(call_security_check(cxx, source, executable, pass_flags), (0, ''))
# arm64 darwin doesn't support non-PIE binaries, control flow or executable stacks
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-flat_namespace','-fno-stack-protector', '-Wl,-no_fixup_chains']),
(1, executable+': failed NOUNDEFS Canary FIXUP_CHAINS BRANCH_PROTECTION'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-flat_namespace','-fno-stack-protector', '-Wl,-fixup_chains', '-mbranch-protection=bti']),
(1, executable+': failed NOUNDEFS Canary'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-flat_namespace','-fstack-protector-all', '-Wl,-fixup_chains', '-mbranch-protection=bti']),
(1, executable+': failed NOUNDEFS'))
self.assertEqual(call_security_check(cc, source, executable, ['-fstack-protector-all', '-Wl,-fixup_chains', '-mbranch-protection=bti']),
(0, ''))
clean_files(source, executable)

View File

@@ -11,31 +11,35 @@ import unittest
from utils import determine_wellknown_cmd
def call_symbol_check(cxx: list[str], source, executable, options):
def call_symbol_check(cc: list[str], source, executable, options):
# This should behave the same as AC_TRY_LINK, so arrange well-known flags
# in the same order as autoconf would.
#
# See the definitions for ac_link in autoconf's lib/autoconf/c.m4 file for
# reference.
env_flags: list[str] = []
for var in ['CXXFLAGS', 'CPPFLAGS', 'LDFLAGS']:
for var in ['CFLAGS', 'CPPFLAGS', 'LDFLAGS']:
env_flags += filter(None, os.environ.get(var, '').split(' '))
subprocess.run([*cxx,source,'-o',executable] + env_flags + options, check=True)
subprocess.run([*cc,source,'-o',executable] + env_flags + options, check=True)
p = subprocess.run([os.path.join(os.path.dirname(__file__), 'symbol-check.py'), executable], stdout=subprocess.PIPE, text=True)
os.remove(source)
os.remove(executable)
return (p.returncode, p.stdout.rstrip())
def get_machine(cc: list[str]):
p = subprocess.run([*cc,'-dumpmachine'], stdout=subprocess.PIPE, text=True)
return p.stdout.rstrip()
class TestSymbolChecks(unittest.TestCase):
def test_ELF(self):
source = 'test1.cpp'
source = 'test1.c'
executable = 'test1'
cxx = determine_wellknown_cmd('CXX', 'g++')
cc = determine_wellknown_cmd('CC', 'gcc')
# -lutil is part of the libc6 package so a safe bet that it's installed
# it's also out of context enough that it's unlikely to ever become a real dependency
source = 'test2.cpp'
source = 'test2.c'
executable = 'test2'
with open(source, 'w', encoding="utf8") as f:
f.write('''
@@ -48,31 +52,31 @@ class TestSymbolChecks(unittest.TestCase):
}
''')
self.assertEqual(call_symbol_check(cxx, source, executable, ['-lutil']),
self.assertEqual(call_symbol_check(cc, source, executable, ['-lutil']),
(1, executable + ': libutil.so.1 is not in ALLOWED_LIBRARIES!\n' +
executable + ': failed LIBRARY_DEPENDENCIES'))
# finally, check a simple conforming binary
source = 'test3.cpp'
source = 'test3.c'
executable = 'test3'
with open(source, 'w', encoding="utf8") as f:
f.write('''
#include <cstdio>
#include <stdio.h>
int main()
{
std::printf("42");
printf("42");
return 0;
}
''')
self.assertEqual(call_symbol_check(cxx, source, executable, []),
self.assertEqual(call_symbol_check(cc, source, executable, []),
(0, ''))
def test_MACHO(self):
source = 'test1.cpp'
source = 'test1.c'
executable = 'test1'
cxx = determine_wellknown_cmd('CXX', 'clang++')
cc = determine_wellknown_cmd('CC', 'clang')
with open(source, 'w', encoding="utf8") as f:
f.write('''
@@ -86,11 +90,11 @@ class TestSymbolChecks(unittest.TestCase):
''')
self.assertEqual(call_symbol_check(cxx, source, executable, ['-lexpat', '-Wl,-platform_version','-Wl,macos', '-Wl,11.4', '-Wl,11.4']),
self.assertEqual(call_symbol_check(cc, source, executable, ['-lexpat', '-Wl,-platform_version','-Wl,macos', '-Wl,11.4', '-Wl,11.4']),
(1, 'libexpat.1.dylib is not in ALLOWED_LIBRARIES!\n' +
f'{executable}: failed DYNAMIC_LIBRARIES MIN_OS SDK'))
source = 'test2.cpp'
source = 'test2.c'
executable = 'test2'
with open(source, 'w', encoding="utf8") as f:
f.write('''
@@ -103,10 +107,10 @@ class TestSymbolChecks(unittest.TestCase):
}
''')
self.assertEqual(call_symbol_check(cxx, source, executable, ['-framework', 'CoreGraphics', '-Wl,-platform_version','-Wl,macos', '-Wl,11.4', '-Wl,11.4']),
self.assertEqual(call_symbol_check(cc, source, executable, ['-framework', 'CoreGraphics', '-Wl,-platform_version','-Wl,macos', '-Wl,11.4', '-Wl,11.4']),
(1, f'{executable}: failed MIN_OS SDK'))
source = 'test3.cpp'
source = 'test3.c'
executable = 'test3'
with open(source, 'w', encoding="utf8") as f:
f.write('''
@@ -116,13 +120,13 @@ class TestSymbolChecks(unittest.TestCase):
}
''')
self.assertEqual(call_symbol_check(cxx, source, executable, ['-Wl,-platform_version','-Wl,macos', '-Wl,11.0', '-Wl,11.4']),
self.assertEqual(call_symbol_check(cc, source, executable, ['-Wl,-platform_version','-Wl,macos', '-Wl,11.0', '-Wl,11.4']),
(1, f'{executable}: failed SDK'))
def test_PE(self):
source = 'test1.cpp'
source = 'test1.c'
executable = 'test1.exe'
cxx = determine_wellknown_cmd('CXX', 'x86_64-w64-mingw32-g++')
cc = determine_wellknown_cmd('CC', 'x86_64-w64-mingw32-gcc')
with open(source, 'w', encoding="utf8") as f:
f.write('''
@@ -135,11 +139,11 @@ class TestSymbolChecks(unittest.TestCase):
}
''')
self.assertEqual(call_symbol_check(cxx, source, executable, ['-lpdh', '-Wl,--major-subsystem-version', '-Wl,6', '-Wl,--minor-subsystem-version', '-Wl,1']),
self.assertEqual(call_symbol_check(cc, source, executable, ['-lpdh', '-Wl,--major-subsystem-version', '-Wl,6', '-Wl,--minor-subsystem-version', '-Wl,1']),
(1, 'pdh.dll is not in ALLOWED_LIBRARIES!\n' +
executable + ': failed DYNAMIC_LIBRARIES'))
source = 'test2.cpp'
source = 'test2.c'
executable = 'test2.exe'
with open(source, 'w', encoding="utf8") as f:
@@ -150,10 +154,10 @@ class TestSymbolChecks(unittest.TestCase):
}
''')
self.assertEqual(call_symbol_check(cxx, source, executable, ['-Wl,--major-subsystem-version', '-Wl,9', '-Wl,--minor-subsystem-version', '-Wl,9']),
self.assertEqual(call_symbol_check(cc, source, executable, ['-Wl,--major-subsystem-version', '-Wl,9', '-Wl,--minor-subsystem-version', '-Wl,9']),
(1, executable + ': failed SUBSYSTEM_VERSION'))
source = 'test3.cpp'
source = 'test3.c'
executable = 'test3.exe'
with open(source, 'w', encoding="utf8") as f:
f.write('''
@@ -166,7 +170,7 @@ class TestSymbolChecks(unittest.TestCase):
}
''')
self.assertEqual(call_symbol_check(cxx, source, executable, ['-lole32', '-Wl,--major-subsystem-version', '-Wl,6', '-Wl,--minor-subsystem-version', '-Wl,1']),
self.assertEqual(call_symbol_check(cc, source, executable, ['-lole32', '-Wl,--major-subsystem-version', '-Wl,6', '-Wl,--minor-subsystem-version', '-Wl,1']),
(0, ''))

View File

@@ -17,21 +17,21 @@ NON_DETERMINISTIC_TESTS=(
"blockfilter_index_tests/blockfilter_index_initial_sync" # src/checkqueue.h: In CCheckQueue::Loop(): while (queue.empty()) { ... }
"coinselector_tests/knapsack_solver_test" # coinselector_tests.cpp: if (equal_sets(setCoinsRet, setCoinsRet2))
"fs_tests/fsbridge_fstream" # deterministic test failure?
"miner_tests/CreateNewBlock_validity" # validation.cpp: if (signals.CallbacksPending() > 10)
"miner_tests/CreateNewBlock_validity" # validation.cpp: if (GetMainSignals().CallbacksPending() > 10)
"scheduler_tests/manythreads" # scheduler.cpp: CScheduler::serviceQueue()
"scheduler_tests/singlethreadedscheduler_ordered" # scheduler.cpp: CScheduler::serviceQueue()
"txvalidationcache_tests/checkinputs_test" # validation.cpp: if (signals.CallbacksPending() > 10)
"txvalidationcache_tests/tx_mempool_block_doublespend" # validation.cpp: if (signals.CallbacksPending() > 10)
"txindex_tests/txindex_initial_sync" # validation.cpp: if (signals.CallbacksPending() > 10)
"txvalidation_tests/tx_mempool_reject_coinbase" # validation.cpp: if (signals.CallbacksPending() > 10)
"validation_block_tests/processnewblock_signals_ordering" # validation.cpp: if (signals.CallbacksPending() > 10)
"wallet_tests/coin_mark_dirty_immature_credit" # validation.cpp: if (signals.CallbacksPending() > 10)
"wallet_tests/dummy_input_size_test" # validation.cpp: if (signals.CallbacksPending() > 10)
"wallet_tests/importmulti_rescan" # validation.cpp: if (signals.CallbacksPending() > 10)
"wallet_tests/importwallet_rescan" # validation.cpp: if (signals.CallbacksPending() > 10)
"wallet_tests/ListCoins" # validation.cpp: if (signals.CallbacksPending() > 10)
"wallet_tests/scan_for_wallet_transactions" # validation.cpp: if (signals.CallbacksPending() > 10)
"wallet_tests/wallet_disableprivkeys" # validation.cpp: if (signals.CallbacksPending() > 10)
"txvalidationcache_tests/checkinputs_test" # validation.cpp: if (GetMainSignals().CallbacksPending() > 10)
"txvalidationcache_tests/tx_mempool_block_doublespend" # validation.cpp: if (GetMainSignals().CallbacksPending() > 10)
"txindex_tests/txindex_initial_sync" # validation.cpp: if (GetMainSignals().CallbacksPending() > 10)
"txvalidation_tests/tx_mempool_reject_coinbase" # validation.cpp: if (GetMainSignals().CallbacksPending() > 10)
"validation_block_tests/processnewblock_signals_ordering" # validation.cpp: if (GetMainSignals().CallbacksPending() > 10)
"wallet_tests/coin_mark_dirty_immature_credit" # validation.cpp: if (GetMainSignals().CallbacksPending() > 10)
"wallet_tests/dummy_input_size_test" # validation.cpp: if (GetMainSignals().CallbacksPending() > 10)
"wallet_tests/importmulti_rescan" # validation.cpp: if (GetMainSignals().CallbacksPending() > 10)
"wallet_tests/importwallet_rescan" # validation.cpp: if (GetMainSignals().CallbacksPending() > 10)
"wallet_tests/ListCoins" # validation.cpp: if (GetMainSignals().CallbacksPending() > 10)
"wallet_tests/scan_for_wallet_transactions" # validation.cpp: if (GetMainSignals().CallbacksPending() > 10)
"wallet_tests/wallet_disableprivkeys" # validation.cpp: if (GetMainSignals().CallbacksPending() > 10)
)
TEST_BITCOIN_BINARY="src/test/test_bitcoin"

View File

@@ -138,7 +138,7 @@ echo
echo "-- Now: add the following to CMainParams::m_assumeutxo_data"
echo " in src/kernel/chainparams.cpp, and recompile:"
echo
echo " {.height = ${RPC_BASE_HEIGHT}, .hash_serialized = AssumeutxoHash{uint256{\"${RPC_AU}\"}}, .m_chain_tx_count = ${RPC_NCHAINTX}, .blockhash = consteval_ctor(uint256{\"${RPC_BLOCKHASH}\"})},"
echo " {${RPC_BASE_HEIGHT}, AssumeutxoHash{uint256S(\"0x${RPC_AU}\")}, ${RPC_NCHAINTX}, uint256S(\"0x${RPC_BLOCKHASH}\")},"
echo
echo
echo "-- IBDing more blocks to the server node (height=$FINAL_HEIGHT) so there is a diff between snapshot and tip..."

View File

@@ -36,16 +36,6 @@ if (( GENERATE_AT_HEIGHT < PRUNED )); then
exit 1
fi
# Check current block height to ensure the node has synchronized past the required block
CURRENT_BLOCK_HEIGHT=$(${BITCOIN_CLI_CALL} getblockcount)
PIVOT_BLOCK_HEIGHT=$(( GENERATE_AT_HEIGHT + 1 ))
if (( PIVOT_BLOCK_HEIGHT > CURRENT_BLOCK_HEIGHT )); then
(>&2 echo "Error: The node has not yet synchronized to block height ${PIVOT_BLOCK_HEIGHT}.")
(>&2 echo "Please wait until the node has synchronized past this block height and try again.")
exit 1
fi
# Early exit if file at OUTPUT_PATH already exists
if [[ -e "$OUTPUT_PATH" ]]; then
(>&2 echo "Error: $OUTPUT_PATH already exists or is not a valid path.")

View File

@@ -319,7 +319,7 @@ Source: https://logs.guix.gnu.org/guix/2020-11-12.log#232527
Start by cloning Guix:
```
git clone https://codeberg.org/guix/guix.git
git clone https://git.savannah.gnu.org/git/guix.git
cd guix
```
@@ -607,7 +607,7 @@ checklist.
```
Generation 38 Feb 22 2021 16:39:31 (current)
guix f350df4
repository URL: https://codeberg.org/guix/guix.git
repository URL: https://git.savannah.gnu.org/git/guix.git
branch: version-1.2.0
commit: f350df405fbcd5b9e27e6b6aa500da7f101f41e7
```
@@ -671,8 +671,6 @@ More information: https://github.com/python/cpython/issues/81765
OpenSSL includes tests that will fail once some certificate has expired.
The workarounds from the GnuTLS section immediately below can be used.
For openssl-1.1.1l use 2022-05-01 as the date.
### GnuTLS: test-suite FAIL: status-request-revoked
*The derivation is likely identified by: `/gnu/store/vhphki5sg9xkdhh2pbc8gi6vhpfzryf0-gnutls-3.6.12.drv`*
@@ -707,12 +705,11 @@ authorized.
This workaround was described [here](https://issues.guix.gnu.org/44559#5).
Basically:
1. Turn off NTP
2. Set system time to 2020-10-01
3. guix build --no-substitutes /gnu/store/vhphki5sg9xkdhh2pbc8gi6vhpfzryf0-gnutls-3.6.12.drv
4. Set system time back to accurate current time
5. Turn NTP back on
2. Turn off NTP
3. Set system time to 2020-10-01
4. guix build --no-substitutes /gnu/store/vhphki5sg9xkdhh2pbc8gi6vhpfzryf0-gnutls-3.6.12.drv
5. Set system time back to accurate current time
6. Turn NTP back on
For example,
@@ -760,8 +757,8 @@ Please see the following links for more details:
- An upstream coreutils bug has been filed: [debbugs#47940](https://debbugs.gnu.org/cgi/bugreport.cgi?bug=47940)
- A Guix bug detailing the underlying problem has been filed: [guix-issues#47935](https://issues.guix.gnu.org/47935), [guix-issues#49985](https://issues.guix.gnu.org/49985#5)
- A commit to skip this test is included since Guix 1.4.0:
[codeberg/guix@6ba1058](https://codeberg.org/guix/guix/commit/6ba1058df0c4ce5611c2367531ae5c3cdc729ab4)
- A commit to skip this test in Guix has been merged into the core-updates branch:
[savannah/guix@6ba1058](https://git.savannah.gnu.org/cgit/guix.git/commit/?id=6ba1058df0c4ce5611c2367531ae5c3cdc729ab4)
[install-script]: #options-1-and-2-using-the-official-shell-installer-script-or-binary-tarball

View File

@@ -69,19 +69,12 @@ fi
mkdir -p "$VERSION_BASE"
################
# SOURCE_DATE_EPOCH should not unintentionally be set
################
check_source_date_epoch
################
# Build directories should not exist
################
# Default to building for all supported HOSTs (overridable by environment)
# powerpc64le-linux-gnu currently disabled due non-determinism issues across build arches.
export HOSTS="${HOSTS:-x86_64-linux-gnu arm-linux-gnueabihf aarch64-linux-gnu riscv64-linux-gnu powerpc64-linux-gnu
export HOSTS="${HOSTS:-x86_64-linux-gnu arm-linux-gnueabihf aarch64-linux-gnu riscv64-linux-gnu powerpc64-linux-gnu powerpc64le-linux-gnu
x86_64-w64-mingw32
x86_64-apple-darwin arm64-apple-darwin}"
@@ -367,10 +360,6 @@ INFO: Building ${VERSION:?not set} for platform triple ${HOST:?not set}:
...bind-mounted in container to: '$(DISTSRC_BASE=/distsrc-base && distsrc_for_host "$HOST")'
...outputting in: '$(outdir_for_host "$HOST")'
...bind-mounted in container to: '$(OUTDIR_BASE=/outdir-base && outdir_for_host "$HOST")'
ADDITIONAL FLAGS (if set)
ADDITIONAL_GUIX_COMMON_FLAGS: ${ADDITIONAL_GUIX_COMMON_FLAGS}
ADDITIONAL_GUIX_ENVIRONMENT_FLAGS: ${ADDITIONAL_GUIX_ENVIRONMENT_FLAGS}
ADDITIONAL_GUIX_TIMEMACHINE_FLAGS: ${ADDITIONAL_GUIX_TIMEMACHINE_FLAGS}
EOF
# Run the build script 'contrib/guix/libexec/build.sh' in the build

View File

@@ -67,12 +67,6 @@ EOF
exit 1
fi
################
# SOURCE_DATE_EPOCH should not unintentionally be set
################
check_source_date_epoch
################
# The codesignature git worktree should not be dirty
################
@@ -143,7 +137,7 @@ fi
################
# Codesigning tarballs SHOULD exist
# Unsigned tarballs SHOULD exist
################
# Usage: outdir_for_host HOST SUFFIX
@@ -155,13 +149,13 @@ outdir_for_host() {
}
codesigning_tarball_for_host() {
unsigned_tarball_for_host() {
case "$1" in
*mingw*)
echo "$(outdir_for_host "$1")/${DISTNAME}-win64-codesigning.tar.gz"
echo "$(outdir_for_host "$1")/${DISTNAME}-win64-unsigned.tar.gz"
;;
*darwin*)
echo "$(outdir_for_host "$1")/${DISTNAME}-${1}-codesigning.tar.gz"
echo "$(outdir_for_host "$1")/${DISTNAME}-${1}-unsigned.tar.gz"
;;
*)
exit 1
@@ -170,22 +164,22 @@ codesigning_tarball_for_host() {
}
# Accumulate a list of build directories that already exist...
hosts_codesigning_tarball_missing=""
hosts_unsigned_tarball_missing=""
for host in $HOSTS; do
if [ ! -e "$(codesigning_tarball_for_host "$host")" ]; then
hosts_codesigning_tarball_missing+=" ${host}"
if [ ! -e "$(unsigned_tarball_for_host "$host")" ]; then
hosts_unsigned_tarball_missing+=" ${host}"
fi
done
if [ -n "$hosts_codesigning_tarball_missing" ]; then
if [ -n "$hosts_unsigned_tarball_missing" ]; then
# ...so that we can print them out nicely in an error message
cat << EOF
ERR: Codesigning tarballs do not exist
ERR: Unsigned tarballs do not exist
...
EOF
for host in $hosts_codesigning_tarball_missing; do
echo " ${host} '$(codesigning_tarball_for_host "$host")'"
for host in $hosts_unsigned_tarball_missing; do
echo " ${host} '$(unsigned_tarball_for_host "$host")'"
done
exit 1
fi
@@ -377,7 +371,7 @@ EOF
OUTDIR="$(OUTDIR_BASE=/outdir-base && outdir_for_host "$HOST" codesigned)" \
DIST_ARCHIVE_BASE=/outdir-base/dist-archive \
DETACHED_SIGS_REPO=/detached-sigs \
CODESIGNING_TARBALL="$(OUTDIR_BASE=/outdir-base && codesigning_tarball_for_host "$HOST")" \
UNSIGNED_TARBALL="$(OUTDIR_BASE=/outdir-base && unsigned_tarball_for_host "$HOST")" \
bash -c "cd /bitcoin && bash contrib/guix/libexec/codesign.sh"
)

View File

@@ -61,6 +61,7 @@ store_path() {
# Set environment variables to point the NATIVE toolchain to the right
# includes/libs
NATIVE_GCC="$(store_path gcc-toolchain)"
NATIVE_GCC_STATIC="$(store_path gcc-toolchain static)"
unset LIBRARY_PATH
unset CPATH
@@ -69,17 +70,11 @@ unset CPLUS_INCLUDE_PATH
unset OBJC_INCLUDE_PATH
unset OBJCPLUS_INCLUDE_PATH
export LIBRARY_PATH="${NATIVE_GCC}/lib:${NATIVE_GCC_STATIC}/lib"
export C_INCLUDE_PATH="${NATIVE_GCC}/include"
export CPLUS_INCLUDE_PATH="${NATIVE_GCC}/include/c++:${NATIVE_GCC}/include"
case "$HOST" in
*darwin*) export LIBRARY_PATH="${NATIVE_GCC}/lib" ;; # Required for qt/qmake
*mingw*) export LIBRARY_PATH="${NATIVE_GCC}/lib" ;;
*)
NATIVE_GCC_STATIC="$(store_path gcc-toolchain static)"
export LIBRARY_PATH="${NATIVE_GCC}/lib:${NATIVE_GCC_STATIC}/lib"
;;
esac
export OBJC_INCLUDE_PATH="${NATIVE_GCC}/include"
export OBJCPLUS_INCLUDE_PATH="${NATIVE_GCC}/include/c++:${NATIVE_GCC}/include"
# Set environment variables to point the CROSS toolchain to the right
# includes/libs for $HOST
@@ -131,7 +126,18 @@ for p in "${PATHS[@]}"; do
done
# Disable Guix ld auto-rpath behavior
export GUIX_LD_WRAPPER_DISABLE_RPATH=yes
case "$HOST" in
*darwin*)
# The auto-rpath behavior is necessary for darwin builds as some native
# tools built by depends refer to and depend on Guix-built native
# libraries
#
# After the native packages in depends are built, the ld wrapper should
# no longer affect our build, as clang would instead reach for
# x86_64-apple-darwin-ld from cctools
;;
*) export GUIX_LD_WRAPPER_DISABLE_RPATH=yes ;;
esac
# Make /usr/bin if it doesn't exist
[ -e /usr/bin ] || mkdir -p /usr/bin
@@ -160,6 +166,16 @@ esac
# Environment variables for determinism
export TAR_OPTIONS="--owner=0 --group=0 --numeric-owner --mtime='@${SOURCE_DATE_EPOCH}' --sort=name"
export TZ="UTC"
case "$HOST" in
*darwin*)
# cctools AR, unlike GNU binutils AR, does not have a deterministic mode
# or a configure flag to enable determinism by default, it only
# understands if this env-var is set or not. See:
#
# https://github.com/tpoechtrager/cctools-port/blob/55562e4073dea0fbfd0b20e0bf69ffe6390c7f97/cctools/ar/archive.c#L334
export ZERO_AR_DATE=yes
;;
esac
####################
# Depends Building #
@@ -176,16 +192,9 @@ make -C depends --jobs="$JOBS" HOST="$HOST" \
x86_64_linux_AR=x86_64-linux-gnu-gcc-ar \
x86_64_linux_RANLIB=x86_64-linux-gnu-gcc-ranlib \
x86_64_linux_NM=x86_64-linux-gnu-gcc-nm \
x86_64_linux_STRIP=x86_64-linux-gnu-strip
x86_64_linux_STRIP=x86_64-linux-gnu-strip \
FORCE_USE_SYSTEM_CLANG=1
case "$HOST" in
*darwin*)
# Unset now that Qt is built
unset C_INCLUDE_PATH
unset CPLUS_INCLUDE_PATH
unset LIBRARY_PATH
;;
esac
###########################
# Source Tarball Building #
@@ -289,19 +298,49 @@ mkdir -p "$DISTSRC"
;;
esac
case "$HOST" in
*darwin*)
make osx_volname ${V:+V=1}
make deploydir ${V:+V=1}
mkdir -p "unsigned-app-${HOST}"
cp --target-directory="unsigned-app-${HOST}" \
osx_volname \
contrib/macdeploy/detached-sig-create.sh
mv --target-directory="unsigned-app-${HOST}" dist
(
cd "unsigned-app-${HOST}"
find . -print0 \
| sort --zero-terminated \
| tar --create --no-recursion --mode='u+rw,go+r-w,a+X' --null --files-from=- \
| gzip -9n > "${OUTDIR}/${DISTNAME}-${HOST}-unsigned.tar.gz" \
|| ( rm -f "${OUTDIR}/${DISTNAME}-${HOST}-unsigned.tar.gz" && exit 1 )
)
make deploy ${V:+V=1} OSX_ZIP="${OUTDIR}/${DISTNAME}-${HOST}-unsigned.zip"
;;
esac
(
cd installed
case "$HOST" in
*mingw*)
mv --target-directory="$DISTNAME"/lib/ "$DISTNAME"/bin/*.dll
;;
esac
# Prune libtool and object archives
find . -name "lib*.la" -delete
find . -name "lib*.a" -delete
# Prune pkg-config files
rm -rf "${DISTNAME}/lib/pkgconfig"
case "$HOST" in
*darwin*) ;;
*)
# Split binaries from their debug symbols
# Split binaries and libraries from their debug symbols
{
find "${DISTNAME}/bin" -type f -executable -print0
find "${DISTNAME}/lib" -type f -print0
} | xargs -0 -P"$JOBS" -I{} "${DISTSRC}/contrib/devtools/split-debug.sh" {} {} {}.dbg
;;
esac
@@ -321,7 +360,7 @@ mkdir -p "$DISTSRC"
cp -r "${DISTSRC}/share/rpcauth" "${DISTNAME}/share/"
# Deterministically produce {non-,}debug binary tarballs ready
# Finally, deterministically produce {non-,}debug binary tarballs ready
# for release
case "$HOST" in
*mingw*)
@@ -329,8 +368,8 @@ mkdir -p "$DISTSRC"
| xargs -0r touch --no-dereference --date="@${SOURCE_DATE_EPOCH}"
find "${DISTNAME}" -not -name "*.dbg" \
| sort \
| zip -X@ "${OUTDIR}/${DISTNAME}-${HOST//x86_64-w64-mingw32/win64}-unsigned.zip" \
|| ( rm -f "${OUTDIR}/${DISTNAME}-${HOST//x86_64-w64-mingw32/win64}-unsigned.zip" && exit 1 )
| zip -X@ "${OUTDIR}/${DISTNAME}-${HOST//x86_64-w64-mingw32/win64}.zip" \
|| ( rm -f "${OUTDIR}/${DISTNAME}-${HOST//x86_64-w64-mingw32/win64}.zip" && exit 1 )
find "${DISTNAME}" -name "*.dbg" -print0 \
| xargs -0r touch --no-dereference --date="@${SOURCE_DATE_EPOCH}"
find "${DISTNAME}" -name "*.dbg" \
@@ -354,13 +393,12 @@ mkdir -p "$DISTSRC"
find "${DISTNAME}" -print0 \
| sort --zero-terminated \
| tar --create --no-recursion --mode='u+rw,go+r-w,a+X' --null --files-from=- \
| gzip -9n > "${OUTDIR}/${DISTNAME}-${HOST}-unsigned.tar.gz" \
|| ( rm -f "${OUTDIR}/${DISTNAME}-${HOST}-unsigned.tar.gz" && exit 1 )
| gzip -9n > "${OUTDIR}/${DISTNAME}-${HOST}.tar.gz" \
|| ( rm -f "${OUTDIR}/${DISTNAME}-${HOST}.tar.gz" && exit 1 )
;;
esac
) # $DISTSRC/installed
# Finally make tarballs for codesigning
case "$HOST" in
*mingw*)
cp -rf --target-directory=. contrib/windeploy
@@ -368,33 +406,13 @@ mkdir -p "$DISTSRC"
cd ./windeploy
mkdir -p unsigned
cp --target-directory=unsigned/ "${OUTDIR}/${DISTNAME}-win64-setup-unsigned.exe"
cp -r --target-directory=unsigned/ "${INSTALLPATH}"
find unsigned/ -name "*.dbg" -print0 \
| xargs -0r rm
find . -print0 \
| sort --zero-terminated \
| tar --create --no-recursion --mode='u+rw,go+r-w,a+X' --null --files-from=- \
| gzip -9n > "${OUTDIR}/${DISTNAME}-win64-codesigning.tar.gz" \
|| ( rm -f "${OUTDIR}/${DISTNAME}-win64-codesigning.tar.gz" && exit 1 )
| gzip -9n > "${OUTDIR}/${DISTNAME}-win64-unsigned.tar.gz" \
|| ( rm -f "${OUTDIR}/${DISTNAME}-win64-unsigned.tar.gz" && exit 1 )
)
;;
*darwin*)
make deploydir ${V:+V=1}
mkdir -p "unsigned-app-${HOST}"
cp --target-directory="unsigned-app-${HOST}" \
contrib/macdeploy/detached-sig-create.sh
mv --target-directory="unsigned-app-${HOST}" dist
cp -r --target-directory="unsigned-app-${HOST}" "${INSTALLPATH}"
(
cd "unsigned-app-${HOST}"
find . -print0 \
| sort --zero-terminated \
| tar --create --no-recursion --mode='u+rw,go+r-w,a+X' --null --files-from=- \
| gzip -9n > "${OUTDIR}/${DISTNAME}-${HOST}-codesigning.tar.gz" \
|| ( rm -f "${OUTDIR}/${DISTNAME}-${HOST}-codesigning.tar.gz" && exit 1 )
)
make deploy ${V:+V=1} OSX_ZIP="${OUTDIR}/${DISTNAME}-${HOST}-unsigned.zip"
;;
esac
) # $DISTSRC

View File

@@ -4,9 +4,6 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C
set -e -o pipefail
# Environment variables for determinism
export TAR_OPTIONS="--owner=0 --group=0 --numeric-owner --mtime='@${SOURCE_DATE_EPOCH}' --sort=name"
export TZ=UTC
# Although Guix _does_ set umask when building its own packages (in our case,
@@ -30,7 +27,7 @@ fi
# Check that required environment variables are set
cat << EOF
Required environment variables as seen inside the container:
CODESIGNING_TARBALL: ${CODESIGNING_TARBALL:?not set}
UNSIGNED_TARBALL: ${UNSIGNED_TARBALL:?not set}
DETACHED_SIGS_REPO: ${DETACHED_SIGS_REPO:?not set}
DIST_ARCHIVE_BASE: ${DIST_ARCHIVE_BASE:?not set}
DISTNAME: ${DISTNAME:?not set}
@@ -66,54 +63,27 @@ mkdir -p "$DISTSRC"
(
cd "$DISTSRC"
tar -xf "$CODESIGNING_TARBALL"
tar -xf "$UNSIGNED_TARBALL"
mkdir -p codesignatures
tar -C codesignatures -xf "$CODESIGNATURE_GIT_ARCHIVE"
case "$HOST" in
*mingw*)
# Apply detached codesignatures
WORKDIR=".tmp"
mkdir -p ${WORKDIR}
cp -r --target-directory="${WORKDIR}" "unsigned/${DISTNAME}"
find "${WORKDIR}/${DISTNAME}" -name "*.exe" -type f -exec rm {} \;
find unsigned/ -name "*.exe" -type f | while read -r bin
do
bin_base="$(realpath --relative-to=unsigned/ "${bin}")"
mkdir -p "${WORKDIR}/$(dirname "${bin_base}")"
find "$PWD" -name "*-unsigned.exe" | while read -r infile; do
infile_base="$(basename "$infile")"
# Codesigned *-unsigned.exe and output to OUTDIR
osslsigncode attach-signature \
-in "${bin}" \
-out "${WORKDIR}/${bin_base/-unsigned}" \
-in "$infile" \
-out "${OUTDIR}/${infile_base/-unsigned}" \
-CAfile "$GUIX_ENVIRONMENT/etc/ssl/certs/ca-certificates.crt" \
-sigin codesignatures/win/"${bin_base}".pem
-sigin codesignatures/win/"$infile_base".pem
done
# Move installer to outdir
cd "${WORKDIR}"
find . -name "*setup.exe" -print0 \
| xargs -0r mv --target-directory="${OUTDIR}"
# Make .zip from binaries
find "${DISTNAME}" -print0 \
| xargs -0r touch --no-dereference --date="@${SOURCE_DATE_EPOCH}"
find "${DISTNAME}" \
| sort \
| zip -X@ "${OUTDIR}/${DISTNAME}-${HOST//x86_64-w64-mingw32/win64}.zip" \
|| ( rm -f "${OUTDIR}/${DISTNAME}-${HOST//x86_64-w64-mingw32/win64}.zip" && exit 1 )
;;
*darwin*)
case "$HOST" in
arm64*) ARCH="arm64" ;;
x86_64*) ARCH="x86_64" ;;
esac
# Apply detached codesignatures (in-place)
signapple apply dist/Bitcoin-Qt.app codesignatures/osx/"${HOST}"/dist/Bitcoin-Qt.app
find "${DISTNAME}" -wholename "*/bin/*" -type f | while read -r bin
do
signapple apply "${bin}" "codesignatures/osx/${HOST}/${bin}.${ARCH}sign"
done
# Apply detached codesignatures to dist/ (in-place)
signapple apply dist/Bitcoin-Qt.app codesignatures/osx/dist
# Make a .zip from dist/
cd dist/
@@ -121,14 +91,6 @@ mkdir -p "$DISTSRC"
| xargs -0r touch --no-dereference --date="@${SOURCE_DATE_EPOCH}"
find . | sort \
| zip -X@ "${OUTDIR}/${DISTNAME}-${HOST}.zip"
cd ..
# Make a .tar.gz from bins
find "${DISTNAME}" -print0 \
| sort --zero-terminated \
| tar --create --no-recursion --mode='u+rw,go+r-w,a+X' --null --files-from=- \
| gzip -9n > "${OUTDIR}/${DISTNAME}-${HOST}.tar.gz" \
|| ( rm -f "${OUTDIR}/${DISTNAME}-${HOST}.tar.gz" && exit 1 )
;;
*)
exit 1
@@ -143,7 +105,7 @@ mv --no-target-directory "$OUTDIR" "$ACTUAL_OUTDIR" \
(
cd /outdir-base
{
echo "$CODESIGNING_TARBALL"
echo "$UNSIGNED_TARBALL"
echo "$CODESIGNATURE_GIT_ARCHIVE"
find "$ACTUAL_OUTDIR" -type f
} | xargs realpath --relative-base="$PWD" \

View File

@@ -21,26 +21,6 @@ check_tools() {
done
}
################
# SOURCE_DATE_EPOCH should not unintentionally be set
################
check_source_date_epoch() {
if [ -n "$SOURCE_DATE_EPOCH" ] && [ -z "$FORCE_SOURCE_DATE_EPOCH" ]; then
cat << EOF
ERR: Environment variable SOURCE_DATE_EPOCH is set which may break reproducibility.
Aborting...
Hint: You may want to:
1. Unset this variable: \`unset SOURCE_DATE_EPOCH\` before rebuilding
2. Set the 'FORCE_SOURCE_DATE_EPOCH' environment variable if you insist on
using your own epoch
EOF
exit 1
fi
}
check_tools cat env readlink dirname basename git
################
@@ -70,8 +50,8 @@ fi
# across time.
time-machine() {
# shellcheck disable=SC2086
guix time-machine --url=https://codeberg.org/guix/guix.git \
--commit=7bf1d7aeaffba15c4f680f93ae88fbef25427252 \
guix time-machine --url=https://git.savannah.gnu.org/git/guix.git \
--commit=d5ca4d4fd713a9f7e17e074a1e37dda99bbb09fc \
--cores="$JOBS" \
--keep-failed \
--fallback \

View File

@@ -3,7 +3,6 @@
((gnu packages bash) #:select (bash-minimal))
(gnu packages bison)
((gnu packages certs) #:select (nss-certs))
((gnu packages check) #:select (libfaketime))
((gnu packages cmake) #:select (cmake-minimal))
(gnu packages commencement)
(gnu packages compression)
@@ -18,16 +17,14 @@
(gnu packages moreutils)
(gnu packages pkg-config)
((gnu packages python) #:select (python-minimal))
((gnu packages python-build) #:select (python-tomli python-poetry-core))
((gnu packages python-build) #:select (python-tomli))
((gnu packages python-crypto) #:select (python-asn1crypto))
((gnu packages tls) #:select (openssl))
((gnu packages version-control) #:select (git-minimal))
(guix build-system cmake)
(guix build-system gnu)
(guix build-system python)
(guix build-system pyproject)
(guix build-system trivial)
(guix download)
(guix gexp)
(guix git-download)
((guix licenses) #:prefix license:)
@@ -94,25 +91,14 @@ chain for " target " development."))
(home-page (package-home-page xgcc))
(license (package-license xgcc)))))
(define base-gcc
(package
(inherit gcc-12) ;; 12.3.0
(version "12.4.0")
(source (origin
(method url-fetch)
(uri (string-append "mirror://gnu/gcc/gcc-"
version "/gcc-" version ".tar.xz"))
(sha256
(base32
"0xcida8l2wykvvzvpcrcn649gj0ijn64gwxbplacpg6c0hk6akvh"))))))
(define base-gcc gcc-10)
(define base-linux-kernel-headers linux-libre-headers-6.1)
(define* (make-bitcoin-cross-toolchain target
#:key
(base-gcc-for-libc linux-base-gcc)
(base-kernel-headers base-linux-kernel-headers)
(base-libc glibc-2.31)
(base-libc glibc-2.27)
(base-gcc linux-base-gcc))
"Convenience wrapper around MAKE-CROSS-TOOLCHAIN with default values
desirable for building Bitcoin Core release binaries."
@@ -124,23 +110,13 @@ desirable for building Bitcoin Core release binaries."
(define (gcc-mingw-patches gcc)
(package-with-extra-patches gcc
(search-our-patches "gcc-remap-guix-store.patch")))
(define (binutils-mingw-patches binutils)
(package-with-extra-patches binutils
(search-our-patches "binutils-unaligned-default.patch")))
(define (winpthreads-patches mingw-w64-x86_64-winpthreads)
(package-with-extra-patches mingw-w64-x86_64-winpthreads
(search-our-patches "winpthreads-remap-guix-store.patch")))
(search-our-patches "gcc-remap-guix-store.patch"
"vmov-alignment.patch")))
(define (make-mingw-pthreads-cross-toolchain target)
"Create a cross-compilation toolchain package for TARGET"
(let* ((xbinutils (binutils-mingw-patches (cross-binutils target)))
(machine (substring target 0 (string-index target #\-)))
(pthreads-xlibc (winpthreads-patches (make-mingw-w64 machine
#:xgcc (cross-gcc target #:xgcc (gcc-mingw-patches base-gcc))
#:with-winpthreads? #t)))
(let* ((xbinutils (cross-binutils target))
(pthreads-xlibc mingw-w64-x86_64-winpthreads)
(pthreads-xgcc (cross-gcc target
#:xgcc (gcc-mingw-patches mingw-w64-base-gcc)
#:xbinutils xbinutils
@@ -222,17 +198,7 @@ and abstract ELF, PE and MachO formats.")
(base32
"1j47vwq4caxfv0xw68kw5yh00qcpbd56d7rq6c483ma3y7s96yyz"))))
(build-system cmake-build-system)
(arguments
(list
#:phases
#~(modify-phases %standard-phases
(replace 'check
(lambda* (#:key tests? #:allow-other-keys)
(if tests?
(invoke "faketime" "-f" "@2025-01-01 00:00:00" ;; Tests fail after 2025.
"ctest" "--output-on-failure" "--no-tests=error")
(format #t "test suite not run~%")))))))
(inputs (list libfaketime openssl))
(inputs (list openssl))
(home-page "https://github.com/mtrojnar/osslsigncode")
(synopsis "Authenticode signing and timestamping tool")
(description "osslsigncode is a small tool that implements part of the
@@ -405,10 +371,10 @@ specific moment in time, whitelisting and revocation checks.")
(license license:expat))))
(define-public python-signapple
(let ((commit "85bfcecc33d2773bc09bc318cec0614af2c8e287"))
(let ((commit "62155712e7417aba07565c9780a80e452823ae6a"))
(package
(name "python-signapple")
(version (git-version "0.2.0" "1" commit))
(version (git-version "0.1" "1" commit))
(source
(origin
(method git-fetch)
@@ -418,14 +384,13 @@ specific moment in time, whitelisting and revocation checks.")
(file-name (git-file-name name commit))
(sha256
(base32
"17yqjll8nw83q6dhgqhkl7w502z5vy9sln8m6mlx0f1c10isg8yg"))))
(build-system pyproject-build-system)
"1nm6rm4h4m7kbq729si4cm8rzild62mk4ni8xr5zja7l33fhv3gb"))))
(build-system python-build-system)
(propagated-inputs
(list python-asn1crypto
python-oscrypto
python-certvalidator
python-elfesteem))
(native-inputs (list python-poetry-core))
;; There are no tests, but attempting to run python setup.py test leads to
;; problems, just disable the test
(arguments '(#:tests? #f))
@@ -458,7 +423,6 @@ inspecting signatures in Mach-O binaries.")
(list "--enable-initfini-array=yes",
"--enable-default-ssp=yes",
"--enable-default-pie=yes",
"--enable-standard-branch-protection=yes",
building-on)))
((#:phases phases)
`(modify-phases ,phases
@@ -472,21 +436,24 @@ inspecting signatures in Mach-O binaries.")
(("-rpath=") "-rpath-link="))
#t))))))))
(define-public glibc-2.31
(let ((commit "8e30f03744837a85e33d84ccd34ed3abe30d37c3"))
(define-public glibc-2.27
(package
(inherit glibc) ;; 2.35
(version "2.31")
(inherit glibc-2.31)
(version "2.27")
(source (origin
(method git-fetch)
(uri (git-reference
(url "https://sourceware.org/git/glibc.git")
(commit commit)))
(file-name (git-file-name "glibc" commit))
(commit "73886db6218e613bd6d4edf529f11e008a6c2fa6")))
(file-name (git-file-name "glibc" "73886db6218e613bd6d4edf529f11e008a6c2fa6"))
(sha256
(base32
"1zi0s9yy5zkisw823vivn7zlj8w6g9p3mm7lmlqiixcxdkz4dbn6"))
(patches (search-our-patches "glibc-guix-prefix.patch"))))
"0azpb9cvnbv25zg8019rqz48h8i2257ngyjg566dlnp74ivrs9vq"))
(patches (search-our-patches "glibc-2.27-riscv64-Use-__has_include-to-include-asm-syscalls.h.patch"
"glibc-2.27-fcommon.patch"
"glibc-2.27-guix-prefix.patch"
"glibc-2.27-no-librt.patch"
"glibc-2.27-powerpc-ldbrx.patch"))))
(arguments
(substitute-keyword-arguments (package-arguments glibc)
((#:configure-flags flags)
@@ -502,13 +469,12 @@ inspecting signatures in Mach-O binaries.")
(lambda* (#:key outputs #:allow-other-keys)
;; Install the rpc data base file under `$out/etc/rpc'.
;; Otherwise build will fail with "Permission denied."
;; Can be removed when we are building 2.32 or later.
(let ((out (assoc-ref outputs "out")))
(substitute* "sunrpc/Makefile"
(("^\\$\\(inst_sysconfdir\\)/rpc(.*)$" _ suffix)
(string-append out "/etc/rpc" suffix "\n"))
(("^install-others =.*$")
(string-append "install-others = " out "/etc/rpc\n")))))))))))))
(string-append "install-others = " out "/etc/rpc\n"))))))))))))
(packages->manifest
(append
@@ -529,16 +495,19 @@ inspecting signatures in Mach-O binaries.")
moreutils
;; Compression and archiving
tar
bzip2
gzip
xz
;; Build tools
gcc-toolchain-12
cmake-minimal
gnu-make
libtool
autoconf-2.71
automake
pkg-config
bison
;; Native GCC 10 toolchain
gcc-toolchain-10
(list gcc-toolchain-10 "static")
;; Scripting
python-minimal ;; (3.10)
;; Git
@@ -547,19 +516,14 @@ inspecting signatures in Mach-O binaries.")
python-lief)
(let ((target (getenv "HOST")))
(cond ((string-suffix? "-mingw32" target)
;; Windows
(list zip
(make-mingw-pthreads-cross-toolchain "x86_64-w64-mingw32")
nsis-x86_64
nss-certs
osslsigncode))
((string-contains target "-linux-")
(list bison
(list gcc-toolchain-12 "static")
(make-bitcoin-cross-toolchain target)))
(list (make-bitcoin-cross-toolchain target)))
((string-contains target "darwin")
(list clang-toolchain-18
lld-18
(make-lld-wrapper lld-18 #:lld-as-ld? #t)
python-signapple
zip))
(list clang-toolchain-17 binutils cmake-minimal python-signapple zip))
(else '())))))

View File

@@ -1,22 +0,0 @@
commit 6537181f59ed186a341db621812a6bc35e22eaf6
Author: fanquake <fanquake@gmail.com>
Date: Wed Apr 10 12:15:52 2024 +0200
build: turn on -muse-unaligned-vector-move by default
This allows us to avoid (more invasively) patching GCC, to avoid
unaligned instruction use.
diff --git a/gas/config/tc-i386.c b/gas/config/tc-i386.c
index e0632681477..14a9653abdf 100644
--- a/gas/config/tc-i386.c
+++ b/gas/config/tc-i386.c
@@ -801,7 +801,7 @@ static unsigned int no_cond_jump_promotion = 0;
static unsigned int sse2avx;
/* Encode aligned vector move as unaligned vector move. */
-static unsigned int use_unaligned_vector_move;
+static unsigned int use_unaligned_vector_move = 1;
/* Encode scalar AVX instructions with specific vector length. */
static enum

View File

@@ -1,9 +1,14 @@
Without ffile-prefix-map, the debug symbols will contain paths for the
guix store which will include the hashes of each package. However, the
hash for the same package will differ when on different architectures.
In order to be reproducible regardless of the architecture used to build
the package, map all guix store prefixes to something fixed, e.g. /usr.
From aad25427e74f387412e8bc9a9d7bbc6c496c792f Mon Sep 17 00:00:00 2001
From: Andrew Chow <achow101-github@achow101.com>
Date: Wed, 6 Jul 2022 16:49:41 -0400
Subject: [PATCH] guix: remap guix store paths to /usr
---
libgcc/Makefile.in | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/libgcc/Makefile.in b/libgcc/Makefile.in
index 851e7657d07..476c2becd1c 100644
--- a/libgcc/Makefile.in
+++ b/libgcc/Makefile.in
@@ -854,7 +854,7 @@ endif

View File

@@ -0,0 +1,34 @@
commit 264a4a0dbe1f4369db315080034b500bed66016c
Author: fanquake <fanquake@gmail.com>
Date: Fri May 6 11:03:04 2022 +0100
build: use -fcommon to retain legacy behaviour with GCC 10
GCC 10 started using -fno-common by default, which causes issues with
the powerpc builds using gibc 2.27. A patch was commited to glibc to fix
the issue, 18363b4f010da9ba459b13310b113ac0647c2fcc but is non-trvial
to backport, and was broken in at least one way, see the followup in
commit 7650321ce037302bfc2f026aa19e0213b8d02fe6.
For now, retain the legacy GCC behaviour by passing -fcommon when
building glibc.
https://gcc.gnu.org/onlinedocs/gcc/Code-Gen-Options.html.
https://sourceware.org/git/?p=glibc.git;a=commit;h=18363b4f010da9ba459b13310b113ac0647c2fcc
https://sourceware.org/git/?p=glibc.git;a=commit;h=7650321ce037302bfc2f026aa19e0213b8d02fe6
This patch can be dropped when we are building with glibc 2.31+.
diff --git a/Makeconfig b/Makeconfig
index 86a71e5802..aa2166be60 100644
--- a/Makeconfig
+++ b/Makeconfig
@@ -896,7 +896,7 @@ ifeq "$(strip $(+cflags))" ""
endif # $(+cflags) == ""
+cflags += $(cflags-cpu) $(+gccwarn) $(+merge-constants) $(+math-flags) \
- $(+stack-protector)
+ $(+stack-protector) -fcommon
+gcc-nowarn := -w
# Don't duplicate options if we inherited variables from the parent.

View File

@@ -4,13 +4,19 @@ hash for the same package will differ when on different architectures.
In order to be reproducible regardless of the architecture used to build
the package, map all guix store prefixes to something fixed, e.g. /usr.
We might be able to drop this in favour of using --with-nonshared-cflags
when we begin using newer versions of glibc.
--- a/Makeconfig
+++ b/Makeconfig
@@ -1007,6 +1007,7 @@ object-suffixes :=
@@ -992,6 +992,10 @@ object-suffixes :=
CPPFLAGS-.o = $(pic-default)
# libc.a must be compiled with -fPIE/-fpie for static PIE.
CFLAGS-.o = $(filter %frame-pointer,$(+cflags)) $(pie-default)
+
+# Map Guix store paths to /usr
+CFLAGS-.o += `find /gnu/store -maxdepth 1 -mindepth 1 -type d -exec echo -n " -ffile-prefix-map={}=/usr" \;`
+
libtype.o := lib%.a
object-suffixes += .o
ifeq (yes,$(build-shared))

View File

@@ -0,0 +1,53 @@
This patch can be dropped when we are building with glibc 2.30+.
commit 6e41ef56c9baab719a02f1377b1e7ce7bff61e73
Author: Florian Weimer <fweimer@redhat.com>
Date: Fri Feb 8 10:21:56 2019 +0100
rt: Turn forwards from librt to libc into compat symbols [BZ #24194]
As the result of commit 6e6249d0b461b952d0f544792372663feb6d792a
("BZ#14743: Move clock_* symbols from librt to libc."), in glibc 2.17,
clock_gettime, clock_getres, clock_settime, clock_getcpuclockid,
clock_nanosleep were added to libc, and the file rt/clock-compat.c
was added with forwarders to the actual implementations in libc.
These forwarders were wrapped in
#if SHLIB_COMPAT (librt, GLIBC_2_2, GLIBC_2_17)
so that they are not present for newer architectures (such as
powerpc64le) with a 2.17 or later ABI baseline. But the forwarders
were not marked as compatibility symbols. As a result, on older
architectures, historic configure checks such as
AC_CHECK_LIB(rt, clock_gettime)
still cause linking against librt, even though this is completely
unnecessary. It also creates a needless porting hazard because
architectures behave differently when it comes to symbol availability.
Reviewed-by: Carlos O'Donell <carlos@redhat.com>
diff --git a/rt/clock-compat.c b/rt/clock-compat.c
index f816973c05..11e71aa890 100644
--- a/rt/clock-compat.c
+++ b/rt/clock-compat.c
@@ -30,14 +30,16 @@
#if HAVE_IFUNC
# undef INIT_ARCH
# define INIT_ARCH()
-# define COMPAT_REDIRECT(name, proto, arglist) libc_ifunc (name, &__##name)
+# define COMPAT_REDIRECT(name, proto, arglist) libc_ifunc (name, &__##name) \
+ compat_symbol (librt, name, name, GLIBC_2_2);
#else
# define COMPAT_REDIRECT(name, proto, arglist) \
int \
name proto \
{ \
return __##name arglist; \
- }
+ } \
+ compat_symbol (librt, name, name, GLIBC_2_2);
#endif
COMPAT_REDIRECT (clock_getres,

View File

@@ -0,0 +1,245 @@
From 50b0b3c9ff71ffd7ebbd74ae46844c3566478123 Mon Sep 17 00:00:00 2001
From: "Gabriel F. T. Gomes" <gabrielftg@linux.ibm.com>
Date: Mon, 27 May 2019 15:21:22 -0300
Subject: [PATCH] powerpc: Fix build failures with current GCC
Since GCC commit 271500 (svn), also known as the following commit on the
git mirror:
commit e154242724b084380e3221df7c08fcdbd8460674
Author: amodra <amodra@138bc75d-0d04-0410-961f-82ee72b054a4>
Date: Wed May 22 04:34:26 2019 +0000
[RS6000] Don't pass -many to the assembler
glibc builds are failing when an assembly implementation does not
declare the correct '.machine' directive, or when no such directive is
declared at all. For example, when a POWER6 instruction is used, but
'.machine power6' is not declared, the assembler will fail with an error
similar to the following:
../sysdeps/powerpc/powerpc64/power8/strcmp.S: Assembler messages:
24 ../sysdeps/powerpc/powerpc64/power8/strcmp.S:55: Error: unrecognized opcode: `cmpb'
This patch adds '.machine powerN' directives where none existed, as well
as it updates '.machine power7' directives on POWER8 files, because the
minimum binutils version required to build glibc (binutils 2.25) now
provides this machine version. It also adds '-many' to the assembler
command used to build tst-set_ppr.c.
Tested for powerpc, powerpc64, and powerpc64le, as well as with
build-many-glibcs.py for powerpc targets.
Reviewed-by: Tulio Magno Quites Machado Filho <tuliom@linux.ibm.com>
---
sysdeps/powerpc/Makefile | 5 +++
sysdeps/powerpc/powerpc64/power4/memcmp.S | 7 ++++
sysdeps/powerpc/powerpc64/power7/strncmp.S | 1 +
.../powerpc/powerpc64/power8/fpu/s_llround.S | 1 +
sysdeps/powerpc/powerpc64/power8/strcasecmp.S | 36 ++++++-------------
sysdeps/powerpc/powerpc64/power8/strcasestr.S | 14 ++------
sysdeps/powerpc/powerpc64/power8/strcmp.S | 1 +
7 files changed, 28 insertions(+), 37 deletions(-)
diff --git a/sysdeps/powerpc/Makefile b/sysdeps/powerpc/Makefile
index 6aa683b03f..23126147df 100644
--- a/sysdeps/powerpc/Makefile
+++ b/sysdeps/powerpc/Makefile
@@ -45,6 +45,11 @@ ifeq ($(subdir),misc)
sysdep_headers += sys/platform/ppc.h
tests += test-gettimebase
tests += tst-set_ppr
+
+# This test is expected to run and exit with EXIT_UNSUPPORTED on
+# processors that do not implement the Power ISA 2.06 or greater.
+# But the test makes use of instructions from Power ISA 2.06 and 2.07.
+CFLAGS-tst-set_ppr.c += -Wa,-many
endif
ifneq (,$(filter %le,$(config-machine)))
diff --git a/sysdeps/powerpc/powerpc64/power4/memcmp.S b/sysdeps/powerpc/powerpc64/power4/memcmp.S
index e5319f101f..38dcf4c9a1 100644
--- a/sysdeps/powerpc/powerpc64/power4/memcmp.S
+++ b/sysdeps/powerpc/powerpc64/power4/memcmp.S
@@ -26,7 +26,14 @@
# define MEMCMP memcmp
#endif
+#ifndef __LITTLE_ENDIAN__
.machine power4
+#else
+/* Little endian is only available since POWER8, so it's safe to
+ specify .machine as power8 (or older), even though this is a POWER4
+ file. Since the little-endian code uses 'ldbrx', power7 is enough. */
+ .machine power7
+#endif
ENTRY_TOCLESS (MEMCMP, 4)
CALL_MCOUNT 3
diff --git a/sysdeps/powerpc/powerpc64/power7/strncmp.S b/sysdeps/powerpc/powerpc64/power7/strncmp.S
index 0c7429d19f..10f898c5a3 100644
--- a/sysdeps/powerpc/powerpc64/power7/strncmp.S
+++ b/sysdeps/powerpc/powerpc64/power7/strncmp.S
@@ -28,6 +28,7 @@
const char *s2 [r4],
size_t size [r5]) */
+ .machine power7
ENTRY_TOCLESS (STRNCMP, 5)
CALL_MCOUNT 3
diff --git a/sysdeps/powerpc/powerpc64/power8/fpu/s_llround.S b/sysdeps/powerpc/powerpc64/power8/fpu/s_llround.S
index a22fc63bb3..84c76ba0f9 100644
--- a/sysdeps/powerpc/powerpc64/power8/fpu/s_llround.S
+++ b/sysdeps/powerpc/powerpc64/power8/fpu/s_llround.S
@@ -26,6 +26,7 @@
/* long long [r3] llround (float x [fp1]) */
+ .machine power8
ENTRY_TOCLESS (__llround)
CALL_MCOUNT 0
frin fp1,fp1 /* Round to nearest +-0.5. */
diff --git a/sysdeps/powerpc/powerpc64/power8/strcasecmp.S b/sysdeps/powerpc/powerpc64/power8/strcasecmp.S
index 3a2efe2a64..eeacd40c7f 100644
--- a/sysdeps/powerpc/powerpc64/power8/strcasecmp.S
+++ b/sysdeps/powerpc/powerpc64/power8/strcasecmp.S
@@ -91,21 +91,7 @@
3: \
TOLOWER()
-#ifdef _ARCH_PWR8
-# define VCLZD_V8_v7 vclzd v8, v7;
-# define MFVRD_R3_V1 mfvrd r3, v1;
-# define VSUBUDM_V9_V8 vsubudm v9, v9, v8;
-# define VPOPCNTD_V8_V8 vpopcntd v8, v8;
-# define VADDUQM_V7_V8 vadduqm v9, v7, v8;
-#else
-# define VCLZD_V8_v7 .long 0x11003fc2
-# define MFVRD_R3_V1 .long 0x7c230067
-# define VSUBUDM_V9_V8 .long 0x112944c0
-# define VPOPCNTD_V8_V8 .long 0x110047c3
-# define VADDUQM_V7_V8 .long 0x11274100
-#endif
-
- .machine power7
+ .machine power8
ENTRY (__STRCASECMP)
#ifdef USE_AS_STRNCASECMP
@@ -265,15 +251,15 @@ L(different):
#ifdef __LITTLE_ENDIAN__
/* Count trailing zero. */
vspltisb v8, -1
- VADDUQM_V7_V8
+ vadduqm v9, v7, v8
vandc v8, v9, v7
- VPOPCNTD_V8_V8
+ vpopcntd v8, v8
vspltb v6, v8, 15
vcmpequb. v6, v6, v1
blt cr6, L(shift8)
#else
/* Count leading zero. */
- VCLZD_V8_v7
+ vclzd v8, v7
vspltb v6, v8, 7
vcmpequb. v6, v6, v1
blt cr6, L(shift8)
@@ -291,7 +277,7 @@ L(skipsum):
/* Merge and move to GPR. */
vmrglb v6, v6, v7
vslo v1, v6, v1
- MFVRD_R3_V1
+ mfvrd r3, v1
/* Place the characters that are different in first position. */
sldi rSTR2, rRTN, 56
srdi rSTR2, rSTR2, 56
@@ -301,7 +287,7 @@ L(skipsum):
vslo v6, v5, v8
vslo v7, v4, v8
vmrghb v1, v6, v7
- MFVRD_R3_V1
+ mfvrd r3, v1
srdi rSTR2, rRTN, 48
sldi rSTR2, rSTR2, 56
srdi rSTR2, rSTR2, 56
@@ -320,15 +306,15 @@ L(null_found):
#ifdef __LITTLE_ENDIAN__
/* Count trailing zero. */
vspltisb v8, -1
- VADDUQM_V7_V8
+ vadduqm v9, v7, v8
vandc v8, v9, v7
- VPOPCNTD_V8_V8
+ vpopcntd v8, v8
vspltb v6, v8, 15
vcmpequb. v6, v6, v10
blt cr6, L(shift_8)
#else
/* Count leading zero. */
- VCLZD_V8_v7
+ vclzd v8, v7
vspltb v6, v8, 7
vcmpequb. v6, v6, v10
blt cr6, L(shift_8)
@@ -343,10 +329,10 @@ L(skipsum1):
vspltisb v10, 7
vslb v10, v10, v10
vsldoi v9, v0, v10, 1
- VSUBUDM_V9_V8
+ vsubudm v9, v9, v8
vspltisb v8, 8
vsldoi v8, v0, v8, 1
- VSUBUDM_V9_V8
+ vsubudm v9, v9, v8
/* Shift and remove junk after null character. */
#ifdef __LITTLE_ENDIAN__
vslo v5, v5, v9
diff --git a/sysdeps/powerpc/powerpc64/power8/strcasestr.S b/sysdeps/powerpc/powerpc64/power8/strcasestr.S
index 9fc24c29f9..e10f06fd86 100644
--- a/sysdeps/powerpc/powerpc64/power8/strcasestr.S
+++ b/sysdeps/powerpc/powerpc64/power8/strcasestr.S
@@ -73,18 +73,8 @@
vor reg, v8, reg; \
vcmpequb. v6, reg, v4;
-/* TODO: change these to the actual instructions when the minimum required
- binutils allows it. */
-#ifdef _ARCH_PWR8
-#define VCLZD_V8_v7 vclzd v8, v7;
-#else
-#define VCLZD_V8_v7 .long 0x11003fc2
-#endif
-
#define FRAMESIZE (FRAME_MIN_SIZE+48)
-/* TODO: change this to .machine power8 when the minimum required binutils
- allows it. */
- .machine power7
+ .machine power8
ENTRY (STRCASESTR, 4)
CALL_MCOUNT 2
mflr r0 /* Load link register LR to r0. */
@@ -291,7 +281,7 @@ L(nullchk1):
vcmpequb. v6, v0, v7
/* Shift r3 by 16 bytes and proceed. */
blt cr6, L(shift16)
- VCLZD_V8_v7
+ vclzd v8, v7
#ifdef __LITTLE_ENDIAN__
vspltb v6, v8, 15
#else
diff --git a/sysdeps/powerpc/powerpc64/power8/strcmp.S b/sysdeps/powerpc/powerpc64/power8/strcmp.S
index 15e7351d1b..d592266d1d 100644
--- a/sysdeps/powerpc/powerpc64/power8/strcmp.S
+++ b/sysdeps/powerpc/powerpc64/power8/strcmp.S
@@ -31,6 +31,7 @@
64K as default, the page cross handling assumes minimum page size of
4k. */
+ .machine power8
ENTRY_TOCLESS (STRCMP, 4)
li r0,0
--
2.41.0

View File

@@ -0,0 +1,78 @@
Note that this has been modified from the original commit, to use __has_include
instead of __has_include__, as the later was causing build failures with GCC 10.
See also: http://lists.busybox.net/pipermail/buildroot/2020-July/590376.html.
https://sourceware.org/git/?p=glibc.git;a=commit;h=0b9c84906f653978fb8768c7ebd0ee14a47e662e
This patch can be dropped when we are building with glibc 2.28+.
From 562c52cc81a4e456a62e6455feb32732049e9070 Mon Sep 17 00:00:00 2001
From: "H.J. Lu" <hjl.tools@gmail.com>
Date: Mon, 31 Dec 2018 09:26:42 -0800
Subject: [PATCH] riscv: Use __has_include__ to include <asm/syscalls.h> [BZ
#24022]
<asm/syscalls.h> has been removed by
commit 27f8899d6002e11a6e2d995e29b8deab5aa9cc25
Author: David Abdurachmanov <david.abdurachmanov@gmail.com>
Date: Thu Nov 8 20:02:39 2018 +0100
riscv: add asm/unistd.h UAPI header
Marcin Juszkiewicz reported issues while generating syscall table for riscv
using 4.20-rc1. The patch refactors our unistd.h files to match some other
architectures.
- Add asm/unistd.h UAPI header, which has __ARCH_WANT_NEW_STAT only for 64-bit
- Remove asm/syscalls.h UAPI header and merge to asm/unistd.h
- Adjust kernel asm/unistd.h
So now asm/unistd.h UAPI header should show all syscalls for riscv.
<asm/syscalls.h> may be restored by
Subject: [PATCH] riscv: restore asm/syscalls.h UAPI header
Date: Tue, 11 Dec 2018 09:09:35 +0100
UAPI header asm/syscalls.h was merged into UAPI asm/unistd.h header,
which did resolve issue with missing syscalls macros resulting in
glibc (2.28) build failure. It also broke glibc in a different way:
asm/syscalls.h is being used by glibc. I noticed this while doing
Fedora 30/Rawhide mass rebuild.
The patch returns asm/syscalls.h header and incl. it into asm/unistd.h.
I plan to send a patch to glibc to use asm/unistd.h instead of
asm/syscalls.h
In the meantime, we use __has_include__, which was added to GCC 5, to
check if <asm/syscalls.h> exists before including it. Tested with
build-many-glibcs.py for riscv against kernel 4.19.12 and 4.20-rc7.
[BZ #24022]
* sysdeps/unix/sysv/linux/riscv/flush-icache.c: Check if
<asm/syscalls.h> exists with __has_include__ before including it.
---
sysdeps/unix/sysv/linux/riscv/flush-icache.c | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/sysdeps/unix/sysv/linux/riscv/flush-icache.c b/sysdeps/unix/sysv/linux/riscv/flush-icache.c
index d612ef4c6c..0b2042620b 100644
--- a/sysdeps/unix/sysv/linux/riscv/flush-icache.c
+++ b/sysdeps/unix/sysv/linux/riscv/flush-icache.c
@@ -21,7 +21,11 @@
#include <stdlib.h>
#include <atomic.h>
#include <sys/cachectl.h>
-#include <asm/syscalls.h>
+#if __has_include (<asm/syscalls.h>)
+# include <asm/syscalls.h>
+#else
+# include <asm/unistd.h>
+#endif
typedef int (*func_type) (void *, void *, unsigned long int);
--
2.31.1

View File

@@ -0,0 +1,268 @@
Description: Use unaligned VMOV instructions
Author: Stephen Kitt <skitt@debian.org>
Bug-Debian: https://bugs.debian.org/939559
See also: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=54412
Based on a patch originally by Claude Heiland-Allen <claude@mathr.co.uk>
--- a/gcc/config/i386/sse.md
+++ b/gcc/config/i386/sse.md
@@ -1058,17 +1058,11 @@
{
if (FLOAT_MODE_P (GET_MODE_INNER (<MODE>mode)))
{
- if (misaligned_operand (operands[1], <MODE>mode))
- return "vmovu<ssemodesuffix>\t{%1, %0%{%3%}%N2|%0%{%3%}%N2, %1}";
- else
- return "vmova<ssemodesuffix>\t{%1, %0%{%3%}%N2|%0%{%3%}%N2, %1}";
+ return "vmovu<ssemodesuffix>\t{%1, %0%{%3%}%N2|%0%{%3%}%N2, %1}";
}
else
{
- if (misaligned_operand (operands[1], <MODE>mode))
- return "vmovdqu<ssescalarsize>\t{%1, %0%{%3%}%N2|%0%{%3%}%N2, %1}";
- else
- return "vmovdqa<ssescalarsize>\t{%1, %0%{%3%}%N2|%0%{%3%}%N2, %1}";
+ return "vmovdqu<ssescalarsize>\t{%1, %0%{%3%}%N2|%0%{%3%}%N2, %1}";
}
}
[(set_attr "type" "ssemov")
@@ -1184,17 +1178,11 @@
{
if (FLOAT_MODE_P (GET_MODE_INNER (<MODE>mode)))
{
- if (misaligned_operand (operands[0], <MODE>mode))
- return "vmovu<ssemodesuffix>\t{%1, %0%{%2%}|%0%{%2%}, %1}";
- else
- return "vmova<ssemodesuffix>\t{%1, %0%{%2%}|%0%{%2%}, %1}";
+ return "vmovu<ssemodesuffix>\t{%1, %0%{%2%}|%0%{%2%}, %1}";
}
else
{
- if (misaligned_operand (operands[0], <MODE>mode))
- return "vmovdqu<ssescalarsize>\t{%1, %0%{%2%}|%0%{%2%}, %1}";
- else
- return "vmovdqa<ssescalarsize>\t{%1, %0%{%2%}|%0%{%2%}, %1}";
+ return "vmovdqu<ssescalarsize>\t{%1, %0%{%2%}|%0%{%2%}, %1}";
}
}
[(set_attr "type" "ssemov")
@@ -7806,7 +7794,7 @@
"TARGET_SSE && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"@
%vmovlps\t{%1, %0|%q0, %1}
- %vmovaps\t{%1, %0|%0, %1}
+ %vmovups\t{%1, %0|%0, %1}
%vmovlps\t{%1, %d0|%d0, %q1}"
[(set_attr "type" "ssemov")
(set_attr "prefix" "maybe_vex")
@@ -13997,29 +13985,15 @@
switch (<MODE>mode)
{
case E_V8DFmode:
- if (misaligned_operand (operands[2], <ssequartermode>mode))
- return "vmovupd\t{%2, %x0|%x0, %2}";
- else
- return "vmovapd\t{%2, %x0|%x0, %2}";
+ return "vmovupd\t{%2, %x0|%x0, %2}";
case E_V16SFmode:
- if (misaligned_operand (operands[2], <ssequartermode>mode))
- return "vmovups\t{%2, %x0|%x0, %2}";
- else
- return "vmovaps\t{%2, %x0|%x0, %2}";
+ return "vmovups\t{%2, %x0|%x0, %2}";
case E_V8DImode:
- if (misaligned_operand (operands[2], <ssequartermode>mode))
- return which_alternative == 2 ? "vmovdqu64\t{%2, %x0|%x0, %2}"
+ return which_alternative == 2 ? "vmovdqu64\t{%2, %x0|%x0, %2}"
: "vmovdqu\t{%2, %x0|%x0, %2}";
- else
- return which_alternative == 2 ? "vmovdqa64\t{%2, %x0|%x0, %2}"
- : "vmovdqa\t{%2, %x0|%x0, %2}";
case E_V16SImode:
- if (misaligned_operand (operands[2], <ssequartermode>mode))
- return which_alternative == 2 ? "vmovdqu32\t{%2, %x0|%x0, %2}"
+ return which_alternative == 2 ? "vmovdqu32\t{%2, %x0|%x0, %2}"
: "vmovdqu\t{%2, %x0|%x0, %2}";
- else
- return which_alternative == 2 ? "vmovdqa32\t{%2, %x0|%x0, %2}"
- : "vmovdqa\t{%2, %x0|%x0, %2}";
default:
gcc_unreachable ();
}
@@ -21225,63 +21199,27 @@
switch (get_attr_mode (insn))
{
case MODE_V16SF:
- if (misaligned_operand (operands[1], <ssehalfvecmode>mode))
- return "vmovups\t{%1, %t0|%t0, %1}";
- else
- return "vmovaps\t{%1, %t0|%t0, %1}";
+ return "vmovups\t{%1, %t0|%t0, %1}";
case MODE_V8DF:
- if (misaligned_operand (operands[1], <ssehalfvecmode>mode))
- return "vmovupd\t{%1, %t0|%t0, %1}";
- else
- return "vmovapd\t{%1, %t0|%t0, %1}";
+ return "vmovupd\t{%1, %t0|%t0, %1}";
case MODE_V8SF:
- if (misaligned_operand (operands[1], <ssehalfvecmode>mode))
- return "vmovups\t{%1, %x0|%x0, %1}";
- else
- return "vmovaps\t{%1, %x0|%x0, %1}";
+ return "vmovups\t{%1, %x0|%x0, %1}";
case MODE_V4DF:
- if (misaligned_operand (operands[1], <ssehalfvecmode>mode))
- return "vmovupd\t{%1, %x0|%x0, %1}";
- else
- return "vmovapd\t{%1, %x0|%x0, %1}";
+ return "vmovupd\t{%1, %x0|%x0, %1}";
case MODE_XI:
- if (misaligned_operand (operands[1], <ssehalfvecmode>mode))
- {
- if (which_alternative == 2)
- return "vmovdqu\t{%1, %t0|%t0, %1}";
- else if (GET_MODE_SIZE (<ssescalarmode>mode) == 8)
- return "vmovdqu64\t{%1, %t0|%t0, %1}";
- else
- return "vmovdqu32\t{%1, %t0|%t0, %1}";
- }
+ if (which_alternative == 2)
+ return "vmovdqu\t{%1, %t0|%t0, %1}";
+ else if (GET_MODE_SIZE (<ssescalarmode>mode) == 8)
+ return "vmovdqu64\t{%1, %t0|%t0, %1}";
else
- {
- if (which_alternative == 2)
- return "vmovdqa\t{%1, %t0|%t0, %1}";
- else if (GET_MODE_SIZE (<ssescalarmode>mode) == 8)
- return "vmovdqa64\t{%1, %t0|%t0, %1}";
- else
- return "vmovdqa32\t{%1, %t0|%t0, %1}";
- }
+ return "vmovdqu32\t{%1, %t0|%t0, %1}";
case MODE_OI:
- if (misaligned_operand (operands[1], <ssehalfvecmode>mode))
- {
- if (which_alternative == 2)
- return "vmovdqu\t{%1, %x0|%x0, %1}";
- else if (GET_MODE_SIZE (<ssescalarmode>mode) == 8)
- return "vmovdqu64\t{%1, %x0|%x0, %1}";
- else
- return "vmovdqu32\t{%1, %x0|%x0, %1}";
- }
+ if (which_alternative == 2)
+ return "vmovdqu\t{%1, %x0|%x0, %1}";
+ else if (GET_MODE_SIZE (<ssescalarmode>mode) == 8)
+ return "vmovdqu64\t{%1, %x0|%x0, %1}";
else
- {
- if (which_alternative == 2)
- return "vmovdqa\t{%1, %x0|%x0, %1}";
- else if (GET_MODE_SIZE (<ssescalarmode>mode) == 8)
- return "vmovdqa64\t{%1, %x0|%x0, %1}";
- else
- return "vmovdqa32\t{%1, %x0|%x0, %1}";
- }
+ return "vmovdqu32\t{%1, %x0|%x0, %1}";
default:
gcc_unreachable ();
}
--- a/gcc/config/i386/i386.c
+++ b/gcc/config/i386/i386.c
@@ -4981,13 +4981,13 @@
switch (type)
{
case opcode_int:
- opcode = misaligned_p ? "vmovdqu32" : "vmovdqa32";
+ opcode = "vmovdqu32";
break;
case opcode_float:
- opcode = misaligned_p ? "vmovups" : "vmovaps";
+ opcode = "vmovups";
break;
case opcode_double:
- opcode = misaligned_p ? "vmovupd" : "vmovapd";
+ opcode = "vmovupd";
break;
}
}
@@ -4996,16 +4996,16 @@
switch (scalar_mode)
{
case E_SFmode:
- opcode = misaligned_p ? "%vmovups" : "%vmovaps";
+ opcode = "%vmovups";
break;
case E_DFmode:
- opcode = misaligned_p ? "%vmovupd" : "%vmovapd";
+ opcode = "%vmovupd";
break;
case E_TFmode:
if (evex_reg_p)
- opcode = misaligned_p ? "vmovdqu64" : "vmovdqa64";
+ opcode = "vmovdqu64";
else
- opcode = misaligned_p ? "%vmovdqu" : "%vmovdqa";
+ opcode = "%vmovdqu";
break;
default:
gcc_unreachable ();
@@ -5017,48 +5017,32 @@
{
case E_QImode:
if (evex_reg_p)
- opcode = (misaligned_p
- ? (TARGET_AVX512BW
- ? "vmovdqu8"
- : "vmovdqu64")
- : "vmovdqa64");
+ opcode = TARGET_AVX512BW ? "vmovdqu8" : "vmovdqu64";
else
- opcode = (misaligned_p
- ? (TARGET_AVX512BW
- ? "vmovdqu8"
- : "%vmovdqu")
- : "%vmovdqa");
+ opcode = TARGET_AVX512BW ? "vmovdqu8" : "%vmovdqu";
break;
case E_HImode:
if (evex_reg_p)
- opcode = (misaligned_p
- ? (TARGET_AVX512BW
- ? "vmovdqu16"
- : "vmovdqu64")
- : "vmovdqa64");
+ opcode = TARGET_AVX512BW ? "vmovdqu16" : "vmovdqu64";
else
- opcode = (misaligned_p
- ? (TARGET_AVX512BW
- ? "vmovdqu16"
- : "%vmovdqu")
- : "%vmovdqa");
+ opcode = TARGET_AVX512BW ? "vmovdqu16" : "%vmovdqu";
break;
case E_SImode:
if (evex_reg_p)
- opcode = misaligned_p ? "vmovdqu32" : "vmovdqa32";
+ opcode = "vmovdqu32";
else
- opcode = misaligned_p ? "%vmovdqu" : "%vmovdqa";
+ opcode = "%vmovdqu";
break;
case E_DImode:
case E_TImode:
case E_OImode:
if (evex_reg_p)
- opcode = misaligned_p ? "vmovdqu64" : "vmovdqa64";
+ opcode = "vmovdqu64";
else
- opcode = misaligned_p ? "%vmovdqu" : "%vmovdqa";
+ opcode = "%vmovdqu";
break;
case E_XImode:
- opcode = misaligned_p ? "vmovdqu64" : "vmovdqa64";
+ opcode = "vmovdqu64";
break;
default:
gcc_unreachable ();

View File

@@ -1,17 +0,0 @@
Without ffile-prefix-map, the debug symbols will contain paths for the
guix store which will include the hashes of each package. However, the
hash for the same package will differ when on different architectures.
In order to be reproducible regardless of the architecture used to build
the package, map all guix store prefixes to something fixed, e.g. /usr.
--- a/mingw-w64-libraries/winpthreads/Makefile.in
+++ b/mingw-w64-libraries/winpthreads/Makefile.in
@@ -478,7 +478,7 @@ top_build_prefix = @top_build_prefix@
top_builddir = @top_builddir@
top_srcdir = @top_srcdir@
SUBDIRS = . tests
-AM_CFLAGS = -Wall -DWIN32_LEAN_AND_MEAN $(am__append_1)
+AM_CFLAGS = -Wall -DWIN32_LEAN_AND_MEAN $(am__append_1) $(shell find /gnu/store -maxdepth 1 -mindepth 1 -type d -exec echo -n " -ffile-prefix-map={}=/usr" \;)
ACLOCAL_AMFLAGS = -I m4
lib_LTLIBRARIES = libwinpthread.la
include_HEADERS = include/pthread.h include/sched.h include/semaphore.h include/pthread_unistd.h include/pthread_time.h include/pthread_compat.h include/pthread_signal.h

View File

@@ -81,8 +81,5 @@ PrivateDevices=true
# Deny the creation of writable and executable memory mappings.
MemoryDenyWriteExecute=true
# Restrict ABIs to help ensure MemoryDenyWriteExecute is enforced
SystemCallArchitectures=native
[Install]
WantedBy=multi-user.target

View File

@@ -76,16 +76,6 @@ def getFirstBlockFileId(block_dir_path):
blkId = int(firstBlkFn[3:8])
return blkId
def read_xor_key(blocks_path):
NUM_XOR_BYTES = 8 # From InitBlocksdirXorKey::xor_key.size()
try:
xor_filename = os.path.join(blocks_path, "xor.dat")
with open(xor_filename, "rb") as xor_file:
return xor_file.read(NUM_XOR_BYTES)
# support also blockdirs created with pre-v28 versions, where no xor key exists yet
except FileNotFoundError:
return bytes([0] * NUM_XOR_BYTES)
# Block header and extent on disk
BlockExtent = namedtuple('BlockExtent', ['fn', 'offset', 'inhdr', 'blkhdr', 'size'])
@@ -105,7 +95,6 @@ class BlockDataCopier:
self.outFname = None
self.blkCountIn = 0
self.blkCountOut = 0
self.xor_key = read_xor_key(self.settings['input'])
self.lastDate = datetime.datetime(2000, 1, 1)
self.highTS = 1408893517 - 315360000
@@ -124,13 +113,6 @@ class BlockDataCopier:
self.outOfOrderData = {}
self.outOfOrderSize = 0 # running total size for items in outOfOrderData
def read_xored(self, f, size):
offset = f.tell()
data = bytearray(f.read(size))
for i in range(len(data)):
data[i] ^= self.xor_key[(i + offset) % len(self.xor_key)]
return bytes(data)
def writeBlock(self, inhdr, blk_hdr, rawblock):
blockSizeOnDisk = len(inhdr) + len(blk_hdr) + len(rawblock)
if not self.fileOutput and ((self.outsz + blockSizeOnDisk) > self.maxOutSz):
@@ -183,7 +165,7 @@ class BlockDataCopier:
'''Fetch block contents from disk given extents'''
with open(self.inFileName(extent.fn), "rb") as f:
f.seek(extent.offset)
return self.read_xored(f, extent.size)
return f.read(extent.size)
def copyOneBlock(self):
'''Find the next block to be written in the input, and copy it to the output.'''
@@ -208,7 +190,7 @@ class BlockDataCopier:
print("Premature end of block data")
return
inhdr = self.read_xored(self.inF, 8)
inhdr = self.inF.read(8)
if (not inhdr or (inhdr[0] == "\0")):
self.inF.close()
self.inF = None
@@ -225,7 +207,7 @@ class BlockDataCopier:
inLenLE = inhdr[4:]
su = struct.unpack("<I", inLenLE)
inLen = su[0] - 80 # length without header
blk_hdr = self.read_xored(self.inF, 80)
blk_hdr = self.inF.read(80)
inExtent = BlockExtent(self.inFn, self.inF.tell(), inhdr, blk_hdr, inLen)
self.hash_str = calc_hash_str(blk_hdr)
@@ -242,7 +224,7 @@ class BlockDataCopier:
if self.blkCountOut == blkHeight:
# If in-order block, just copy
rawblock = self.read_xored(self.inF, inLen)
rawblock = self.inF.read(inLen)
self.writeBlock(inhdr, blk_hdr, rawblock)
# See if we can catch up to prior out-of-order blocks
@@ -255,7 +237,7 @@ class BlockDataCopier:
# If there is space in the cache, read the data
# Reading the data in file sequence instead of seeking and fetching it later is preferred,
# but we don't want to fill up memory
self.outOfOrderData[blkHeight] = self.read_xored(self.inF, inLen)
self.outOfOrderData[blkHeight] = self.inF.read(inLen)
self.outOfOrderSize += inLen
else: # If no space in cache, seek forward
self.inF.seek(inLen, os.SEEK_CUR)

View File

@@ -56,22 +56,46 @@ The `sha256sum` should be `c0c2e7bb92c1fee0c4e9f3a485e4530786732d6c6dd9e9f418c28
## Deterministic macOS App Notes
macOS Applications are created on Linux using a recent LLVM.
macOS Applications are created in Linux by combining a recent `clang` and the Apple
`binutils` (`ld`, `ar`, etc).
All builds must target an Apple SDK. These SDKs are free to download, but not redistributable.
See the SDK Extraction notes above for how to obtain it.
Apple uses `clang` extensively for development and has upstreamed the necessary
functionality so that a vanilla clang can take advantage. It supports the use of `-F`,
`-target`, `-mmacosx-version-min`, and `-isysroot`, which are all necessary when
building for macOS.
The Guix build process has been designed to avoid including the SDK's files in Guix's outputs.
All interim tarballs are fully deterministic and may be freely redistributed.
Apple's version of `binutils` (called `cctools`) contains lots of functionality missing in the
FSF's `binutils`. In addition to extra linker options for frameworks and sysroots, several
other tools are needed as well such as `install_name_tool`, `lipo`, and `nmedit`. These
do not build under Linux, so they have been patched to do so. The work here was used as
a starting point: [mingwandroid/toolchain4](https://github.com/mingwandroid/toolchain4).
Using an Apple-blessed key to sign binaries is a requirement to produce (distributable) macOS
binaries. Because this private key cannot be shared, we'll have to be a bit creative in order
for the build process to remain somewhat deterministic. Here's how it works:
In order to build a working toolchain, the following source packages are needed from
Apple: `cctools`, `dyld`, and `ld64`.
These tools inject timestamps by default, which produce non-deterministic binaries. The
`ZERO_AR_DATE` environment variable is used to disable that.
This version of `cctools` has been patched to use the current version of `clang`'s headers
and its `libLTO.so` rather than those from `llvmgcc`, as it was originally done in `toolchain4`.
To complicate things further, all builds must target an Apple SDK. These SDKs are free to
download, but not redistributable. See the SDK Extraction notes above for how to obtain it.
The Guix process builds 2 sets of files: Linux tools, then Apple binaries which are
created using these tools. The build process has been designed to avoid including the
SDK's files in Guix's outputs. All interim tarballs are fully deterministic and may be freely
redistributed.
As of OS X 10.9 Mavericks, using an Apple-blessed key to sign binaries is a requirement in
order to satisfy the new Gatekeeper requirements. Because this private key cannot be
shared, we'll have to be a bit creative in order for the build process to remain somewhat
deterministic. Here's how it works:
- Builders use Guix to create an unsigned release. This outputs an unsigned ZIP which
users may choose to bless, self-codesign, and run. It also outputs an unsigned app structure
in the form of a tarball.
users may choose to bless and run. It also outputs an unsigned app structure in the form
of a tarball.
- The Apple keyholder uses this unsigned app to create a detached signature, using the
included script. Detached signatures are available from this [repository](https://github.com/bitcoin-core/bitcoin-detached-sigs).
- Builders feed the unsigned app + detached signature back into Guix, which combines the
pieces into a deterministic ZIP.
script that is also included there. Detached signatures are available from this [repository](https://github.com/bitcoin-core/bitcoin-detached-sigs).
- Builders feed the unsigned app + detached signature back into Guix. It uses the
pre-built tools to recombine the pieces into a deterministic ZIP.

View File

@@ -6,57 +6,26 @@
export LC_ALL=C
set -e
ROOTDIR=dist
BUNDLE="${ROOTDIR}/Bitcoin-Qt.app"
BINARY="${BUNDLE}/Contents/MacOS/Bitcoin-Qt"
SIGNAPPLE=signapple
TEMPDIR=sign.temp
BUNDLE_ROOT=dist
BUNDLE_NAME="Bitcoin-Qt.app"
UNSIGNED_BUNDLE="${BUNDLE_ROOT}/${BUNDLE_NAME}"
UNSIGNED_BINARY="${UNSIGNED_BUNDLE}/Contents/MacOS/Bitcoin-Qt"
ARCH=$(${SIGNAPPLE} info ${UNSIGNED_BINARY} | head -n 1 | cut -d " " -f 1)
OUTDIR="osx/${ARCH}-apple-darwin"
OUTROOT="${TEMPDIR}/${OUTDIR}"
ARCH=$(${SIGNAPPLE} info ${BINARY} | head -n 1 | cut -d " " -f 1)
OUT="signature-osx-${ARCH}.tar.gz"
OUTROOT=osx/dist
if [ "$#" -ne 3 ]; then
echo "usage: $0 <path to key> <path to app store connect key> <apple developer team uuid>"
if [ -z "$1" ]; then
echo "usage: $0 <signapple args>"
echo "example: $0 <path to key>"
exit 1
fi
rm -rf ${TEMPDIR}
mkdir -p ${TEMPDIR}
stty -echo
printf "Enter the passphrase for %s: " "$1"
read cs_key_pass
printf "\n"
printf "Enter the passphrase for %s: " "$2"
read api_key_pass
printf "\n"
stty echo
${SIGNAPPLE} sign -f --detach "${TEMPDIR}/${OUTROOT}" "$@" "${BUNDLE}" --hardened-runtime
# Sign and notarize app bundle
${SIGNAPPLE} sign -f --hardened-runtime --detach "${OUTROOT}/${BUNDLE_ROOT}" --passphrase "${cs_key_pass}" "$1" "${UNSIGNED_BUNDLE}"
${SIGNAPPLE} apply "${UNSIGNED_BUNDLE}" "${OUTROOT}/${BUNDLE_ROOT}/${BUNDLE_NAME}"
${SIGNAPPLE} notarize --detach "${OUTROOT}/${BUNDLE_ROOT}" --passphrase "${api_key_pass}" "$2" "$3" "${UNSIGNED_BUNDLE}"
# Sign each binary
find . -maxdepth 3 -wholename "*/bin/*" -type f -exec realpath --relative-to=. {} \; | while read -r bin
do
bin_dir=$(dirname "${bin}")
bin_name=$(basename "${bin}")
${SIGNAPPLE} sign -f --hardened-runtime --detach "${OUTROOT}/${bin_dir}" --passphrase "${cs_key_pass}" "$1" "${bin}"
${SIGNAPPLE} apply "${bin}" "${OUTROOT}/${bin_dir}/${bin_name}.${ARCH}sign"
done
# Notarize the binaries
# Binaries cannot have stapled notarizations so this does not actually generate any output
binaries_dir=$(dirname "$(find . -maxdepth 2 -wholename '*/bin' -type d -exec realpath --relative-to=. {} \;)")
${SIGNAPPLE} notarize --passphrase "${api_key_pass}" "$2" "$3" "${binaries_dir}"
tar -C "${TEMPDIR}" -czf "${OUT}" "${OUTDIR}"
tar -C "${TEMPDIR}" -czf "${OUT}" .
rm -rf "${TEMPDIR}"
echo "Created ${OUT}"

View File

@@ -8,6 +8,21 @@ import gzip
import os
import contextlib
# monkey-patch Python 3.8 and older to fix wrong TAR header handling
# see https://github.com/bitcoin/bitcoin/pull/24534
# and https://github.com/python/cpython/pull/18080 for more info
if sys.version_info < (3, 9):
_old_create_header = tarfile.TarInfo._create_header
def _create_header(info, format, encoding, errors):
buf = _old_create_header(info, format, encoding, errors)
# replace devmajor/devminor with binary zeroes
buf = buf[:329] + bytes(16) + buf[345:]
# recompute checksum
chksum = tarfile.calc_chksums(buf)[0]
buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:]
return buf
tarfile.TarInfo._create_header = staticmethod(_create_header)
@contextlib.contextmanager
def cd(path):
"""Context manager that restores PWD even if an exception was raised."""

View File

@@ -77,7 +77,7 @@ class FrameworkInfo(object):
bundleBinaryDirectory = "Contents/MacOS"
@classmethod
def fromLibraryLine(cls, line: str) -> Optional['FrameworkInfo']:
def fromOtoolLibraryLine(cls, line: str) -> Optional['FrameworkInfo']:
# Note: line must be trimmed
if line == "":
return None
@@ -88,7 +88,7 @@ class FrameworkInfo(object):
m = cls.reOLine.match(line)
if m is None:
raise RuntimeError(f"Line could not be parsed: {line}")
raise RuntimeError(f"otool line could not be parsed: {line}")
path = m.group(1)
@@ -120,7 +120,7 @@ class FrameworkInfo(object):
break
i += 1
if i == len(parts):
raise RuntimeError(f"Could not find .framework or .dylib in line: {line}")
raise RuntimeError(f"Could not find .framework or .dylib in otool line: {line}")
info.frameworkName = parts[i]
info.frameworkDirectory = "/".join(parts[:i])
@@ -182,24 +182,24 @@ class DeploymentInfo(object):
return False
def getFrameworks(binaryPath: str, verbose: int) -> list[FrameworkInfo]:
objdump = os.getenv("OBJDUMP", "objdump")
if verbose:
print(f"Inspecting with {objdump}: {binaryPath}")
output = run([objdump, "--macho", "--dylibs-used", binaryPath], stdout=PIPE, stderr=PIPE, text=True)
if output.returncode != 0:
sys.stderr.write(output.stderr)
print(f"Inspecting with otool: {binaryPath}")
otoolbin=os.getenv("OTOOL", "otool")
otool = run([otoolbin, "-L", binaryPath], stdout=PIPE, stderr=PIPE, text=True)
if otool.returncode != 0:
sys.stderr.write(otool.stderr)
sys.stderr.flush()
raise RuntimeError(f"{objdump} failed with return code {output.returncode}")
raise RuntimeError(f"otool failed with return code {otool.returncode}")
lines = output.stdout.split("\n")
lines.pop(0) # First line is the inspected binary
otoolLines = otool.stdout.split("\n")
otoolLines.pop(0) # First line is the inspected binary
if ".framework" in binaryPath or binaryPath.endswith(".dylib"):
lines.pop(0) # Frameworks and dylibs list themselves as a dependency.
otoolLines.pop(0) # Frameworks and dylibs list themselves as a dependency.
libraries = []
for line in lines:
for line in otoolLines:
line = line.replace("@loader_path", os.path.dirname(binaryPath))
info = FrameworkInfo.fromLibraryLine(line.strip())
info = FrameworkInfo.fromOtoolLibraryLine(line.strip())
if info is not None:
if verbose:
print("Found framework:")
@@ -465,18 +465,18 @@ if config.translations_dir:
sys.stderr.write(f"Error: Could not find translation dir \"{config.translations_dir[0]}\"\n")
sys.exit(1)
print("+ Adding Qt translations +")
print("+ Adding Qt translations +")
translations = Path(config.translations_dir[0])
translations = Path(config.translations_dir[0])
regex = re.compile('qt_[a-z]*(.qm|_[A-Z]*.qm)')
regex = re.compile('qt_[a-z]*(.qm|_[A-Z]*.qm)')
lang_files = [x for x in translations.iterdir() if regex.match(x.name)]
lang_files = [x for x in translations.iterdir() if regex.match(x.name)]
for file in lang_files:
if verbose:
print(file.as_posix(), "->", os.path.join(applicationBundle.resourcesPath, file.name))
shutil.copy2(file.as_posix(), os.path.join(applicationBundle.resourcesPath, file.name))
for file in lang_files:
if verbose:
print(file.as_posix(), "->", os.path.join(applicationBundle.resourcesPath, file.name))
shutil.copy2(file.as_posix(), os.path.join(applicationBundle.resourcesPath, file.name))
# ------------------------------------------------

View File

@@ -1,3 +1,2 @@
seeds_main.txt
seeds_test.txt
asmap-filled.dat

Some files were not shown because too many files have changed in this diff Show More