1
0
Fork 0
mirror of https://github.com/ton-blockchain/ton synced 2025-03-09 15:40:10 +00:00

Merge pull request #863 from ton-blockchain/testnet

Merge developer branch
This commit is contained in:
EmelyanenkoK 2024-01-17 10:00:03 +03:00 committed by GitHub
commit 2748477b14
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
116 changed files with 2827 additions and 1092 deletions

8
.editorconfig Normal file
View file

@ -0,0 +1,8 @@
root = true
[*]
end_of_line = lf
insert_final_newline = true
charset = utf-8
indent_style = space
indent_size = 2

View file

@ -0,0 +1,32 @@
name: Tonlib Android
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
runs-on: ubuntu-22.04
steps:
- name: Check out repository
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Install system libraries
run: |
sudo apt-get update
sudo apt-get install -y build-essential git cmake ninja-build automake libtool texinfo autoconf libgflags-dev \
zlib1g-dev libssl-dev libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev \
libtool autoconf libsodium-dev libsecp256k1-dev
- name: Build TON
run: |
cp assembly/android/build-android-tonlib.sh .
chmod +x build-android-tonlib.sh
./build-android-tonlib.sh -a
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: tonlib-android
path: artifacts

View file

@ -0,0 +1,40 @@
name: Ubuntu TON build (shared, x86-64)
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
strategy:
fail-fast: false
matrix:
os: [ubuntu-20.04, ubuntu-22.04]
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Install system libraries
run: |
sudo apt-get update
sudo apt-get install -y build-essential git cmake ninja-build zlib1g-dev libsecp256k1-dev libmicrohttpd-dev libsodium-dev
- name: Install clang-16
run: |
wget https://apt.llvm.org/llvm.sh
chmod +x llvm.sh
sudo ./llvm.sh 16 all
- name: Build TON
run: |
cp assembly/native/build-ubuntu-shared.sh .
chmod +x build-ubuntu-shared.sh
./build-ubuntu-shared.sh -t -a
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-binaries-${{ matrix.os }}
path: artifacts

View file

@ -0,0 +1,25 @@
name: MacOS TON build (shared, x86-64)
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
runs-on: macos-12
steps:
- name: Check out repository
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Build TON
run: |
cp assembly/native/build-macos-shared.sh .
chmod +x build-macos-shared.sh
./build-macos-shared.sh -t -a
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-binaries-macos-12
path: artifacts

View file

@ -0,0 +1,30 @@
name: Emscripten TON build (wasm)
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
runs-on: ubuntu-22.04
steps:
- name: Check out repository
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Install system libraries
run: |
sudo apt-get update
sudo apt-get install -y build-essential git openssl cmake ninja-build zlib1g-dev libssl-dev libsecp256k1-dev libmicrohttpd-dev libsodium-dev
- name: Build TON WASM artifacts
run: |
cd assembly/wasm
chmod +x fift-func-wasm-build-ubuntu.sh
./fift-func-wasm-build-ubuntu.sh -a
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-wasm-binaries
path: artifacts

View file

@ -46,7 +46,7 @@ jobs:
- name: Download Windows artifacts
uses: dawidd6/action-download-artifact@v2
with:
workflow: win-2019-compile.yml
workflow: ton-x86-64-windows.yml
path: artifacts
workflow_conclusion: success
skip_unpack: true
@ -54,7 +54,7 @@ jobs:
- name: Download and unzip Windows artifacts
uses: dawidd6/action-download-artifact@v2
with:
workflow: win-2019-compile.yml
workflow: ton-x86-64-windows.yml
path: artifacts
workflow_conclusion: success
skip_unpack: false
@ -62,7 +62,7 @@ jobs:
- name: Download WASM artifacts
uses: dawidd6/action-download-artifact@v2
with:
workflow: ton-wasm-emscripten.yml
workflow: build-ton-wasm-emscripten.yml
path: artifacts
workflow_conclusion: success
skip_unpack: true

View file

@ -1,4 +1,4 @@
name: Docker Ubuntu 20.04 image
name: Docker Ubuntu 22.04 image
on:
workflow_dispatch:
@ -12,10 +12,12 @@ env:
jobs:
build-and-push:
runs-on: ubuntu-20.04
runs-on: ubuntu-22.04
steps:
- name: Checkout
- name: Check out repository
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
@ -35,5 +37,5 @@ jobs:
uses: docker/build-push-action@v2
with:
push: true
context: ./docker
context: ./
tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest

View file

@ -1,106 +0,0 @@
name: MacOS 11.7 Big Sur x86-64 Compile
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
runs-on: macos-11
steps:
- name: Check out repository
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Compile Secp256k1
run: |
export NONINTERACTIVE=1
brew install ninja secp256k1 libsodium libmicrohttpd pkg-config automake libtool
git clone https://github.com/libbitcoin/secp256k1.git
cd secp256k1
./autogen.sh
./configure --enable-module-recovery
make
make install
- name: Build all
run: |
brew unlink openssl@1.1
brew install openssl@3
brew unlink openssl@3 && brew link --overwrite openssl@3
rootPath=`pwd`
mkdir build
cd build
cmake -GNinja -DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=11.7 -DCMAKE_CXX_FLAGS="-stdlib=libc++" -DCMAKE_BUILD_TYPE=Release ..
ninja storage-daemon storage-daemon-cli blockchain-explorer fift func tonlib tonlibjson tonlib-cli validator-engine \
lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server \
http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork tlbc emulator \
test-ed25519 test-ed25519-crypto test-bigint test-vm test-fift test-cells test-smartcont test-net test-tdactor \
test-tdutils test-tonlib-offline test-adnl test-dht test-rldp test-rldp2 test-catchain test-fec test-tddb test-db test-validator-session-state
- name: Strip binaries
run: |
strip build/storage/storage-daemon/storage-daemon
strip build/storage/storage-daemon/storage-daemon-cli
strip build/blockchain-explorer/blockchain-explorer
strip build/crypto/fift
strip build/crypto/func
strip build/crypto/create-state
strip build/crypto/tlbc
strip build/validator-engine-console/validator-engine-console
strip build/tonlib/tonlib-cli
strip build/http/http-proxy
strip build/rldp-http-proxy/rldp-http-proxy
strip build/dht-server/dht-server
strip build/lite-client/lite-client
strip build/validator-engine/validator-engine
strip build/utils/generate-random-id
strip build/utils/json2tlo
strip build/adnl/adnl-proxy
- name: Run tests
run: |
cd build
ctest --output-on-failure -E "test-catchain|test-actors"
- name: Find & copy binaries
run: |
mkdir artifacts
cp build/storage/storage-daemon/storage-daemon artifacts/
cp build/storage/storage-daemon/storage-daemon-cli artifacts/
cp build/blockchain-explorer/blockchain-explorer artifacts/
cp build/crypto/fift artifacts/
cp build/crypto/func artifacts/
cp build/crypto/create-state artifacts/
cp build/crypto/tlbc artifacts/
cp build/validator-engine-console/validator-engine-console artifacts/
cp build/tonlib/tonlib-cli artifacts/
cp build/tonlib/libtonlibjson.0.5.dylib artifacts/libtonlibjson.dylib
cp build/http/http-proxy artifacts/
cp build/rldp-http-proxy/rldp-http-proxy artifacts/
cp build/dht-server/dht-server artifacts/
cp build/lite-client/lite-client artifacts/
cp build/validator-engine/validator-engine artifacts/
cp build/utils/generate-random-id artifacts/
cp build/utils/json2tlo artifacts/
cp build/adnl/adnl-proxy artifacts/
cp build/emulator/*emulator.* artifacts/
chmod +x artifacts/*
rsync -r crypto/smartcont artifacts/
rsync -r crypto/fift/lib artifacts/
ls -laRt artifacts
- name: Simple binaries test
run: |
artifacts/validator-engine -V
artifacts/lite-client -V
artifacts/fift -V
artifacts/func -V
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-macos-11.7
path: artifacts

View file

@ -1,106 +0,0 @@
name: MacOS 12.6 Monterey x86-64 Compile
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
runs-on: macos-12
steps:
- name: Check out repository
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Compile Secp256k1
run: |
export NONINTERACTIVE=1
brew install ninja secp256k1 libsodium libmicrohttpd pkg-config automake libtool
git clone https://github.com/libbitcoin/secp256k1.git
cd secp256k1
./autogen.sh
./configure --enable-module-recovery
make
make install
- name: Build all
run: |
brew unlink openssl@1.1
brew install openssl@3
brew unlink openssl@3 && brew link --overwrite openssl@3
rootPath=`pwd`
mkdir build
cd build
cmake -GNinja -DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=12.6 -DCMAKE_CXX_FLAGS="-stdlib=libc++" -DCMAKE_BUILD_TYPE=Release ..
ninja storage-daemon storage-daemon-cli blockchain-explorer fift func tonlib tonlibjson tonlib-cli \
validator-engine lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server \
http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork tlbc emulator \
test-ed25519 test-ed25519-crypto test-bigint test-vm test-fift test-cells test-smartcont test-net test-tdactor \
test-tdutils test-tonlib-offline test-adnl test-dht test-rldp test-rldp2 test-catchain test-fec test-tddb test-db test-validator-session-state
- name: Strip binaries
run: |
strip build/storage/storage-daemon/storage-daemon
strip build/storage/storage-daemon/storage-daemon-cli
strip build/blockchain-explorer/blockchain-explorer
strip build/crypto/fift
strip build/crypto/func
strip build/crypto/create-state
strip build/crypto/tlbc
strip build/validator-engine-console/validator-engine-console
strip build/tonlib/tonlib-cli
strip build/http/http-proxy
strip build/rldp-http-proxy/rldp-http-proxy
strip build/dht-server/dht-server
strip build/lite-client/lite-client
strip build/validator-engine/validator-engine
strip build/utils/generate-random-id
strip build/utils/json2tlo
strip build/adnl/adnl-proxy
- name: Run tests
run: |
cd build
ctest --output-on-failure -E "test-catchain|test-actors"
- name: Find & copy binaries
run: |
mkdir artifacts
cp build/storage/storage-daemon/storage-daemon artifacts/
cp build/storage/storage-daemon/storage-daemon-cli artifacts/
cp build/blockchain-explorer/blockchain-explorer artifacts/
cp build/crypto/fift artifacts/
cp build/crypto/func artifacts/
cp build/crypto/create-state artifacts/
cp build/crypto/tlbc artifacts/
cp build/validator-engine-console/validator-engine-console artifacts/
cp build/tonlib/tonlib-cli artifacts/
cp build/tonlib/libtonlibjson.0.5.dylib artifacts/libtonlibjson.dylib
cp build/http/http-proxy artifacts/
cp build/rldp-http-proxy/rldp-http-proxy artifacts/
cp build/dht-server/dht-server artifacts/
cp build/lite-client/lite-client artifacts/
cp build/validator-engine/validator-engine artifacts/
cp build/utils/generate-random-id artifacts/
cp build/utils/json2tlo artifacts/
cp build/adnl/adnl-proxy artifacts/
cp build/emulator/*emulator.* artifacts/
chmod +x artifacts/*
rsync -r crypto/smartcont artifacts/
rsync -r crypto/fift/lib artifacts/
ls -laRt artifacts
- name: Simple binaries test
run: |
artifacts/validator-engine -V
artifacts/lite-client -V
artifacts/fift -V
artifacts/func -V
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-macos-12.6
path: artifacts

View file

@ -1,50 +0,0 @@
name: "TON aarch64 Linux binaries"
on: [workflow_dispatch,workflow_call]
jobs:
build:
runs-on: ubuntu-22.04
steps:
- run: |
sudo apt update
sudo apt install -y apt-utils
sudo apt install -q -y qemu-system-aarch64 qemu-efi binfmt-support qemu-user-static
- uses: actions/checkout@v3
with:
submodules: 'recursive'
- uses: cachix/install-nix-action@v18
with:
extra_nix_config: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
- name: Compile
run: nix build .?submodules=1#packages.aarch64-linux.ton-oldglibc_staticbinaries --print-build-logs --system aarch64-linux -o result-aarch64
- name: Copy binaries
run: |
ls -lart
mkdir artifacts
cp $PWD/result-aarch64-linux/bin/* artifacts/
chmod +x artifacts/*
cp $PWD/result-aarch64-linux/lib/libtonlibjson.so.0.5 artifacts/libtonlibjson.so
cp $PWD/result-aarch64-linux/lib/libemulator.so artifacts/
cp -R crypto/smartcont artifacts/
cp -R crypto/fift/lib artifacts/
- name: Simple binaries test
run: |
sudo mv /nix/store /nix/store2
artifacts/validator-engine -V
artifacts/lite-client -V
artifacts/fift -V
artifacts/func -V
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-aarch64-linux-binaries
path: artifacts

View file

@ -1,47 +0,0 @@
name: "TON aarch64 macOS binaries"
on: [workflow_dispatch,workflow_call]
jobs:
build:
runs-on: macos-12
steps:
- run: brew install qemu
- uses: actions/checkout@v3
with:
submodules: 'recursive'
- uses: cachix/install-nix-action@v18
with:
extra_nix_config: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
- name: Compile
run: nix build .?submodules=1#packages.aarch64-darwin.ton-staticbin-dylib --print-build-logs -o result-aarch64-darwin
- name: Copy binaries
run: |
ls -lart
mkdir artifacts
cp $PWD/result-aarch64-darwin/bin/* artifacts/
chmod +x artifacts/*
cp $PWD/result-aarch64-darwin/lib/libtonlibjson* artifacts/
cp $PWD/result-aarch64-darwin/lib/libemulator* artifacts/
cp -R crypto/smartcont artifacts/
cp -R crypto/fift/lib artifacts/
- name: Simple binaries test
run: |
sudo mv /nix/store /nix/store2
artifacts/validator-engine -V
artifacts/lite-client -V
artifacts/fift -V
artifacts/func -V
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-aarch64-macos-binaries
path: artifacts

View file

@ -1,10 +1,9 @@
name: TON Ccpcheck
name: TON Static Code Analysis
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
runs-on: ubuntu-22.04
steps:

View file

@ -1,46 +0,0 @@
name: TON WASM Compile
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
runs-on: ubuntu-22.04
steps:
- name: Check out repository
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Install libraries
run: |
sudo apt update
sudo apt install -y build-essential git make cmake ninja-build clang libgflags-dev zlib1g-dev libssl-dev libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev python3-pip nodejs libsecp256k1-dev libsodium-dev automake libtool
- name: Setup compiler
run: |
wget https://apt.llvm.org/llvm.sh
chmod +x llvm.sh
sudo ./llvm.sh 16 all
- name: Configure & Build
run: |
cd .github/script
./fift-func-wasm-build-ubuntu.sh
- name: Find & copy binaries
run: |
mkdir artifacts
ls build/crypto
cp build/crypto/fift* artifacts
cp build/crypto/func* artifacts
cp build/crypto/tlbc* artifacts
cp build/emulator/emulator-emscripten* artifacts
cp -R crypto/smartcont artifacts
cp -R crypto/fift/lib artifacts
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-wasm-binaries
path: artifacts

View file

@ -1,4 +1,4 @@
name: "TON x86_64 Linux binaries"
name: Ubuntu TON build (portable, x86-64)
on: [push,workflow_dispatch,workflow_call]
@ -20,19 +20,11 @@ jobs:
extra_nix_config: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
- name: Compile
run: nix build .?submodules=1#packages.x86_64-linux.ton-oldglibc_staticbinaries --print-build-logs --system x86_64-linux -o result-x86_64
- name: Copy binaries
- name: Build TON
run: |
ls -lart
mkdir artifacts
cp $PWD/result-x86_64/bin/* artifacts/
chmod +x artifacts/*
cp $PWD/result-x86_64/lib/libtonlibjson.so.0.5 artifacts/libtonlibjson.so
cp $PWD/result-x86_64/lib/libemulator.so artifacts/
cp -R crypto/smartcont artifacts/
cp -R crypto/fift/lib artifacts/
cp assembly/nix/build-linux-x86-64-nix.sh .
chmod +x build-linux-x86-64-nix.sh
./build-linux-x86-64-nix.sh
- name: Simple binaries test
run: |

View file

@ -1,4 +1,4 @@
name: "TON x86_64 macOS binaries"
name: MacOS TON build (portable, x86-64)
on: [push,workflow_dispatch,workflow_call]
@ -16,19 +16,11 @@ jobs:
extra_nix_config: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
- name: Compile
run: nix build .?submodules=1#packages.x86_64-darwin.ton-staticbin-dylib --print-build-logs -o result-x86_64-darwin
- name: Copy binaries
- name: Build TON
run: |
ls -lart
mkdir artifacts
cp $PWD/result-x86_64-darwin/bin/* artifacts/
chmod +x artifacts/*
cp $PWD/result-x86_64-darwin/lib/libtonlibjson.dylib artifacts/
cp $PWD/result-x86_64-darwin/lib/libemulator.dylib artifacts/
cp -R crypto/smartcont artifacts/
cp -R crypto/fift/lib artifacts/
cp assembly/nix/build-macos-nix.sh .
chmod +x build-macos-nix.sh
./build-macos-nix.sh
- name: Simple binaries test
run: |

View file

@ -0,0 +1,34 @@
name: Windows TON build (portable, x86-64)
on: [push,workflow_dispatch,workflow_call]
defaults:
run:
shell: cmd
jobs:
build:
runs-on: windows-2022
steps:
- name: Get Current OS version
run: |
systeminfo | findstr /B /C:"OS Name" /C:"OS Version"
- name: Check out current repository
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Build TON
run: |
copy assembly\native\build-windows-github.bat .
copy assembly\native\build-windows.bat .
build-windows-github.bat Enterprise
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-win-binaries
path: artifacts

View file

@ -1,61 +0,0 @@
name: Tonlib Android JNI
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
runs-on: ubuntu-22.04
steps:
- name: Check out repository
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Install libraries
run: |
sudo apt update
sudo apt install -y build-essential git make cmake clang libgflags-dev zlib1g-dev libssl-dev libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev ninja-build
- name: Configure & Build
run: |
wget -q https://dl.google.com/android/repository/android-ndk-r25b-linux.zip
unzip -q android-ndk-r25b-linux.zip
export JAVA_AWT_LIBRARY=NotNeeded
export JAVA_JVM_LIBRARY=NotNeeded
export JAVA_INCLUDE_PATH=${JAVA_HOME}/include
export JAVA_AWT_INCLUDE_PATH=${JAVA_HOME}/include
export JAVA_INCLUDE_PATH2=${JAVA_HOME}/include/linux
export ANDROID_NDK_ROOT=$(pwd)/android-ndk-r25b
export NDK_PLATFORM="android-21"
export ANDROID_PLATFORM="android-21"
export OPENSSL_DIR=$(pwd)/example/android/third_party/crypto
rm -rf example/android/src/drinkless/org/ton/TonApi.java
cd example/android/
sudo apt install -y libtool autoconf libsodium-dev libsecp256k1-dev
cmake -GNinja -DTON_ONLY_TONLIB=ON .
ninja prepare_cross_compiling
sudo apt remove -y libsodium-dev libsecp256k1-dev
rm CMakeCache.txt
./build-all.sh
find . -name "*.debug" -type f -delete
- name: Find & copy binaries
run: |
mkdir -p artifacts/tonlib-android-jni
cp example/android/src/drinkless/org/ton/TonApi.java artifacts/tonlib-android-jni/
cp -R example/android/libs/* artifacts/tonlib-android-jni/
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: tonlib-android-jni
path: artifacts

View file

@ -1,78 +0,0 @@
name: Ubuntu 22.04 Compile
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
runs-on: ubuntu-22.04
steps:
- name: Check out repository
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Install libraries
run: |
sudo apt update
sudo apt install -y build-essential git make cmake clang libgflags-dev zlib1g-dev libssl-dev libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev ninja-build libsecp256k1-dev libsodium-dev
- name: Show CPU flags
run: |
cat /proc/cpuinfo
- name: Configure & Build
run: |
export CC=$(which clang)
export CXX=$(which clang++)
export CCACHE_DISABLE=1
git clone https://github.com/openssl/openssl openssl_3
cd openssl_3
git checkout openssl-3.1.4
./config
make build_libs -j4
cd ..
rootPath=`pwd`
mkdir build
cd build
cmake -GNinja -DOPENSSL_FOUND=1 -DOPENSSL_INCLUDE_DIR=$rootPath/openssl_3/include -DOPENSSL_CRYPTO_LIBRARY=$rootPath/openssl_3/libcrypto.a -DCMAKE_BUILD_TYPE=Release -DPORTABLE=1 -DTON_ARCH= ..
ninja storage-daemon storage-daemon-cli fift func tonlib tonlibjson tonlib-cli validator-engine lite-client \
pow-miner validator-engine-console generate-random-id json2tlo dht-server http-proxy rldp-http-proxy \
adnl-proxy create-state emulator \
test-ed25519 test-ed25519-crypto test-bigint test-vm test-fift test-cells test-smartcont test-net test-tdactor \
test-tdutils test-tonlib-offline test-adnl test-dht test-rldp test-rldp2 test-catchain test-fec test-tddb test-db test-validator-session-state
- name: Strip binaries
run: |
strip -g build/storage/storage-daemon/storage-daemon build/storage/storage-daemon/storage-daemon-cli build/crypto/fift build/crypto/tlbc build/crypto/func build/crypto/create-state build/validator-engine-console/validator-engine-console build/tonlib/tonlib-cli build/tonlib/libtonlibjson.so.0.5 build/http/http-proxy build/rldp-http-proxy/rldp-http-proxy build/dht-server/dht-server build/lite-client/lite-client build/validator-engine/validator-engine build/utils/generate-random-id build/utils/json2tlo build/adnl/adnl-proxy build/emulator/libemulator.*
- name: Run tests
run: |
cd build
ctest --output-on-failure -E "test-catchain|test-actors"
- name: Find & copy binaries
run: |
mkdir artifacts
cp build/storage/storage-daemon/storage-daemon build/storage/storage-daemon/storage-daemon-cli build/crypto/fift build/crypto/tlbc build/crypto/func build/crypto/create-state build/validator-engine-console/validator-engine-console build/tonlib/tonlib-cli build/tonlib/libtonlibjson.so.0.5 build/http/http-proxy build/rldp-http-proxy/rldp-http-proxy build/dht-server/dht-server build/lite-client/lite-client build/validator-engine/validator-engine build/utils/generate-random-id build/utils/json2tlo build/adnl/adnl-proxy build/emulator/libemulator.* artifacts
chmod +x artifacts/*
cp -R crypto/smartcont artifacts/
cp -R crypto/fift/lib artifacts/
- name: Simple binaries test
run: |
artifacts/validator-engine -V
artifacts/lite-client -V
artifacts/fift -V
artifacts/func -V
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-ubuntu-binaries
path: artifacts

View file

@ -1,81 +0,0 @@
name: Ubuntu Compile x86-64
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
strategy:
fail-fast: false
matrix:
os: [ubuntu-20.04, ubuntu-22.04]
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Install libraries
run: |
sudo apt update
sudo apt install -y build-essential git make cmake clang libgflags-dev zlib1g-dev libssl-dev libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev ninja-build libsecp256k1-dev libsodium-dev
- name: Show CPU flags
run: |
cat /proc/cpuinfo
- name: Configure & Build
run: |
export CC=$(which clang)
export CXX=$(which clang++)
export CCACHE_DISABLE=1
mkdir build-${{ matrix.os }}
cd build-${{ matrix.os }}
git clone https://github.com/openssl/openssl openssl_3
cd openssl_3
git checkout openssl-3.1.4
./config
make build_libs -j4
cd ..
rootPath=`pwd`
cmake -GNinja -DOPENSSL_FOUND=1 -DOPENSSL_INCLUDE_DIR=$rootPath/openssl_3/include -DOPENSSL_CRYPTO_LIBRARY=$rootPath/openssl_3/libcrypto.a -DCMAKE_BUILD_TYPE=Release -DPORTABLE=1 -DTON_ARCH= ..
ninja storage-daemon storage-daemon-cli fift func tonlib tonlibjson tonlib-cli validator-engine lite-client \
pow-miner validator-engine-console generate-random-id json2tlo dht-server http-proxy rldp-http-proxy adnl-proxy \
create-state create-hardfork emulator \
test-ed25519 test-ed25519-crypto test-bigint test-vm test-fift test-cells test-smartcont test-net test-tdactor \
test-tdutils test-tonlib-offline test-adnl test-dht test-rldp test-rldp2 test-catchain test-fec test-tddb test-db test-validator-session-state
- name: Strip binaries
run: |
strip -g build-${{ matrix.os }}/storage/storage-daemon/storage-daemon build-${{ matrix.os }}/storage/storage-daemon/storage-daemon-cli build-${{ matrix.os }}/crypto/fift build-${{ matrix.os }}/crypto/tlbc build-${{ matrix.os }}/crypto/func build-${{ matrix.os }}/crypto/create-state build-${{ matrix.os }}/validator-engine-console/validator-engine-console build-${{ matrix.os }}/tonlib/tonlib-cli build-${{ matrix.os }}/tonlib/libtonlibjson.so.0.5 build-${{ matrix.os }}/http/http-proxy build-${{ matrix.os }}/rldp-http-proxy/rldp-http-proxy build-${{ matrix.os }}/dht-server/dht-server build-${{ matrix.os }}/lite-client/lite-client build-${{ matrix.os }}/validator-engine/validator-engine build-${{ matrix.os }}/utils/generate-random-id build-${{ matrix.os }}/utils/json2tlo build-${{ matrix.os }}/adnl/adnl-proxy build-${{ matrix.os }}/emulator/libemulator.*
- name: Run tests
run: |
cd build-${{ matrix.os }}
ctest --output-on-failure -E "test-catchain|test-actors"
- name: Find & copy binaries
run: |
mkdir artifacts-${{ matrix.os }}
cp build-${{ matrix.os }}/storage/storage-daemon/storage-daemon build-${{ matrix.os }}/storage/storage-daemon/storage-daemon-cli build-${{ matrix.os }}/crypto/fift build-${{ matrix.os }}/crypto/tlbc build-${{ matrix.os }}/crypto/func build-${{ matrix.os }}/crypto/create-state build-${{ matrix.os }}/validator-engine-console/validator-engine-console build-${{ matrix.os }}/tonlib/tonlib-cli build-${{ matrix.os }}/tonlib/libtonlibjson.so.0.5 build-${{ matrix.os }}/http/http-proxy build-${{ matrix.os }}/rldp-http-proxy/rldp-http-proxy build-${{ matrix.os }}/dht-server/dht-server build-${{ matrix.os }}/lite-client/lite-client build-${{ matrix.os }}/validator-engine/validator-engine build-${{ matrix.os }}/utils/generate-random-id build-${{ matrix.os }}/utils/json2tlo build-${{ matrix.os }}/adnl/adnl-proxy build-${{ matrix.os }}/emulator/libemulator.* artifacts-${{ matrix.os }}
chmod +x artifacts-${{ matrix.os }}/*
cp -R crypto/smartcont artifacts-${{ matrix.os }}
cp -R crypto/fift/lib artifacts-${{ matrix.os }}
- name: Simple binaries test
run: |
artifacts-${{ matrix.os }}/validator-engine -V
artifacts-${{ matrix.os }}/lite-client -V
artifacts-${{ matrix.os }}/fift -V
artifacts-${{ matrix.os }}/func -V
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-binaries-${{ matrix.os }}
path: artifacts-${{ matrix.os }}

View file

@ -1,108 +0,0 @@
name: Windows Server 2019 x64 Compile
on: [push,workflow_dispatch,workflow_call]
defaults:
run:
shell: cmd
jobs:
build:
runs-on: windows-2019
steps:
- name: Get Current OS version
run: |
systeminfo | findstr /B /C:"OS Name" /C:"OS Version"
- name: Check out current repository
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Check out zlib repository
uses: actions/checkout@v3
with:
repository: desktop-app/zlib
path: zlib
- name: Setup msbuild.exe
uses: microsoft/setup-msbuild@v1.1
- name: Install Pkg-config Lite
run: choco install pkgconfiglite
- name: Compile zlib Win64
run: |
cd zlib\contrib\vstudio\vc14
msbuild zlibstat.vcxproj /p:Configuration=ReleaseWithoutAsm /p:platform=x64 -p:PlatformToolset=v142
- name: Compile secp256k1 Win64
run: |
git clone https://github.com/libbitcoin/secp256k1.git
cd secp256k1\builds\msvc\vs2017
msbuild /p:Configuration=StaticRelease -p:PlatformToolset=v142 -p:Platform=x64
- name: Install pre-compiled libsodium Win64
run: |
curl -Lo libsodium-1.0.18-stable-msvc.zip https://download.libsodium.org/libsodium/releases/libsodium-1.0.18-stable-msvc.zip
unzip libsodium-1.0.18-stable-msvc.zip
- name: Install pre-compiled OpenSSL 3 Win64
run: |
curl -Lo openssl-3.1.4.zip https://github.com/neodiX42/precompiled-openssl-win64/raw/main/openssl-3.1.4.zip
unzip openssl-3.1.4.zip
- name: Install pre-compiled libmicrohttpd Win64
run: |
curl -Lo libmicrohttpd-0.9.77-w32-bin.zip https://github.com/neodiX42/precompiled-openssl-win64/raw/main/libmicrohttpd-0.9.77-w32-bin.zip
unzip libmicrohttpd-0.9.77-w32-bin.zip
- name: Install pre-compiled Readline Win64
run: |
curl -Lo readline-5.0-1-lib.zip https://github.com/neodiX42/precompiled-openssl-win64/raw/main/readline-5.0-1-lib.zip
unzip readline-5.0-1-lib.zip
- name: Compile
run: |
set root=%cd%
set SODIUM_DIR=%root%\libsodium
echo %root%
echo %SODIUM_DIR%
mkdir build
cd build
cmake -DSODIUM_USE_STATIC_LIBS=1 -DSECP256K1_INCLUDE_DIR=%root%\secp256k1\include -DSECP256K1_LIBRARY=%root%\secp256k1\bin\x64\Release\v142\static\secp256k1.lib -DREADLINE_INCLUDE_DIR=%root%\readline-5.0-1-lib\include\readline -DREADLINE_LIBRARY=%root%\readline-5.0-1-lib\lib\readline.lib -DPORTABLE=1 -DZLIB_FOUND=1 -DMHD_FOUND=1 -DMHD_LIBRARY=%root%\libmicrohttpd-0.9.77-w32-bin\x86_64\VS2019\Release-static\libmicrohttpd.lib -DMHD_INCLUDE_DIR=%root%\libmicrohttpd-0.9.77-w32-bin\x86_64\VS2019\Release-static -DZLIB_INCLUDE_DIR=%root%\zlib -DZLIB_LIBRARY=%root%\zlib\contrib\vstudio\vc14\x64\ZlibStatReleaseWithoutAsm\zlibstat.lib -DOPENSSL_FOUND=1 -DOPENSSL_INCLUDE_DIR=%root%/openssl-3.1.4/x64/include -DOPENSSL_CRYPTO_LIBRARY=%root%/openssl-3.1.4/x64/lib/libcrypto_static.lib -DCMAKE_CXX_FLAGS="/DTD_WINDOWS=1 /EHsc /bigobj /W0" ..
cmake --build . --config Release --target storage-daemon storage-daemon-cli blockchain-explorer fift func tonlib tonlibjson tonlib-cli validator-engine lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork emulator test-ed25519 test-ed25519-crypto test-bigint test-vm test-fift test-cells test-smartcont test-net test-tdactor test-tdutils test-tonlib-offline test-adnl test-dht test-rldp test-rldp2 test-catchain test-fec test-tddb test-db test-validator-session-state
- name: Run tests
run: |
cd build
ctest -C Release --output-on-failure -E "test-catchain|test-actors|test-validator-session-state"
- name: Show executables
run: |
cd build
del Release\test-*
dir *.exe /a-D /S /B
dir *.dll /a-D /S /B
- name: Check if validator-engine.exe exists
run: |
copy %cd%\build\validator-engine\Release\validator-engine.exe test
- name: Find & copy binaries
run: |
mkdir artifacts
mkdir artifacts\smartcont
mkdir artifacts\lib
for %%I in (build\storage\storage-daemon\Release\storage-daemon.exe build\storage\storage-daemon\Release\storage-daemon-cli.exe build\blockchain-explorer\blockchain-explorer.exe build\crypto\Release\fift.exe build\crypto\Release\tlbc.exe build\crypto\Release\func.exe build\crypto\Release\create-state.exe build\validator-engine-console\Release\validator-engine-console.exe build\tonlib\Release\tonlib-cli.exe build\tonlib\Release\tonlibjson.dll build\http\Release\http-proxy.exe build\rldp-http-proxy\Release\rldp-http-proxy.exe build\dht-server\Release\dht-server.exe build\lite-client\Release\lite-client.exe build\validator-engine\Release\validator-engine.exe build\utils\Release\generate-random-id.exe build\utils\Release\json2tlo.exe build\adnl\Release\adnl-proxy.exe build\emulator\Release\emulator.dll) do (strip -g %%I & copy %%I artifacts\)
xcopy /e /k /h /i crypto\smartcont artifacts\smartcont
xcopy /e /k /h /i crypto\fift\lib artifacts\lib
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-win-binaries
path: artifacts

12
.gitignore vendored
View file

@ -12,4 +12,14 @@ test/regression-tests.cache/
*.swp
**/*build*/
.idea
.vscode
.vscode
zlib/
libsodium/
libmicrohttpd-0.9.77-w32-bin/
readline-5.0-1-lib/
secp256k1/
openssl-3.1.4/
libsodium-1.0.18-stable-msvc.zip
libmicrohttpd-0.9.77-w32-bin.zip
openssl-3.1.4.zip
readline-5.0-1-lib.zip

View file

@ -2,23 +2,26 @@
# Once done this will define
#
# MHD_FOUND - system has MHD
# MHD_INCLUDE_DIRS - the MHD include directory
# MHD_INCLUDE_DIR - the MHD include directory
# MHD_LIBRARY - Link these to use MHD
find_path(
MHD_INCLUDE_DIR
NAMES microhttpd.h
DOC "microhttpd include dir"
)
if (NOT MHD_LIBRARY)
find_path(
MHD_INCLUDE_DIR
NAMES microhttpd.h
DOC "microhttpd include dir"
)
find_library(
MHD_LIBRARY
NAMES microhttpd microhttpd-10 libmicrohttpd libmicrohttpd-dll
DOC "microhttpd library"
)
find_library(
MHD_LIBRARY
NAMES microhttpd microhttpd-10 libmicrohttpd libmicrohttpd-dll
DOC "microhttpd library"
)
endif()
set(MHD_INCLUDE_DIRS ${MHD_INCLUDE_DIR})
set(MHD_LIBRARIES ${MHD_LIBRARY})
if (MHD_LIBRARY)
message(STATUS "Found MHD: ${MHD_LIBRARY}")
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(MHD DEFAULT_MSG MHD_INCLUDE_DIR MHD_LIBRARY)

View file

@ -2,28 +2,27 @@
# Once done this will define
#
# SECP256K1_FOUND - system has SECP256K1
# SECP256K1_INCLUDE_DIRS - the SECP256K1 include directory
# SECP256K1_INCLUDE_DIR - the SECP256K1 include directory
# SECP256K1_LIBRARY - Link these to use SECP256K1
find_path(
SECP256K1_INCLUDE_DIR
NAMES secp256k1_recovery.h
DOC "secp256k1_recovery.h include dir"
)
if (NOT SECP256K1_LIBRARY)
find_path(
SECP256K1_INCLUDE_DIR
NAMES secp256k1_recovery.h
DOC "secp256k1_recovery.h include dir"
)
find_library(
SECP256K1_LIBRARY
NAMES secp256k1 libsecp256k1
DOC "secp256k1 library"
)
find_library(
SECP256K1_LIBRARY
NAMES secp256k1 libsecp256k1
DOC "secp256k1 library"
)
endif()
if (SECP256K1_LIBRARY)
message(STATUS "Found Secp256k1: ${SECP256K1_LIBRARY}")
endif()
set(SECP256K1_INCLUDE_DIRS ${SECP256K1_INCLUDE_DIR})
set(SECP256K1_LIBRARIES ${SECP256K1_LIBRARY})
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Secp256k1 DEFAULT_MSG SECP256K1_INCLUDE_DIR SECP256K1_LIBRARY)
mark_as_advanced(SECP256K1_INCLUDE_DIR SECP256K1_LIBRARY)

View file

@ -26,6 +26,7 @@
# Furthermore an imported "sodium" target is created.
#
if (CMAKE_C_COMPILER_ID STREQUAL "GNU"
OR CMAKE_C_COMPILER_ID STREQUAL "Clang")
set(_GCC_COMPATIBLE 1)

View file

@ -211,7 +211,13 @@ set(CMAKE_THREAD_PREFER_PTHREAD ON)
set(THREADS_PREFER_PTHREAD_FLAG ON)
find_package(Threads REQUIRED)
find_package(PkgConfig REQUIRED)
find_package(ZLIB REQUIRED)
if (NOT ZLIB_FOUND)
find_package(ZLIB REQUIRED)
else()
message(STATUS "Using zlib ${ZLIB_LIBRARIES}")
endif()
if (TON_ARCH AND NOT MSVC)
CHECK_CXX_COMPILER_FLAG( "-march=${TON_ARCH}" COMPILER_OPT_ARCH_SUPPORTED )
@ -371,6 +377,9 @@ if (LATEX_FOUND)
add_latex_document(doc/fiftbase.tex TARGET_NAME fift_basic_description)
add_latex_document(doc/catchain.tex TARGET_NAME catchain_consensus_description)
endif()
if (NOT LATEX_FOUND)
message(STATUS "Could NOT find LATEX (this is NOT an error)")
endif()
#END internal
function(target_link_libraries_system target)
@ -580,8 +589,8 @@ if (NOT NIX)
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/crypto/func/auto-tests)
if (WIN32)
set_property(TEST test-func PROPERTY ENVIRONMENT
"FUNC_EXECUTABLE=${CMAKE_CURRENT_BINARY_DIR}/crypto/Release/func.exe"
"FIFT_EXECUTABLE=${CMAKE_CURRENT_BINARY_DIR}/crypto/Release/fift.exe"
"FUNC_EXECUTABLE=${CMAKE_CURRENT_BINARY_DIR}/crypto/func.exe"
"FIFT_EXECUTABLE=${CMAKE_CURRENT_BINARY_DIR}/crypto/fift.exe"
"FIFTPATH=${CMAKE_CURRENT_SOURCE_DIR}/crypto/fift/lib/")
else()
set_property(TEST test-func PROPERTY ENVIRONMENT
@ -596,8 +605,8 @@ if (NOT NIX)
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/crypto/func/auto-tests)
if (WIN32)
set_property(TEST test-func-legacy PROPERTY ENVIRONMENT
"FUNC_EXECUTABLE=${CMAKE_CURRENT_BINARY_DIR}/crypto/Release/func.exe"
"FIFT_EXECUTABLE=${CMAKE_CURRENT_BINARY_DIR}/crypto/Release/fift.exe"
"FUNC_EXECUTABLE=${CMAKE_CURRENT_BINARY_DIR}/crypto/func.exe"
"FIFT_EXECUTABLE=${CMAKE_CURRENT_BINARY_DIR}/crypto/fift.exe"
"FIFTPATH=${CMAKE_CURRENT_SOURCE_DIR}/crypto/fift/lib/")
else()
set_property(TEST test-func-legacy PROPERTY ENVIRONMENT

View file

@ -1,3 +1,15 @@
## 2024.01 Update
1. Fixes in how gas in transactions on special accounts is accounted in block limit. Previously, gas was counted as usual, so to conduct elections that costs >30m gas block limit in masterchain was set to 37m gas. To lower the limit for safety reasons it is proposed to not count gas on special accounts. Besides `gas_max` is set to `special_gas_limit` for all types of transactions on special accounts. New behavior is activated through setting `gas_prices_v3` in `ConfigParam 20;`.
* Besides update of config temporally increases gas limit on `EQD_v9j1rlsuHHw2FIhcsCFFSD367ldfDdCKcsNmNpIRzUlu` to `special_gas_limit`, see [details](https://t.me/tonstatus/88).
2. Improvements in LS behavior
* Improved detection of the state with all shards applied to decrease rate of `Block is not applied` error
* Better error logs: `block not in db` and `block is not applied` separation
* Fix error in proof generation for blocks after merge
3. Improvements in DHT work and storage, CellDb, config.json ammendment, peer misbehavior detection, validator session stats collection, emulator.
Besides the work of the core team, this update is based on the efforts of @XaBbl4 (peer misbehavior detection).
## 2023.12 Update
1. Optimized message queue handling, now queue cleaning speed doesn't depend on total queue size

View file

@ -1,26 +1,29 @@
FROM ubuntu:20.04 as builder
FROM ubuntu:22.04 as builder
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get install -y build-essential cmake clang-6.0 openssl libssl-dev zlib1g-dev gperf wget git ninja-build libsecp256k1-dev libsodium-dev libmicrohttpd-dev pkg-config && \
DEBIAN_FRONTEND=noninteractive apt-get install -y build-essential cmake clang openssl libssl-dev zlib1g-dev gperf wget git ninja-build libsecp256k1-dev libsodium-dev libmicrohttpd-dev pkg-config autoconf automake libtool && \
rm -rf /var/lib/apt/lists/*
ENV CC clang-6.0
ENV CXX clang++-6.0
ENV CC clang
ENV CXX clang++
ENV CCACHE_DISABLE 1
WORKDIR /
RUN git clone --recursive https://github.com/ton-blockchain/ton
RUN mkdir ton
WORKDIR /ton
COPY ./ ./
RUN mkdir build && \
cd build && \
cmake -GNinja -DCMAKE_BUILD_TYPE=Release -DPORTABLE=1 -DTON_ARCH= -DCMAKE_CXX_FLAGS="-mavx2" .. && \
cmake -GNinja -DCMAKE_BUILD_TYPE=Release -DPORTABLE=1 -DTON_ARCH= .. && \
ninja storage-daemon storage-daemon-cli tonlibjson fift func validator-engine validator-engine-console generate-random-id dht-server lite-client
FROM ubuntu:20.04
FROM ubuntu:22.04
RUN apt-get update && \
apt-get install -y openssl wget libatomic1 && \
rm -rf /var/lib/apt/lists/*
apt-get install -y wget libatomic1 openssl libsecp256k1-dev libsodium-dev libmicrohttpd-dev && \
rm -rf /var/lib/apt/lists/*
RUN mkdir -p /var/ton-work/db && \
mkdir -p /var/ton-work/db/static
mkdir -p /var/ton-work/db/static
COPY --from=builder /ton/build/storage/storage-daemon/storage-daemon /usr/local/bin/
COPY --from=builder /ton/build/storage/storage-daemon/storage-daemon-cli /usr/local/bin/
@ -30,7 +33,7 @@ COPY --from=builder /ton/build/validator-engine-console/validator-engine-console
COPY --from=builder /ton/build/utils/generate-random-id /usr/local/bin/
WORKDIR /var/ton-work/db
COPY init.sh control.template ./
COPY ./docker/init.sh ./docker/control.template ./
RUN chmod +x init.sh
ENTRYPOINT ["/var/ton-work/db/init.sh"]
ENTRYPOINT ["/var/ton-work/db/init.sh"]

View file

@ -43,7 +43,7 @@ __The Open Network (TON)__ is a fast, secure, scalable blockchain focused on han
- To work on TON check [wallets](https://ton.app/wallets), [explorers](https://ton.app/explorers), [DEXes](https://ton.app/dex) and [utilities](https://ton.app/utilities)
- To interact with TON check [APIs](https://ton.org/docs/develop/dapps/apis/)
## Updates flow:
## Updates flow
* **master branch** - mainnet is running on this stable branch.
@ -61,12 +61,90 @@ Usually, the response to your pull request will indicate which section it falls
* Thou shall not merge your own PRs, at least one person should review the PR and merge it (4-eyes rule)
* Thou shall make sure that workflows are cleanly completed for your PR before considering merge
## Workflows responsibility
If a CI workflow fails not because of your changes but workflow issues, try to fix it yourself or contact one of the persons listed below via Telegram messenger:
## Build TON blockchain
* **C/C++ CI (ccpp-linux.yml)**: TBD
* **C/C++ CI Win64 Compile (ccpp-win64.yml)**: TBD
### Ubuntu 20.4, 22.04 (x86-64, aarch64)
Install additional system libraries
```bash
sudo apt-get update
sudo apt-get install -y build-essential git cmake ninja-build zlib1g-dev libsecp256k1-dev libmicrohttpd-dev libsodium-dev
wget https://apt.llvm.org/llvm.sh
chmod +x llvm.sh
sudo ./llvm.sh 16 all
```
Compile TON binaries
```bash
cp assembly/native/build-ubuntu-shared.sh .
chmod +x build-ubuntu-shared.sh
./build-ubuntu-shared.sh
```
### MacOS 11, 12 (x86-64, aarch64)
```bash
cp assembly/native/build-macos-shared.sh .
chmod +x build-macos-shared.sh
./build-macos-shared.sh
```
### Windows 10, 11, Server (x86-64)
You need to install `MS Visual Studio 2022` first.
Go to https://www.visualstudio.com/downloads/ and download `MS Visual Studio 2022 Community`.
Launch installer and select `Desktop development with C++`.
After installation, also make sure that `cmake` is globally available by adding
`C:\Program Files\Microsoft Visual Studio\2022\Community\Common7\IDE\CommonExtensions\Microsoft\CMake\CMake\bin` to the system `PATH` (adjust the path per your needs).
Open an elevated (Run as Administrator) `x86-64 Native Tools Command Prompt for VS 2022`, go to the root folder and execute:
```bash
copy assembly\native\build-windows.bat .
build-windows.bat
```
### Building TON to WebAssembly
Install additional system libraries on Ubuntu
```bash
sudo apt-get update
sudo apt-get install -y build-essential git cmake ninja-build zlib1g-dev libsecp256k1-dev libmicrohttpd-dev libsodium-dev
wget https://apt.llvm.org/llvm.sh
chmod +x llvm.sh
sudo ./llvm.sh 16 all
```
Compile TON binaries with emscripten
```bash
cd assembly/wasm
chmod +x fift-func-wasm-build-ubuntu.sh
./fift-func-wasm-build-ubuntu.sh
```
### Building TON tonlib library for Android (arm64-v8a, armeabi-v7a, x86, x86-64)
Install additional system libraries on Ubuntu
```bash
sudo apt-get update
sudo apt-get install -y build-essential git cmake ninja-build automake libtool texinfo autoconf libgflags-dev \
zlib1g-dev libssl-dev libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev \
libtool autoconf libsodium-dev libsecp256k1-dev
```
Compile TON tonlib library
```bash
cp assembly/android/build-android-tonlib.sh .
chmod +x build-android-tonlib.sh
./build-android-tonlib.sh
```
### Build TON portable binaries with Nix package manager
You need to install Nix first.
```bash
sh <(curl -L https://nixos.org/nix/install) --daemon
```
Then compile TON with Nix by executing below command from the root folder:
```bash
cp -r assembly/nix/* .
export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz
nix-build linux-x86-64-static.nix
```
More examples for other platforms can be found under `assembly/nix`.
## Running tests

View file

@ -0,0 +1,55 @@
with_artifacts=false
while getopts 'a' flag; do
case "${flag}" in
a) with_artifacts=true ;;
*) break
;;
esac
done
if [ ! -d android-ndk-r25b ]; then
rm android-ndk-r25b-linux.zip
wget -q https://dl.google.com/android/repository/android-ndk-r25b-linux.zip
unzip -q android-ndk-r25b-linux.zip
test $? -eq 0 || { echo "Can't unzip android-ndk-r25b-linux.zip"; exit 1; }
echo Android NDK extracted
else
echo Using extracted Android NDK
fi
export JAVA_AWT_LIBRARY=NotNeeded
export JAVA_JVM_LIBRARY=NotNeeded
export JAVA_INCLUDE_PATH=${JAVA_HOME}/include
export JAVA_AWT_INCLUDE_PATH=${JAVA_HOME}/include
export JAVA_INCLUDE_PATH2=${JAVA_HOME}/include/linux
export ANDROID_NDK_ROOT=$(pwd)/android-ndk-r25b
export NDK_PLATFORM="android-21"
export ANDROID_PLATFORM="android-21"
export OPENSSL_DIR=$(pwd)/example/android/third_party/crypto
rm -rf example/android/src/drinkless/org/ton/TonApi.java
cd example/android/
rm CMakeCache.txt .ninja_*
cmake -GNinja -DTON_ONLY_TONLIB=ON .
test $? -eq 0 || { echo "Can't configure TON"; exit 1; }
ninja prepare_cross_compiling
test $? -eq 0 || { echo "Can't compile prepare_cross_compiling"; exit 1; }
rm CMakeCache.txt .ninja_*
. ./build-all.sh
find . -name "*.debug" -type f -delete
if [ "$with_artifacts" = true ]; then
cd ../..
mkdir -p artifacts/tonlib-android-jni
cp example/android/src/drinkless/org/ton/TonApi.java artifacts/tonlib-android-jni/
cp -R example/android/libs/* artifacts/tonlib-android-jni/
fi

View file

@ -0,0 +1,236 @@
pipeline {
agent none
stages {
stage('Run Builds') {
parallel {
stage('Ubuntu 20.04 x86-64 (shared)') {
agent {
label 'Ubuntu_x86-64'
}
steps {
timeout(time: 90, unit: 'MINUTES') {
sh '''
cp assembly/native/build-ubuntu-shared.sh .
chmod +x build-ubuntu-shared.sh
./build-ubuntu-shared.sh -t -a
'''
sh '''
cd artifacts
zip -9r ton-x86_64-linux-shared ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-x86_64-linux-shared.zip'
}
}
}
stage('Ubuntu 20.04 x86-64 (portable)') {
agent {
label 'Ubuntu_x86-64'
}
steps {
timeout(time: 90, unit: 'MINUTES') {
sh '''
cp assembly/nix/build-linux-x86-64-nix.sh .
chmod +x build-linux-x86-64-nix.sh
./build-linux-x86-64-nix.sh
'''
sh '''
cd artifacts
zip -9r ton-x86-64-linux-portable ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-x86-64-linux-portable.zip'
}
}
}
stage('Ubuntu 20.04 aarch64 (shared)') {
agent {
label 'Ubuntu_arm64'
}
steps {
timeout(time: 90, unit: 'MINUTES') {
sh '''
cp assembly/native/build-ubuntu-shared.sh .
chmod +x build-ubuntu-shared.sh
./build-ubuntu-shared.sh -t -a
'''
sh '''
cd artifacts
zip -9r ton-arm64-linux-shared ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-arm64-linux-shared.zip'
}
}
}
stage('Ubuntu 20.04 aarch64 (portable)') {
agent {
label 'Ubuntu_arm64'
}
steps {
timeout(time: 90, unit: 'MINUTES') {
sh '''
cp assembly/nix/build-linux-arm64-nix.sh .
chmod +x build-linux-arm64-nix.sh
./build-linux-arm64-nix.sh
'''
sh '''
cd artifacts
zip -9r ton-arm64-linux-portable ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-arm64-linux-portable.zip'
}
}
}
stage('macOS 12.7 x86-64 (shared)') {
agent {
label 'macOS_12.7_x86-64'
}
steps {
timeout(time: 90, unit: 'MINUTES') {
sh '''
cp assembly/native/build-macos-shared.sh .
chmod +x build-macos-shared.sh
./build-macos-shared.sh -t -a
'''
sh '''
cd artifacts
zip -9r ton-x86-64-macos-shared ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-x86-64-macos-shared.zip'
}
}
}
stage('macOS 12.7 x86-64 (portable)') {
agent {
label 'macOS_12.7_x86-64'
}
steps {
timeout(time: 90, unit: 'MINUTES') {
sh '''
cp assembly/nix/build-macos-nix.sh .
chmod +x build-macos-nix.sh
./build-macos-nix.sh
'''
sh '''
cd artifacts
zip -9r ton-x86-64-macos-portable ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-x86-64-macos-portable.zip'
}
}
}
stage('macOS 12.6 aarch64 (shared)') {
agent {
label 'macOS_12.6-arm64-m1'
}
steps {
timeout(time: 90, unit: 'MINUTES') {
sh '''
cp assembly/native/build-macos-shared.sh .
chmod +x build-macos-shared.sh
./build-macos-shared.sh -t -a
'''
sh '''
cd artifacts
zip -9r ton-arm64-macos-m1-shared ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-arm64-macos-m1-shared.zip'
}
}
}
stage('macOS 12.6 aarch64 (portable)') {
agent {
label 'macOS_12.6-arm64-m1'
}
steps {
timeout(time: 90, unit: 'MINUTES') {
sh '''
cp assembly/nix/build-macos-nix.sh .
chmod +x build-macos-nix.sh
./build-macos-nix.sh
'''
sh '''
cd artifacts
zip -9r ton-arm64-macos-portable ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-arm64-macos-portable.zip'
}
}
}
stage('macOS 13.2 aarch64 (shared)') {
agent {
label 'macOS_13.2-arm64-m2'
}
steps {
timeout(time: 90, unit: 'MINUTES') {
sh '''
cp assembly/native/build-macos-shared.sh .
chmod +x build-macos-shared.sh
./build-macos-shared.sh -t -a
'''
sh '''
cd artifacts
zip -9r ton-arm64-macos-m2-shared ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-arm64-macos-m2-shared.zip'
}
}
}
stage('Windows Server 2022 x86-64') {
agent {
label 'Windows_x86-64'
}
steps {
timeout(time: 90, unit: 'MINUTES') {
bat '''
copy assembly\\native\\build-windows.bat .
build-windows.bat
'''
bat '''
cd artifacts
zip -9r ton-x86-64-windows ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-x86-64-windows.zip'
}
}
}
stage('Android Tonlib') {
agent {
label 'Ubuntu_x86-64'
}
steps {
timeout(time: 90, unit: 'MINUTES') {
sh '''
cp assembly/android/build-android-tonlib.sh .
chmod +x build-android-tonlib.sh
./build-android-tonlib.sh -a
'''
sh '''
cd artifacts/tonlib-android-jni
zip -9r ton-android-tonlib ./*
'''
archiveArtifacts artifacts: 'artifacts/tonlib-android-jni/ton-android-tonlib.zip'
}
}
}
stage('WASM fift func emulator') {
agent {
label 'Ubuntu_x86-64'
}
steps {
timeout(time: 90, unit: 'MINUTES') {
sh '''
cd assembly/wasm
chmod +x fift-func-wasm-build-ubuntu.sh
./fift-func-wasm-build-ubuntu.sh -a
'''
sh '''
cd artifacts
zip -9r ton-wasm-binaries ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-wasm-binaries.zip'
}
}
}
}
}
}
}

View file

@ -0,0 +1,207 @@
#/bin/bash
with_tests=false
with_artifacts=false
OSX_TARGET=10.15
while getopts 'tao:' flag; do
case "${flag}" in
t) with_tests=true ;;
a) with_artifacts=true ;;
o) OSX_TARGET=${OPTARG} ;;
*) break
;;
esac
done
if [ ! -d "build" ]; then
mkdir build
cd build
else
cd build
rm -rf .ninja* CMakeCache.txt
fi
export NONINTERACTIVE=1
brew install ninja pkg-config automake libtool autoconf
brew install llvm@16
if [ -f /opt/homebrew/opt/llvm@16/bin/clang ]; then
export CC=/opt/homebrew/opt/llvm@16/bin/clang
export CXX=/opt/homebrew/opt/llvm@16/bin/clang++
else
export CC=/usr/local/opt/llvm@16/bin/clang
export CXX=/usr/local/opt/llvm@16/bin/clang++
fi
export CCACHE_DISABLE=1
if [ ! -d "secp256k1" ]; then
git clone https://github.com/bitcoin-core/secp256k1.git
cd secp256k1
secp256k1Path=`pwd`
git checkout v0.3.2
./autogen.sh
./configure --enable-module-recovery --enable-static --disable-tests --disable-benchmark --with-pic
make -j12
test $? -eq 0 || { echo "Can't compile secp256k1"; exit 1; }
cd ..
else
secp256k1Path=$(pwd)/secp256k1
echo "Using compiled secp256k1"
fi
if [ ! -d "libsodium" ]; then
export LIBSODIUM_FULL_BUILD=1
git clone https://github.com/jedisct1/libsodium.git
cd libsodium
sodiumPath=`pwd`
git checkout 1.0.18
./autogen.sh
./configure --with-pic --enable-static
make -j12
test $? -eq 0 || { echo "Can't compile libsodium"; exit 1; }
cd ..
else
sodiumPath=$(pwd)/libsodium
echo "Using compiled libsodium"
fi
if [ ! -d "openssl_3" ]; then
git clone https://github.com/openssl/openssl openssl_3
cd openssl_3
opensslPath=`pwd`
git checkout openssl-3.1.4
./config -static
make build_libs -j12
test $? -eq 0 || { echo "Can't compile openssl_3"; exit 1; }
cd ..
else
opensslPath=$(pwd)/openssl_3
echo "Using compiled openssl_3"
fi
if [ ! -d "zlib" ]; then
git clone https://github.com/madler/zlib.git
cd zlib
zlibPath=`pwd`
./configure --static
make -j12
test $? -eq 0 || { echo "Can't compile zlib"; exit 1; }
cd ..
else
zlibPath=$(pwd)/zlib
echo "Using compiled zlib"
fi
if [ ! -d "libmicrohttpd" ]; then
git clone https://git.gnunet.org/libmicrohttpd.git
cd libmicrohttpd
libmicrohttpdPath=`pwd`
./autogen.sh
./configure --enable-static --disable-tests --disable-benchmark --disable-shared --disable-https --with-pic
make -j12
test $? -eq 0 || { echo "Can't compile libmicrohttpd"; exit 1; }
cd ..
else
libmicrohttpdPath=$(pwd)/libmicrohttpd
echo "Using compiled libmicrohttpd"
fi
cmake -GNinja .. \
-DPORTABLE=1 \
-DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=$OSX_TARGET \
-DCMAKE_CXX_FLAGS="-stdlib=libc++" \
-DCMAKE_BUILD_TYPE=Release \
-DOPENSSL_FOUND=1 \
-DOPENSSL_INCLUDE_DIR=$opensslPath/include \
-DOPENSSL_CRYPTO_LIBRARY=$opensslPath/libcrypto.a \
-DZLIB_FOUND=1 \
-DZLIB_INCLUDE_DIR=$zlibPath \
-DZLIB_LIBRARIES=$zlibPath/libz.a \
-DSECP256K1_FOUND=1 \
-DSECP256K1_INCLUDE_DIR=$secp256k1Path/include \
-DSECP256K1_LIBRARY=$secp256k1Path/.libs/libsecp256k1.a \
-DSODIUM_FOUND=1 \
-DSODIUM_INCLUDE_DIR=$sodiumPath/src/libsodium/include \
-DSODIUM_LIBRARY_RELEASE=$sodiumPath/src/libsodium/.libs/libsodium.a \
-DMHD_FOUND=1 \
-DMHD_INCLUDE_DIR=$libmicrohttpdPath/src/include \
-DMHD_LIBRARY=$libmicrohttpdPath/src/microhttpd/.libs/libmicrohttpd.a
test $? -eq 0 || { echo "Can't configure ton"; exit 1; }
if [ "$with_tests" = true ]; then
ninja storage-daemon storage-daemon-cli blockchain-explorer \
tonlib tonlibjson tonlib-cli validator-engine func fift \
lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server \
http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork tlbc emulator \
test-ed25519 test-ed25519-crypto test-bigint test-vm test-fift test-cells test-smartcont \
test-net test-tdactor test-tdutils test-tonlib-offline test-adnl test-dht test-rldp \
test-rldp2 test-catchain test-fec test-tddb test-db test-validator-session-state
test $? -eq 0 || { echo "Can't compile ton"; exit 1; }
else
ninja storage-daemon storage-daemon-cli blockchain-explorer \
tonlib tonlibjson tonlib-cli validator-engine func fift \
lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server \
http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork tlbc emulator
test $? -eq 0 || { echo "Can't compile ton"; exit 1; }
fi
strip storage/storage-daemon/storage-daemon
strip storage/storage-daemon/storage-daemon-cli
strip blockchain-explorer/blockchain-explorer
strip crypto/fift
strip crypto/func
strip crypto/create-state
strip crypto/tlbc
strip validator-engine-console/validator-engine-console
strip tonlib/tonlib-cli
strip http/http-proxy
strip rldp-http-proxy/rldp-http-proxy
strip dht-server/dht-server
strip lite-client/lite-client
strip validator-engine/validator-engine
strip utils/generate-random-id
strip utils/json2tlo
strip adnl/adnl-proxy
cd ..
if [ "$with_artifacts" = true ]; then
echo Creating artifacts...
rm -rf artifacts
mkdir artifacts
cp crypto/fift/lib artifacts/
cp -R crypto/smartcont/ artifacts/
cp build/storage/storage-daemon/storage-daemon artifacts/
cp build/storage/storage-daemon/storage-daemon-cli artifacts/
cp build/blockchain-explorer/blockchain-explorer artifacts/
cp build/crypto/fift artifacts/
cp build/crypto/func artifacts/
cp build/crypto/create-state artifacts/
cp build/crypto/tlbc artifacts/
cp build/validator-engine-console/validator-engine-console artifacts/
cp build/tonlib/tonlib-cli artifacts/
cp build/tonlib/libtonlibjson.0.5.dylib artifacts/libtonlibjson.dylib
cp build/http/http-proxy artifacts/
cp build/rldp-http-proxy/rldp-http-proxy artifacts/
cp build/dht-server/dht-server artifacts/
cp build/lite-client/lite-client artifacts/
cp build/validator-engine/validator-engine artifacts/
cp build/utils/generate-random-id artifacts/
cp build/utils/json2tlo artifacts/
cp build/adnl/adnl-proxy artifacts/
cp build/emulator/libemulator.dylib artifacts/
chmod +x artifacts/*
rsync -r crypto/smartcont artifacts/
rsync -r crypto/fift/lib artifacts/
fi
if [ "$with_tests" = true ]; then
cd build
# ctest --output-on-failure -E "test-catchain|test-actors"
ctest --output-on-failure
fi

View file

@ -0,0 +1,136 @@
#/bin/bash
with_tests=false
with_artifacts=false
OSX_TARGET=10.15
while getopts 'tao:' flag; do
case "${flag}" in
t) with_tests=true ;;
a) with_artifacts=true ;;
o) OSX_TARGET=${OPTARG} ;;
*) break
;;
esac
done
if [ ! -d "build" ]; then
mkdir build
cd build
else
cd build
rm -rf .ninja* CMakeCache.txt
fi
export NONINTERACTIVE=1
brew install ninja libsodium libmicrohttpd pkg-config automake libtool autoconf gnutls
brew install llvm@16
if [ -f /opt/homebrew/opt/llvm@16/bin/clang ]; then
export CC=/opt/homebrew/opt/llvm@16/bin/clang
export CXX=/opt/homebrew/opt/llvm@16/bin/clang++
else
export CC=/usr/local/opt/llvm@16/bin/clang
export CXX=/usr/local/opt/llvm@16/bin/clang++
fi
export CCACHE_DISABLE=1
if [ ! -d "secp256k1" ]; then
git clone https://github.com/bitcoin-core/secp256k1.git
cd secp256k1
secp256k1Path=`pwd`
git checkout v0.3.2
./autogen.sh
./configure --enable-module-recovery --enable-static --disable-tests --disable-benchmark
make -j12
test $? -eq 0 || { echo "Can't compile secp256k1"; exit 1; }
cd ..
else
secp256k1Path=$(pwd)/secp256k1
echo "Using compiled secp256k1"
fi
brew unlink openssl@1.1
brew install openssl@3
brew unlink openssl@3 && brew link --overwrite openssl@3
cmake -GNinja -DCMAKE_BUILD_TYPE=Release .. \
-DCMAKE_CXX_FLAGS="-stdlib=libc++" \
-DSECP256K1_FOUND=1 \
-DSECP256K1_INCLUDE_DIR=$secp256k1Path/include \
-DSECP256K1_LIBRARY=$secp256k1Path/.libs/libsecp256k1.a
test $? -eq 0 || { echo "Can't configure ton"; exit 1; }
if [ "$with_tests" = true ]; then
ninja storage-daemon storage-daemon-cli blockchain-explorer \
tonlib tonlibjson tonlib-cli validator-engine func fift \
lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server \
http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork tlbc emulator \
test-ed25519 test-ed25519-crypto test-bigint test-vm test-fift test-cells test-smartcont \
test-net test-tdactor test-tdutils test-tonlib-offline test-adnl test-dht test-rldp \
test-rldp2 test-catchain test-fec test-tddb test-db test-validator-session-state
test $? -eq 0 || { echo "Can't compile ton"; exit 1; }
else
ninja storage-daemon storage-daemon-cli blockchain-explorer \
tonlib tonlibjson tonlib-cli validator-engine func fift \
lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server \
http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork tlbc emulator
test $? -eq 0 || { echo "Can't compile ton"; exit 1; }
fi
strip storage/storage-daemon/storage-daemon
strip storage/storage-daemon/storage-daemon-cli
strip blockchain-explorer/blockchain-explorer
strip crypto/fift
strip crypto/func
strip crypto/create-state
strip crypto/tlbc
strip validator-engine-console/validator-engine-console
strip tonlib/tonlib-cli
strip http/http-proxy
strip rldp-http-proxy/rldp-http-proxy
strip dht-server/dht-server
strip lite-client/lite-client
strip validator-engine/validator-engine
strip utils/generate-random-id
strip utils/json2tlo
strip adnl/adnl-proxy
cd ..
if [ "$with_artifacts" = true ]; then
echo Creating artifacts...
rm -rf artifacts
mkdir artifacts
cp build/storage/storage-daemon/storage-daemon artifacts/
cp build/storage/storage-daemon/storage-daemon-cli artifacts/
cp build/blockchain-explorer/blockchain-explorer artifacts/
cp build/crypto/fift artifacts/
cp build/crypto/func artifacts/
cp build/crypto/create-state artifacts/
cp build/crypto/tlbc artifacts/
cp build/validator-engine-console/validator-engine-console artifacts/
cp build/tonlib/tonlib-cli artifacts/
cp build/tonlib/libtonlibjson.0.5.dylib artifacts/libtonlibjson.dylib
cp build/http/http-proxy artifacts/
cp build/rldp-http-proxy/rldp-http-proxy artifacts/
cp build/dht-server/dht-server artifacts/
cp build/lite-client/lite-client artifacts/
cp build/validator-engine/validator-engine artifacts/
cp build/utils/generate-random-id artifacts/
cp build/utils/json2tlo artifacts/
cp build/adnl/adnl-proxy artifacts/
cp build/emulator/libemulator.dylib artifacts/
chmod +x artifacts/*
rsync -r crypto/smartcont artifacts/
rsync -r crypto/fift/lib artifacts/
fi
if [ "$with_tests" = true ]; then
cd build
# ctest --output-on-failure -E "test-catchain|test-actors"
ctest --output-on-failure --timeout 1800
fi

View file

@ -0,0 +1,198 @@
#/bin/bash
#sudo apt-get update
#sudo apt-get install -y build-essential git cmake ninja-build automake libtool texinfo autoconf
with_tests=false
with_artifacts=false
while getopts 'ta' flag; do
case "${flag}" in
t) with_tests=true ;;
a) with_artifacts=true ;;
*) break
;;
esac
done
if [ ! -d "build" ]; then
mkdir build
cd build
else
cd build
rm -rf .ninja* CMakeCache.txt
fi
export CC=$(which clang-16)
export CXX=$(which clang++-16)
export CCACHE_DISABLE=1
if [ ! -d "secp256k1" ]; then
git clone https://github.com/bitcoin-core/secp256k1.git
cd secp256k1
secp256k1Path=`pwd`
git checkout v0.3.2
./autogen.sh
./configure --enable-module-recovery --enable-static --disable-tests --disable-benchmark --with-pic
make -j12
test $? -eq 0 || { echo "Can't compile secp256k1"; exit 1; }
cd ..
# ./.libs/libsecp256k1.a
# ./include
else
secp256k1Path=$(pwd)/secp256k1
echo "Using compiled secp256k1"
fi
if [ ! -d "libsodium" ]; then
export LIBSODIUM_FULL_BUILD=1
git clone https://github.com/jedisct1/libsodium.git
cd libsodium
sodiumPath=`pwd`
git checkout 1.0.18
./autogen.sh
./configure --with-pic --enable-static
make -j12
test $? -eq 0 || { echo "Can't compile libsodium"; exit 1; }
cd ..
else
sodiumPath=$(pwd)/libsodium
echo "Using compiled libsodium"
fi
if [ ! -d "openssl_3" ]; then
git clone https://github.com/openssl/openssl openssl_3
cd openssl_3
opensslPath=`pwd`
git checkout openssl-3.1.4
./config -static
make build_libs -j12
test $? -eq 0 || { echo "Can't compile openssl_3"; exit 1; }
cd ..
else
opensslPath=$(pwd)/openssl_3
echo "Using compiled openssl_3"
fi
if [ ! -d "zlib" ]; then
git clone https://github.com/madler/zlib.git
cd zlib
zlibPath=`pwd`
./configure --static
make -j12
test $? -eq 0 || { echo "Can't compile zlib"; exit 1; }
cd ..
else
zlibPath=$(pwd)/zlib
echo "Using compiled zlib"
fi
if [ ! -d "libmicrohttpd" ]; then
git clone https://git.gnunet.org/libmicrohttpd.git
cd libmicrohttpd
libmicrohttpdPath=`pwd`
./autogen.sh
./configure --enable-static --disable-tests --disable-benchmark --disable-shared --disable-https --with-pic
make -j12
test $? -eq 0 || { echo "Can't compile libmicrohttpd"; exit 1; }
cd ..
else
libmicrohttpdPath=$(pwd)/libmicrohttpd
echo "Using compiled libmicrohttpd"
fi
cmake -GNinja .. \
-DPORTABLE=1 \
-DCMAKE_BUILD_TYPE=Release \
-DOPENSSL_FOUND=1 \
-DOPENSSL_INCLUDE_DIR=$opensslPath/include \
-DOPENSSL_CRYPTO_LIBRARY=$opensslPath/libcrypto.a \
-DZLIB_FOUND=1 \
-DZLIB_INCLUDE_DIR=$zlibPath \
-DZLIB_LIBRARIES=$zlibPath/libz.a \
-DSECP256K1_FOUND=1 \
-DSECP256K1_INCLUDE_DIR=$secp256k1Path/include \
-DSECP256K1_LIBRARY=$secp256k1Path/.libs/libsecp256k1.a \
-DSODIUM_FOUND=1 \
-DSODIUM_INCLUDE_DIR=$sodiumPath/src/libsodium/include \
-DSODIUM_LIBRARY_RELEASE=$sodiumPath/src/libsodium/.libs/libsodium.a \
-DMHD_FOUND=1 \
-DMHD_INCLUDE_DIR=$libmicrohttpdPath/src/include \
-DMHD_LIBRARY=$libmicrohttpdPath/src/microhttpd/.libs/libmicrohttpd.a
test $? -eq 0 || { echo "Can't configure ton"; exit 1; }
if [ "$with_tests" = true ]; then
ninja storage-daemon storage-daemon-cli fift func tonlib tonlibjson tonlib-cli \
validator-engine lite-client pow-miner validator-engine-console blockchain-explorer \
generate-random-id json2tlo dht-server http-proxy rldp-http-proxy \
adnl-proxy create-state emulator test-ed25519 test-ed25519-crypto test-bigint \
test-vm test-fift test-cells test-smartcont test-net test-tdactor test-tdutils \
test-tonlib-offline test-adnl test-dht test-rldp test-rldp2 test-catchain \
test-fec test-tddb test-db test-validator-session-state
test $? -eq 0 || { echo "Can't compile ton"; exit 1; }
else
ninja storage-daemon storage-daemon-cli fift func tonlib tonlibjson tonlib-cli \
validator-engine lite-client pow-miner validator-engine-console blockchain-explorer \
generate-random-id json2tlo dht-server http-proxy rldp-http-proxy \
adnl-proxy create-state emulator
test $? -eq 0 || { echo "Can't compile ton"; exit 1; }
fi
strip -g storage/storage-daemon/storage-daemon \
storage/storage-daemon/storage-daemon-cli \
blockchain-explorer/blockchain-explorer \
crypto/fift \
crypto/tlbc \
crypto/func \
crypto/create-state \
validator-engine-console/validator-engine-console \
tonlib/tonlib-cli \
tonlib/libtonlibjson.so.0.5 \
http/http-proxy \
rldp-http-proxy/rldp-http-proxy \
dht-server/dht-server \
lite-client/lite-client \
validator-engine/validator-engine \
utils/generate-random-id \
utils/json2tlo \
adnl/adnl-proxy \
emulator/libemulator.*
test $? -eq 0 || { echo "Can't strip final binaries"; exit 1; }
# simple binaries' test
./storage/storage-daemon/storage-daemon -V || exit 1
./validator-engine/validator-engine -V || exit 1
./lite-client/lite-client -V || exit 1
./crypto/fift -V || exit 1
cd ..
if [ "$with_artifacts" = true ]; then
rm -rf artifacts
mkdir artifacts
cp crypto/fift/lib artifacts/
cp -R crypto/smartcont/ artifacts/
mv build/tonlib/libtonlibjson.so.0.5 build/tonlib/libtonlibjson.so
cp build/storage/storage-daemon/storage-daemon build/storage/storage-daemon/storage-daemon-cli \
build/crypto/fift build/crypto/tlbc build/crypto/func build/crypto/create-state build/blockchain-explorer/blockchain-explorer \
build/validator-engine-console/validator-engine-console build/tonlib/tonlib-cli \
build/tonlib/libtonlibjson.so build/http/http-proxy build/rldp-http-proxy/rldp-http-proxy \
build/dht-server/dht-server build/lite-client/lite-client build/validator-engine/validator-engine \
build/utils/generate-random-id build/utils/json2tlo build/adnl/adnl-proxy build/emulator/libemulator.so \
artifacts
test $? -eq 0 || { echo "Can't copy final binaries"; exit 1; }
chmod +x artifacts/*
cp -R crypto/smartcont artifacts
cp -R crypto/fift/lib artifacts
fi
if [ "$with_tests" = true ]; then
cd build
# ctest --output-on-failure -E "test-catchain|test-actors|test-smartcont|test-adnl|test-validator-session-state|test-dht|test-rldp"
ctest --output-on-failure -E "test-adnl"
fi

View file

@ -0,0 +1,122 @@
#/bin/bash
#sudo apt-get update
#sudo apt-get install -y build-essential git cmake ninja-build zlib1g-dev libsecp256k1-dev libmicrohttpd-dev libsodium-dev
with_tests=false
with_artifacts=false
while getopts 'ta' flag; do
case "${flag}" in
t) with_tests=true ;;
a) with_artifacts=true ;;
*) break
;;
esac
done
if [ ! -d "build" ]; then
mkdir build
cd build
else
cd build
rm -rf .ninja* CMakeCache.txt
fi
export CC=$(which clang-16)
export CXX=$(which clang++-16)
export CCACHE_DISABLE=1
if [ ! -d "openssl_3" ]; then
git clone https://github.com/openssl/openssl openssl_3
cd openssl_3
opensslPath=`pwd`
git checkout openssl-3.1.4
./config
make build_libs -j12
test $? -eq 0 || { echo "Can't compile openssl_3"; exit 1; }
cd ..
else
opensslPath=$(pwd)/openssl_3
echo "Using compiled openssl_3"
fi
cmake -GNinja .. \
-DCMAKE_BUILD_TYPE=Release \
-DOPENSSL_ROOT_DIR=$opensslPath \
-DOPENSSL_INCLUDE_DIR=$opensslPath/include \
-DOPENSSL_CRYPTO_LIBRARY=$opensslPath/libcrypto.so
test $? -eq 0 || { echo "Can't configure ton"; exit 1; }
if [ "$with_tests" = true ]; then
ninja storage-daemon storage-daemon-cli fift func tonlib tonlibjson tonlib-cli \
validator-engine lite-client pow-miner validator-engine-console blockchain-explorer \
generate-random-id json2tlo dht-server http-proxy rldp-http-proxy \
adnl-proxy create-state emulator test-ed25519 test-ed25519-crypto test-bigint \
test-vm test-fift test-cells test-smartcont test-net test-tdactor test-tdutils \
test-tonlib-offline test-adnl test-dht test-rldp test-rldp2 test-catchain \
test-fec test-tddb test-db test-validator-session-state
test $? -eq 0 || { echo "Can't compile ton"; exit 1; }
else
ninja storage-daemon storage-daemon-cli fift func tonlib tonlibjson tonlib-cli \
validator-engine lite-client pow-miner validator-engine-console blockchain-explorer \
generate-random-id json2tlo dht-server http-proxy rldp-http-proxy \
adnl-proxy create-state emulator
test $? -eq 0 || { echo "Can't compile ton"; exit 1; }
fi
strip -g storage/storage-daemon/storage-daemon \
storage/storage-daemon/storage-daemon-cli \
blockchain-explorer/blockchain-explorer \
crypto/fift \
crypto/tlbc \
crypto/func \
crypto/create-state \
validator-engine-console/validator-engine-console \
tonlib/tonlib-cli \
tonlib/libtonlibjson.so.0.5 \
http/http-proxy \
rldp-http-proxy/rldp-http-proxy \
dht-server/dht-server \
lite-client/lite-client \
validator-engine/validator-engine \
utils/generate-random-id \
utils/json2tlo \
adnl/adnl-proxy \
emulator/libemulator.*
test $? -eq 0 || { echo "Can't strip final binaries"; exit 1; }
# simple binaries' test
./storage/storage-daemon/storage-daemon -V || exit 1
./validator-engine/validator-engine -V || exit 1
./lite-client/lite-client -V || exit 1
./crypto/fift -V || exit 1
cd ..
if [ "$with_artifacts" = true ]; then
rm -rf artifacts
mkdir artifacts
mv build/tonlib/libtonlibjson.so.0.5 build/tonlib/libtonlibjson.so
cp build/storage/storage-daemon/storage-daemon build/storage/storage-daemon/storage-daemon-cli \
build/crypto/fift build/crypto/tlbc build/crypto/func build/crypto/create-state build/blockchain-explorer/blockchain-explorer \
build/validator-engine-console/validator-engine-console build/tonlib/tonlib-cli \
build/tonlib/libtonlibjson.so build/http/http-proxy build/rldp-http-proxy/rldp-http-proxy \
build/dht-server/dht-server build/lite-client/lite-client build/validator-engine/validator-engine \
build/utils/generate-random-id build/utils/json2tlo build/adnl/adnl-proxy build/emulator/libemulator.so \
artifacts
test $? -eq 0 || { echo "Can't copy final binaries"; exit 1; }
chmod +x artifacts/*
cp -R crypto/smartcont artifacts
cp -R crypto/fift/lib artifacts
fi
if [ "$with_tests" = true ]; then
cd build
# ctest --output-on-failure -E "test-catchain|test-actors|test-smartcont|test-adnl|test-validator-session-state|test-dht|test-rldp"
ctest --output-on-failure --timeout 1800
fi

View file

@ -0,0 +1,2 @@
call "C:\Program Files\Microsoft Visual Studio\2022\%1\VC\Auxiliary\Build\vcvars64.bat"
call build-windows.bat -t

View file

@ -0,0 +1,193 @@
REM execute this script inside elevated (Run as Administrator) console "x64 Native Tools Command Prompt for VS 2022"
echo off
echo Installing chocolatey windows package manager...
@"%SystemRoot%\System32\WindowsPowerShell\v1.0\powershell.exe" -NoProfile -InputFormat None -ExecutionPolicy Bypass -Command "iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1'))" && SET "PATH=%PATH%;%ALLUSERSPROFILE%\chocolatey\bin"
choco -?
IF %errorlevel% NEQ 0 (
echo Can't install chocolatey
exit /b %errorlevel%
)
choco feature enable -n allowEmptyChecksums
echo Installing pkgconfiglite...
choco install -y pkgconfiglite
IF errorlevel 1 (
echo Can't install pkgconfiglite
exit /b %errorlevel%
)
echo Installing ninja...
choco install -y ninja
IF errorlevel 1 (
echo Can't install ninja
exit /b %errorlevel%
)
if not exist "zlib" (
git clone https://github.com/madler/zlib.git
cd zlib\contrib\vstudio\vc14
msbuild zlibstat.vcxproj /p:Configuration=ReleaseWithoutAsm /p:platform=x64 -p:PlatformToolset=v143
IF errorlevel 1 (
echo Can't install zlib
exit /b %errorlevel%
)
cd ..\..\..\..
) else (
echo Using zlib...
)
if not exist "secp256k1" (
git clone https://github.com/libbitcoin/secp256k1.git
cd secp256k1\builds\msvc\vs2017
msbuild /p:Configuration=StaticRelease -p:PlatformToolset=v143 -p:Platform=x64
IF errorlevel 1 (
echo Can't install secp256k1
exit /b %errorlevel%
)
cd ..\..\..\..
) else (
echo Using secp256k1...
)
if not exist "libsodium" (
curl -Lo libsodium-1.0.18-stable-msvc.zip https://download.libsodium.org/libsodium/releases/libsodium-1.0.18-stable-msvc.zip
IF errorlevel 1 (
echo Can't download libsodium
exit /b %errorlevel%
)
unzip libsodium-1.0.18-stable-msvc.zip
) else (
echo Using libsodium...
)
if not exist "openssl-3.1.4" (
curl -Lo openssl-3.1.4.zip https://github.com/neodiX42/precompiled-openssl-win64/raw/main/openssl-3.1.4.zip
IF errorlevel 1 (
echo Can't download OpenSSL
exit /b %errorlevel%
)
unzip -q openssl-3.1.4.zip
) else (
echo Using openssl...
)
if not exist "libmicrohttpd-0.9.77-w32-bin" (
curl -Lo libmicrohttpd-0.9.77-w32-bin.zip https://github.com/neodiX42/precompiled-openssl-win64/raw/main/libmicrohttpd-0.9.77-w32-bin.zip
IF errorlevel 1 (
echo Can't download libmicrohttpd
exit /b %errorlevel%
)
unzip -q libmicrohttpd-0.9.77-w32-bin.zip
) else (
echo Using libmicrohttpd...
)
if not exist "readline-5.0-1-lib" (
curl -Lo readline-5.0-1-lib.zip https://github.com/neodiX42/precompiled-openssl-win64/raw/main/readline-5.0-1-lib.zip
IF errorlevel 1 (
echo Can't download readline
exit /b %errorlevel%
)
unzip -q -d readline-5.0-1-lib readline-5.0-1-lib.zip
) else (
echo Using readline...
)
set root=%cd%
echo %root%
set SODIUM_DIR=%root%\libsodium
mkdir build
cd build
cmake -GNinja -DCMAKE_BUILD_TYPE=Release ^
-DPORTABLE=1 ^
-DSODIUM_USE_STATIC_LIBS=1 ^
-DSECP256K1_FOUND=1 ^
-DSECP256K1_INCLUDE_DIR=%root%\secp256k1\include ^
-DSECP256K1_LIBRARY=%root%\secp256k1\bin\x64\Release\v143\static\secp256k1.lib ^
-DMHD_FOUND=1 ^
-DMHD_LIBRARY=%root%\libmicrohttpd-0.9.77-w32-bin\x86_64\VS2019\Release-static\libmicrohttpd.lib ^
-DMHD_INCLUDE_DIR=%root%\libmicrohttpd-0.9.77-w32-bin\x86_64\VS2019\Release-static ^
-DZLIB_FOUND=1 ^
-DZLIB_INCLUDE_DIR=%root%\zlib ^
-DZLIB_LIBRARIES=%root%\zlib\contrib\vstudio\vc14\x64\ZlibStatReleaseWithoutAsm\zlibstat.lib ^
-DOPENSSL_FOUND=1 ^
-DOPENSSL_INCLUDE_DIR=%root%/openssl-3.1.4/x64/include ^
-DOPENSSL_CRYPTO_LIBRARY=%root%/openssl-3.1.4/x64/lib/libcrypto_static.lib ^
-DCMAKE_CXX_FLAGS="/DTD_WINDOWS=1 /EHsc /bigobj" ..
IF errorlevel 1 (
echo Can't configure TON
exit /b %errorlevel%
)
IF "%1"=="-t" (
ninja storage-daemon storage-daemon-cli blockchain-explorer fift func tonlib tonlibjson ^
tonlib-cli validator-engine lite-client pow-miner validator-engine-console generate-random-id ^
json2tlo dht-server http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork emulator ^
test-ed25519 test-ed25519-crypto test-bigint test-vm test-fift test-cells test-smartcont test-net ^
test-tdactor test-tdutils test-tonlib-offline test-adnl test-dht test-rldp test-rldp2 test-catchain ^
test-fec test-tddb test-db test-validator-session-state
IF errorlevel 1 (
echo Can't compile TON
exit /b %errorlevel%
)
) else (
ninja storage-daemon storage-daemon-cli blockchain-explorer fift func tonlib tonlibjson ^
tonlib-cli validator-engine lite-client pow-miner validator-engine-console generate-random-id ^
json2tlo dht-server http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork emulator
IF errorlevel 1 (
echo Can't compile TON
exit /b %errorlevel%
)
)
copy validator-engine\validator-engine.exe test
IF errorlevel 1 (
echo validator-engine.exe does not exist
exit /b %errorlevel%
)
IF "%1"=="-t" (
echo Running tests...
REM ctest -C Release --output-on-failure -E "test-catchain|test-actors|test-validator-session-state"
ctest -C Release --output-on-failure --timeout 1800
IF errorlevel 1 (
echo Some tests failed
exit /b %errorlevel%
)
)
echo Creating artifacts...
cd ..
mkdir artifacts
mkdir artifacts\smartcont
mkdir artifacts\lib
for %%I in (build\storage\storage-daemon\storage-daemon.exe ^
build\storage\storage-daemon\storage-daemon-cli.exe ^
build\blockchain-explorer\blockchain-explorer.exe ^
build\crypto\fift.exe ^
build\crypto\tlbc.exe ^
build\crypto\func.exe ^
build\crypto\create-state.exe ^
build\validator-engine-console\validator-engine-console.exe ^
build\tonlib\tonlib-cli.exe ^
build\tonlib\tonlibjson.dll ^
build\http\http-proxy.exe ^
build\rldp-http-proxy\rldp-http-proxy.exe ^
build\dht-server\dht-server.exe ^
build\lite-client\lite-client.exe ^
build\validator-engine\validator-engine.exe ^
build\utils\generate-random-id.exe ^
build\utils\json2tlo.exe ^
build\adnl\adnl-proxy.exe ^
build\emulator\emulator.dll) do (strip -g %%I & copy %%I artifacts\)
xcopy /e /k /h /i crypto\smartcont artifacts\smartcont
xcopy /e /k /h /i crypto\fift\lib artifacts\lib

View file

@ -0,0 +1,20 @@
#/bin/bash
nix-build --version
test $? -eq 0 || { echo "Nix is not installed!"; exit 1; }
cp assembly/nix/linux-arm64* .
cp assembly/nix/microhttpd.nix .
cp assembly/nix/openssl.nix .
export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz
nix-build linux-arm64-static.nix
mkdir artifacts
cp ./result/bin/* artifacts/
chmod +x artifacts/*
rm -rf result
nix-build linux-arm64-tonlib.nix
cp ./result/lib/libtonlibjson.so.0.5 artifacts/libtonlibjson.so
cp ./result/lib/libemulator.so artifacts/
cp -r crypto/fift/lib artifacts/
cp -r crypto/smartcont artifacts/

View file

@ -0,0 +1,20 @@
#/bin/bash
nix-build --version
test $? -eq 0 || { echo "Nix is not installed!"; exit 1; }
cp assembly/nix/linux-x86-64* .
cp assembly/nix/microhttpd.nix .
cp assembly/nix/openssl.nix .
export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz
nix-build linux-x86-64-static.nix
mkdir artifacts
cp ./result/bin/* artifacts/
chmod +x artifacts/*
rm -rf result
nix-build linux-x86-64-tonlib.nix
cp ./result/lib/libtonlibjson.so.0.5 artifacts/libtonlibjson.so
cp ./result/lib/libemulator.so artifacts/
cp -r crypto/fift/lib artifacts/
cp -r crypto/smartcont artifacts/

View file

@ -0,0 +1,17 @@
#/bin/bash
nix-build --version
test $? -eq 0 || { echo "Nix is not installed!"; exit 1; }
cp assembly/nix/macos-* .
export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz
nix-build macos-static.nix
mkdir artifacts
cp ./result-bin/bin/* artifacts/
chmod +x artifacts/*
rm -rf result-bin
nix-build macos-tonlib.nix
cp ./result/lib/libtonlibjson.dylib artifacts/
cp ./result/lib/libemulator.dylib artifacts/
cp -r crypto/fift/lib artifacts/
cp -r crypto/smartcont artifacts/

View file

@ -0,0 +1,45 @@
# export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz
{ pkgs ? import <nixpkgs> { system = builtins.currentSystem; }
, lib ? pkgs.lib
, stdenv ? pkgs.stdenv
}:
let
microhttpdmy = (import ./microhttpd.nix) {};
in
with import microhttpdmy;
stdenv.mkDerivation {
pname = "ton";
version = "dev-bin";
src = ./.;
nativeBuildInputs = with pkgs;
[
cmake ninja git pkg-config
];
buildInputs = with pkgs;
[
pkgsStatic.openssl microhttpdmy pkgsStatic.zlib pkgsStatic.libsodium.dev pkgsStatic.secp256k1 glibc.static
];
makeStatic = true;
doCheck = true;
cmakeFlags = [
"-DTON_USE_ABSEIL=OFF"
"-DNIX=ON"
"-DBUILD_SHARED_LIBS=OFF"
"-DCMAKE_LINK_SEARCH_START_STATIC=ON"
"-DCMAKE_LINK_SEARCH_END_STATIC=ON"
"-DMHD_FOUND=1"
"-DMHD_INCLUDE_DIR=${microhttpdmy}/usr/local/include"
"-DMHD_LIBRARY=${microhttpdmy}/usr/local/lib/libmicrohttpd.a"
"-DCMAKE_CTEST_ARGUMENTS=--timeout;1800"
];
LDFLAGS = [
"-static-libgcc" "-static-libstdc++" "-static"
];
}

View file

@ -0,0 +1,44 @@
# export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz
{
pkgs ? import <nixpkgs> { system = builtins.currentSystem; }
, lib ? pkgs.lib
, stdenv ? pkgs.stdenv
}:
let
microhttpdmy = (import ./microhttpd.nix) {};
in
with import microhttpdmy;
pkgs.llvmPackages_16.stdenv.mkDerivation {
pname = "ton";
version = "dev-lib";
src = ./.;
nativeBuildInputs = with pkgs;
[
cmake ninja git pkg-config
];
buildInputs = with pkgs;
[
pkgsStatic.openssl microhttpdmy pkgsStatic.zlib pkgsStatic.libsodium.dev pkgsStatic.secp256k1
];
dontAddStaticConfigureFlags = false;
cmakeFlags = [
"-DTON_USE_ABSEIL=OFF"
"-DNIX=ON"
"-DMHD_FOUND=1"
"-DMHD_INCLUDE_DIR=${microhttpdmy}/usr/local/include"
"-DMHD_LIBRARY=${microhttpdmy}/usr/local/lib/libmicrohttpd.a"
];
LDFLAGS = [
"-static-libgcc" "-static-libstdc++" "-fPIC" "-fcommon"
];
ninjaFlags = [
"tonlibjson" "emulator"
];
}

View file

@ -0,0 +1,45 @@
# export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz
{ pkgs ? import <nixpkgs> { system = builtins.currentSystem; }
, lib ? pkgs.lib
, stdenv ? pkgs.stdenv
}:
let
microhttpdmy = (import ./microhttpd.nix) {};
in
with import microhttpdmy;
stdenv.mkDerivation {
pname = "ton";
version = "dev-bin";
src = ./.;
nativeBuildInputs = with pkgs;
[
cmake ninja git pkg-config
];
buildInputs = with pkgs;
[
pkgsStatic.openssl microhttpdmy pkgsStatic.zlib pkgsStatic.libsodium.dev pkgsStatic.secp256k1 glibc.static
];
makeStatic = true;
doCheck = true;
cmakeFlags = [
"-DTON_USE_ABSEIL=OFF"
"-DNIX=ON"
"-DBUILD_SHARED_LIBS=OFF"
"-DCMAKE_LINK_SEARCH_START_STATIC=ON"
"-DCMAKE_LINK_SEARCH_END_STATIC=ON"
"-DMHD_FOUND=1"
"-DMHD_INCLUDE_DIR=${microhttpdmy}/usr/local/include"
"-DMHD_LIBRARY=${microhttpdmy}/usr/local/lib/libmicrohttpd.a"
"-DCMAKE_CTEST_ARGUMENTS=--timeout;1800"
];
LDFLAGS = [
"-static-libgcc" "-static-libstdc++" "-static"
];
}

View file

@ -0,0 +1,54 @@
# export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.11.tar.gz
# copy linux-x86-64-tonlib.nix to git root directory and execute:
# nix-build linux-x86-64-tonlib.nix
{
pkgs ? import <nixpkgs> { system = builtins.currentSystem; }
, lib ? pkgs.lib
, stdenv ? pkgs.stdenv
}:
let
system = builtins.currentSystem;
nixos1909 = (import (builtins.fetchTarball {
url = "https://channels.nixos.org/nixos-19.09/nixexprs.tar.xz";
sha256 = "1vp1h2gkkrckp8dzkqnpcc6xx5lph5d2z46sg2cwzccpr8ay58zy";
}) { inherit system; });
glibc227 = nixos1909.glibc // { pname = "glibc"; };
stdenv227 = let
cc = pkgs.wrapCCWith {
cc = nixos1909.buildPackages.gcc-unwrapped;
libc = glibc227;
bintools = pkgs.binutils.override { libc = glibc227; };
};
in (pkgs.overrideCC pkgs.stdenv cc);
in
stdenv227.mkDerivation {
pname = "ton";
version = "dev-lib";
src = ./.;
nativeBuildInputs = with pkgs;
[ cmake ninja git pkg-config ];
buildInputs = with pkgs;
[
pkgsStatic.openssl pkgsStatic.zlib pkgsStatic.libmicrohttpd.dev pkgsStatic.libsodium.dev pkgsStatic.secp256k1
];
dontAddStaticConfigureFlags = false;
cmakeFlags = [
"-DTON_USE_ABSEIL=OFF"
"-DNIX=ON"
];
LDFLAGS = [
"-static-libgcc" "-static-libstdc++" "-fPIC"
];
ninjaFlags = [
"tonlibjson" "emulator"
];
}

View file

@ -0,0 +1,65 @@
# export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz
{ pkgs ? import <nixpkgs> { system = builtins.currentSystem; }
, lib ? pkgs.lib
, stdenv ? pkgs.stdenv
}:
pkgs.llvmPackages_14.stdenv.mkDerivation {
pname = "ton";
version = "dev-bin";
src = ./.;
nativeBuildInputs = with pkgs;
[ cmake ninja git pkg-config ];
buildInputs = with pkgs;
lib.forEach [
secp256k1 libsodium.dev libmicrohttpd.dev gmp.dev nettle.dev libtasn1.dev libidn2.dev libunistring.dev gettext (gnutls.override { withP11-kit = false; }).dev
]
(x: x.overrideAttrs(oldAttrs: rec { configureFlags = (oldAttrs.configureFlags or []) ++ [ "--enable-static" "--disable-shared" "--disable-tests" ]; dontDisableStatic = true; }))
++ [
darwin.apple_sdk.frameworks.CoreFoundation
(openssl.override { static = true; }).dev
(zlib.override { shared = false; }).dev
(libiconv.override { enableStatic = true; enableShared = false; })
];
dontAddStaticConfigureFlags = true;
makeStatic = true;
doCheck = true;
configureFlags = [];
cmakeFlags = [
"-DTON_USE_ABSEIL=OFF"
"-DNIX=ON"
"-DCMAKE_CROSSCOMPILING=OFF"
"-DCMAKE_LINK_SEARCH_START_STATIC=ON"
"-DCMAKE_LINK_SEARCH_END_STATIC=ON"
"-DBUILD_SHARED_LIBS=OFF"
"-DCMAKE_CXX_FLAGS=-stdlib=libc++"
"-DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=11.3"
"-DCMAKE_CTEST_ARGUMENTS=--timeout;1800"
];
LDFLAGS = [
"-static-libstdc++"
"-framework CoreFoundation"
];
postInstall = ''
moveToOutput bin "$bin"
'';
preFixup = ''
for fn in "$bin"/bin/* "$out"/lib/*.dylib; do
echo Fixing libc++ in "$fn"
install_name_tool -change "$(otool -L "$fn" | grep libc++.1 | cut -d' ' -f1 | xargs)" libc++.1.dylib "$fn"
install_name_tool -change "$(otool -L "$fn" | grep libc++abi.1 | cut -d' ' -f1 | xargs)" libc++abi.dylib "$fn"
done
'';
outputs = [ "bin" "out" ];
}

View file

@ -0,0 +1,55 @@
# export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz
{ pkgs ? import <nixpkgs> { system = builtins.currentSystem; }
, lib ? pkgs.lib
, stdenv ? pkgs.stdenv
}:
pkgs.llvmPackages_14.stdenv.mkDerivation {
pname = "ton";
version = "dev-lib";
src = ./.;
nativeBuildInputs = with pkgs;
[ cmake ninja git pkg-config ];
buildInputs = with pkgs;
lib.forEach [
secp256k1 libsodium.dev libmicrohttpd.dev gmp.dev nettle.dev libtasn1.dev libidn2.dev libunistring.dev gettext (gnutls.override { withP11-kit = false; }).dev
] (x: x.overrideAttrs(oldAttrs: rec { configureFlags = (oldAttrs.configureFlags or []) ++ [ "--enable-static" "--disable-shared" "--disable-tests" ]; dontDisableStatic = true; }))
++ [
darwin.apple_sdk.frameworks.CoreFoundation
(openssl.override { static = true; }).dev
(zlib.override { shared = false; }).dev
(libiconv.override { enableStatic = true; enableShared = false; })
];
dontAddStaticConfigureFlags = true;
configureFlags = [];
cmakeFlags = [
"-DTON_USE_ABSEIL=OFF"
"-DNIX=ON"
"-DCMAKE_CXX_FLAGS=-stdlib=libc++"
"-DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=11.3"
];
LDFLAGS = [
"-static-libstdc++"
"-framework CoreFoundation"
];
ninjaFlags = [
"tonlibjson" "emulator"
];
preFixup = ''
for fn in $out/bin/* $out/lib/*.dylib; do
echo Fixing libc++ in "$fn"
install_name_tool -change "$(otool -L "$fn" | grep libc++.1 | cut -d' ' -f1 | xargs)" libc++.1.dylib "$fn"
install_name_tool -change "$(otool -L "$fn" | grep libc++abi.1 | cut -d' ' -f1 | xargs)" libc++abi.dylib "$fn"
done
'';
}

View file

@ -0,0 +1,28 @@
{ pkgs ? import <nixpkgs> { system = builtins.currentSystem; }
, stdenv ? pkgs.stdenv
, fetchgit ? pkgs.fetchgit
}:
stdenv.mkDerivation rec {
name = "microhttpdmy";
src = fetchgit {
url = "https://git.gnunet.org/libmicrohttpd.git";
rev = "refs/tags/v0.9.77";
sha256 = "sha256-x+nfB07PbZwBlFc6kZZFYiRpk0a3QN/ByHB+hC8na/o=";
};
nativeBuildInputs = with pkgs; [ automake libtool autoconf texinfo ];
buildInputs = with pkgs; [ ];
configurePhase = ''
./autogen.sh
./configure --enable-static --disable-tests --disable-benchmark --disable-shared --disable-https --with-pic
'';
installPhase = ''
make install DESTDIR=$out
'';
}

30
assembly/nix/openssl.nix Normal file
View file

@ -0,0 +1,30 @@
{ pkgs ? import <nixpkgs> { system = builtins.currentSystem; }
, stdenv ? pkgs.stdenv
, fetchFromGitHub ? pkgs.fetchFromGitHub
}:
stdenv.mkDerivation rec {
name = "opensslmy";
src = fetchFromGitHub {
owner = "openssl";
repo = "openssl";
rev = "refs/tags/openssl-3.1.4";
sha256 = "sha256-Vvf1wiNb4ikg1lIS9U137aodZ2JzM711tSWMJFYWtWI=";
};
nativeBuildInputs = with pkgs; [ perl ];
buildInputs = with pkgs; [ ];
postPatch = ''
patchShebangs Configure
'';
configurePhase = ''
./Configure no-shared
'';
installPhase = ''
make install DESTDIR=$out
'';
}

View file

@ -1,7 +1,24 @@
# The script builds funcfift compiler to WASM
# dependencies:
#sudo apt-get install -y build-essential git make cmake clang libgflags-dev zlib1g-dev libssl-dev libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev python3-pip nodejs libevent-dev
# Execute these prerequisites first
# sudo apt update
# sudo apt install -y build-essential git make cmake ninja-build clang libgflags-dev zlib1g-dev libssl-dev \
# libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev python3-pip \
# nodejs libsecp256k1-dev libsodium-dev automake libtool
# wget https://apt.llvm.org/llvm.sh
# chmod +x llvm.sh
# sudo ./llvm.sh 16 all
with_artifacts=false
while getopts 'a' flag; do
case "${flag}" in
a) with_artifacts=true ;;
*) break
;;
esac
done
export CC=$(which clang-16)
export CXX=$(which clang++-16)
@ -39,8 +56,7 @@ mkdir build
cd build
cmake -GNinja -DCMAKE_BUILD_TYPE=Release \
-DCMAKE_CXX_STANDARD=17 \
-DZLIB_LIBRARY=/usr/lib/x86_64-linux-gnu/libz.so \
-DZLIB_INCLUDE_DIR=$ZLIB_DIR \
-DOPENSSL_FOUND=1 \
-DOPENSSL_ROOT_DIR=$OPENSSL_DIR \
-DOPENSSL_INCLUDE_DIR=$OPENSSL_DIR/include \
-DOPENSSL_CRYPTO_LIBRARY=$OPENSSL_DIR/libcrypto.so \
@ -62,8 +78,9 @@ cd emsdk
./emsdk install 3.1.19
./emsdk activate 3.1.19
EMSDK_DIR=`pwd`
ls $EMSDK_DIR
source $EMSDK_DIR/emsdk_env.sh
. $EMSDK_DIR/emsdk_env.sh
export CC=$(which emcc)
export CXX=$(which em++)
export CCACHE_DISABLE=1
@ -71,7 +88,7 @@ export CCACHE_DISABLE=1
cd ../openssl
make clean
emconfigure ./Configure linux-generic32 no-shared no-dso no-engine no-unit-test no-ui
emconfigure ./Configure linux-generic32 no-shared no-dso no-engine no-unit-test
sed -i 's/CROSS_COMPILE=.*/CROSS_COMPILE=/g' Makefile
sed -i 's/-ldl//g' Makefile
sed -i 's/-O3/-Os/g' Makefile
@ -101,22 +118,42 @@ test $? -eq 0 || { echo "Can't compile libsodium with emmake "; exit 1; }
cd ../build
emcmake cmake -DUSE_EMSCRIPTEN=ON -DCMAKE_BUILD_TYPE=Release \
-DZLIB_LIBRARY=$ZLIB_DIR/libz.a \
-DZLIB_FOUND=1 \
-DZLIB_LIBRARIES=$ZLIB_DIR/libz.a \
-DZLIB_INCLUDE_DIR=$ZLIB_DIR \
-DOPENSSL_FOUND=1 \
-DOPENSSL_ROOT_DIR=$OPENSSL_DIR \
-DOPENSSL_INCLUDE_DIR=$OPENSSL_DIR/include \
-DOPENSSL_CRYPTO_LIBRARY=$OPENSSL_DIR/libcrypto.a \
-DOPENSSL_SSL_LIBRARY=$OPENSSL_DIR/libssl.a \
-DCMAKE_TOOLCHAIN_FILE=$EMSDK_DIR/upstream/emscripten/cmake/Modules/Platform/Emscripten.cmake \
-DCMAKE_CXX_FLAGS="-sUSE_ZLIB=1" \
-DSECP256K1_FOUND=1 \
-DSECP256K1_INCLUDE_DIR=$SECP256K1_DIR/include \
-DSECP256K1_LIBRARY=$SECP256K1_DIR/.libs/libsecp256k1.a \
-DSODIUM_INCLUDE_DIR=$SODIUM_DIR/src/libsodium/include \
-DSODIUM_LIBRARY_RELEASE=$SODIUM_DIR/src/libsodium/.libs/libsodium.a \
-DSODIUM_LIBRARY_DEBUG=$SODIUM_DIR/src/libsodium/.libs/libsodium.a \
-DSODIUM_USE_STATIC_LIBS=ON ..
test $? -eq 0 || { echo "Can't configure TON with emmake "; exit 1; }
cp -R ../crypto/smartcont ../crypto/fift/lib crypto
emmake make -j16 funcfiftlib func fift tlbc emulator-emscripten
test $? -eq 0 || { echo "Can't compile TON with emmake "; exit 1; }
if [ "$with_artifacts" = true ]; then
echo "Creating artifacts..."
cd ..
rm -rf artifacts
mkdir artifacts
ls build/crypto
cp build/crypto/fift* artifacts
cp build/crypto/func* artifacts
cp build/crypto/tlbc* artifacts
cp build/emulator/emulator-emscripten* artifacts
cp -R crypto/smartcont artifacts
cp -R crypto/fift/lib artifacts
fi

View file

@ -14,15 +14,28 @@ set(BLOCHAIN_EXPLORER_SOURCE
add_executable(blockchain-explorer ${BLOCHAIN_EXPLORER_SOURCE})
if (NIX)
find_package(PkgConfig REQUIRED)
pkg_check_modules(MHD libmicrohttpd)
target_include_directories(blockchain-explorer PUBLIC ${MHD_INCLUDE_DIRS} ${MHD_STATIC_INCLUDE_DIRS})
target_link_libraries(blockchain-explorer tdutils tdactor adnllite tl_lite_api tl-lite-utils ton_crypto ton_block ${MHD_LIBRARIES} ${MHD_STATIC_LIBRARIES})
if (MHD_FOUND)
target_include_directories(blockchain-explorer PUBLIC ${MHD_INCLUDE_DIR})
target_link_libraries(blockchain-explorer tdutils tdactor adnllite tl_lite_api tl-lite-utils ton_crypto ton_block ${MHD_LIBRARY})
else()
find_package(PkgConfig REQUIRED)
pkg_check_modules(MHD libmicrohttpd)
target_include_directories(blockchain-explorer PUBLIC ${MHD_INCLUDE_DIR} ${MHD_STATIC_INCLUDE_DIRS})
target_link_libraries(blockchain-explorer tdutils tdactor adnllite tl_lite_api tl-lite-utils ton_crypto ton_block ${MHD_LIBRARIES} ${MHD_STATIC_LIBRARIES})
endif()
else()
find_package(MHD)
target_include_directories(blockchain-explorer PUBLIC ${MHD_INCLUDE_DIRS})
target_link_libraries(blockchain-explorer tdutils tdactor adnllite tl_lite_api tl-lite-utils ton_crypto ton_block ${MHD_LIBRARIES})
if (MHD_FOUND)
target_include_directories(blockchain-explorer PUBLIC ${MHD_INCLUDE_DIR})
target_link_libraries(blockchain-explorer tdutils tdactor adnllite tl_lite_api tl-lite-utils ton_crypto ton_block ${MHD_LIBRARY})
else()
find_package(MHD)
target_include_directories(blockchain-explorer PUBLIC ${MHD_INCLUDE_DIR})
target_link_libraries(blockchain-explorer tdutils tdactor adnllite tl_lite_api tl-lite-utils ton_crypto ton_block ${MHD_LIBRARY})
endif()
endif()
target_include_directories(blockchain-explorer PUBLIC ${MHD_INCLUDE_DIR})
target_link_libraries(blockchain-explorer tdutils tdactor adnllite tl_lite_api tl-lite-utils ton_crypto ton_block ${MHD_LIBRARY})
install(TARGETS blockchain-explorer RUNTIME DESTINATION bin)

View file

@ -287,7 +287,9 @@ void CatChainReceiverImpl::add_block_cont_3(tl_object_ptr<ton_api::catchain_bloc
run_scheduler();
if (!intentional_fork_) {
CHECK(last_sent_block_->delivered());
LOG_CHECK(last_sent_block_->delivered())
<< "source=" << last_sent_block_->get_source_id() << " ill=" << last_sent_block_->is_ill()
<< " height=" << last_sent_block_->get_height();
}
active_send_ = false;

View file

@ -325,23 +325,20 @@ endif()
if (MSVC)
find_package(Sodium REQUIRED)
target_compile_definitions(ton_crypto PUBLIC SODIUM_STATIC)
target_include_directories(ton_crypto_core PUBLIC $<BUILD_INTERFACE:${SECP256K1_INCLUDE_DIRS}>)
target_link_libraries(ton_crypto PUBLIC ${SECP256K1_LIBRARIES})
elseif (ANDROID)
target_include_directories(ton_crypto_core PUBLIC $<BUILD_INTERFACE:${SECP256K1_INCLUDE_DIR}>)
target_link_libraries(ton_crypto PUBLIC ${SECP256K1_LIBRARY})
elseif (ANDROID OR EMSCRIPTEN)
target_include_directories(ton_crypto_core PUBLIC $<BUILD_INTERFACE:${SECP256K1_INCLUDE_DIR}>)
target_link_libraries(ton_crypto PUBLIC $<BUILD_INTERFACE:${SECP256K1_LIBRARY}>)
else()
if (NOT USE_EMSCRIPTEN)
if (NOT SODIUM_FOUND)
find_package(Sodium REQUIRED)
target_include_directories(ton_crypto_core PUBLIC $<BUILD_INTERFACE:${SECP256K1_INCLUDE_DIRS}>)
target_link_libraries(ton_crypto PUBLIC ${SECP256K1_LIBRARIES})
else()
target_include_directories(ton_crypto_core PUBLIC $<BUILD_INTERFACE:${SECP256K1_INCLUDE_DIR}>)
target_link_libraries(ton_crypto PUBLIC ${SECP256K1_LIBRARY})
endif()
if (NOT APPLE AND NOT USE_EMSCRIPTEN)
target_link_libraries(ton_crypto_core PUBLIC secp256k1)
message(STATUS "Using Sodium ${SODIUM_LIBRARY_RELEASE}")
endif()
target_compile_definitions(ton_crypto PUBLIC SODIUM_STATIC)
target_include_directories(ton_crypto_core PUBLIC $<BUILD_INTERFACE:${SECP256K1_INCLUDE_DIR}>)
target_link_libraries(ton_crypto PUBLIC ${SECP256K1_LIBRARY})
endif()
target_include_directories(ton_crypto_core PUBLIC $<BUILD_INTERFACE:${SODIUM_INCLUDE_DIR}>)
@ -356,7 +353,7 @@ add_executable(test-ed25519-crypto test/test-ed25519-crypto.cpp)
target_include_directories(test-ed25519-crypto PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>)
target_link_libraries(test-ed25519-crypto PUBLIC ton_crypto)
add_library(fift-lib ${FIFT_SOURCE})
add_library(fift-lib STATIC ${FIFT_SOURCE})
target_include_directories(fift-lib PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>)
target_link_libraries(fift-lib PUBLIC ton_crypto ton_db tdutils ton_block)
if (USE_EMSCRIPTEN)
@ -400,6 +397,7 @@ if (USE_EMSCRIPTEN)
target_link_options(funcfiftlib PRIVATE -sIGNORE_MISSING_MAIN=1)
target_link_options(funcfiftlib PRIVATE -sAUTO_NATIVE_LIBRARIES=0)
target_link_options(funcfiftlib PRIVATE -sMODULARIZE=1)
target_link_options(funcfiftlib PRIVATE -sTOTAL_MEMORY=33554432)
target_link_options(funcfiftlib PRIVATE -sALLOW_MEMORY_GROWTH=1)
target_link_options(funcfiftlib PRIVATE -sALLOW_TABLE_GROWTH=1)
target_link_options(funcfiftlib PRIVATE --embed-file ${CMAKE_CURRENT_SOURCE_DIR}/fift/lib@/fiftlib)
@ -506,7 +504,7 @@ if (NOT CMAKE_CROSSCOMPILING OR USE_EMSCRIPTEN)
GenFif(DEST smartcont/auto/simple-wallet-ext-code SOURCE smartcont/simple-wallet-ext-code.fc NAME simple-wallet-ext)
endif()
add_library(smc-envelope ${SMC_ENVELOPE_SOURCE})
add_library(smc-envelope STATIC ${SMC_ENVELOPE_SOURCE})
target_include_directories(smc-envelope PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>)
target_link_libraries(smc-envelope PUBLIC ton_crypto PRIVATE tdutils ton_block)
if (NOT CMAKE_CROSSCOMPILING)

View file

@ -696,6 +696,11 @@ gas_prices_ext#de gas_price:uint64 gas_limit:uint64 special_gas_limit:uint64 gas
block_gas_limit:uint64 freeze_due_limit:uint64 delete_due_limit:uint64
= GasLimitsPrices;
// same fields as gas_prices_ext; behavior differs
gas_prices_v3#df gas_price:uint64 gas_limit:uint64 special_gas_limit:uint64 gas_credit:uint64
block_gas_limit:uint64 freeze_due_limit:uint64 delete_due_limit:uint64
= GasLimitsPrices;
gas_flat_pfx#d1 flat_gas_limit:uint64 flat_gas_price:uint64 other:GasLimitsPrices
= GasLimitsPrices;

View file

@ -654,11 +654,16 @@ td::Result<GasLimitsPrices> Config::do_get_gas_limits_prices(td::Ref<vm::Cell> c
res.delete_due_limit = r.delete_due_limit;
};
block::gen::GasLimitsPrices::Record_gas_prices_ext rec;
block::gen::GasLimitsPrices::Record_gas_prices_v3 rec_v3;
vm::CellSlice cs0 = cs;
if (tlb::unpack(cs, rec)) {
f(rec, rec.special_gas_limit);
} else if (tlb::unpack(cs = cs0, rec_v3)) {
f(rec_v3, rec_v3.special_gas_limit);
res.special_full_limit = true;
} else {
block::gen::GasLimitsPrices::Record_gas_prices rec0;
if (tlb::unpack(cs, rec0)) {
if (tlb::unpack(cs = cs0, rec0)) {
f(rec0, rec0.gas_limit);
} else {
return td::Status::Error(PSLICE() << "configuration parameter " << id

View file

@ -349,6 +349,7 @@ struct GasLimitsPrices {
td::uint64 block_gas_limit{0};
td::uint64 freeze_due_limit{0};
td::uint64 delete_due_limit{0};
bool special_full_limit{false};
td::RefInt256 compute_gas_price(td::uint64 gas_used) const;
};

View file

@ -1039,8 +1039,12 @@ bool ComputePhaseConfig::parse_GasLimitsPrices_internal(Ref<vm::CellSlice> cs, t
delete_due_limit = td::make_refint(r.delete_due_limit);
};
block::gen::GasLimitsPrices::Record_gas_prices_ext rec;
block::gen::GasLimitsPrices::Record_gas_prices_v3 rec_v3;
if (tlb::csr_unpack(cs, rec)) {
f(rec, rec.special_gas_limit);
} else if (tlb::csr_unpack(cs, rec_v3)) {
f(rec_v3, rec_v3.special_gas_limit);
special_gas_full = true;
} else {
block::gen::GasLimitsPrices::Record_gas_prices rec0;
if (tlb::csr_unpack(std::move(cs), rec0)) {
@ -1077,6 +1081,25 @@ bool ComputePhaseConfig::is_address_suspended(ton::WorkchainId wc, td::Bits256 a
}
}
/**
* Computes the maximum gas fee based on the gas prices and limits.
*
* @param gas_price256 The gas price from config as RefInt256
* @param gas_limit The gas limit from config
* @param flat_gas_limit The flat gas limit from config
* @param flat_gas_price The flat gas price from config
*
* @returns The maximum gas fee.
*/
static td::RefInt256 compute_max_gas_threshold(const td::RefInt256& gas_price256, td::uint64 gas_limit,
td::uint64 flat_gas_limit, td::uint64 flat_gas_price) {
if (gas_limit > flat_gas_limit) {
return td::rshift(gas_price256 * (gas_limit - flat_gas_limit), 16, 1) + td::make_bigint(flat_gas_price);
} else {
return td::make_refint(flat_gas_price);
}
}
/**
* Computes the maximum for gas fee based on the gas prices and limits.
*
@ -1084,12 +1107,7 @@ bool ComputePhaseConfig::is_address_suspended(ton::WorkchainId wc, td::Bits256 a
*/
void ComputePhaseConfig::compute_threshold() {
gas_price256 = td::make_refint(gas_price);
if (gas_limit > flat_gas_limit) {
max_gas_threshold =
td::rshift(gas_price256 * (gas_limit - flat_gas_limit), 16, 1) + td::make_bigint(flat_gas_price);
} else {
max_gas_threshold = td::make_refint(flat_gas_price);
}
max_gas_threshold = compute_max_gas_threshold(gas_price256, gas_limit, flat_gas_limit, flat_gas_price);
}
/**
@ -1126,6 +1144,67 @@ td::RefInt256 ComputePhaseConfig::compute_gas_price(td::uint64 gas_used) const {
}
namespace transaction {
/**
* Checks if it is required to increase gas_limit (from GasLimitsPrices config) to special_gas_limit * 2
* from masterchain GasLimitsPrices config for the transaction.
*
* In January 2024 a highload wallet of @wallet Telegram bot in mainnet was stuck because current gas limit (1M) is
* not enough to clean up old queires, thus locking funds inside.
* See comment in crypto/smartcont/highload-wallet-v2-code.fc for details on why this happened.
* Account address: EQD_v9j1rlsuHHw2FIhcsCFFSD367ldfDdCKcsNmNpIRzUlu
* It was proposed to validators to increase gas limit for this account for a limited amount of time (until 2024-02-16).
* It is activated by setting gas_prices_v3 in ConfigParam 20 (config_mc_gas_prices).
* This config change also activates new behavior for special accounts in masterchain.
*
* @param cfg The compute phase configuration.
* @param now The Unix time of the transaction.
* @param account The account of the transaction.
*
* @returns True if gas_limit override is required, false otherwise
*/
static bool override_gas_limit(const ComputePhaseConfig& cfg, ton::UnixTime now, const Account& account) {
if (!cfg.mc_gas_prices.special_full_limit) {
return false;
}
ton::UnixTime until = 1708041600; // 2024-02-16 00:00:00 UTC
ton::WorkchainId wc = 0;
const char* addr_hex = "FFBFD8F5AE5B2E1C7C3614885CB02145483DFAEE575F0DD08A72C366369211CD";
return now < until && account.workchain == wc && account.addr.to_hex() == addr_hex;
}
/**
* Computes the amount of gas that can be bought for a given amount of nanograms.
* Usually equal to `cfg.gas_bought_for(nanograms)`
* However, it overrides gas_limit from config in special cases.
*
* @param cfg The compute phase configuration.
* @param nanograms The amount of nanograms to compute gas for.
*
* @returns The amount of gas.
*/
td::uint64 Transaction::gas_bought_for(const ComputePhaseConfig& cfg, td::RefInt256 nanograms) {
if (override_gas_limit(cfg, now, account)) {
gas_limit_overridden = true;
// Same as ComputePhaseConfig::gas_bought for, but with other gas_limit and max_gas_threshold
auto gas_limit = cfg.mc_gas_prices.special_gas_limit * 2;
auto max_gas_threshold =
compute_max_gas_threshold(cfg.gas_price256, gas_limit, cfg.flat_gas_limit, cfg.flat_gas_price);
if (nanograms.is_null() || sgn(nanograms) < 0) {
return 0;
}
if (nanograms >= max_gas_threshold) {
return gas_limit;
}
if (nanograms < cfg.flat_gas_price) {
return 0;
}
auto res = td::div((std::move(nanograms) - cfg.flat_gas_price) << 16, cfg.gas_price256);
return res->to_long() + cfg.flat_gas_limit;
}
return cfg.gas_bought_for(nanograms);
}
/**
* Computes the gas limits for a transaction.
*
@ -1139,16 +1218,16 @@ bool Transaction::compute_gas_limits(ComputePhase& cp, const ComputePhaseConfig&
if (account.is_special) {
cp.gas_max = cfg.special_gas_limit;
} else {
cp.gas_max = cfg.gas_bought_for(balance.grams);
cp.gas_max = gas_bought_for(cfg, balance.grams);
}
cp.gas_credit = 0;
if (trans_type != tr_ord) {
if (trans_type != tr_ord || (account.is_special && cfg.special_gas_full)) {
// may use all gas that can be bought using remaining balance
cp.gas_limit = cp.gas_max;
} else {
// originally use only gas bought using remaining message balance
// if the message is "accepted" by the smart contract, the gas limit will be set to gas_max
cp.gas_limit = std::min(cfg.gas_bought_for(msg_balance_remaining.grams), cp.gas_max);
cp.gas_limit = std::min(gas_bought_for(cfg, msg_balance_remaining.grams), cp.gas_max);
if (!block::tlb::t_Message.is_internal(in_msg)) {
// external messages carry no balance, give them some credit to check whether they are accepted
cp.gas_credit = std::min(cfg.gas_credit, cp.gas_max);
@ -3203,8 +3282,8 @@ td::Result<vm::NewCellStorageStat::Stat> Transaction::estimate_block_storage_pro
*
* @returns True if the limits were successfully updated, False otherwise.
*/
bool Transaction::update_limits(block::BlockLimitStatus& blimst, bool with_size) const {
if (!(blimst.update_lt(end_lt) && blimst.update_gas(gas_used()))) {
bool Transaction::update_limits(block::BlockLimitStatus& blimst, bool with_gas, bool with_size) const {
if (!(blimst.update_lt(end_lt) && blimst.update_gas(with_gas ? gas_used() : 0))) {
return false;
}
if (with_size) {
@ -3450,6 +3529,9 @@ td::Status FetchConfigParams::fetch_config_params(
storage_phase_cfg->delete_due_limit)) {
return td::Status::Error(-668, "cannot unpack current gas prices and limits from masterchain configuration");
}
TRY_RESULT_PREFIX(mc_gas_prices, config.get_gas_limits_prices(true),
"cannot unpack masterchain gas prices and limits: ");
compute_phase_cfg->mc_gas_prices = std::move(mc_gas_prices);
storage_phase_cfg->enable_due_payment = config.get_global_version() >= 4;
compute_phase_cfg->block_rand_seed = *rand_seed;
compute_phase_cfg->max_vm_data_depth = size_limits.max_vm_data_depth;

View file

@ -104,6 +104,8 @@ struct ComputePhaseConfig {
td::uint64 gas_credit;
td::uint64 flat_gas_limit = 0;
td::uint64 flat_gas_price = 0;
bool special_gas_full = false;
block::GasLimitsPrices mc_gas_prices;
static constexpr td::uint64 gas_infty = (1ULL << 63) - 1;
td::RefInt256 gas_price256;
td::RefInt256 max_gas_threshold;
@ -119,12 +121,7 @@ struct ComputePhaseConfig {
SizeLimitsConfig size_limits;
int vm_log_verbosity = 0;
ComputePhaseConfig(td::uint64 _gas_price = 0, td::uint64 _gas_limit = 0, td::uint64 _gas_credit = 0)
: gas_price(_gas_price), gas_limit(_gas_limit), special_gas_limit(_gas_limit), gas_credit(_gas_credit) {
compute_threshold();
}
ComputePhaseConfig(td::uint64 _gas_price, td::uint64 _gas_limit, td::uint64 _spec_gas_limit, td::uint64 _gas_credit)
: gas_price(_gas_price), gas_limit(_gas_limit), special_gas_limit(_spec_gas_limit), gas_credit(_gas_credit) {
ComputePhaseConfig() : gas_price(0), gas_limit(0), special_gas_limit(0), gas_credit(0) {
compute_threshold();
}
void compute_threshold();
@ -362,12 +359,14 @@ struct Transaction {
std::unique_ptr<ActionPhase> action_phase;
std::unique_ptr<BouncePhase> bounce_phase;
vm::CellStorageStat new_storage_stat;
bool gas_limit_overridden{false};
Transaction(const Account& _account, int ttype, ton::LogicalTime req_start_lt, ton::UnixTime _now,
Ref<vm::Cell> _inmsg = {});
bool unpack_input_msg(bool ihr_delivered, const ActionPhaseConfig* cfg);
bool check_in_msg_state_hash();
bool prepare_storage_phase(const StoragePhaseConfig& cfg, bool force_collect = true, bool adjust_msg_value = false);
bool prepare_credit_phase();
td::uint64 gas_bought_for(const ComputePhaseConfig& cfg, td::RefInt256 nanograms);
bool compute_gas_limits(ComputePhase& cp, const ComputePhaseConfig& cfg);
Ref<vm::Stack> prepare_vm_stack(ComputePhase& cp);
std::vector<Ref<vm::Cell>> compute_vm_libraries(const ComputePhaseConfig& cfg);
@ -383,7 +382,7 @@ struct Transaction {
td::Result<vm::NewCellStorageStat::Stat> estimate_block_storage_profile_incr(
const vm::NewCellStorageStat& store_stat, const vm::CellUsageTree* usage_tree) const;
bool update_limits(block::BlockLimitStatus& blk_lim_st, bool with_size = true) const;
bool update_limits(block::BlockLimitStatus& blk_lim_st, bool with_gas = true, bool with_size = true) const;
Ref<vm::Cell> commit(Account& _account); // _account should point to the same account
LtCellRef extract_out_msg(unsigned i);

View file

@ -3,6 +3,22 @@
;; this version does not use seqno for replay protection; instead, it remembers all recent query_ids
;; in this way several external messages with different query_id can be sent in parallel
;; Note, when dealing with highload-wallet the following limits need to be checked and taken into account:
;; 1) Storage size limit. Currently, size of contract storage should be less than 65535 cells. If size of
;; old_queries will grow above this limit, exception in ActionPhase will be thrown and transaction will fail.
;; Failed transaction may be replayed.
;; 2) Gas limit. Currently, gas limit is 1'000'000 gas units, that means that there is a limit of how much
;; old queries may be cleaned in one tx. If number of expired queries will be higher, contract will stuck.
;; That means that it is not recommended to set too high expiration date:
;; number of queries during expiration timespan should not exceed 1000.
;; Also, number of expired queries cleaned in one transaction should be below 100.
;; Such precautions are not easy to follow, so it is recommended to use highload contract
;; only when strictly necessary and the developer understands the above details.
() recv_internal(slice in_msg) impure {
;; do nothing for internal messages
}

View file

@ -489,7 +489,7 @@ void do_test_wallet(int revision) {
auto address = std::move(res.address);
auto iwallet = std::move(res.wallet);
auto public_key = priv_key.get_public_key().move_as_ok().as_octet_string();
;
check_wallet_state(iwallet, 1, 123, public_key);
// lets send a lot of messages
@ -1026,7 +1026,7 @@ class CheckedDns {
}
return action;
});
auto query = dns_->create_update_query(key_.value(), smc_actions).move_as_ok();
auto query = dns_->create_update_query(key_.value(), smc_actions, query_id_++).move_as_ok();
CHECK(dns_.write().send_external_message(std::move(query)).code == 0);
}
map_dns_.update(entries);
@ -1081,6 +1081,7 @@ class CheckedDns {
using ManualDns = ton::ManualDns;
td::optional<td::Ed25519::PrivateKey> key_;
td::Ref<ManualDns> dns_;
td::uint32 query_id_ = 1; // Query id serve as "valid until", but in tests now() == 0
MapDns map_dns_;
td::optional<MapDns> combined_map_dns_;

View file

@ -930,7 +930,7 @@ unsigned long long BagOfCells::get_idx_entry_raw(int index) {
*
*/
td::Result<Ref<Cell>> std_boc_deserialize(td::Slice data, bool can_be_empty) {
td::Result<Ref<Cell>> std_boc_deserialize(td::Slice data, bool can_be_empty, bool allow_nonzero_level) {
if (data.empty() && can_be_empty) {
return Ref<Cell>();
}
@ -946,7 +946,7 @@ td::Result<Ref<Cell>> std_boc_deserialize(td::Slice data, bool can_be_empty) {
if (root.is_null()) {
return td::Status::Error("bag of cells has null root cell (?)");
}
if (root->get_level() != 0) {
if (!allow_nonzero_level && root->get_level() != 0) {
return td::Status::Error("bag of cells has a root with non-zero level");
}
return std::move(root);

View file

@ -52,6 +52,7 @@ class NewCellStorageStat {
bool operator==(const Stat& other) const {
return key() == other.key();
}
Stat(const Stat& other) = default;
Stat& operator=(const Stat& other) = default;
Stat& operator+=(const Stat& other) {
cells += other.cells;
@ -323,7 +324,7 @@ class BagOfCells {
std::vector<td::uint8>* cell_should_cache);
};
td::Result<Ref<Cell>> std_boc_deserialize(td::Slice data, bool can_be_empty = false);
td::Result<Ref<Cell>> std_boc_deserialize(td::Slice data, bool can_be_empty = false, bool allow_nonzero_level = false);
td::Result<td::BufferSlice> std_boc_serialize(Ref<Cell> root, int mode = 0);
td::Result<std::vector<Ref<Cell>>> std_boc_deserialize_multi(td::Slice data,

View file

@ -98,7 +98,7 @@ class RefcntCellParser {
auto size = parser.get_left_len();
td::Slice data = parser.template fetch_string_raw<td::Slice>(size);
if (stored_boc_) {
TRY_RESULT(boc, vm::std_boc_deserialize(data));
TRY_RESULT(boc, vm::std_boc_deserialize(data, false, true));
TRY_RESULT(loaded_cell, boc->load_cell());
cell = std::move(loaded_cell.data_cell);
return td::Status::OK();

View file

@ -572,6 +572,12 @@ void DhtServer::load_config(td::Promise<td::Unit> promise) {
config_file_ = db_root_ + "/config.json";
}
auto conf_data_R = td::read_file(config_file_);
if (conf_data_R.is_error()) {
conf_data_R = td::read_file(temp_config_file());
if (conf_data_R.is_ok()) {
td::rename(temp_config_file(), config_file_).ensure();
}
}
if (conf_data_R.is_error()) {
auto P = td::PromiseCreator::lambda(
[name = local_config_, new_name = config_file_, promise = std::move(promise)](td::Result<td::Unit> R) {
@ -620,12 +626,15 @@ void DhtServer::load_config(td::Promise<td::Unit> promise) {
void DhtServer::write_config(td::Promise<td::Unit> promise) {
auto s = td::json_encode<std::string>(td::ToJson(*config_.tl().get()), true);
auto S = td::write_file(config_file_, s);
if (S.is_ok()) {
promise.set_value(td::Unit());
} else {
auto S = td::write_file(temp_config_file(), s);
if (S.is_error()) {
td::unlink(temp_config_file()).ignore();
promise.set_error(std::move(S));
return;
}
td::unlink(config_file_).ignore();
TRY_STATUS_PROMISE(promise, td::rename(temp_config_file(), config_file_));
promise.set_value(td::Unit());
}
td::Promise<ton::PublicKey> DhtServer::get_key_promise(td::MultiPromise::InitGuard &ig) {

View file

@ -109,6 +109,9 @@ class DhtServer : public td::actor::Actor {
std::string local_config_ = "";
std::string global_config_ = "ton-global.config";
std::string config_file_;
std::string temp_config_file() const {
return config_file_ + ".tmp";
}
std::string db_root_ = "/var/ton-work/db/";

View file

@ -155,10 +155,7 @@ class DhtMemberImpl : public DhtMember {
}
}
void add_full_node(DhtKeyId id, DhtNode node) override {
add_full_node_impl(id, std::move(node));
}
void add_full_node_impl(DhtKeyId id, DhtNode node, bool set_active = false);
void add_full_node(DhtKeyId id, DhtNode node, bool set_active) override;
adnl::AdnlNodeIdShort get_id() const override {
return id_;

View file

@ -34,24 +34,33 @@ namespace ton {
namespace dht {
void DhtQuery::send_queries() {
while (pending_queries_.size() > k_ * 2) {
pending_queries_.erase(--pending_queries_.end());
}
VLOG(DHT_EXTRA_DEBUG) << this << ": sending new queries. active=" << active_queries_ << " max_active=" << a_;
while (pending_ids_.size() > 0 && active_queries_ < a_) {
while (pending_queries_.size() > 0 && active_queries_ < a_) {
auto id_xor = *pending_queries_.begin();
if (result_list_.size() == k_ && *result_list_.rbegin() < id_xor) {
break;
}
active_queries_++;
auto id_xor = *pending_ids_.begin();
auto id = id_xor ^ key_;
VLOG(DHT_EXTRA_DEBUG) << this << ": sending " << get_name() << " query to " << id;
pending_ids_.erase(id_xor);
pending_queries_.erase(id_xor);
auto it = list_.find(id_xor);
CHECK(it != list_.end());
td::actor::send_closure(adnl_, &adnl::Adnl::add_peer, get_src(), it->second.adnl_id(), it->second.addr_list());
auto it = nodes_.find(id_xor);
CHECK(it != nodes_.end());
td::actor::send_closure(adnl_, &adnl::Adnl::add_peer, get_src(), it->second.node.adnl_id(),
it->second.node.addr_list());
send_one_query(id.to_adnl());
}
if (active_queries_ == 0) {
CHECK(pending_ids_.size() == 0);
pending_queries_.clear();
DhtNodesList list;
for (auto &node : list_) {
list.push_back(std::move(node.second));
for (auto id_xor : result_list_) {
auto it = nodes_.find(id_xor);
CHECK(it != nodes_.end());
list.push_back(it->second.node.clone());
}
CHECK(list.size() <= k_);
VLOG(DHT_EXTRA_DEBUG) << this << ": finalizing " << get_name() << " query. List size=" << list.size();
@ -65,30 +74,32 @@ void DhtQuery::add_nodes(DhtNodesList list) {
for (auto &node : list.list()) {
auto id = node.get_key();
auto id_xor = key_ ^ id;
if (list_.find(id_xor) != list_.end()) {
if (nodes_.find(id_xor) != nodes_.end()) {
continue;
}
td::actor::send_closure(node_, &DhtMember::add_full_node, id, node.clone());
VLOG(DHT_EXTRA_DEBUG) << this << ": " << get_name() << " query: adding " << id << " key";
td::actor::send_closure(node_, &DhtMember::add_full_node, id, node.clone(), false);
nodes_[id_xor].node = std::move(node);
pending_queries_.insert(id_xor);
}
}
DhtKeyId last_id_xor;
if (list_.size() > 0) {
last_id_xor = list_.rbegin()->first;
void DhtQuery::finish_query(adnl::AdnlNodeIdShort id, bool success) {
active_queries_--;
CHECK(active_queries_ <= k_);
auto id_xor = key_ ^ DhtKeyId(id);
if (success) {
result_list_.insert(id_xor);
if (result_list_.size() > k_) {
result_list_.erase(--result_list_.end());
}
if (list_.size() < k_ || id_xor < last_id_xor) {
list_[id_xor] = std::move(node);
pending_ids_.insert(id_xor);
if (list_.size() > k_) {
CHECK(id_xor != last_id_xor);
VLOG(DHT_EXTRA_DEBUG) << this << ": " << get_name() << " query: replacing " << (last_id_xor ^ key_)
<< " key with " << id;
pending_ids_.erase(last_id_xor);
list_.erase(last_id_xor);
} else {
VLOG(DHT_EXTRA_DEBUG) << this << ": " << get_name() << " query: adding " << id << " key";
}
} else {
NodeInfo &info = nodes_[id_xor];
if (++info.failed_attempts < MAX_ATTEMPTS) {
pending_queries_.insert(id_xor);
}
}
send_queries();
}
void DhtQueryFindNodes::send_one_query(adnl::AdnlNodeIdShort id) {
@ -111,7 +122,7 @@ void DhtQueryFindNodes::send_one_query(adnl::AdnlNodeIdShort id) {
void DhtQueryFindNodes::on_result(td::Result<td::BufferSlice> R, adnl::AdnlNodeIdShort dst) {
if (R.is_error()) {
VLOG(DHT_INFO) << this << ": failed find nodes query " << get_src() << "->" << dst << ": " << R.move_as_error();
finish_query();
finish_query(dst, false);
return;
}
@ -122,7 +133,7 @@ void DhtQueryFindNodes::on_result(td::Result<td::BufferSlice> R, adnl::AdnlNodeI
} else {
add_nodes(DhtNodesList{Res.move_as_ok(), our_network_id()});
}
finish_query();
finish_query(dst);
}
void DhtQueryFindNodes::finish(DhtNodesList list) {
@ -166,14 +177,14 @@ void DhtQueryFindValue::send_one_query_nodes(adnl::AdnlNodeIdShort id) {
void DhtQueryFindValue::on_result(td::Result<td::BufferSlice> R, adnl::AdnlNodeIdShort dst) {
if (R.is_error()) {
VLOG(DHT_INFO) << this << ": failed find value query " << get_src() << "->" << dst << ": " << R.move_as_error();
finish_query();
finish_query(dst, false);
return;
}
auto Res = fetch_tl_object<ton_api::dht_ValueResult>(R.move_as_ok(), true);
if (Res.is_error()) {
VLOG(DHT_WARNING) << this << ": dropping incorrect answer on dht.findValue query from " << dst << ": "
<< Res.move_as_error();
finish_query();
finish_query(dst, false);
return;
}
@ -210,26 +221,26 @@ void DhtQueryFindValue::on_result(td::Result<td::BufferSlice> R, adnl::AdnlNodeI
} else if (send_get_nodes) {
send_one_query_nodes(dst);
} else {
finish_query();
finish_query(dst);
}
}
void DhtQueryFindValue::on_result_nodes(td::Result<td::BufferSlice> R, adnl::AdnlNodeIdShort dst) {
if (R.is_error()) {
VLOG(DHT_INFO) << this << ": failed find nodes query " << get_src() << "->" << dst << ": " << R.move_as_error();
finish_query();
finish_query(dst, false);
return;
}
auto Res = fetch_tl_object<ton_api::dht_nodes>(R.move_as_ok(), true);
if (Res.is_error()) {
VLOG(DHT_WARNING) << this << ": dropping incorrect answer on dht.findNodes query from " << dst << ": "
<< Res.move_as_error();
finish_query();
finish_query(dst, false);
return;
}
auto r = Res.move_as_ok();
add_nodes(DhtNodesList{create_tl_object<ton_api::dht_nodes>(std::move(r->nodes_)), our_network_id()});
finish_query();
finish_query(dst);
}
void DhtQueryFindValue::finish(DhtNodesList list) {
@ -422,14 +433,14 @@ void DhtQueryRequestReversePing::send_one_query(adnl::AdnlNodeIdShort id) {
void DhtQueryRequestReversePing::on_result(td::Result<td::BufferSlice> R, adnl::AdnlNodeIdShort dst) {
if (R.is_error()) {
VLOG(DHT_INFO) << this << ": failed reverse ping query " << get_src() << "->" << dst << ": " << R.move_as_error();
finish_query();
finish_query(dst, false);
return;
}
auto Res = fetch_tl_object<ton_api::dht_ReversePingResult>(R.move_as_ok(), true);
if (Res.is_error()) {
VLOG(DHT_WARNING) << this << ": dropping incorrect answer on dht.requestReversePing query from " << dst << ": "
<< Res.move_as_error();
finish_query();
finish_query(dst, false);
return;
}
@ -441,7 +452,7 @@ void DhtQueryRequestReversePing::on_result(td::Result<td::BufferSlice> R, adnl::
},
[&](ton_api::dht_clientNotFound &v) {
add_nodes(DhtNodesList{std::move(v.nodes_), our_network_id()});
finish_query();
finish_query(dst);
}));
}

View file

@ -63,11 +63,7 @@ class DhtQuery : public td::actor::Actor {
}
void send_queries();
void add_nodes(DhtNodesList list);
void finish_query() {
active_queries_--;
CHECK(active_queries_ <= k_);
send_queries();
}
void finish_query(adnl::AdnlNodeIdShort id, bool success = true);
DhtKeyId get_key() const {
return key_;
}
@ -88,16 +84,22 @@ class DhtQuery : public td::actor::Actor {
virtual std::string get_name() const = 0;
private:
struct NodeInfo {
DhtNode node;
int failed_attempts = 0;
};
DhtMember::PrintId print_id_;
adnl::AdnlNodeIdShort src_;
std::map<DhtKeyId, DhtNode> list_;
std::set<DhtKeyId> pending_ids_;
std::map<DhtKeyId, NodeInfo> nodes_;
std::set<DhtKeyId> result_list_, pending_queries_;
td::uint32 k_;
td::uint32 a_;
td::int32 our_network_id_;
td::actor::ActorId<DhtMember> node_;
td::uint32 active_queries_ = 0;
static const int MAX_ATTEMPTS = 1;
protected:
td::actor::ActorId<adnl::Adnl> adnl_;
};

View file

@ -57,7 +57,7 @@ td::Result<td::actor::ActorOwn<Dht>> Dht::create(adnl::AdnlNodeIdShort id, std::
for (auto &node : nodes.list()) {
auto key = node.get_key();
td::actor::send_closure(D, &DhtMember::add_full_node, key, node.clone());
td::actor::send_closure(D, &DhtMember::add_full_node, key, node.clone(), true);
}
return std::move(D);
}
@ -74,7 +74,7 @@ td::Result<td::actor::ActorOwn<Dht>> Dht::create_client(adnl::AdnlNodeIdShort id
for (auto &node : nodes.list()) {
auto key = node.get_key();
td::actor::send_closure(D, &DhtMember::add_full_node, key, node.clone());
td::actor::send_closure(D, &DhtMember::add_full_node, key, node.clone(), true);
}
return std::move(D);
}
@ -368,7 +368,7 @@ void DhtMemberImpl::receive_query(adnl::AdnlNodeIdShort src, td::BufferSlice dat
auto node = N.move_as_ok();
if (node.adnl_id().compute_short_id() == src) {
auto key = node.get_key();
add_full_node_impl(key, std::move(node), true);
add_full_node(key, std::move(node), true);
} else {
VLOG(DHT_WARNING) << this << ": dropping bad node: unexpected adnl id";
}
@ -398,7 +398,7 @@ void DhtMemberImpl::receive_query(adnl::AdnlNodeIdShort src, td::BufferSlice dat
ton_api::downcast_call(*Q, [&](auto &object) { this->process_query(src, object, std::move(promise)); });
}
void DhtMemberImpl::add_full_node_impl(DhtKeyId key, DhtNode node, bool set_active) {
void DhtMemberImpl::add_full_node(DhtKeyId key, DhtNode node, bool set_active) {
VLOG(DHT_EXTRA_DEBUG) << this << ": adding full node " << key;
auto eid = key ^ key_;
@ -466,7 +466,7 @@ void DhtMemberImpl::set_value(DhtValue value, td::Promise<td::Unit> promise) {
void DhtMemberImpl::get_value_in(DhtKeyId key, td::Promise<DhtValue> result) {
auto P = td::PromiseCreator::lambda([key, promise = std::move(result), SelfId = actor_id(this), print_id = print_id(),
adnl = adnl_, list = get_nearest_nodes(key, k_), k = k_, a = a_,
adnl = adnl_, list = get_nearest_nodes(key, k_ * 2), k = k_, a = a_,
network_id = network_id_, id = id_,
client_only = client_only_](td::Result<DhtNode> R) mutable {
R.ensure();
@ -485,7 +485,7 @@ void DhtMemberImpl::register_reverse_connection(adnl::AdnlNodeIdFull client, td:
auto key_id = get_reverse_connection_key(client_short).compute_key_id();
td::actor::send_closure(keyring_, &keyring::Keyring::sign_message, client_short.pubkey_hash(),
register_reverse_connection_to_sign(client_short, id_, ttl),
[=, print_id = print_id(), list = get_nearest_nodes(key_id, k_), SelfId = actor_id(this),
[=, print_id = print_id(), list = get_nearest_nodes(key_id, k_ * 2), SelfId = actor_id(this),
promise = std::move(promise)](td::Result<td::BufferSlice> R) mutable {
TRY_RESULT_PROMISE_PREFIX(promise, signature, std::move(R), "Failed to sign: ");
td::actor::send_closure(SelfId, &DhtMemberImpl::get_self_node,
@ -532,7 +532,7 @@ void DhtMemberImpl::request_reverse_ping_cont(adnl::AdnlNode target, td::BufferS
}
auto key_id = get_reverse_connection_key(client).compute_key_id();
get_self_node([=, target = std::move(target), signature = std::move(signature), promise = std::move(promise),
SelfId = actor_id(this), print_id = print_id(), list = get_nearest_nodes(key_id, k_),
SelfId = actor_id(this), print_id = print_id(), list = get_nearest_nodes(key_id, k_ * 2),
client_only = client_only_](td::Result<DhtNode> R) mutable {
R.ensure();
td::actor::create_actor<DhtQueryRequestReversePing>(
@ -651,8 +651,8 @@ void DhtMemberImpl::check() {
DhtKeyId key{x};
auto P = td::PromiseCreator::lambda([key, promise = std::move(promise), SelfId = actor_id(this),
print_id = print_id(), adnl = adnl_, list = get_nearest_nodes(key, k_), k = k_,
a = a_, network_id = network_id_, id = id_,
print_id = print_id(), adnl = adnl_, list = get_nearest_nodes(key, k_ * 2),
k = k_, a = a_, network_id = network_id_, id = id_,
client_only = client_only_](td::Result<DhtNode> R) mutable {
R.ensure();
td::actor::create_actor<DhtQueryFindNodes>("FindNodesQuery", key, print_id, id, std::move(list), k, a, network_id,
@ -677,8 +677,8 @@ void DhtMemberImpl::send_store(DhtValue value, td::Promise<td::Unit> promise) {
auto key_id = value.key_id();
auto P = td::PromiseCreator::lambda([value = std::move(value), print_id = print_id(), id = id_,
client_only = client_only_, list = get_nearest_nodes(key_id, k_), k = k_, a = a_,
network_id = network_id_, SelfId = actor_id(this), adnl = adnl_,
client_only = client_only_, list = get_nearest_nodes(key_id, k_ * 2), k = k_,
a = a_, network_id = network_id_, SelfId = actor_id(this), adnl = adnl_,
promise = std::move(promise)](td::Result<DhtNode> R) mutable {
R.ensure();
td::actor::create_actor<DhtQueryStore>("StoreQuery", std::move(value), print_id, id, std::move(list), k, a,

View file

@ -95,7 +95,7 @@ class DhtMember : public Dht {
//virtual void update_addr_list(tl_object_ptr<ton_api::adnl_addressList> addr_list) = 0;
//virtual void add_node(adnl::AdnlNodeIdShort id) = 0;
virtual void add_full_node(DhtKeyId id, DhtNode node) = 0;
virtual void add_full_node(DhtKeyId id, DhtNode node, bool set_active) = 0;
virtual void receive_ping(DhtKeyId id, DhtNode result) = 0;

View file

@ -103,6 +103,7 @@ const char *transaction_emulator_emulate_transaction(void *transaction_emulator,
td::Ref<vm::CellSlice> addr_slice;
auto account_slice = vm::load_cell_slice(shard_account.account);
bool account_exists = block::gen::t_Account.get_tag(account_slice) == block::gen::Account::account;
if (block::gen::t_Account.get_tag(account_slice) == block::gen::Account::account_none) {
if (msg_tag == block::gen::CommonMsgInfo::ext_in_msg_info) {
block::gen::CommonMsgInfo::Record_ext_in_msg_info info;
@ -120,12 +121,14 @@ const char *transaction_emulator_emulate_transaction(void *transaction_emulator,
} else {
ERROR_RESPONSE(PSTRING() << "Only ext in and int message are supported");
}
} else {
} else if (block::gen::t_Account.get_tag(account_slice) == block::gen::Account::account) {
block::gen::Account::Record_account account_record;
if (!tlb::unpack(account_slice, account_record)) {
ERROR_RESPONSE(PSTRING() << "Can't unpack account cell");
}
addr_slice = std::move(account_record.addr);
} else {
ERROR_RESPONSE(PSTRING() << "Can't parse account cell");
}
ton::WorkchainId wc;
ton::StdSmcAddress addr;
@ -139,8 +142,16 @@ const char *transaction_emulator_emulate_transaction(void *transaction_emulator,
now = (unsigned)std::time(nullptr);
}
bool is_special = wc == ton::masterchainId && emulator->get_config().is_special_smartcontract(addr);
if (!account.unpack(vm::load_cell_slice_ref(shard_account_cell.move_as_ok()), now, is_special)) {
ERROR_RESPONSE(PSTRING() << "Can't unpack shard account");
if (account_exists) {
if (!account.unpack(vm::load_cell_slice_ref(shard_account_cell.move_as_ok()), now, is_special)) {
ERROR_RESPONSE(PSTRING() << "Can't unpack shard account");
}
} else {
if (!account.init_new(now)) {
ERROR_RESPONSE(PSTRING() << "Can't init new account");
}
account.last_trans_lt_ = shard_account.last_trans_lt;
account.last_trans_hash_ = shard_account.last_trans_hash;
}
auto result = emulator->emulate_transaction(std::move(account), message_cell, now, 0, block::transaction::Transaction::tr_ord);

View file

@ -42,6 +42,7 @@ td::Result<std::unique_ptr<TransactionEmulator::EmulationResult>> TransactionEmu
if (!lt) {
lt = (account.last_trans_lt_ / block::ConfigInfo::get_lt_align() + 1) * block::ConfigInfo::get_lt_align(); // next block after account_.last_trans_lt_
}
account.block_lt = lt - lt % block::ConfigInfo::get_lt_align();
compute_phase_cfg.libraries = std::make_unique<vm::Dictionary>(libraries_);
compute_phase_cfg.ignore_chksig = ignore_chksig_;

View file

@ -6,27 +6,9 @@ Prerequisite: installed Java and set environment variable JAVA_HOME.
```bash
git clone --recursive https://github.com/ton-blockchain/ton.git
cd ton
wget https://dl.google.com/android/repository/android-ndk-r25b-linux.zip
unzip android-ndk-r25b-linux.zip
export JAVA_AWT_LIBRARY=NotNeeded
export JAVA_JVM_LIBRARY=NotNeeded
export JAVA_INCLUDE_PATH=${JAVA_HOME}/include
export JAVA_AWT_INCLUDE_PATH=${JAVA_HOME}/include
export JAVA_INCLUDE_PATH2=${JAVA_HOME}/include/linux
export ANDROID_NDK_ROOT=$(pwd)/android-ndk-r25b
export OPENSSL_DIR=$(pwd)/example/android/third_party/crypto
export SECP256K1_INCLUDE_DIR=$(pwd)/example/android/third_party/secp256k1/include
export SECP256K1_LIBRARY=$(pwd)/example/android/third_party/secp256k1/.libs/libsecp256k1.a
export SODIUM_INCLUDE_DIR=$(pwd)/example/android/third_party/libsodium/libsodium-android-westmere/include
export SODIUM_LIBRARY=$(pwd)/example/android/third_party/libsodium/libsodium-android-westmere/lib/libsodium.a
rm -rf example/android/src/drinkless/org/ton/TonApi.java
cd example/android/
cmake -GNinja -DTON_ONLY_TONLIB=ON .
ninja prepare_cross_compiling
rm CMakeCache.txt
./build-all.sh
cp assembly/android/build-android-tonlib.sh .
chmod +x build-android-tonlib.sh
sudo -E ./build-android-tonlib.sh
```
# Generation of Tonlib libraries for iOS in Xcode

View file

@ -1,4 +1,6 @@
#!/bin/bash
echo ANDROID_NDK_ROOT = $ANDROID_NDK_ROOT
echo Building tonlib for x86...
echo
ARCH="x86" ./build.sh || exit 1

View file

@ -42,14 +42,22 @@ ARCH=$ABI
mkdir -p build-$ARCH
cd build-$ARCH
cmake .. -GNinja -DPORTABLE=1 \
-DANDROID_ABI=x86 -DANDROID_PLATFORM=android-32 -DANDROID_NDK=${ANDROID_NDK_ROOT} \
-DCMAKE_TOOLCHAIN_FILE=${ANDROID_NDK_ROOT}/build/cmake/android.toolchain.cmake \
-DCMAKE_BUILD_TYPE=Release -DANDROID_ABI=${ABI} \
-DOPENSSL_ROOT_DIR=${OPENSSL_DIR}/${ORIG_ARCH} -DTON_ARCH="" \
cmake .. -GNinja \
-DPORTABLE=1 \
-DTON_ONLY_TONLIB=ON \
-DSECP256K1_INCLUDE_DIR=${SECP256K1_INCLUDE_DIR} -DSECP256K1_LIBRARY=${SECP256K1_LIBRARY} \
-DSODIUM_INCLUDE_DIR=${SODIUM_INCLUDE_DIR} -DSODIUM_LIBRARY_RELEASE=${SODIUM_LIBRARY_RELEASE} \
-DTON_ARCH="" \
-DANDROID_ABI=x86 \
-DANDROID_PLATFORM=android-32 \
-DANDROID_NDK=${ANDROID_NDK_ROOT} \
-DCMAKE_TOOLCHAIN_FILE=${ANDROID_NDK_ROOT}/build/cmake/android.toolchain.cmake \
-DCMAKE_BUILD_TYPE=Release \
-DANDROID_ABI=${ABI} \
-DOPENSSL_ROOT_DIR=${OPENSSL_DIR}/${ORIG_ARCH} \
-DSECP256K1_FOUND=1 \
-DSECP256K1_INCLUDE_DIR=${SECP256K1_INCLUDE_DIR} \
-DSECP256K1_LIBRARY=${SECP256K1_LIBRARY} \
-DSODIUM_INCLUDE_DIR=${SODIUM_INCLUDE_DIR} \
-DSODIUM_LIBRARY_RELEASE=${SODIUM_LIBRARY_RELEASE} \
-DSODIUM_USE_STATIC_LIBS=1 \
-DBLST_LIB=${BLST_LIBRARY} || exit 1

View file

@ -1,6 +1,6 @@
cmake_minimum_required(VERSION 3.5 FATAL_ERROR)
add_library(lite-client-common lite-client-common.cpp lite-client-common.h)
add_library(lite-client-common STATIC lite-client-common.cpp lite-client-common.h)
target_link_libraries(lite-client-common PUBLIC tdutils tdactor adnllite tl_api tl_lite_api tl-lite-utils ton_crypto ton_block)
add_executable(lite-client lite-client.cpp lite-client.h)

View file

@ -93,17 +93,17 @@ void OverlayManager::create_public_overlay(adnl::AdnlNodeIdShort local_id, Overl
std::unique_ptr<Callback> callback, OverlayPrivacyRules rules,
td::string scope) {
create_public_overlay_ex(local_id, std::move(overlay_id), std::move(callback), std::move(rules), std::move(scope),
true);
{});
}
void OverlayManager::create_public_overlay_ex(adnl::AdnlNodeIdShort local_id, OverlayIdFull overlay_id,
std::unique_ptr<Callback> callback, OverlayPrivacyRules rules,
td::string scope, bool announce_self) {
td::string scope, OverlayOptions opts) {
CHECK(!dht_node_.empty());
auto id = overlay_id.compute_short_id();
register_overlay(local_id, id,
Overlay::create(keyring_, adnl_, actor_id(this), dht_node_, local_id, std::move(overlay_id),
std::move(callback), std::move(rules), scope, announce_self));
std::move(callback), std::move(rules), scope, std::move(opts)));
}
void OverlayManager::create_private_overlay(adnl::AdnlNodeIdShort local_id, OverlayIdFull overlay_id,

View file

@ -54,7 +54,7 @@ class OverlayManager : public Overlays {
std::unique_ptr<Callback> callback, OverlayPrivacyRules rules, td::string scope) override;
void create_public_overlay_ex(adnl::AdnlNodeIdShort local_id, OverlayIdFull overlay_id,
std::unique_ptr<Callback> callback, OverlayPrivacyRules rules, td::string scope,
bool announce_self) override;
OverlayOptions opts) override;
void create_private_overlay(adnl::AdnlNodeIdShort local_id, OverlayIdFull overlay_id,
std::vector<adnl::AdnlNodeIdShort> nodes, std::unique_ptr<Callback> callback,
OverlayPrivacyRules rules) override;

View file

@ -37,10 +37,10 @@ td::actor::ActorOwn<Overlay> Overlay::create(td::actor::ActorId<keyring::Keyring
td::actor::ActorId<OverlayManager> manager,
td::actor::ActorId<dht::Dht> dht_node, adnl::AdnlNodeIdShort local_id,
OverlayIdFull overlay_id, std::unique_ptr<Overlays::Callback> callback,
OverlayPrivacyRules rules, td::string scope, bool announce_self) {
OverlayPrivacyRules rules, td::string scope, OverlayOptions opts) {
auto R = td::actor::create_actor<OverlayImpl>("overlay", keyring, adnl, manager, dht_node, local_id,
std::move(overlay_id), true, std::vector<adnl::AdnlNodeIdShort>(),
std::move(callback), std::move(rules), scope, announce_self);
std::move(callback), std::move(rules), scope, opts);
return td::actor::ActorOwn<Overlay>(std::move(R));
}
@ -60,7 +60,7 @@ OverlayImpl::OverlayImpl(td::actor::ActorId<keyring::Keyring> keyring, td::actor
td::actor::ActorId<OverlayManager> manager, td::actor::ActorId<dht::Dht> dht_node,
adnl::AdnlNodeIdShort local_id, OverlayIdFull overlay_id, bool pub,
std::vector<adnl::AdnlNodeIdShort> nodes, std::unique_ptr<Overlays::Callback> callback,
OverlayPrivacyRules rules, td::string scope, bool announce_self)
OverlayPrivacyRules rules, td::string scope, OverlayOptions opts)
: keyring_(keyring)
, adnl_(adnl)
, manager_(manager)
@ -71,7 +71,8 @@ OverlayImpl::OverlayImpl(td::actor::ActorId<keyring::Keyring> keyring, td::actor
, public_(pub)
, rules_(std::move(rules))
, scope_(scope)
, announce_self_(announce_self) {
, announce_self_(opts.announce_self_)
, frequent_dht_lookup_(opts.frequent_dht_lookup_) {
overlay_id_ = id_full_.compute_short_id();
VLOG(OVERLAY_INFO) << this << ": creating " << (public_ ? "public" : "private");
@ -279,13 +280,13 @@ void OverlayImpl::alarm() {
send_random_peers(P->get_id(), {});
}
}
if (next_dht_query_.is_in_past()) {
if (next_dht_query_ && next_dht_query_.is_in_past()) {
next_dht_query_ = td::Timestamp::never();
auto P = td::PromiseCreator::lambda([SelfId = actor_id(this)](td::Result<dht::DhtValue> res) {
td::actor::send_closure(SelfId, &OverlayImpl::receive_dht_nodes, std::move(res), true);
});
td::actor::send_closure(dht_node_, &dht::Dht::get_value, dht::DhtKey{overlay_id_.pubkey_hash(), "nodes", 0},
std::move(P));
next_dht_query_ = td::Timestamp::in(td::Random::fast(60.0, 100.0));
}
if (update_db_at_.is_in_past()) {
if (peers_.size() > 0) {
@ -333,7 +334,13 @@ void OverlayImpl::receive_dht_nodes(td::Result<dht::DhtValue> res, bool dummy) {
VLOG(OVERLAY_NOTICE) << this << ": can not get value from DHT: " << res.move_as_error();
}
if (!(next_dht_store_query_ && next_dht_store_query_.is_in_past())) {
finish_dht_query();
return;
}
next_dht_store_query_ = td::Timestamp::never();
if (!announce_self_) {
finish_dht_query();
return;
}
@ -341,6 +348,7 @@ void OverlayImpl::receive_dht_nodes(td::Result<dht::DhtValue> res, bool dummy) {
auto P = td::PromiseCreator::lambda([SelfId = actor_id(this), oid = print_id()](td::Result<OverlayNode> R) {
if (R.is_error()) {
LOG(ERROR) << oid << "cannot get self node";
td::actor::send_closure(SelfId, &OverlayImpl::finish_dht_query);
return;
}
td::actor::send_closure(SelfId, &OverlayImpl::update_dht_nodes, R.move_as_ok());
@ -365,10 +373,11 @@ void OverlayImpl::update_dht_nodes(OverlayNode node) {
static_cast<td::uint32>(td::Clocks::system() + 3600), td::BufferSlice()};
value.check().ensure();
auto P = td::PromiseCreator::lambda([oid = print_id()](td::Result<td::Unit> res) {
auto P = td::PromiseCreator::lambda([SelfId = actor_id(this), oid = print_id()](td::Result<td::Unit> res) {
if (res.is_error()) {
VLOG(OVERLAY_NOTICE) << oid << ": error storing to DHT: " << res.move_as_error();
}
td::actor::send_closure(SelfId, &OverlayImpl::finish_dht_query);
});
td::actor::send_closure(dht_node_, &dht::Dht::set_value, std::move(value), std::move(P));

View file

@ -42,7 +42,7 @@ class Overlay : public td::actor::Actor {
td::actor::ActorId<OverlayManager> manager,
td::actor::ActorId<dht::Dht> dht_node, adnl::AdnlNodeIdShort local_id,
OverlayIdFull overlay_id, std::unique_ptr<Overlays::Callback> callback,
OverlayPrivacyRules rules, td::string scope, bool announce_self = true);
OverlayPrivacyRules rules, td::string scope, OverlayOptions opts = {});
static td::actor::ActorOwn<Overlay> create(td::actor::ActorId<keyring::Keyring> keyring,
td::actor::ActorId<adnl::Adnl> adnl,
td::actor::ActorId<OverlayManager> manager,

View file

@ -82,12 +82,17 @@ class OverlayPeer {
void on_ping_result(bool success) {
if (success) {
missed_pings_ = 0;
last_ping_at_ = td::Timestamp::now();
is_alive_ = true;
} else {
++missed_pings_;
if (missed_pings_ >= 3 && last_ping_at_.is_in_past(td::Timestamp::in(-15.0))) {
is_alive_ = false;
}
}
}
bool is_alive() const {
return missed_pings_ < 3;
return is_alive_;
}
td::uint32 throughput_out_bytes = 0;
@ -116,6 +121,8 @@ class OverlayPeer {
bool is_neighbour_ = false;
size_t missed_pings_ = 0;
bool is_alive_ = true;
td::Timestamp last_ping_at_ = td::Timestamp::now();
};
class OverlayImpl : public Overlay {
@ -124,7 +131,7 @@ class OverlayImpl : public Overlay {
td::actor::ActorId<OverlayManager> manager, td::actor::ActorId<dht::Dht> dht_node,
adnl::AdnlNodeIdShort local_id, OverlayIdFull overlay_id, bool pub,
std::vector<adnl::AdnlNodeIdShort> nodes, std::unique_ptr<Overlays::Callback> callback,
OverlayPrivacyRules rules, td::string scope = "{ \"type\": \"undefined\" }", bool announce_self = true);
OverlayPrivacyRules rules, td::string scope = "{ \"type\": \"undefined\" }", OverlayOptions opts = {});
void update_dht_node(td::actor::ActorId<dht::Dht> dht) override {
dht_node_ = dht;
}
@ -295,6 +302,17 @@ class OverlayImpl : public Overlay {
void del_peer(adnl::AdnlNodeIdShort id);
OverlayPeer *get_random_peer(bool only_alive = false);
void finish_dht_query() {
if (!next_dht_store_query_) {
next_dht_store_query_ = td::Timestamp::in(td::Random::fast(60.0, 100.0));
}
if (frequent_dht_lookup_ && peers_.size() == bad_peers_.size()) {
next_dht_query_ = td::Timestamp::in(td::Random::fast(6.0, 10.0));
} else {
next_dht_query_ = next_dht_store_query_;
}
}
td::actor::ActorId<keyring::Keyring> keyring_;
td::actor::ActorId<adnl::Adnl> adnl_;
td::actor::ActorId<OverlayManager> manager_;
@ -305,6 +323,7 @@ class OverlayImpl : public Overlay {
td::DecTree<adnl::AdnlNodeIdShort, OverlayPeer> peers_;
td::Timestamp next_dht_query_ = td::Timestamp::in(1.0);
td::Timestamp next_dht_store_query_ = td::Timestamp::in(1.0);
td::Timestamp update_db_at_;
td::Timestamp update_throughput_at_;
td::Timestamp last_throughput_update_;
@ -367,6 +386,7 @@ class OverlayImpl : public Overlay {
OverlayPrivacyRules rules_;
td::string scope_;
bool announce_self_ = true;
bool frequent_dht_lookup_ = false;
std::map<PublicKeyHash, std::shared_ptr<Certificate>> certs_;
class CachedEncryptor : public td::ListNode {

View file

@ -158,6 +158,11 @@ class Certificate {
td::SharedSlice signature_;
};
struct OverlayOptions {
bool announce_self_ = true;
bool frequent_dht_lookup_ = false;
};
class Overlays : public td::actor::Actor {
public:
class Callback {
@ -197,7 +202,7 @@ class Overlays : public td::actor::Actor {
td::string scope) = 0;
virtual void create_public_overlay_ex(adnl::AdnlNodeIdShort local_id, OverlayIdFull overlay_id,
std::unique_ptr<Callback> callback, OverlayPrivacyRules rules,
td::string scope, bool announce_self) = 0;
td::string scope, OverlayOptions opts) = 0;
virtual void create_private_overlay(adnl::AdnlNodeIdShort local_id, OverlayIdFull overlay_id,
std::vector<adnl::AdnlNodeIdShort> nodes, std::unique_ptr<Callback> callback,
OverlayPrivacyRules rules) = 0;

View file

@ -1,10 +1,11 @@
## 2023.12 Update
## 2024.01 Update
1. Optimized message queue handling, now queue cleaning speed doesn't depend on total queue size
* Cleaning delivered messages using lt augmentation instead of random search / consequtive walk
* Keeping root cell of queue message in memory until outdated (caching)
2. Changes to block collation/validation limits
3. Stop accepting new external message if message queue is overloaded
4. Introducing conditions for shard split/merge based on queue size
1. Fixes in how gas in transactions on special accounts is accounted in block limit. Previously, gas was counted as usual, so to conduct elections that costs >30m gas block limit in masterchain was set to 37m gas. To lower the limit for safety reasons it is proposed to not count gas on special accounts. Besides `gas_max` is set to `special_gas_limit` for all types of transactions on special accounts. New behavior is activated through setting `gas_prices_v3` in `ConfigParam 20;`.
* Besides update of config temporally increases gas limit on `EQD_v9j1rlsuHHw2FIhcsCFFSD367ldfDdCKcsNmNpIRzUlu` to `special_gas_limit`, see [details](https://t.me/tonstatus/88).
2. Improvements in LS behavior
* Improved detection of the state with all shards applied to decrease rate of `Block is not applied` error
* Better error logs: `block not in db` and `block is not applied` separation
* Fix error in proof generation for blocks after merge
3. Improvements in DHT work and storage, CellDb, config.json ammendment, peer misbehavior detection, validator session stats collection, emulator.
Read [more](https://blog.ton.org/technical-report-december-5-inscriptions-launch-on-ton) on that update.
Besides the work of the core team, this update is based on the efforts of @XaBbl4 (peer misbehavior detection).

View file

@ -54,6 +54,7 @@
#include "git.h"
#include "td/utils/BufferedFd.h"
#include "common/delay.h"
#include "td/utils/port/path.h"
#include "tonlib/tonlib/TonlibClientWrapper.h"
#include "DNSResolver.h"
@ -920,6 +921,12 @@ class RldpHttpProxy : public td::actor::Actor {
}
void run() {
if (!db_root_.empty()) {
td::mkpath(db_root_ + "/").ensure();
} else if (!is_client_) {
LOG(ERROR) << "DB root is required for server proxy";
std::_Exit(2);
}
keyring_ = ton::keyring::Keyring::create(is_client_ ? std::string("") : (db_root_ + "/keyring"));
{
auto S = load_global_config();
@ -955,9 +962,16 @@ class RldpHttpProxy : public td::actor::Actor {
auto conf_dataR = td::read_file(global_config_);
conf_dataR.ensure();
ton::tl_object_ptr<tonlib_api::KeyStoreType> key_store;
if (db_root_.empty()) {
key_store = tonlib_api::make_object<tonlib_api::keyStoreTypeInMemory>();
} else {
td::mkpath(db_root_ + "/tonlib-cache/").ensure();
key_store = tonlib_api::make_object<tonlib_api::keyStoreTypeDirectory>(db_root_ + "/tonlib-cache/");
}
auto tonlib_options = tonlib_api::make_object<tonlib_api::options>(
tonlib_api::make_object<tonlib_api::config>(conf_dataR.move_as_ok().as_slice().str(), "", false, false),
tonlib_api::make_object<tonlib_api::keyStoreTypeInMemory>());
std::move(key_store));
tonlib_client_ = td::actor::create_actor<tonlib::TonlibClientWrapper>("tonlibclient", std::move(tonlib_options));
dns_resolver_ = td::actor::create_actor<DNSResolver>("dnsresolver", tonlib_client_.get());
}

View file

@ -25,6 +25,7 @@
#include "td/utils/overloaded.h"
#include "td/utils/Random.h"
#include "vm/boc.h"
#include "common/delay.h"
namespace ton {
@ -119,9 +120,9 @@ void PeerActor::on_get_piece_result(PartId piece_id, td::Result<td::BufferSlice>
return std::move(res);
}();
if (res.is_error()) {
LOG(DEBUG) << "getPiece " << piece_id << "query: " << res.error();
LOG(DEBUG) << "getPiece " << piece_id << " query: " << res.error();
} else {
LOG(DEBUG) << "getPiece " << piece_id << "query: OK";
LOG(DEBUG) << "getPiece " << piece_id << " query: OK";
}
state_->node_queries_results_.add_element(std::make_pair(piece_id, std::move(res)));
notify_node();
@ -343,11 +344,20 @@ void PeerActor::loop_node_get_piece() {
}
auto piece_size =
std::min<td::uint64>(torrent_info_->piece_size, torrent_info_->file_size - part * torrent_info_->piece_size);
td::actor::send_closure(state_->speed_limiters_.download, &SpeedLimiter::enqueue, (double)piece_size,
td::Timestamp::in(3.0), [part, SelfId = actor_id(this)](td::Result<td::Unit> R) {
td::actor::send_closure(SelfId, &PeerActor::node_get_piece_query_ready, part,
std::move(R));
});
td::Timestamp timeout = td::Timestamp::in(3.0);
td::actor::send_closure(
state_->speed_limiters_.download, &SpeedLimiter::enqueue, (double)piece_size, timeout,
[=, SelfId = actor_id(this)](td::Result<td::Unit> R) {
if (R.is_ok()) {
td::actor::send_closure(SelfId, &PeerActor::node_get_piece_query_ready, part, std::move(R));
} else {
delay_action(
[=, R = std::move(R)]() mutable {
td::actor::send_closure(SelfId, &PeerActor::node_get_piece_query_ready, part, std::move(R));
},
timeout);
}
});
}
}

View file

@ -143,9 +143,11 @@ class PeerManager : public td::actor::Actor {
td::actor::ActorId<PeerManager> peer_manager_;
ton::adnl::AdnlNodeIdShort dst_;
};
ton::overlay::OverlayOptions opts;
opts.announce_self_ = !client_mode_;
opts.frequent_dht_lookup_ = true;
send_closure(overlays_, &ton::overlay::Overlays::create_public_overlay_ex, src_id, overlay_id_.clone(),
std::make_unique<Callback>(actor_id(this), src_id), rules, R"({ "type": "storage" })",
!client_mode_);
std::make_unique<Callback>(actor_id(this), src_id), rules, R"({ "type": "storage" })", opts);
}
promise.set_value({});
}

View file

@ -16,6 +16,7 @@
*/
#include "SpeedLimiter.h"
#include "common/errorcode.h"
namespace ton {
@ -41,11 +42,11 @@ void SpeedLimiter::enqueue(double size, td::Timestamp timeout, td::Promise<td::U
return;
}
if (max_speed_ == 0.0) {
promise.set_error(td::Status::Error("Speed limit is 0"));
promise.set_error(td::Status::Error(ErrorCode::timeout, "Speed limit is 0"));
return;
}
if (timeout < unlock_at_) {
promise.set_error(td::Status::Error("Timeout caused by speed limit"));
promise.set_error(td::Status::Error(ErrorCode::timeout, "Timeout caused by speed limit"));
return;
}
if (queue_.empty() && unlock_at_.is_in_past()) {

View file

@ -927,9 +927,11 @@ class StorageDaemon : public td::actor::Actor {
}
auto r_conf_data = td::read_file(global_config_);
r_conf_data.ensure();
std::string key_store = db_root_ + "/tonlib/";
td::mkpath(key_store).ensure();
auto tonlib_options = tonlib_api::make_object<tonlib_api::options>(
tonlib_api::make_object<tonlib_api::config>(r_conf_data.move_as_ok().as_slice().str(), "", false, false),
tonlib_api::make_object<tonlib_api::keyStoreTypeInMemory>());
tonlib_api::make_object<tonlib_api::keyStoreTypeDirectory>(key_store));
tonlib_client_ = td::actor::create_actor<tonlib::TonlibClientWrapper>("tonlibclient", std::move(tonlib_options));
}

View file

@ -46,9 +46,11 @@ void TcpListener::start_up() {
}
void TcpListener::tear_down() {
// unsubscribe from socket updates
// nb: interface will be changed
td::actor::SchedulerContext::get()->get_poll().unsubscribe(server_socket_fd_.get_poll_info().get_pollable_fd_ref());
if (!server_socket_fd_.empty()) {
// unsubscribe from socket updates
// nb: interface will be changed
td::actor::SchedulerContext::get()->get_poll().unsubscribe(server_socket_fd_.get_poll_info().get_pollable_fd_ref());
}
}
void TcpListener::loop() {

View file

@ -158,9 +158,11 @@ void run_server(int from_port, int to_port, bool is_first, bool use_tcp) {
TEST(Net, PingPong) {
SET_VERBOSITY_LEVEL(VERBOSITY_NAME(ERROR));
int port1 = td::Random::fast(10000, 10999);
int port2 = td::Random::fast(11000, 11999);
for (auto use_tcp : {false, true}) {
auto a = td::thread([use_tcp] { run_server(8091, 8092, true, use_tcp); });
auto b = td::thread([use_tcp] { run_server(8092, 8091, false, use_tcp); });
auto a = td::thread([=] { run_server(port1, port2, true, use_tcp); });
auto b = td::thread([=] { run_server(port2, port1, false, use_tcp); });
a.join();
b.join();
}

View file

@ -76,12 +76,7 @@ class ConstAs {
} // namespace detail
// no std::is_trivially_copyable in libstdc++ before 5.0
#if __GLIBCXX__
#define TD_IS_TRIVIALLY_COPYABLE(T) __has_trivial_copy(T)
#else
#define TD_IS_TRIVIALLY_COPYABLE(T) std::is_trivially_copyable<T>::value
#endif
template <class ToT, class FromT,
std::enable_if_t<TD_IS_TRIVIALLY_COPYABLE(ToT) && TD_IS_TRIVIALLY_COPYABLE(FromT), int> = 0>

View file

@ -186,6 +186,7 @@ class CatChainInst : public td::actor::Actor {
void create_fork() {
auto height = height_ - 1; //td::Random::fast(0, height_ - 1);
LOG(WARNING) << "Creating fork, source_id=" << idx_ << ", height=" << height;
auto sum = prev_values_[height] + 1;
td::uint64 x[2];
@ -241,7 +242,8 @@ int main(int argc, char *argv[]) {
td::actor::send_closure(adnl, &ton::adnl::Adnl::register_network_manager, network_manager.get());
});
for (td::uint32 att = 0; att < 10; att++) {
for (td::uint32 att = 0; att < 20; att++) {
LOG(WARNING) << "Test #" << att;
nodes.resize(total_nodes);
scheduler.run_in_context([&] {
@ -296,7 +298,10 @@ int main(int argc, char *argv[]) {
std::cout << "value=" << n.get_actor_unsafe().value() << std::endl;
}
scheduler.run_in_context([&] { td::actor::send_closure(inst[0], &CatChainInst::create_fork); });
td::uint32 fork_cnt = att < 10 ? 1 : (att - 10) / 5 + 2;
for (td::uint32 idx = 0; idx < fork_cnt; ++idx) {
scheduler.run_in_context([&] { td::actor::send_closure(inst[idx], &CatChainInst::create_fork); });
}
t = td::Timestamp::in(1.0);
while (scheduler.run(1)) {

View file

@ -742,11 +742,12 @@ http.server.config dhs:(vector http.server.dnsEntry) local_hosts:(vector http.se
---types---
validatorSession.statsProducer id:int256 block_status:int block_timestamp:long = validatorSession.StatsProducer;
validatorSession.statsProducer id:int256 candidate_id:int256 block_status:int block_timestamp:long comment:string = validatorSession.StatsProducer;
validatorSession.statsRound timestamp:long producers:(vector validatorSession.statsProducer) = validatorSession.StatsRound;
validatorSession.stats id:tonNode.blockId timestamp:long self:int256 creator:int256 total_validators:int total_weight:long
validatorSession.stats success:Bool id:tonNode.blockIdExt timestamp:long self:int256 session_id:int256 cc_seqno:int
creator:int256 total_validators:int total_weight:long
signatures:int signatures_weight:long approve_signatures:int approve_signatures_weight:long
first_round:int rounds:(vector validatorSession.statsRound) = validatorSession.Stats;

Binary file not shown.

View file

@ -467,15 +467,20 @@ TEST(Tonlib, KeysApi) {
make_object<tonlib_api::deleteKey>(make_object<tonlib_api::key>(key->public_key_, key->secret_.copy())))
.move_as_ok();
auto err1 = sync_send(client, make_object<tonlib_api::importKey>(
new_local_password.copy(), td::SecureString("wrong password"),
make_object<tonlib_api::exportedKey>(copy_word_list())))
.move_as_error();
auto err1 = sync_send(
client, make_object<tonlib_api::importKey>(new_local_password.copy(), td::SecureString("wrong password"),
make_object<tonlib_api::exportedKey>(copy_word_list())));
if (err1.is_ok()) {
if (err1.ok()->public_key_ != key->public_key_) {
err1 = td::Status::Error("imported key successfully, but the public key is different");
}
}
err1.ensure_error();
auto err2 =
sync_send(client, make_object<tonlib_api::importKey>(new_local_password.copy(), td::SecureString(),
make_object<tonlib_api::exportedKey>(copy_word_list())))
.move_as_error();
LOG(INFO) << err1 << " | " << err2;
make_object<tonlib_api::exportedKey>(copy_word_list())));
err2.ensure_error();
LOG(INFO) << err1.move_as_error() << " | " << err2.move_as_error();
auto imported_key =
sync_send(client, make_object<tonlib_api::importKey>(new_local_password.copy(), mnemonic_password.copy(),
make_object<tonlib_api::exportedKey>(copy_word_list())))

View file

@ -4995,6 +4995,8 @@ td::Status TonlibClient::do_request(const tonlib_api::importKey& request,
if (!request.exported_key_) {
return TonlibError::EmptyField("exported_key");
}
// Note: the mnemonic is considered valid if a certain hash starts with zero byte (see Mnemonic::is_basic_seed())
// Therefore, importKey with invalid password has 1/256 chance to return OK
TRY_RESULT(key, key_storage_.import_key(std::move(request.local_password_), std::move(request.mnemonic_password_),
KeyStorage::ExportedKey{std::move(request.exported_key_->word_list_)}));
TRY_RESULT(key_bytes, public_key_from_bytes(key.public_key.as_slice()));

View file

@ -1595,6 +1595,12 @@ void ValidatorEngine::load_config(td::Promise<td::Unit> promise) {
config_file_ = db_root_ + "/config.json";
}
auto conf_data_R = td::read_file(config_file_);
if (conf_data_R.is_error()) {
conf_data_R = td::read_file(temp_config_file());
if (conf_data_R.is_ok()) {
td::rename(temp_config_file(), config_file_).ensure();
}
}
if (conf_data_R.is_error()) {
auto P = td::PromiseCreator::lambda(
[name = local_config_, new_name = config_file_, promise = std::move(promise)](td::Result<td::Unit> R) {
@ -1643,12 +1649,15 @@ void ValidatorEngine::load_config(td::Promise<td::Unit> promise) {
void ValidatorEngine::write_config(td::Promise<td::Unit> promise) {
auto s = td::json_encode<std::string>(td::ToJson(*config_.tl().get()), true);
auto S = td::write_file(config_file_, s);
if (S.is_ok()) {
promise.set_value(td::Unit());
} else {
auto S = td::write_file(temp_config_file(), s);
if (S.is_error()) {
td::unlink(temp_config_file()).ignore();
promise.set_error(std::move(S));
return;
}
td::unlink(config_file_).ignore();
TRY_STATUS_PROMISE(promise, td::rename(temp_config_file(), config_file_));
promise.set_value(td::Unit());
}
td::Promise<ton::PublicKey> ValidatorEngine::get_key_promise(td::MultiPromise::InitGuard &ig) {
@ -3775,11 +3784,15 @@ int main(int argc, char *argv[]) {
acts.push_back([&x, at]() { td::actor::send_closure(x, &ValidatorEngine::schedule_shutdown, (double)at); });
return td::Status::OK();
});
p.add_checked_option('\0', "celldb-compress-depth", "(default: 0)", [&](td::Slice arg) {
TRY_RESULT(value, td::to_integer_safe<td::uint32>(arg));
acts.push_back([&x, value]() { td::actor::send_closure(x, &ValidatorEngine::set_celldb_compress_depth, value); });
return td::Status::OK();
});
p.add_checked_option('\0', "celldb-compress-depth",
"optimize celldb by storing cells of depth X with whole subtrees (experimental, default: 0)",
[&](td::Slice arg) {
TRY_RESULT(value, td::to_integer_safe<td::uint32>(arg));
acts.push_back([&x, value]() {
td::actor::send_closure(x, &ValidatorEngine::set_celldb_compress_depth, value);
});
return td::Status::OK();
});
auto S = p.run(argc, argv);
if (S.is_error()) {
LOG(ERROR) << "failed to parse options: " << S.move_as_error();

View file

@ -1,4 +1,4 @@
/*
/*
This file is part of TON Blockchain source code.
TON Blockchain is free software; you can redistribute it and/or
@ -152,6 +152,9 @@ class ValidatorEngine : public td::actor::Actor {
std::string local_config_ = "";
std::string global_config_ = "ton-global.config";
std::string config_file_;
std::string temp_config_file() const {
return config_file_ + ".tmp";
}
std::string fift_dir_ = "";

View file

@ -74,8 +74,10 @@ struct ValidatorSessionStats {
struct Producer {
PublicKeyHash id = PublicKeyHash::zero();
ValidatorSessionCandidateId candidate_id = ValidatorSessionCandidateId::zero();
int block_status = status_none;
td::uint64 block_timestamp = 0;
std::string comment;
};
struct Round {
td::uint64 timestamp = 0;
@ -85,6 +87,9 @@ struct ValidatorSessionStats {
td::uint32 first_round;
std::vector<Round> rounds;
bool success = false;
ValidatorSessionId session_id = ValidatorSessionId::zero();
CatchainSeqno cc_seqno = 0;
td::uint64 timestamp = 0;
PublicKeyHash self = PublicKeyHash::zero();
PublicKeyHash creator = PublicKeyHash::zero();

Some files were not shown because too many files have changed in this diff Show more