diff --git a/.github/script/amd64-18.04.Dockerfile b/.github/script/amd64-18.04.Dockerfile
deleted file mode 100644
index 3e98b026..00000000
--- a/.github/script/amd64-18.04.Dockerfile
+++ /dev/null
@@ -1,19 +0,0 @@
-FROM ubuntu:18.04
-
-RUN apt update
-RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata
-RUN apt install -y build-essential cmake clang openssl libssl-dev zlib1g-dev gperf wget git curl libreadline-dev ccache libmicrohttpd-dev ninja-build
-
-WORKDIR /
-
-ARG BRANCH
-RUN git clone --recurse-submodules https://github.com/ton-blockchain/ton.git && cd ton && git checkout $BRANCH
-
-WORKDIR /ton
-RUN mkdir /ton/build
-WORKDIR /ton/build
-ENV CC clang
-ENV CXX clang++
-ENV CCACHE_DISABLE 1
-RUN cmake -GNinja -DCMAKE_BUILD_TYPE=Release -DPORTABLE=1 -DTON_ARCH= -DCMAKE_CXX_FLAGS="-mavx2" ..
-RUN ninja storage-daemon storage-daemon-cli tonlibjson blockchain-explorer fift func validator-engine validator-engine-console create-state generate-random-id create-hardfork dht-server lite-client
\ No newline at end of file
diff --git a/.github/script/amd64-20.04.Dockerfile b/.github/script/amd64-20.04.Dockerfile
index ab71bbf2..cab9caa5 100644
--- a/.github/script/amd64-20.04.Dockerfile
+++ b/.github/script/amd64-20.04.Dockerfile
@@ -2,12 +2,13 @@ FROM ubuntu:20.04
RUN apt update
RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata
-RUN apt install -y build-essential cmake clang openssl libssl-dev zlib1g-dev gperf wget git curl libreadline-dev ccache libmicrohttpd-dev ninja-build
+RUN apt install -y build-essential cmake clang openssl libssl-dev zlib1g-dev gperf wget git curl libreadline-dev ccache libmicrohttpd-dev ninja-build pkg-config
WORKDIR /
ARG BRANCH
-RUN git clone --recurse-submodules https://github.com/ton-blockchain/ton.git && cd ton && git checkout $BRANCH
+ARG REPO
+RUN git clone --recurse-submodules https://github.com/$REPO && cd ton && git checkout $BRANCH
WORKDIR /ton
RUN mkdir /ton/build
diff --git a/.github/script/amd64-22.04.Dockerfile b/.github/script/amd64-22.04.Dockerfile
index 0479aa0b..1ed30e77 100644
--- a/.github/script/amd64-22.04.Dockerfile
+++ b/.github/script/amd64-22.04.Dockerfile
@@ -2,12 +2,13 @@ FROM ubuntu:22.04
RUN apt update
RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata
-RUN apt install -y build-essential cmake clang openssl libssl-dev zlib1g-dev gperf wget git curl libreadline-dev ccache libmicrohttpd-dev ninja-build
+RUN apt install -y build-essential cmake clang openssl libssl-dev zlib1g-dev gperf wget git curl libreadline-dev ccache libmicrohttpd-dev ninja-build pkg-config
WORKDIR /
ARG BRANCH
-RUN git clone --recurse-submodules https://github.com/ton-blockchain/ton.git && cd ton && git checkout $BRANCH
+ARG REPO
+RUN git clone --recurse-submodules https://github.com/$REPO && cd ton && git checkout $BRANCH
WORKDIR /ton
RUN mkdir /ton/build
diff --git a/.github/script/arm64-18.04.Dockerfile b/.github/script/arm64-18.04.Dockerfile
deleted file mode 100644
index 6c527a45..00000000
--- a/.github/script/arm64-18.04.Dockerfile
+++ /dev/null
@@ -1,19 +0,0 @@
-FROM ubuntu:18.04
-
-RUN apt update
-RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata
-RUN apt install -y build-essential cmake clang openssl libssl-dev zlib1g-dev gperf wget git curl libreadline-dev ccache libmicrohttpd-dev ninja-build
-
-WORKDIR /
-
-ARG BRANCH
-RUN git clone --recurse-submodules https://github.com/ton-blockchain/ton.git && cd ton && git checkout $BRANCH
-
-WORKDIR /ton
-RUN mkdir /ton/build
-WORKDIR /ton/build
-ENV CC clang
-ENV CXX clang++
-ENV CCACHE_DISABLE 1
-RUN cmake -GNinja -DCMAKE_BUILD_TYPE=Release -DPORTABLE=1 -DTON_ARCH= ..
-RUN ninja storage-daemon storage-daemon-cli tonlibjson blockchain-explorer fift func validator-engine validator-engine-console create-state generate-random-id dht-server lite-client
\ No newline at end of file
diff --git a/.github/script/arm64-20.04.Dockerfile b/.github/script/arm64-20.04.Dockerfile
index 7b2348fd..9066c0f4 100644
--- a/.github/script/arm64-20.04.Dockerfile
+++ b/.github/script/arm64-20.04.Dockerfile
@@ -2,12 +2,13 @@ FROM ubuntu:20.04
RUN apt update
RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata
-RUN apt install -y build-essential cmake clang openssl libssl-dev zlib1g-dev gperf wget git curl libreadline-dev ccache libmicrohttpd-dev ninja-build
+RUN apt install -y build-essential cmake clang openssl libssl-dev zlib1g-dev gperf wget git curl libreadline-dev ccache libmicrohttpd-dev ninja-build pkg-config
WORKDIR /
ARG BRANCH
-RUN git clone --recurse-submodules https://github.com/ton-blockchain/ton.git && cd ton && git checkout $BRANCH
+ARG REPO
+RUN git clone --recurse-submodules https://github.com/$REPO && cd ton && git checkout $BRANCH
WORKDIR /ton
RUN mkdir /ton/build
diff --git a/.github/script/arm64-22.04.Dockerfile b/.github/script/arm64-22.04.Dockerfile
index d0ea491b..e2cbfadc 100644
--- a/.github/script/arm64-22.04.Dockerfile
+++ b/.github/script/arm64-22.04.Dockerfile
@@ -2,12 +2,13 @@ FROM ubuntu:22.04
RUN apt update
RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata
-RUN apt install -y build-essential cmake clang openssl libssl-dev zlib1g-dev gperf wget git curl libreadline-dev ccache libmicrohttpd-dev ninja-build
+RUN apt install -y build-essential cmake clang openssl libssl-dev zlib1g-dev gperf wget git curl libreadline-dev ccache libmicrohttpd-dev ninja-build pkg-config
WORKDIR /
ARG BRANCH
-RUN git clone --recurse-submodules https://github.com/ton-blockchain/ton.git && cd ton && git checkout $BRANCH
+ARG REPO
+RUN git clone --recurse-submodules https://github.com/$REPO && cd ton && git checkout $BRANCH
WORKDIR /ton
RUN mkdir /ton/build
diff --git a/.github/script/fift-func-wasm-build-ubuntu.sh b/.github/script/fift-func-wasm-build-ubuntu.sh
index 505ce137..b02fe3ee 100755
--- a/.github/script/fift-func-wasm-build-ubuntu.sh
+++ b/.github/script/fift-func-wasm-build-ubuntu.sh
@@ -1,50 +1,48 @@
-# The script build funcfift compiler to WASM
+# The script builds funcfift compiler to WASM
# dependencies:
-#sudo apt-get install -y build-essential git make cmake clang libgflags-dev zlib1g-dev libssl-dev libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev python3-pip nodejs
+#sudo apt-get install -y build-essential git make cmake clang libgflags-dev zlib1g-dev libssl-dev libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev python3-pip nodejs libevent-dev
export CC=$(which clang)
export CXX=$(which clang++)
export CCACHE_DISABLE=1
+cd ../..
+rm -rf openssl zlib emsdk build
+echo `pwd`
+
git clone https://github.com/openssl/openssl.git
cd openssl
git checkout OpenSSL_1_1_1j
-
./config
-make -j4
-
+make -j16
OPENSSL_DIR=`pwd`
-
cd ..
git clone https://github.com/madler/zlib.git
cd zlib
ZLIB_DIR=`pwd`
-
cd ..
-# clone ton repo
-git clone --recursive https://github.com/the-ton-tech/ton-blockchain.git
-
-# only to generate auto-block.cpp
-
-cd ton-blockchain
-git pull
-git checkout 1566a23b2bece49fd1de9ab2f35e88297d22829f
mkdir build
cd build
-cmake -DCMAKE_BUILD_TYPE=Release -DZLIB_LIBRARY=/usr/lib/x86_64-linux-gnu/libz.so -DZLIB_INCLUDE_DIR=$ZLIB_DIR -DOPENSSL_ROOT_DIR=$OPENSSL_DIR -DOPENSSL_INCLUDE_DIR=$OPENSSL_DIR/include -DOPENSSL_CRYPTO_LIBRARY=$OPENSSL_DIR/libcrypto.so -DOPENSSL_SSL_LIBRARY=$OPENSSL_DIR/libssl.so ..
-make -j4 fift
+cmake -GNinja -DCMAKE_BUILD_TYPE=Release -DZLIB_LIBRARY=/usr/lib/x86_64-linux-gnu/libz.so -DZLIB_INCLUDE_DIR=$ZLIB_DIR -DOPENSSL_ROOT_DIR=$OPENSSL_DIR -DOPENSSL_INCLUDE_DIR=$OPENSSL_DIR/include -DOPENSSL_CRYPTO_LIBRARY=$OPENSSL_DIR/libcrypto.so -DOPENSSL_SSL_LIBRARY=$OPENSSL_DIR/libssl.so -DTON_USE_ABSEIL=OFF ..
+
+test $? -eq 0 || { echo "Can't configure TON build"; exit 1; }
+
+
+ninja fift smc-envelope
+
+test $? -eq 0 || { echo "Can't compile fift "; exit 1; }
rm -rf *
-cd ../..
+cd ..
git clone https://github.com/emscripten-core/emsdk.git
cd emsdk
-./emsdk install latest
-./emsdk activate latest
+./emsdk install 3.1.19
+./emsdk activate 3.1.19
EMSDK_DIR=`pwd`
source $EMSDK_DIR/emsdk_env.sh
@@ -55,7 +53,8 @@ export CCACHE_DISABLE=1
cd ../zlib
emconfigure ./configure --static
-emmake make -j4
+emmake make -j16
+test $? -eq 0 || { echo "Can't compile zlib with emmake "; exit 1; }
ZLIB_DIR=`pwd`
cd ../openssl
@@ -66,14 +65,13 @@ sed -i 's/CROSS_COMPILE=.*/CROSS_COMPILE=/g' Makefile
sed -i 's/-ldl//g' Makefile
sed -i 's/-O3/-Os/g' Makefile
emmake make depend
-emmake make -j4
-
-cd ../ton-blockchain
-
-cd build
-
-emcmake cmake -DUSE_EMSCRIPTEN=ON -DCMAKE_BUILD_TYPE=Release -DZLIB_LIBRARY=$ZLIB_DIR/libz.a -DZLIB_INCLUDE_DIR=$ZLIB_DIR -DOPENSSL_ROOT_DIR=$OPENSSL_DIR -DOPENSSL_INCLUDE_DIR=$OPENSSL_DIR/include -DOPENSSL_CRYPTO_LIBRARY=$OPENSSL_DIR/libcrypto.a -DOPENSSL_SSL_LIBRARY=$OPENSSL_DIR/libssl.a -DCMAKE_TOOLCHAIN_FILE=$EMSDK_DIR/upstream/emscripten/cmake/Modules/Platform/Emscripten.cmake -DCMAKE_CXX_FLAGS="-pthread -sUSE_ZLIB=1" ..
+emmake make -j16
+test $? -eq 0 || { echo "Can't compile OpenSSL with emmake "; exit 1; }
+cd ../build
+emcmake cmake -DUSE_EMSCRIPTEN=ON -DCMAKE_BUILD_TYPE=Release -DZLIB_LIBRARY=$ZLIB_DIR/libz.a -DZLIB_INCLUDE_DIR=$ZLIB_DIR -DOPENSSL_ROOT_DIR=$OPENSSL_DIR -DOPENSSL_INCLUDE_DIR=$OPENSSL_DIR/include -DOPENSSL_CRYPTO_LIBRARY=$OPENSSL_DIR/libcrypto.a -DOPENSSL_SSL_LIBRARY=$OPENSSL_DIR/libssl.a -DCMAKE_TOOLCHAIN_FILE=$EMSDK_DIR/upstream/emscripten/cmake/Modules/Platform/Emscripten.cmake -DCMAKE_CXX_FLAGS="-sUSE_ZLIB=1" ..
+test $? -eq 0 || { echo "Can't configure TON with with emmake "; exit 1; }
cp -R ../crypto/smartcont ../crypto/fift/lib crypto
-emmake make -j4 funcfiftlib
+emmake make -j16 funcfiftlib func fift tlbc emulator-emscripten
+
diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml
index 089af583..85d0dc25 100644
--- a/.github/workflows/create-release.yml
+++ b/.github/workflows/create-release.yml
@@ -11,42 +11,69 @@ jobs:
steps:
- uses: actions/checkout@v3
- - name: Show all artifacts
- run: |
- mkdir artifacts
- ls -lart artifacts
-
- - name: Download Ubuntu x86-64 artifacts
+ - name: Download Linux x86-64 artifacts
uses: dawidd6/action-download-artifact@v2
with:
- workflow: ubuntu-compile.yml
+ workflow: ton-x86-64-linux.yml
path: artifacts
workflow_conclusion: success
skip_unpack: true
- - name: Download Ubuntu arm64 artifacts
+ - name: Download and unzip Linux x86-64 artifacts
uses: dawidd6/action-download-artifact@v2
with:
- workflow: docker-compile-ubuntu.yml
+ workflow: ton-x86-64-linux.yml
+ path: artifacts
+ workflow_conclusion: success
+ skip_unpack: false
+
+# - name: Download Linux arm64 artifacts
+# uses: dawidd6/action-download-artifact@v2
+# with:
+# workflow: ton-aarch64-linux.yml
+# path: artifacts
+# workflow_conclusion: success
+# skip_unpack: true
+#
+# - name: Download and unzip Linux arm64 artifacts
+# uses: dawidd6/action-download-artifact@v2
+# with:
+# workflow: ton-aarch64-linux.yml
+# path: artifacts
+# workflow_conclusion: success
+# skip_unpack: false
+
+ - name: Download Mac x86-64 artifacts
+ uses: dawidd6/action-download-artifact@v2
+ with:
+ workflow: ton-x86-64-macos.yml
path: artifacts
workflow_conclusion: success
skip_unpack: true
- - name: Download MacOS 11.7 artifacts
+ - name: Download and unzip Mac x86-64 artifacts
uses: dawidd6/action-download-artifact@v2
with:
- workflow: macos-11.7-compile.yml
+ workflow: ton-x86-64-macos.yml
path: artifacts
workflow_conclusion: success
- skip_unpack: true
+ skip_unpack: false
- - name: Download MacOS 12.6 artifacts
- uses: dawidd6/action-download-artifact@v2
- with:
- workflow: macos-12.6-compile.yml
- path: artifacts
- workflow_conclusion: success
- skip_unpack: true
+# - name: Download Mac arm64 artifacts
+# uses: dawidd6/action-download-artifact@v2
+# with:
+# workflow: ton-aarch64-macos.yml
+# path: artifacts
+# workflow_conclusion: success
+# skip_unpack: true
+#
+# - name: Download and unzip Mac arm64 artifacts
+# uses: dawidd6/action-download-artifact@v2
+# with:
+# workflow: ton-aarch64-macos.yml
+# path: artifacts
+# workflow_conclusion: success
+# skip_unpack: false
- name: Download Windows artifacts
uses: dawidd6/action-download-artifact@v2
@@ -56,9 +83,26 @@ jobs:
workflow_conclusion: success
skip_unpack: true
+ - name: Download and unzip Windows artifacts
+ uses: dawidd6/action-download-artifact@v2
+ with:
+ workflow: win-2019-compile.yml
+ path: artifacts
+ workflow_conclusion: success
+ skip_unpack: false
+
+ - name: Download WASM artifacts
+ uses: dawidd6/action-download-artifact@v2
+ with:
+ workflow: ton-wasm-emscripten.yml
+ path: artifacts
+ workflow_conclusion: success
+ skip_unpack: true
+
- name: Show all artifacts
run: |
tree artifacts
+
# create release
@@ -79,7 +123,7 @@ jobs:
- name: Get registration token
id: getRegToken
run: |
- curl -X POST -H \"Accept: application/vnd.github+json\" -H 'Authorization: token ${{ secrets.GITHUB_TOKEN }}' https://api.github.com/repos/neodix42/HardTestDevelopment/actions/runners/registration-token
+ curl -X POST -H \"Accept: application/vnd.github+json\" -H 'Authorization: token ${{ secrets.GITHUB_TOKEN }}' https://api.github.com/repos/ton-blockchain/ton/actions/runners/registration-token
- name: Create release
id: create_release
@@ -94,74 +138,250 @@ jobs:
draft: false
prerelease: false
+# upload
+
+# win
+
- name: Upload Windows 2019 artifacts
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-win-binaries.zip
- asset_name: ton-windows-2019-x86-64.zip
+ asset_name: ton-win-x86-64.zip
tag: v${{ steps.date.outputs.date }}
- - name: Upload MacOS 11.7 x86-64 artifacts
+ - name: Upload Windows 2019 single artifact - fift
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
- file: artifacts/ton-macos-11.7.zip
- asset_name: ton-macos-11.7-x86-64.zip
+ file: artifacts/ton-win-binaries/fift.exe
+ asset_name: fift.exe
tag: v${{ steps.date.outputs.date }}
- - name: Upload MacOS 12.6 x86-64 artifacts
+ - name: Upload Windows 2019 single artifact - func
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
- file: artifacts/ton-macos-12.6.zip
- asset_name: ton-macos-12.6-x86-64.zip
+ file: artifacts/ton-win-binaries/func.exe
+ asset_name: func.exe
tag: v${{ steps.date.outputs.date }}
- - name: Upload Ubuntu 18.04 x86-64 artifacts
+ - name: Upload Windows 2019 single artifact - lite-client
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
- file: artifacts/ton-binaries-ubuntu-18.04.zip
- asset_name: ton-ubuntu-18.04-x86-64.zip
+ file: artifacts/ton-win-binaries/lite-client.exe
+ asset_name: lite-client.exe
tag: v${{ steps.date.outputs.date }}
- - name: Upload Ubuntu 20.04 x86-64 artifacts
+ - name: Upload Windows 2019 single artifact - rldp-http-proxy
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
- file: artifacts/ton-binaries-ubuntu-20.04.zip
- asset_name: ton-ubuntu-20.04-x86-64.zip
+ file: artifacts/ton-win-binaries/rldp-http-proxy.exe
+ asset_name: rldp-http-proxy.exe
tag: v${{ steps.date.outputs.date }}
- - name: Upload Ubuntu 22.04 x86-64 artifacts
+ - name: Upload Windows 2019 single artifact - http-proxy
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
- file: artifacts/ton-binaries-ubuntu-22.04.zip
- asset_name: ton-ubuntu-22.04-x86-64.zip
+ file: artifacts/ton-win-binaries/http-proxy.exe
+ asset_name: http-proxy.exe
tag: v${{ steps.date.outputs.date }}
- - name: Upload Ubuntu 18.04 arm64 artifacts
+ - name: Upload Windows 2019 single artifact - storage-daemon-cli
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
- file: artifacts/ton-ubuntu-18.04-arm64.zip
- asset_name: ton-ubuntu-18.04-arm64.zip
+ file: artifacts/ton-win-binaries/storage-daemon-cli.exe
+ asset_name: storage-daemon-cli.exe
tag: v${{ steps.date.outputs.date }}
- - name: Upload Ubuntu 20.04 arm64 artifacts
+ - name: Upload Windows 2019 single artifact - tonlibjson
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
- file: artifacts/ton-ubuntu-20.04-arm64.zip
- asset_name: ton-ubuntu-20.04-arm64.zip
+ file: artifacts/ton-win-binaries/tonlibjson.dll
+ asset_name: tonlibjson.dll
tag: v${{ steps.date.outputs.date }}
- - name: Upload Ubuntu 22.04 arm64 artifacts
+ - name: Upload Windows 2019 single artifact - tonlib-cli
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
- file: artifacts/ton-ubuntu-22.04-arm64.zip
- asset_name: ton-ubuntu-22.04-arm64.zip
- tag: v${{ steps.date.outputs.date }}
\ No newline at end of file
+ file: artifacts/ton-win-binaries/tonlib-cli.exe
+ asset_name: tonlib-cli.exe
+ tag: v${{ steps.date.outputs.date }}
+
+# mac x86-64
+
+ - name: Upload Mac x86-64 artifacts
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-x86_64-macos-binaries.zip
+ asset_name: ton-mac-x86-64.zip
+ tag: v${{ steps.date.outputs.date }}
+
+ - name: Upload Mac x86-64 single artifact - fift
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-x86_64-macos-binaries/fift
+ asset_name: fift-mac-x86-64
+ tag: v${{ steps.date.outputs.date }}
+
+ - name: Upload Mac x86-64 single artifact - func
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-x86_64-macos-binaries/func
+ asset_name: func-mac-x86-64
+ tag: v${{ steps.date.outputs.date }}
+
+ - name: Upload Mac x86-64 single artifact - lite-client
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-x86_64-macos-binaries/lite-client
+ asset_name: lite-client-mac-x86-64
+ tag: v${{ steps.date.outputs.date }}
+
+ - name: Upload Mac x86-64 single artifact - rldp-http-proxy
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-x86_64-macos-binaries/rldp-http-proxy
+ asset_name: rldp-http-proxy-mac-x86-64
+ tag: v${{ steps.date.outputs.date }}
+
+ - name: Upload Mac x86-64 single artifact - http-proxy
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-x86_64-macos-binaries/http-proxy
+ asset_name: http-proxy-mac-x86-64
+ tag: v${{ steps.date.outputs.date }}
+
+ - name: Upload Mac x86-64 single artifact - storage-daemon-cli
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-x86_64-macos-binaries/storage-daemon-cli
+ asset_name: storage-daemon-cli-mac-x86-64
+ tag: v${{ steps.date.outputs.date }}
+
+ - name: Upload Mac x86-64 single artifact - tonlibjson
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-x86_64-macos-binaries/libtonlibjson.dylib
+ asset_name: tonlibjson-mac-x86-64.dylib
+ tag: v${{ steps.date.outputs.date }}
+
+ - name: Upload Mac x86-64 single artifact - tonlib-cli
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-x86_64-macos-binaries/tonlib-cli
+ asset_name: tonlib-cli-mac-x86-64
+ tag: v${{ steps.date.outputs.date }}
+
+# linux x86-64
+
+ - name: Upload Linux x86-64 artifacts
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-x86_64-linux-binaries.zip
+ asset_name: ton-linux-x86_64.zip
+ tag: v${{ steps.date.outputs.date }}
+
+ - name: Upload Linux x86-64 single artifact - fift
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-x86_64-linux-binaries/fift
+ asset_name: fift-linux-x86_64
+ tag: v${{ steps.date.outputs.date }}
+
+ - name: Upload Linux x86-64 single artifact - func
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-x86_64-linux-binaries/func
+ asset_name: func-linux-x86_64
+ tag: v${{ steps.date.outputs.date }}
+
+ - name: Upload Linux x86-64 single artifact - lite-client
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-x86_64-linux-binaries/lite-client
+ asset_name: lite-client-linux-x86_64
+ tag: v${{ steps.date.outputs.date }}
+
+ - name: Upload Linux x86-64 single artifact - rldp-http-proxy
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-x86_64-linux-binaries/rldp-http-proxy
+ asset_name: rldp-http-proxy-linux-x86_64
+ tag: v${{ steps.date.outputs.date }}
+
+ - name: Upload Linux x86-64 single artifact - http-proxy
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-x86_64-linux-binaries/http-proxy
+ asset_name: http-proxy-linux-x86_64
+ tag: v${{ steps.date.outputs.date }}
+
+ - name: Upload Linux x86-64 single artifact - storage-daemon-cli
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-x86_64-linux-binaries/storage-daemon-cli
+ asset_name: storage-daemon-cli-linux-x86_64
+ tag: v${{ steps.date.outputs.date }}
+
+ - name: Upload Linux x86-64 single artifact - tonlibjson
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-x86_64-linux-binaries/libtonlibjson.so.0.5
+ asset_name: tonlibjson-linux-x86_64.so
+ tag: v${{ steps.date.outputs.date }}
+
+ - name: Upload Linux x86-64 single artifact - tonlib-cli
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-x86_64-linux-binaries/tonlib-cli
+ asset_name: tonlib-cli-linux-x86_64
+ tag: v${{ steps.date.outputs.date }}
+
+# - name: Upload Linux arm64 artifacts
+# uses: svenstaro/upload-release-action@v2
+# with:
+# repo_token: ${{ secrets.GITHUB_TOKEN }}
+# file: artifacts/ton-aarch64-linux-binaries.zip
+# asset_name: ton-linux-arm64.zip
+# tag: v${{ steps.date.outputs.date }}
+#
+# - name: Upload Mac arm64 artifacts
+# uses: svenstaro/upload-release-action@v2
+# with:
+# repo_token: ${{ secrets.GITHUB_TOKEN }}
+# file: artifacts/ton-aarch64-macos-binaries
+# asset_name: ton-mac-arm64.zip
+# tag: v${{ steps.date.outputs.date }}
+
+ - name: Upload WASM artifacts
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: artifacts/ton-wasm-binaries.zip
+ asset_name: ton-wasm-binaries.zip
+ tag: v${{ steps.date.outputs.date }}
diff --git a/.github/workflows/docker-compile-ubuntu.yml b/.github/workflows/docker-compile-ubuntu.yml
index 08caab23..41fbb8e0 100644
--- a/.github/workflows/docker-compile-ubuntu.yml
+++ b/.github/workflows/docker-compile-ubuntu.yml
@@ -9,7 +9,7 @@ jobs:
max-parallel: 3
matrix:
arch: [arm64]
- ver: [22.04, 18.04, 20.04 ]
+ ver: [22.04, 20.04 ]
runs-on: ubuntu-22.04
steps:
@@ -33,7 +33,7 @@ jobs:
run: |
mkdir build-${{matrix.ver}}-${{matrix.arch}}
- docker buildx build --build-arg BRANCH=${{ steps.vars.outputs.short_ref }} --platform=linux/${{matrix.arch}} --progress=plain --load . -t build-${{matrix.ver}}-${{matrix.arch}} -f .github/script/${{matrix.arch}}-${{matrix.ver}}.Dockerfile
+ docker buildx build --build-arg REPO=${{ github.repository }} --build-arg BRANCH=${{ steps.vars.outputs.short_ref }} --platform=linux/${{matrix.arch}} --progress=plain --load . -t build-${{matrix.ver}}-${{matrix.arch}} -f .github/script/${{matrix.arch}}-${{matrix.ver}}.Dockerfile
container_id=$(docker create --platform=linux/${{matrix.arch}} build-${{matrix.ver}}-${{matrix.arch}})
docker cp $container_id:/ton/build/dht-server/dht-server build-${{matrix.ver}}-${{matrix.arch}}/
docker cp -a $container_id:/ton/build/validator-engine/validator-engine build-${{matrix.ver}}-${{matrix.arch}}/
diff --git a/.github/workflows/macos-11.7-compile.yml b/.github/workflows/macos-11.7-compile.yml
index 80a57e22..b1c97d3d 100644
--- a/.github/workflows/macos-11.7-compile.yml
+++ b/.github/workflows/macos-11.7-compile.yml
@@ -23,18 +23,19 @@ jobs:
- name: Build all
run: |
export NONINTERACTIVE=1
- brew install ninja
+ brew install ninja libmicrohttpd pkg-config
rootPath=`pwd`
mkdir build
cd build
cmake -GNinja -DOPENSSL_FOUND=1 -DOPENSSL_INCLUDE_DIR=$rootPath/openssl_1_1_1/include -DOPENSSL_CRYPTO_LIBRARY=$rootPath/openssl_1_1_1/libcrypto.a -DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=11.7 -DCMAKE_CXX_FLAGS="-stdlib=libc++" -DCMAKE_BUILD_TYPE=Release ..
- ninja storage-daemon storage-daemon-cli fift func tonlib tonlibjson tonlib-cli validator-engine lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork tlbc
+ ninja storage-daemon storage-daemon-cli blockchain-explorer fift func tonlib tonlibjson tonlib-cli validator-engine lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork tlbc
- name: Find & copy binaries
run: |
mkdir artifacts
cp build/storage/storage-daemon/storage-daemon artifacts/
cp build/storage/storage-daemon/storage-daemon-cli artifacts/
+ cp build/blockchain-explorer/blockchain-explorer artifacts/
cp build/crypto/fift artifacts/
cp build/crypto/func artifacts/
cp build/crypto/create-state artifacts/
@@ -50,10 +51,18 @@ jobs:
cp build/utils/generate-random-id artifacts/
cp build/utils/json2tlo artifacts/
cp build/adnl/adnl-proxy artifacts/
+ chmod +x artifacts/*
rsync -r crypto/smartcont artifacts/
rsync -r crypto/fift/lib artifacts/
ls -laRt artifacts
+ - name: Simple binaries test
+ run: |
+ artifacts/validator-engine -V
+ artifacts/lite-client -V
+ artifacts/fift -V
+ artifacts/func -V
+
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
diff --git a/.github/workflows/macos-12.6-compile.yml b/.github/workflows/macos-12.6-compile.yml
index 2656de0a..c7e2e99b 100644
--- a/.github/workflows/macos-12.6-compile.yml
+++ b/.github/workflows/macos-12.6-compile.yml
@@ -23,18 +23,19 @@ jobs:
- name: Build all
run: |
export NONINTERACTIVE=1
- brew install ninja
+ brew install ninja libmicrohttpd pkg-config
rootPath=`pwd`
mkdir build
cd build
cmake -GNinja -DOPENSSL_FOUND=1 -DOPENSSL_INCLUDE_DIR=$rootPath/openssl_1_1_1/include -DOPENSSL_CRYPTO_LIBRARY=$rootPath/openssl_1_1_1/libcrypto.a -DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=12.6 -DCMAKE_CXX_FLAGS="-stdlib=libc++" -DCMAKE_BUILD_TYPE=Release ..
- ninja storage-daemon storage-daemon-cli fift func tonlib tonlibjson tonlib-cli validator-engine lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork tlbc
+ ninja storage-daemon storage-daemon-cli blockchain-explorer fift func tonlib tonlibjson tonlib-cli validator-engine lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork tlbc
- name: Find & copy binaries
run: |
mkdir artifacts
cp build/storage/storage-daemon/storage-daemon artifacts/
cp build/storage/storage-daemon/storage-daemon-cli artifacts/
+ cp build/blockchain-explorer/blockchain-explorer artifacts/
cp build/crypto/fift artifacts/
cp build/crypto/func artifacts/
cp build/crypto/create-state artifacts/
@@ -50,10 +51,18 @@ jobs:
cp build/utils/generate-random-id artifacts/
cp build/utils/json2tlo artifacts/
cp build/adnl/adnl-proxy artifacts/
+ chmod +x artifacts/*
rsync -r crypto/smartcont artifacts/
rsync -r crypto/fift/lib artifacts/
ls -laRt artifacts
+ - name: Simple binaries test
+ run: |
+ artifacts/validator-engine -V
+ artifacts/lite-client -V
+ artifacts/fift -V
+ artifacts/func -V
+
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
diff --git a/.github/workflows/ton-aarch64-linux.yml b/.github/workflows/ton-aarch64-linux.yml
new file mode 100644
index 00000000..83ad8694
--- /dev/null
+++ b/.github/workflows/ton-aarch64-linux.yml
@@ -0,0 +1,48 @@
+name: "TON aarch64 Linux binaries"
+
+on: [workflow_dispatch,workflow_call]
+
+jobs:
+ build:
+ runs-on: ubuntu-22.04
+
+ steps:
+ - run: |
+ sudo apt update
+ sudo apt install -y apt-utils
+ sudo apt install -q -y qemu-system-aarch64 qemu-efi binfmt-support qemu-user-static
+
+ - uses: actions/checkout@v3
+ with:
+ submodules: 'recursive'
+
+ - uses: cachix/install-nix-action@v18
+ with:
+ extra_nix_config: |
+ access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
+
+ - name: Compile
+ run: nix build .?submodules=1#packages.aarch64-linux.ton-oldglibc_staticbinaries --print-build-logs --system aarch64-linux -o result-aarch64
+
+ - name: Copy binaries
+ run: |
+ ls -lart
+ mkdir artifacts
+ cp $PWD/result-aarch64-linux/bin/* artifacts/
+ chmod +x artifacts/*
+ cp $PWD/result-aarch64-linux/lib/libtonlibjson.so.0.5 artifacts/
+ cp -R crypto/smartcont artifacts/
+ cp -R crypto/fift/lib artifacts/
+
+ - name: Simple binaries test
+ run: |
+ artifacts/validator-engine -V
+ artifacts/lite-client -V
+ artifacts/fift -V
+ artifacts/func -V
+
+ - name: Upload artifacts
+ uses: actions/upload-artifact@master
+ with:
+ name: ton-aarch64-linux-binaries
+ path: artifacts
\ No newline at end of file
diff --git a/.github/workflows/ton-aarch64-macos.yml b/.github/workflows/ton-aarch64-macos.yml
new file mode 100644
index 00000000..855ffa6c
--- /dev/null
+++ b/.github/workflows/ton-aarch64-macos.yml
@@ -0,0 +1,45 @@
+name: "TON aarch64 macOS binaries"
+
+on: [workflow_dispatch,workflow_call]
+
+jobs:
+ build:
+ runs-on: macos-12
+
+ steps:
+ - run: brew install qemu
+
+ - uses: actions/checkout@v3
+ with:
+ submodules: 'recursive'
+
+ - uses: cachix/install-nix-action@v18
+ with:
+ extra_nix_config: |
+ access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
+
+ - name: Compile
+ run: nix build .?submodules=1#packages.aarch64-darwin.ton-staticbin-dylib --print-build-logs -o result-aarch64-darwin
+
+ - name: Copy binaries
+ run: |
+ ls -lart
+ mkdir artifacts
+ cp $PWD/result-aarch64-darwin/bin/* artifacts/
+ chmod +x artifacts/*
+ cp $PWD/result-aarch64-darwin/lib/libtonlibjson* artifacts/
+ cp -R crypto/smartcont artifacts/
+ cp -R crypto/fift/lib artifacts/
+
+ - name: Simple binaries test
+ run: |
+ artifacts/validator-engine -V
+ artifacts/lite-client -V
+ artifacts/fift -V
+ artifacts/func -V
+
+ - name: Upload artifacts
+ uses: actions/upload-artifact@master
+ with:
+ name: ton-aarch64-macos-binaries
+ path: artifacts
diff --git a/.github/workflows/ubuntu-18.04-ton-ccpcheck.yml b/.github/workflows/ton-ccpcheck.yml
similarity index 95%
rename from .github/workflows/ubuntu-18.04-ton-ccpcheck.yml
rename to .github/workflows/ton-ccpcheck.yml
index f440d7a5..c2295638 100644
--- a/.github/workflows/ubuntu-18.04-ton-ccpcheck.yml
+++ b/.github/workflows/ton-ccpcheck.yml
@@ -5,7 +5,7 @@ on: [push,workflow_dispatch,workflow_call]
jobs:
build:
- runs-on: ubuntu-18.04
+ runs-on: ubuntu-22.04
steps:
- name: Check out repository
diff --git a/.github/workflows/ton-wasm-emscripten.yml b/.github/workflows/ton-wasm-emscripten.yml
new file mode 100644
index 00000000..1ce83ccc
--- /dev/null
+++ b/.github/workflows/ton-wasm-emscripten.yml
@@ -0,0 +1,40 @@
+name: TON WASM Compile
+
+on: [push,workflow_dispatch,workflow_call]
+
+jobs:
+ build:
+ runs-on: ubuntu-22.04
+
+ steps:
+ - name: Check out repository
+ uses: actions/checkout@v3
+ with:
+ submodules: 'recursive'
+
+ - name: Install libraries
+ run: |
+ sudo apt update
+ sudo apt install -y build-essential git make cmake ninja-build clang libgflags-dev zlib1g-dev libssl-dev libreadline-dev libmicrohttpd-dev pkg-config libgsl-dev python3 python3-dev python3-pip nodejs
+
+ - name: Configure & Build
+ run: |
+ cd .github/script
+ ./fift-func-wasm-build-ubuntu.sh
+
+ - name: Find & copy binaries
+ run: |
+ mkdir artifacts
+ ls build/crypto
+ cp build/crypto/fift* artifacts
+ cp build/crypto/func* artifacts
+ cp build/crypto/tlbc* artifacts
+ cp build/emulator/emulator-emscripten* artifacts
+ cp -R crypto/smartcont artifacts
+ cp -R crypto/fift/lib artifacts
+
+ - name: Upload artifacts
+ uses: actions/upload-artifact@master
+ with:
+ name: ton-wasm-binaries
+ path: artifacts
\ No newline at end of file
diff --git a/.github/workflows/ton-x86-64-linux.yml b/.github/workflows/ton-x86-64-linux.yml
new file mode 100644
index 00000000..0af0051b
--- /dev/null
+++ b/.github/workflows/ton-x86-64-linux.yml
@@ -0,0 +1,47 @@
+name: "TON x86_64 Linux binaries"
+
+on: [push,workflow_dispatch,workflow_call]
+
+jobs:
+ build:
+ runs-on: ubuntu-22.04
+
+ steps:
+ - run: |
+ sudo apt update
+ sudo apt install -y apt-utils
+
+ - uses: actions/checkout@v3
+ with:
+ submodules: 'recursive'
+
+ - uses: cachix/install-nix-action@v18
+ with:
+ extra_nix_config: |
+ access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
+
+ - name: Compile
+ run: nix build .?submodules=1#packages.x86_64-linux.ton-oldglibc_staticbinaries --print-build-logs --system x86_64-linux -o result-x86_64
+
+ - name: Copy binaries
+ run: |
+ ls -lart
+ mkdir artifacts
+ cp $PWD/result-x86_64/bin/* artifacts/
+ chmod +x artifacts/*
+ cp $PWD/result-x86_64/lib/libtonlibjson.so.0.5 artifacts/
+ cp -R crypto/smartcont artifacts/
+ cp -R crypto/fift/lib artifacts/
+
+ - name: Simple binaries test
+ run: |
+ artifacts/validator-engine -V
+ artifacts/lite-client -V
+ artifacts/fift -V
+ artifacts/func -V
+
+ - name: Upload artifacts
+ uses: actions/upload-artifact@master
+ with:
+ name: ton-x86_64-linux-binaries
+ path: artifacts
\ No newline at end of file
diff --git a/.github/workflows/ton-x86-64-macos.yml b/.github/workflows/ton-x86-64-macos.yml
new file mode 100644
index 00000000..9d490997
--- /dev/null
+++ b/.github/workflows/ton-x86-64-macos.yml
@@ -0,0 +1,43 @@
+name: "TON x86_64 macOS binaries"
+
+on: [push,workflow_dispatch,workflow_call]
+
+jobs:
+ build:
+ runs-on: macos-12
+
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ submodules: 'recursive'
+
+ - uses: cachix/install-nix-action@v18
+ with:
+ extra_nix_config: |
+ access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
+
+ - name: Compile
+ run: nix build .?submodules=1#packages.x86_64-darwin.ton-staticbin-dylib --print-build-logs -o result-x86_64-darwin
+
+ - name: Copy binaries
+ run: |
+ ls -lart
+ mkdir artifacts
+ cp $PWD/result-x86_64-darwin/bin/* artifacts/
+ chmod +x artifacts/*
+ cp $PWD/result-x86_64-darwin/lib/libtonlibjson.dylib artifacts/
+ cp -R crypto/smartcont artifacts/
+ cp -R crypto/fift/lib artifacts/
+
+ - name: Simple binaries test
+ run: |
+ artifacts/validator-engine -V
+ artifacts/lite-client -V
+ artifacts/fift -V
+ artifacts/func -V
+
+ - name: Upload artifacts
+ uses: actions/upload-artifact@master
+ with:
+ name: ton-x86_64-macos-binaries
+ path: artifacts
\ No newline at end of file
diff --git a/.github/workflows/ubuntu-18.04-compile.yml b/.github/workflows/ubuntu-22.04-compile.yml
similarity index 88%
rename from .github/workflows/ubuntu-18.04-compile.yml
rename to .github/workflows/ubuntu-22.04-compile.yml
index c661a683..d27d1b4d 100644
--- a/.github/workflows/ubuntu-18.04-compile.yml
+++ b/.github/workflows/ubuntu-22.04-compile.yml
@@ -1,11 +1,11 @@
-name: Ubuntu 18.04 Compile
+name: Ubuntu 22.04 Compile
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
- runs-on: ubuntu-18.04
+ runs-on: ubuntu-22.04
steps:
- name: Check out repository
@@ -36,9 +36,17 @@ jobs:
run: |
mkdir artifacts
cp build/storage/storage-daemon/storage-daemon build/storage/storage-daemon/storage-daemon-cli build/crypto/fift build/crypto/tlbc build/crypto/func build/crypto/create-state build/validator-engine-console/validator-engine-console build/tonlib/tonlib-cli build/tonlib/libtonlibjson.so.0.5 build/http/http-proxy build/rldp-http-proxy/rldp-http-proxy build/dht-server/dht-server build/lite-client/lite-client build/validator-engine/validator-engine build/utils/generate-random-id build/utils/json2tlo build/adnl/adnl-proxy artifacts
+ chmod +x artifacts/*
cp -R crypto/smartcont artifacts/
cp -R crypto/fift/lib artifacts/
+ - name: Simple binaries test
+ run: |
+ artifacts/validator-engine -V
+ artifacts/lite-client -V
+ artifacts/fift -V
+ artifacts/func -V
+
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
diff --git a/.github/workflows/ubuntu-compile.yml b/.github/workflows/ubuntu-compile.yml
index a91e7128..dd246a39 100644
--- a/.github/workflows/ubuntu-compile.yml
+++ b/.github/workflows/ubuntu-compile.yml
@@ -7,7 +7,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- os: [ubuntu-18.04, ubuntu-20.04, ubuntu-22.04]
+ os: [ubuntu-20.04, ubuntu-22.04]
runs-on: ${{ matrix.os }}
steps:
@@ -39,8 +39,17 @@ jobs:
run: |
mkdir artifacts-${{ matrix.os }}
cp build-${{ matrix.os }}/storage/storage-daemon/storage-daemon build-${{ matrix.os }}/storage/storage-daemon/storage-daemon-cli build-${{ matrix.os }}/crypto/fift build-${{ matrix.os }}/crypto/tlbc build-${{ matrix.os }}/crypto/func build-${{ matrix.os }}/crypto/create-state build-${{ matrix.os }}/validator-engine-console/validator-engine-console build-${{ matrix.os }}/tonlib/tonlib-cli build-${{ matrix.os }}/tonlib/libtonlibjson.so.0.5 build-${{ matrix.os }}/http/http-proxy build-${{ matrix.os }}/rldp-http-proxy/rldp-http-proxy build-${{ matrix.os }}/dht-server/dht-server build-${{ matrix.os }}/lite-client/lite-client build-${{ matrix.os }}/validator-engine/validator-engine build-${{ matrix.os }}/utils/generate-random-id build-${{ matrix.os }}/utils/json2tlo build-${{ matrix.os }}/adnl/adnl-proxy artifacts-${{ matrix.os }}
+ chmod +x artifacts-${{ matrix.os }}/*
cp -R crypto/smartcont artifacts-${{ matrix.os }}
cp -R crypto/fift/lib artifacts-${{ matrix.os }}
+
+ - name: Simple binaries test
+ run: |
+ artifacts-${{ matrix.os }}/validator-engine -V
+ artifacts-${{ matrix.os }}/lite-client -V
+ artifacts-${{ matrix.os }}/fift -V
+ artifacts-${{ matrix.os }}/func -V
+
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
diff --git a/.github/workflows/win-2019-compile.yml b/.github/workflows/win-2019-compile.yml
index e648dbb7..bd174a15 100644
--- a/.github/workflows/win-2019-compile.yml
+++ b/.github/workflows/win-2019-compile.yml
@@ -28,7 +28,10 @@ jobs:
path: zlib
- name: Setup msbuild.exe
- uses: microsoft/setup-msbuild@v1.0.2
+ uses: microsoft/setup-msbuild@v1.1
+
+ - name: Install Pkg-config Lite
+ run: choco install pkgconfiglite
- name: Compile zlib Win64
run: |
@@ -57,7 +60,7 @@ jobs:
mkdir build
cd build
cmake -DREADLINE_INCLUDE_DIR=%root%\readline-5.0-1-lib\include\readline -DREADLINE_LIBRARY=%root%\readline-5.0-1-lib\lib\readline.lib -DZLIB_FOUND=1 -DMHD_FOUND=1 -DMHD_LIBRARY=%root%\libmicrohttpd-0.9.75-w32-bin\x86_64\VS2019\Release-static\libmicrohttpd.lib -DMHD_INCLUDE_DIR=%root%\libmicrohttpd-0.9.75-w32-bin\x86_64\VS2019\Release-static -DZLIB_INCLUDE_DIR=%root%\zlib -DZLIB_LIBRARY=%root%\zlib\contrib\vstudio\vc14\x64\ZlibStatReleaseWithoutAsm\zlibstat.lib -DOPENSSL_FOUND=1 -DOPENSSL_INCLUDE_DIR=%root%/openssl-1.1/x64/include -DOPENSSL_CRYPTO_LIBRARY=%root%/openssl-1.1/x64/lib/libcrypto.lib -DCMAKE_CXX_FLAGS="/DTD_WINDOWS=1 /EHsc /bigobj /W0" ..
- cmake --build . --target storage-daemon storage-daemon-cli fift func tonlib tonlibjson tonlib-cli validator-engine lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork --config Release
+ cmake --build . --target storage-daemon storage-daemon-cli blockchain-explorer fift func tonlib tonlibjson tonlib-cli validator-engine lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork --config Release
- name: Show executables
run: |
@@ -68,8 +71,7 @@ jobs:
- name: Check if validator-engine.exe exists
run: |
- set root=%cd%
- copy %root%\build\validator-engine\Release\validator-engine.exe test
+ copy %cd%\build\validator-engine\Release\validator-engine.exe test
- name: Find & copy binaries
run: |
@@ -77,9 +79,9 @@ jobs:
mkdir artifacts\smartcont
mkdir artifacts\lib
- for %%I in (build\storage\storage-daemon\Release\storage-daemon.exe build\storage\storage-daemon\Release\storage-daemon-cli.exe build\crypto\Release\fift.exe build\crypto\Release\tlbc.exe build\crypto\Release\func.exe build\crypto\Release\create-state.exe build\validator-engine-console\Release\validator-engine-console.exe build\tonlib\Release\tonlib-cli.exe build\tonlib\Release\tonlibjson.dll build\http\Release\http-proxy.exe build\rldp-http-proxy\Release\rldp-http-proxy.exe build\dht-server\Release\dht-server.exe build\lite-client\Release\lite-client.exe build\validator-engine\Release\validator-engine.exe build\utils\Release\generate-random-id.exe build\utils\Release\json2tlo.exe build\adnl\Release\adnl-proxy.exe) do copy %%I artifacts\
+ for %%I in (build\storage\storage-daemon\Release\storage-daemon.exe build\storage\storage-daemon\Release\storage-daemon-cli.exe build\blockchain-explorer\blockchain-explorer.exe build\crypto\Release\fift.exe build\crypto\Release\tlbc.exe build\crypto\Release\func.exe build\crypto\Release\create-state.exe build\validator-engine-console\Release\validator-engine-console.exe build\tonlib\Release\tonlib-cli.exe build\tonlib\Release\tonlibjson.dll build\http\Release\http-proxy.exe build\rldp-http-proxy\Release\rldp-http-proxy.exe build\dht-server\Release\dht-server.exe build\lite-client\Release\lite-client.exe build\validator-engine\Release\validator-engine.exe build\utils\Release\generate-random-id.exe build\utils\Release\json2tlo.exe build\adnl\Release\adnl-proxy.exe) do copy %%I artifacts\
xcopy /e /k /h /i crypto\smartcont artifacts\smartcont
- xcopy /e /k /h /i crypto\fift\lib artifacts\lib
+ xcopy /e /k /h /i crypto\fift\lib artifacts\lib
- name: Upload artifacts
uses: actions/upload-artifact@master
diff --git a/CMake/FindMHD.cmake b/CMake/FindMHD.cmake
index 822714a2..c4b94c0e 100644
--- a/CMake/FindMHD.cmake
+++ b/CMake/FindMHD.cmake
@@ -20,19 +20,6 @@ find_library(
set(MHD_INCLUDE_DIRS ${MHD_INCLUDE_DIR})
set(MHD_LIBRARIES ${MHD_LIBRARY})
-# debug library on windows
-# same naming convention as in qt (appending debug library with d)
-# boost is using the same "hack" as us with "optimized" and "debug"
-# official MHD project actually uses _d suffix
-if (MSVC)
- find_library(
- MHD_LIBRARY_DEBUG
- NAMES microhttpd_d microhttpd-10_d libmicrohttpd_d libmicrohttpd-dll_d
- DOC "mhd debug library"
- )
- set(MHD_LIBRARIES optimized ${MHD_LIBRARIES} debug ${MHD_LIBRARY_DEBUG})
-endif()
-
include(FindPackageHandleStandardArgs)
-find_package_handle_standard_args(mhd DEFAULT_MSG MHD_INCLUDE_DIR MHD_LIBRARY)
+find_package_handle_standard_args(MHD DEFAULT_MSG MHD_INCLUDE_DIR MHD_LIBRARY)
mark_as_advanced(MHD_INCLUDE_DIR MHD_LIBRARY)
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 1e52f969..aafc79b6 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -108,7 +108,8 @@ set(TON_ARCH "native" CACHE STRING "Architecture, will be passed to -march=")
#BEGIN M1 support
EXECUTE_PROCESS( COMMAND uname -m COMMAND tr -d '\n' OUTPUT_VARIABLE ARCHITECTURE )
-if ((ARCHITECTURE MATCHES "arm64") AND (CMAKE_SYSTEM_NAME STREQUAL "Darwin"))
+if ((ARCHITECTURE MATCHES "arm64") AND (CMAKE_SYSTEM_NAME STREQUAL "Darwin") AND
+ (CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 13.0)) # only clang 13+ supports cpu=apple-m1
set(TON_ARCH "apple-m1")
endif()
#END M1 support
@@ -135,7 +136,16 @@ set(CRC32C_BUILD_BENCHMARKS OFF CACHE BOOL "Build CRC32C's benchmarks")
set(CRC32C_USE_GLOG OFF CACHE BOOL "Build CRC32C's tests with Google Logging")
set(CRC32C_INSTALL OFF CACHE BOOL "Install CRC32C's header and library")
message("Add crc32c")
-add_subdirectory(third-party/crc32c EXCLUDE_FROM_ALL)
+if (NOT MSVC)
+ set(OLD_CMAKE_CXX_FLAGS ${CMAKE_CXX_FLAGS})
+ # fix aarch64 build @ crc32c/src/crc32c_arm64_linux_check.h
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-error=address")
+ add_subdirectory(third-party/crc32c EXCLUDE_FROM_ALL)
+ set(CMAKE_CXX_FLAGS ${OLD_CMAKE_CXX_FLAGS})
+ unset(OLD_CMAKE_CXX_FLAGS)
+else()
+ add_subdirectory(third-party/crc32c EXCLUDE_FROM_ALL)
+endif()
set(CRC32C_FOUND 1)
if (TON_USE_ROCKSDB)
@@ -198,6 +208,7 @@ include(CheckCXXCompilerFlag)
set(CMAKE_THREAD_PREFER_PTHREAD ON)
set(THREADS_PREFER_PTHREAD_FLAG ON)
find_package(Threads REQUIRED)
+find_package(PkgConfig REQUIRED)
find_package(ZLIB REQUIRED)
if (TON_ARCH AND NOT MSVC)
@@ -383,6 +394,7 @@ add_subdirectory(tl-utils)
add_subdirectory(adnl)
add_subdirectory(crypto)
add_subdirectory(lite-client)
+add_subdirectory(emulator)
#BEGIN tonlib
add_subdirectory(tonlib)
diff --git a/Changelog.md b/Changelog.md
index ae131e74..be18efc2 100644
--- a/Changelog.md
+++ b/Changelog.md
@@ -1,43 +1,16 @@
-## 05.2022 Update
-* Initial synchronization improved: adjusted timeouts for state download and the way of choosing which state to download. Nodes with low network speed and/or bad connectivity will synchronize faster and consistently.
-* Improved peer-to-peer network stability and DDoS resistance: now peers will only relay valid messages to the network. Large messages, which require splitting for relaying, will be retranslated as well, but only after the node gets all parts, and reassembles and checks them. Validators may sign certificates for network peers, which allow relaying large messages by parts without checks. It is used now by validators to faster relay new blocks. Sign and import certificate commands are exposed via `validator-engine-console`.
-* Fixed some rare edge cases in TVM arithmetic operations related to big numbers (`2**63+`)
-* Improved fixes used to combat wrong activate-destruct-activate contract behavior last November.
-* Improved tonlib: support libraries (with client-side caching), getmethods completely fill c7 register, getmethods support slice arguments, improved messages listing for transactions, added extended block header params, added getConfig method.
-* RocksDB updated to a newer version.
-* Improved persistent state serialization: memory usage during serialization was optimized; the start of serialization on different nodes was sparsed.
-* FunC update: support for string literals and constants (including precompiled constant expressions), semver, `include` expressions.
-* Fixed rarely manifested bugs in `Asm.fif`.
-* LiteClient supports key as cli parameter.
-* Improved Liteserver DoS resistance for running getmethods.
+## 03.2023 Update
+1. Improvement of ADNL connection stability
+2. Transaction emulator support and getAccountStateByTransaction method
+3. Fixes of typos, undefined behavior and timer warnings
+4. Handling incorrect integer literal values in funC; funC version bumped to 0.4.2
+5. FunC Mathlib
-Besides the work of the core team, this update is based on the efforts of @tvorogme (added support for slice arguments and noted bugs in Asm.fif), @akifoq (fixed bug in Asm.fif), @cryshado (noted strange behavior of LS, which, upon inspection, turned out to be a vector of DoS attack).
-## 08.2022 Update
-* Blockchain state serialization now works via separate db-handler which simplfies memory clearing after serialization
-* CellDB now works asynchronously which substantially increase database access throughput
-* Abseil-cpp and crc32 updated: solve issues with compilation on recent OS distributives
-* Fixed a series of UBs and issues for exotic endianness hosts
-* Added detailed network stats for overlays (can be accessed via `validator-console`)
-* Improved auto-builds for wide range of systems.
-* Added extended error information for unaccepted external messages: `exit_code` and TVM trace (where applicable).
-* [Improved catchain DoS resistance](https://github.com/ton-blockchain/ton/blob/master/doc/catchain-dos.md)
-* A series of FunC improvements, summarized [here](https://github.com/ton-blockchain/ton/pull/378)
-#### Update delay
-Update coincided with persistent state serialization event which lead to block production speed deterioration (issue substantially mitigated in update itself). This phenomena was aggravated by the fact that after update some validators lost ability to participate in block creation. The last was caused by threshold based hardcoded protocol version bump, where threshold was set in such manner (based on block height with value higher than 9m), that it eluded detection in private net tests. The update was temporarily paused and resumed after persistent state serialization ended and issues with block creation were resolved.
-
-Besides the work of the core team, this update is based on the efforts of @awesome-doge (help with abseil-cpp upgrade), @rec00rsiff (noted issues for exotic endianess and implemented network stats) and third-party security auditors.
-
-## 10.2022 Update
-* Added extended block creation and general perfomance stats gathering
-* Forbidden report data on blocks not committed to the master chain for LS
-* Improved debug in TVM
-* FunC 0.3.0: multi-line asms, bitwise operations for constants, duplication of identical definition for constants and asms now allowed
-* New tonlib methods: sendMessageReturnHash, getTransactionsV2, getMasterchainBlockSignatures, getShardBlockProof, getLibraries.
-* Fixed bugs related to invalid TVM output (c4, c5, libaries) and non-validated network data; avoided too deep recursion in libraries loading
-* Fixed multiple undefined behavior issues
-* Added build of FunC and Fift to WASM
-
-Besides the work of the core team, this update is based on the efforts of @tvorogme (debug improvements), @AlexeyFSL (WASM builds) and third-party security auditors.
+## 01.2023 Update
+1. Added ConfigParam 44: `SuspendedAddressList`. Upon being set this config suspends initialisation of **uninit** addresses from the list for given time.
+2. FunC: `v0.4.1` added pragmas for precise control of computation order
+3. FunC: fixed compiler crashes for some exotic inputs
+4. FunC: added legacy tester, a collection of smart-contracts which is used to check whether compilator update change compilation result
+5. Improved archive manager: proper handling of recently garbage-collected blocks
## 12.2022 Update
Node update:
@@ -53,3 +26,47 @@ Node update:
10. TON Storage: added storage-daemon (create, download bag of Files, storage-provider staff), added storage-daemon-cli
Besides the work of the core team, this update is based on the efforts of @vtamara (help with abseil-cpp upgrade), @krigga(in-place modification of global variables) and third-party security auditors.
+
+## 10.2022 Update
+* Added extended block creation and general perfomance stats gathering
+* Forbidden report data on blocks not committed to the master chain for LS
+* Improved debug in TVM
+* FunC 0.3.0: multi-line asms, bitwise operations for constants, duplication of identical definition for constants and asms now allowed
+* New tonlib methods: sendMessageReturnHash, getTransactionsV2, getMasterchainBlockSignatures, getShardBlockProof, getLibraries.
+* Fixed bugs related to invalid TVM output (c4, c5, libaries) and non-validated network data; avoided too deep recursion in libraries loading
+* Fixed multiple undefined behavior issues
+* Added build of FunC and Fift to WASM
+
+Besides the work of the core team, this update is based on the efforts of @tvorogme (debug improvements), @AlexeyFSL (WASM builds) and third-party security auditors.
+
+## 08.2022 Update
+* Blockchain state serialization now works via separate db-handler which simplfies memory clearing after serialization
+* CellDB now works asynchronously which substantially increase database access throughput
+* Abseil-cpp and crc32 updated: solve issues with compilation on recent OS distributives
+* Fixed a series of UBs and issues for exotic endianness hosts
+* Added detailed network stats for overlays (can be accessed via `validator-console`)
+* Improved auto-builds for wide range of systems.
+* Added extended error information for unaccepted external messages: `exit_code` and TVM trace (where applicable).
+* [Improved catchain DoS resistance](https://github.com/ton-blockchain/ton/blob/master/doc/catchain-dos.md)
+* A series of FunC improvements, summarized [here](https://github.com/ton-blockchain/ton/pull/378)
+#### Update delay
+Update coincided with persistent state serialization event which lead to block production speed deterioration (issue substantially mitigated in update itself). This phenomena was aggravated by the fact that after update some validators lost ability to participate in block creation. The last was caused by threshold based hardcoded protocol version bump, where threshold was set in such manner (based on block height with value higher than 9m), that it eluded detection in private net tests. The update was temporarily paused and resumed after persistent state serialization ended and issues with block creation were resolved.
+
+Besides the work of the core team, this update is based on the efforts of @awesome-doge (help with abseil-cpp upgrade), @rec00rsiff (noted issues for exotic endianess and implemented network stats) and third-party security auditors.
+
+## 05.2022 Update
+* Initial synchronization improved: adjusted timeouts for state download and the way of choosing which state to download. Nodes with low network speed and/or bad connectivity will synchronize faster and consistently.
+* Improved peer-to-peer network stability and DDoS resistance: now peers will only relay valid messages to the network. Large messages, which require splitting for relaying, will be retranslated as well, but only after the node gets all parts, and reassembles and checks them. Validators may sign certificates for network peers, which allow relaying large messages by parts without checks. It is used now by validators to faster relay new blocks. Sign and import certificate commands are exposed via `validator-engine-console`.
+* Fixed some rare edge cases in TVM arithmetic operations related to big numbers (`2**63+`)
+* Improved fixes used to combat wrong activate-destruct-activate contract behavior last November.
+* Improved tonlib: support libraries (with client-side caching), getmethods completely fill c7 register, getmethods support slice arguments, improved messages listing for transactions, added extended block header params, added getConfig method.
+* RocksDB updated to a newer version.
+* Improved persistent state serialization: memory usage during serialization was optimized; the start of serialization on different nodes was sparsed.
+* FunC update: support for string literals and constants (including precompiled constant expressions), semver, `include` expressions.
+* Fixed rarely manifested bugs in `Asm.fif`.
+* LiteClient supports key as cli parameter.
+* Improved Liteserver DoS resistance for running getmethods.
+
+Besides the work of the core team, this update is based on the efforts of @tvorogme (added support for slice arguments and noted bugs in Asm.fif), @akifoq (fixed bug in Asm.fif), @cryshado (noted strange behavior of LS, which, upon inspection, turned out to be a vector of DoS attack).
+
+
diff --git a/README.md b/README.md
index 893717ba..6bc3b1f5 100644
--- a/README.md
+++ b/README.md
@@ -1,21 +1,28 @@
-
-
-
-
+
+
+
+
+
+
+
Reference implementation of TON Node and tools
+##
[![TON Overflow Group][ton-overflow-badge]][ton-overflow-url]
[![Stack Overflow Group][stack-overflow-badge]][stack-overflow-url]
-[![Telegram Foundation Group][telegram-foundation-badge]][telegram-foundation-url]
+[![Telegram Community Chat][telegram-tondev-badge]][telegram-tondev-url]
[![Telegram Community Group][telegram-community-badge]][telegram-community-url]
+[![Telegram Foundation Group][telegram-foundation-badge]][telegram-foundation-url]
[![Twitter Group][twitter-badge]][twitter-url]
-[telegram-foundation-badge]: https://img.shields.io/badge/-TON%20Foundation-2CA5E0?style=flat&logo=telegram&logoColor=white
-[telegram-community-badge]: https://img.shields.io/badge/-TON%20Community-2CA5E0?style=flat&logo=telegram&logoColor=white
+[telegram-foundation-badge]: https://img.shields.io/badge/TON%20Foundation-2CA5E0?logo=telegram&logoColor=white&style=flat
+[telegram-community-badge]: https://img.shields.io/badge/TON%20Community-2CA5E0?logo=telegram&logoColor=white&style=flat
+[telegram-tondev-badge]: https://img.shields.io/badge/chat-TONDev-2CA5E0?logo=telegram&logoColor=white&style=flat
[telegram-foundation-url]: https://t.me/tonblockchain
[telegram-community-url]: https://t.me/toncoin
+[telegram-tondev-url]: https://t.me/tondev_eng
[twitter-badge]: https://img.shields.io/twitter/follow/ton_blockchain
[twitter-url]: https://twitter.com/ton_blockchain
[stack-overflow-badge]: https://img.shields.io/badge/-Stack%20Overflow-FE7A16?style=flat&logo=stack-overflow&logoColor=white
@@ -27,6 +34,15 @@
Main TON monorepo, which includes the code of the node/validator, lite-client, tonlib, FunC compiler, etc.
+## The Open Network
+
+__The Open Network (TON)__ is a fast, secure, scalable blockchain focused on handling _millions of transactions per second_ (TPS) with the goal of reaching hundreds of millions of blockchain users.
+- To learn more about different aspects of TON blockchain and its underlying ecosystem check [documentation](ton.org/docs)
+- To run node, validator or lite-server check [Participate section](https://ton.org/docs/participate/nodes/run-node)
+- To develop decentralised apps check [Tutorials](https://ton.org/docs/develop/smart-contracts/), [FunC docs](https://ton.org/docs/develop/func/overview) and [DApp tutorials](https://ton.org/docs/develop/dapps/)
+- To work on TON check [wallets](https://ton.app/wallets), [explorers](https://ton.app/explorers), [DEXes](https://ton.app/dex) and [utilities](https://ton.app/utilities)
+- To interact with TON check [APIs](https://ton.org/docs/develop/dapps/apis/)
+
## Updates flow:
* **master branch** - mainnet is running on this stable branch.
diff --git a/adnl/CMakeLists.txt b/adnl/CMakeLists.txt
index 954db27b..b287cba0 100644
--- a/adnl/CMakeLists.txt
+++ b/adnl/CMakeLists.txt
@@ -99,6 +99,8 @@ target_link_libraries(adnl-pong PUBLIC tdactor ton_crypto tl_api tdnet common
add_library(adnltest STATIC ${ADNL_TEST_SOURCE})
target_include_directories(adnltest PUBLIC $)
target_link_libraries(adnltest PUBLIC adnl )
+
+install(TARGETS adnl-proxy RUNTIME DESTINATION bin)
endif()
#END internal
diff --git a/adnl/adnl-peer.cpp b/adnl/adnl-peer.cpp
index 528dd78f..5baee64e 100644
--- a/adnl/adnl-peer.cpp
+++ b/adnl/adnl-peer.cpp
@@ -68,7 +68,9 @@ void AdnlPeerPairImpl::alarm() {
}
if (retry_send_at_ && retry_send_at_.is_in_past()) {
retry_send_at_ = td::Timestamp::never();
- send_messages_in(std::move(pending_messages_), false);
+ auto messages = std::move(pending_messages_);
+ pending_messages_.clear();
+ send_messages_in(std::move(messages), false);
}
alarm_timestamp().relax(next_dht_query_at_);
alarm_timestamp().relax(next_db_update_at_);
@@ -113,6 +115,8 @@ void AdnlPeerPairImpl::discover() {
}
void AdnlPeerPairImpl::receive_packet_checked(AdnlPacket packet) {
+ last_received_packet_ = td::Timestamp::now();
+ try_reinit_at_ = td::Timestamp::never();
request_reverse_ping_after_ = td::Timestamp::in(15.0);
auto d = Adnl::adnl_start_time();
if (packet.dst_reinit_date() > d) {
@@ -263,7 +267,9 @@ void AdnlPeerPairImpl::send_messages_in(std::vector message
size_t ptr = 0;
bool first = true;
do {
- size_t s = (channel_ready_ ? channel_packet_header_max_size() : packet_header_max_size());
+ bool try_reinit = try_reinit_at_ && try_reinit_at_.is_in_past();
+ bool via_channel = channel_ready_ && !try_reinit;
+ size_t s = (via_channel ? channel_packet_header_max_size() : packet_header_max_size());
if (first) {
s += 2 * addr_list_max_size();
}
@@ -307,7 +313,7 @@ void AdnlPeerPairImpl::send_messages_in(std::vector message
}
}
- if (!channel_ready_) {
+ if (!via_channel) {
packet.set_reinit_date(Adnl::adnl_start_time(), reinit_date_);
packet.set_source(local_id_);
}
@@ -330,7 +336,7 @@ void AdnlPeerPairImpl::send_messages_in(std::vector message
packet.run_basic_checks().ensure();
auto P = td::PromiseCreator::lambda([SelfId = actor_id(this), conn, id = print_id(),
- via_channel = channel_ready_](td::Result res) {
+ via_channel](td::Result res) {
if (res.is_error()) {
LOG(ERROR) << id << ": dropping OUT message: error while creating packet: " << res.move_as_error();
} else {
@@ -339,9 +345,9 @@ void AdnlPeerPairImpl::send_messages_in(std::vector message
});
td::actor::send_closure(local_actor_, &AdnlLocalId::update_packet, std::move(packet),
- !channel_ready_ && ack_seqno_ == 0 && in_seqno_ == 0, !channel_ready_,
+ (!channel_ready_ && ack_seqno_ == 0 && in_seqno_ == 0) || try_reinit, !via_channel,
(first || s + addr_list_max_size() <= AdnlNetworkManager::get_mtu())
- ? peer_recv_addr_list_version_
+ ? (try_reinit ? 0 : peer_recv_addr_list_version_)
: 0x7fffffff,
(first || s + 2 * addr_list_max_size() <= AdnlNetworkManager::get_mtu())
? peer_recv_priority_addr_list_version_
@@ -388,6 +394,9 @@ void AdnlPeerPairImpl::send_messages(std::vector messages)
void AdnlPeerPairImpl::send_packet_continue(AdnlPacket packet, td::actor::ActorId conn,
bool via_channel) {
+ if (!try_reinit_at_ && last_received_packet_ < td::Timestamp::in(-5.0)) {
+ try_reinit_at_ = td::Timestamp::in(10.0);
+ }
packet.run_basic_checks().ensure();
auto B = serialize_tl_object(packet.tl(), true);
if (via_channel) {
@@ -800,7 +809,9 @@ void AdnlPeerPairImpl::Conn::create_conn(td::actor::ActorId pe
void AdnlPeerPairImpl::conn_change_state(AdnlConnectionIdShort id, bool ready) {
if (ready) {
if (pending_messages_.size() > 0) {
- send_messages_in(std::move(pending_messages_), true);
+ auto messages = std::move(pending_messages_);
+ pending_messages_.clear();
+ send_messages_in(std::move(messages), true);
}
}
}
@@ -949,6 +960,7 @@ void AdnlPeerPairImpl::got_data_from_dht(td::Result R) {
CHECK(dht_query_active_);
dht_query_active_ = false;
next_dht_query_at_ = td::Timestamp::in(td::Random::fast(60.0, 120.0));
+ alarm_timestamp().relax(next_dht_query_at_);
if (R.is_error()) {
VLOG(ADNL_INFO) << this << ": dht query failed: " << R.move_as_error();
return;
diff --git a/adnl/adnl-peer.hpp b/adnl/adnl-peer.hpp
index 4e1d8db7..041de23c 100644
--- a/adnl/adnl-peer.hpp
+++ b/adnl/adnl-peer.hpp
@@ -255,6 +255,9 @@ class AdnlPeerPairImpl : public AdnlPeerPair {
td::Timestamp next_db_update_at_ = td::Timestamp::never();
td::Timestamp retry_send_at_ = td::Timestamp::never();
+ td::Timestamp last_received_packet_ = td::Timestamp::never();
+ td::Timestamp try_reinit_at_ = td::Timestamp::never();
+
bool has_reverse_addr_ = false;
td::Timestamp request_reverse_ping_after_ = td::Timestamp::now();
bool request_reverse_ping_active_ = false;
diff --git a/blockchain-explorer/CMakeLists.txt b/blockchain-explorer/CMakeLists.txt
index 0d02f01f..11328a7a 100644
--- a/blockchain-explorer/CMakeLists.txt
+++ b/blockchain-explorer/CMakeLists.txt
@@ -1,22 +1,28 @@
cmake_minimum_required(VERSION 3.0.2 FATAL_ERROR)
+option(NIX "Use \"ON\" for a static build." OFF)
-find_package(MHD)
-
-if (MHD_FOUND)
-
- set(BLOCHAIN_EXPLORER_SOURCE
+set(BLOCHAIN_EXPLORER_SOURCE
blockchain-explorer.cpp
blockchain-explorer.hpp
blockchain-explorer-http.cpp
blockchain-explorer-http.hpp
blockchain-explorer-query.cpp
blockchain-explorer-query.hpp
- )
+)
- add_executable(blockchain-explorer ${BLOCHAIN_EXPLORER_SOURCE})
+add_executable(blockchain-explorer ${BLOCHAIN_EXPLORER_SOURCE})
+
+if (NIX)
+ find_package(PkgConfig REQUIRED)
+ pkg_check_modules(MHD libmicrohttpd)
+ target_include_directories(blockchain-explorer PUBLIC ${MHD_INCLUDE_DIRS} ${MHD_STATIC_INCLUDE_DIRS})
+ target_link_libraries(blockchain-explorer tdutils tdactor adnllite tl_lite_api tl-lite-utils ton_crypto ton_block ${MHD_LIBRARIES} ${MHD_STATIC_LIBRARIES})
+else()
+ find_package(MHD)
target_include_directories(blockchain-explorer PUBLIC ${MHD_INCLUDE_DIRS})
- target_link_libraries(blockchain-explorer tdutils tdactor adnllite tl_lite_api tl-lite-utils
- ton_crypto ton_block ${MHD_LIBRARY})
-
+ target_link_libraries(blockchain-explorer tdutils tdactor adnllite tl_lite_api tl-lite-utils ton_crypto ton_block ${MHD_LIBRARIES})
endif()
+
+install(TARGETS blockchain-explorer RUNTIME DESTINATION bin)
+
diff --git a/crypto/CMakeLists.txt b/crypto/CMakeLists.txt
index fab75bfc..598169f7 100644
--- a/crypto/CMakeLists.txt
+++ b/crypto/CMakeLists.txt
@@ -143,6 +143,7 @@ set(FIFT_SOURCE
fift/Dictionary.cpp
fift/Fift.cpp
fift/IntCtx.cpp
+ fift/HashMap.cpp
fift/Continuation.cpp
fift/SourceLookup.cpp
fift/utils.cpp
@@ -151,6 +152,7 @@ set(FIFT_SOURCE
fift/Dictionary.h
fift/Fift.h
fift/IntCtx.h
+ fift/HashMap.h
fift/Continuation.h
fift/SourceLookup.h
fift/utils.h
@@ -274,6 +276,10 @@ add_library(ton_crypto STATIC ${TON_CRYPTO_SOURCE})
target_include_directories(ton_crypto PUBLIC $
$)
target_link_libraries(ton_crypto PUBLIC ${OPENSSL_CRYPTO_LIBRARY} tdutils tddb_utils)
+if (USE_EMSCRIPTEN)
+ target_link_options(ton_crypto PRIVATE -fexceptions)
+ target_compile_options(ton_crypto PRIVATE -fexceptions)
+endif()
if (NOT WIN32)
find_library(DL dl)
if (DL)
@@ -296,6 +302,10 @@ target_link_libraries(test-ed25519-crypto PUBLIC ton_crypto)
add_library(fift-lib ${FIFT_SOURCE})
target_include_directories(fift-lib PUBLIC $)
target_link_libraries(fift-lib PUBLIC ton_crypto ton_db tdutils ton_block)
+if (USE_EMSCRIPTEN)
+ target_link_options(fift-lib PRIVATE -fexceptions)
+ target_compile_options(fift-lib PRIVATE -fexceptions)
+endif()
set_target_properties(fift-lib PROPERTIES OUTPUT_NAME fift)
add_executable(fift fift/fift-main.cpp)
@@ -324,17 +334,20 @@ if (USE_EMSCRIPTEN)
add_executable(funcfiftlib funcfiftlib/funcfiftlib.cpp ${FUNC_LIB_SOURCE})
target_include_directories(funcfiftlib PUBLIC $)
target_link_libraries(funcfiftlib PUBLIC fift-lib src_parser git)
- target_link_options(funcfiftlib PRIVATE -sEXPORTED_RUNTIME_METHODS=FS,ccall,cwrap,_malloc,free,UTF8ToString,stringToUTF8)
- target_link_options(funcfiftlib PRIVATE -sEXPORTED_FUNCTIONS=_func_compile,_version)
+ target_link_options(funcfiftlib PRIVATE -sEXPORTED_RUNTIME_METHODS=FS,ccall,cwrap,UTF8ToString,stringToUTF8,lengthBytesUTF8,addFunction,removeFunction,setValue)
+ target_link_options(funcfiftlib PRIVATE -sEXPORTED_FUNCTIONS=_func_compile,_version,_malloc,_free,_setThrew)
target_link_options(funcfiftlib PRIVATE -sEXPORT_NAME=CompilerModule)
target_link_options(funcfiftlib PRIVATE -sERROR_ON_UNDEFINED_SYMBOLS=0)
- target_link_options(funcfiftlib PRIVATE -sFILESYSTEM=1)
+ target_link_options(funcfiftlib PRIVATE -sFILESYSTEM=1 -lnodefs.js)
target_link_options(funcfiftlib PRIVATE -Oz)
target_link_options(funcfiftlib PRIVATE -sIGNORE_MISSING_MAIN=1)
target_link_options(funcfiftlib PRIVATE -sAUTO_NATIVE_LIBRARIES=0)
target_link_options(funcfiftlib PRIVATE -sMODULARIZE=1)
+ target_link_options(funcfiftlib PRIVATE -sALLOW_MEMORY_GROWTH=1)
+ target_link_options(funcfiftlib PRIVATE -sALLOW_TABLE_GROWTH=1)
target_link_options(funcfiftlib PRIVATE --embed-file ${CMAKE_CURRENT_SOURCE_DIR}/fift/lib@/fiftlib)
- target_compile_options(funcfiftlib PRIVATE -sDISABLE_EXCEPTION_CATCHING=0)
+ target_link_options(funcfiftlib PRIVATE -fexceptions)
+ target_compile_options(funcfiftlib PRIVATE -fexceptions)
endif()
add_executable(tlbc tl/tlbc.cpp)
@@ -484,6 +497,6 @@ if (WINGETOPT_FOUND)
target_link_libraries_system(test-weight-distr wingetopt)
endif()
-install(TARGETS fift func pow-miner RUNTIME DESTINATION bin)
+install(TARGETS fift func create-state tlbc RUNTIME DESTINATION bin)
install(DIRECTORY fift/lib/ DESTINATION lib/fift)
install(DIRECTORY smartcont DESTINATION share/ton)
diff --git a/crypto/block/block-db.cpp b/crypto/block/block-db.cpp
index 2be1f580..21c7c0a0 100644
--- a/crypto/block/block-db.cpp
+++ b/crypto/block/block-db.cpp
@@ -624,6 +624,7 @@ void BlockDbImpl::get_block_by_id(ton::BlockId blk_id, bool need_data, td::Promi
}
}
promise(it->second);
+ return;
}
promise(td::Status::Error(-666, "block not found in database"));
}
@@ -642,6 +643,7 @@ void BlockDbImpl::get_state_by_id(ton::BlockId blk_id, bool need_data, td::Promi
}
}
promise(it->second);
+ return;
}
if (zerostate.not_null() && blk_id == zerostate->blk.id) {
LOG(DEBUG) << "get_state_by_id(): zerostate requested";
@@ -666,6 +668,7 @@ void BlockDbImpl::get_out_queue_info_by_id(ton::BlockId blk_id, td::Promisesecond->data.is_null()) {
LOG(DEBUG) << "loading data for state " << blk_id.to_str();
@@ -679,6 +682,7 @@ void BlockDbImpl::get_out_queue_info_by_id(ton::BlockId blk_id, td::Promisesecond->data.clone(), options);
@@ -707,10 +711,12 @@ void BlockDbImpl::get_out_queue_info_by_id(ton::BlockId blk_id, td::Promisesecond->blk.root_hash != state_root->get_hash().bits()) {
promise(td::Status::Error(
-668, std::string{"state for block "} + blk_id.to_str() + " is invalid : state root hash mismatch"));
+ return;
}
vm::CellSlice cs = vm::load_cell_slice(state_root);
if (!cs.have(64, 1) || cs.prefetch_ulong(32) != 0x9023afde) {
promise(td::Status::Error(-668, std::string{"state for block "} + blk_id.to_str() + " is invalid"));
+ return;
}
auto out_queue_info = cs.prefetch_ref();
promise(Ref{true, blk_id, it2->second->blk.root_hash.cbits(), state_root->get_hash().bits(),
@@ -758,6 +764,7 @@ void BlockDbImpl::save_new_block(ton::BlockIdExt id, td::BufferSlice data, int a
auto save_res = save_db_file(id.file_hash, data, FMode::chk_if_exists | FMode::overwrite | FMode::chk_file_hash);
if (save_res.is_error()) {
promise(std::move(save_res));
+ return;
}
auto sz = data.size();
auto lev = bb.alloc(id.id, id.root_hash, id.file_hash, data.size(), authority & 0xff);
@@ -780,6 +787,7 @@ void BlockDbImpl::save_new_state(ton::BlockIdExt id, td::BufferSlice data, int a
auto save_res = save_db_file(id.file_hash, data, FMode::chk_if_exists | FMode::overwrite | FMode::chk_file_hash);
if (save_res.is_error()) {
promise(std::move(save_res));
+ return;
}
auto sz = data.size();
auto lev = bb.alloc(id.id, id.root_hash, id.file_hash, data.size(), authority & 0xff);
diff --git a/crypto/block/block-parse.cpp b/crypto/block/block-parse.cpp
index c62854d4..e9eb8209 100644
--- a/crypto/block/block-parse.cpp
+++ b/crypto/block/block-parse.cpp
@@ -1000,7 +1000,7 @@ bool Account::skip_copy_depth_balance(vm::CellBuilder& cb, vm::CellSlice& cs) co
}
const Account t_Account, t_AccountE{true};
-const RefTo t_Ref_Account;
+const RefTo t_Ref_AccountE{true};
bool ShardAccount::extract_account_state(Ref cs_ref, Ref& acc_state) {
if (cs_ref.is_null()) {
diff --git a/crypto/block/block-parse.h b/crypto/block/block-parse.h
index 25476a64..ad4faec0 100644
--- a/crypto/block/block-parse.h
+++ b/crypto/block/block-parse.h
@@ -536,7 +536,7 @@ struct Account final : TLB_Complex {
};
extern const Account t_Account, t_AccountE;
-extern const RefTo t_Ref_Account;
+extern const RefTo t_Ref_AccountE;
struct AccountStatus final : TLB {
enum { acc_state_uninit, acc_state_frozen, acc_state_active, acc_state_nonexist };
@@ -572,7 +572,7 @@ struct ShardAccount final : TLB_Complex {
return cs.advance_ext(0x140, 1);
}
bool validate_skip(int* ops, vm::CellSlice& cs, bool weak = false) const override {
- return cs.advance(0x140) && t_Ref_Account.validate_skip(ops, cs, weak);
+ return cs.advance(0x140) && t_Ref_AccountE.validate_skip(ops, cs, weak);
}
static bool unpack(vm::CellSlice& cs, Record& info) {
return info.unpack(cs);
diff --git a/crypto/block/block.tlb b/crypto/block/block.tlb
index 82179c40..6f816197 100644
--- a/crypto/block/block.tlb
+++ b/crypto/block/block.tlb
@@ -366,7 +366,7 @@ trans_merge_install$0111 split_info:SplitMergeInfo
smc_info#076ef1ea actions:uint16 msgs_sent:uint16
unixtime:uint32 block_lt:uint64 trans_lt:uint64
rand_seed:bits256 balance_remaining:CurrencyCollection
- myself:MsgAddressInt = SmartContractInfo;
+ myself:MsgAddressInt global_config:(Maybe Cell) = SmartContractInfo;
//
//
out_list_empty$_ = OutList 0;
diff --git a/crypto/block/check-proof.cpp b/crypto/block/check-proof.cpp
index 6720ad40..431a03fe 100644
--- a/crypto/block/check-proof.cpp
+++ b/crypto/block/check-proof.cpp
@@ -315,6 +315,113 @@ td::Result TransactionList::validate() const {
return std::move(res);
}
+td::Result BlockTransaction::validate(bool check_proof) const {
+ if (root.is_null()) {
+ return td::Status::Error("transactions are expected to be non-empty");
+ }
+ if (check_proof && proof->get_hash().bits().compare(root->get_hash().bits(), 256)) {
+ return td::Status::Error(PSLICE() << "transaction hash mismatch: Merkle proof expects "
+ << proof->get_hash().bits().to_hex(256)
+ << " but received data has " << root->get_hash().bits().to_hex(256));
+ }
+ block::gen::Transaction::Record trans;
+ if (!tlb::unpack_cell(root, trans)) {
+ return td::Status::Error("cannot unpack transaction cell");
+ }
+ Info res;
+ res.blkid = blkid;
+ res.now = trans.now;
+ res.lt = trans.lt;
+ res.hash = root->get_hash().bits();
+ res.transaction = root;
+ return std::move(res);
+}
+
+td::Result BlockTransactionList::validate(bool check_proof) const {
+ constexpr int max_answer_transactions = 256;
+
+ TRY_RESULT_PREFIX(list, vm::std_boc_deserialize_multi(std::move(transactions_boc)), "cannot deserialize transactions boc: ");
+ std::vector> tx_proofs(list.size());
+
+ if (check_proof) {
+ try {
+ TRY_RESULT(proof_cell, vm::std_boc_deserialize(std::move(proof_boc)));
+ auto virt_root = vm::MerkleProof::virtualize(proof_cell, 1);
+
+ if (blkid.root_hash != virt_root->get_hash().bits()) {
+ return td::Status::Error("Invalid block proof root hash");
+ }
+ block::gen::Block::Record blk;
+ block::gen::BlockExtra::Record extra;
+ if (!(tlb::unpack_cell(virt_root, blk) && tlb::unpack_cell(std::move(blk.extra), extra))) {
+ return td::Status::Error("Error unpacking proof cell");
+ }
+ vm::AugmentedDictionary acc_dict{vm::load_cell_slice_ref(extra.account_blocks), 256,
+ block::tlb::aug_ShardAccountBlocks};
+
+ bool eof = false;
+ ton::LogicalTime reverse = reverse_mode ? ~0ULL : 0;
+ ton::LogicalTime trans_lt = static_cast(start_lt);
+ td::Bits256 cur_addr = start_addr;
+ bool allow_same = true;
+ int count = 0;
+ while (!eof && count < req_count && count < max_answer_transactions) {
+ auto value = acc_dict.extract_value(
+ acc_dict.vm::DictionaryFixed::lookup_nearest_key(cur_addr.bits(), 256, !reverse, allow_same));
+ if (value.is_null()) {
+ eof = true;
+ break;
+ }
+ allow_same = false;
+ if (cur_addr != start_addr) {
+ trans_lt = reverse;
+ }
+
+ block::gen::AccountBlock::Record acc_blk;
+ if (!tlb::csr_unpack(std::move(value), acc_blk) || acc_blk.account_addr != cur_addr) {
+ return td::Status::Error("Error unpacking proof account block");
+ }
+ vm::AugmentedDictionary trans_dict{vm::DictNonEmpty(), std::move(acc_blk.transactions), 64,
+ block::tlb::aug_AccountTransactions};
+ td::BitArray<64> cur_trans{(long long)trans_lt};
+ while (count < req_count && count < max_answer_transactions) {
+ auto tvalue = trans_dict.extract_value_ref(
+ trans_dict.vm::DictionaryFixed::lookup_nearest_key(cur_trans.bits(), 64, !reverse));
+ if (tvalue.is_null()) {
+ trans_lt = reverse;
+ break;
+ }
+ if (static_cast(count) < tx_proofs.size()) {
+ tx_proofs[count] = std::move(tvalue);
+ }
+ count++;
+ }
+ }
+ if (static_cast(count) != list.size()) {
+ return td::Status::Error(PSLICE() << "Txs count mismatch in proof (" << count << ") and response (" << list.size() << ")");
+ }
+ } catch (vm::VmError& err) {
+ return err.as_status("Couldn't verify proof: ");
+ } catch (vm::VmVirtError& err) {
+ return err.as_status("Couldn't verify proof: ");
+ } catch (...) {
+ return td::Status::Error("Unknown exception raised while verifying proof");
+ }
+ }
+
+ Info res;
+ for (int i = 0; i < static_cast(list.size()); i++) {
+ auto& root = list[i];
+ BlockTransaction transaction;
+ transaction.root = root;
+ transaction.blkid = blkid;
+ transaction.proof = tx_proofs[i];
+ TRY_RESULT(info, transaction.validate(check_proof));
+ res.transactions.push_back(std::move(info));
+ }
+ return std::move(res);
+}
+
td::Status BlockProofLink::validate(td::uint32* save_utime) const {
if (save_utime) {
*save_utime = 0;
@@ -362,7 +469,7 @@ td::Status BlockProofLink::validate(td::uint32* save_utime) const {
if (to.seqno()) {
TRY_STATUS(check_block_header(vd_root, to));
if (!(tlb::unpack_cell(vd_root, blk) && tlb::unpack_cell(blk.info, info))) {
- return td::Status::Error("cannot unpack header for block "s + from.to_str());
+ return td::Status::Error("cannot unpack header for block "s + to.to_str());
}
if (info.key_block != is_key) {
return td::Status::Error(PSTRING() << "incorrect is_key_block value " << is_key << " for destination block "
diff --git a/crypto/block/check-proof.h b/crypto/block/check-proof.h
index 527f3138..497a4eba 100644
--- a/crypto/block/check-proof.h
+++ b/crypto/block/check-proof.h
@@ -88,4 +88,36 @@ struct TransactionList {
td::Result validate() const;
};
+struct BlockTransaction {
+ ton::BlockIdExt blkid;
+ td::Ref root;
+ td::Ref proof;
+
+ struct Info {
+ ton::BlockIdExt blkid;
+ td::uint32 now;
+ ton::LogicalTime lt;
+ ton::Bits256 hash;
+ td::Ref transaction;
+ };
+ td::Result validate(bool check_proof) const;
+};
+
+struct BlockTransactionList {
+ ton::BlockIdExt blkid;
+ td::BufferSlice transactions_boc;
+ td::BufferSlice proof_boc;
+ ton::LogicalTime start_lt;
+ td::Bits256 start_addr;
+ bool reverse_mode;
+ int req_count;
+
+ struct Info {
+ ton::BlockIdExt blkid;
+ std::vector transactions;
+ };
+
+ td::Result validate(bool check_proof) const;
+};
+
} // namespace block
diff --git a/crypto/block/create-state.cpp b/crypto/block/create-state.cpp
index 7a734c3a..183da0a7 100644
--- a/crypto/block/create-state.cpp
+++ b/crypto/block/create-state.cpp
@@ -47,6 +47,7 @@
#include "fift/Fift.h"
#include "fift/Dictionary.h"
#include "fift/SourceLookup.h"
+#include "fift/IntCtx.h"
#include "fift/words.h"
#include "td/utils/logging.h"
@@ -308,7 +309,7 @@ td::RefInt256 create_smartcontract(td::RefInt256 smc_addr, Ref code, R
THRERR("cannot create smart-contract AccountStorage");
Ref storage = cb.finalize();
vm::CellStorageStat stats;
- PDO(stats.compute_used_storage(Ref(storage)));
+ PDO(stats.compute_used_storage(Ref(storage)).is_ok());
if (verbosity > 2) {
std::cerr << "storage is:\n";
vm::load_cell_slice(storage).print_rec(std::cerr);
@@ -866,8 +867,9 @@ int main(int argc, char* const argv[]) {
case 'v':
new_verbosity_level = VERBOSITY_NAME(FATAL) + (verbosity = td::to_integer(td::Slice(optarg)));
break;
- case 'V':
- std::cout << "create-state build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
+ case 'V':
+ std::cout << "create-state build information: [ Commit: " << GitMetadata::CommitSHA1()
+ << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
break;
case 'h':
diff --git a/crypto/block/mc-config.h b/crypto/block/mc-config.h
index 98d5ee32..dc512762 100644
--- a/crypto/block/mc-config.h
+++ b/crypto/block/mc-config.h
@@ -644,7 +644,6 @@ class Config {
static td::Result> unpack_param_dict(vm::Dictionary& dict);
static td::Result> unpack_param_dict(Ref dict_root);
- protected:
Config(int _mode) : mode(_mode) {
config_addr.set_zero();
}
diff --git a/crypto/block/transaction.cpp b/crypto/block/transaction.cpp
index adda48a5..47ea4e47 100644
--- a/crypto/block/transaction.cpp
+++ b/crypto/block/transaction.cpp
@@ -20,6 +20,7 @@
#include "block/block.h"
#include "block/block-parse.h"
#include "block/block-auto.h"
+#include "crypto/openssl/rand.hpp"
#include "td/utils/bits.h"
#include "td/utils/uint128.h"
#include "ton/ton-shard.h"
@@ -513,6 +514,7 @@ td::RefInt256 Account::compute_storage_fees(ton::UnixTime now, const std::vector
return StoragePrices::compute_storage_fees(now, pricing, storage_stat, last_paid, is_special, is_masterchain());
}
+namespace transaction {
Transaction::Transaction(const Account& _account, int ttype, ton::LogicalTime req_start_lt, ton::UnixTime _now,
Ref _inmsg)
: trans_type(ttype)
@@ -588,15 +590,19 @@ bool Transaction::unpack_input_msg(bool ihr_delivered, const ActionPhaseConfig*
in_msg_type = 2;
in_msg_extern = true;
// compute forwarding fees for this external message
- vm::CellStorageStat sstat; // for message size
- sstat.compute_used_storage(cs); // message body
- sstat.bits -= cs.size(); // bits in the root cells are free
- sstat.cells--; // the root cell itself is not counted as a cell
+ vm::CellStorageStat sstat; // for message size
+ auto cell_info = sstat.compute_used_storage(cs).move_as_ok(); // message body
+ sstat.bits -= cs.size(); // bits in the root cells are free
+ sstat.cells--; // the root cell itself is not counted as a cell
LOG(DEBUG) << "storage paid for a message: " << sstat.cells << " cells, " << sstat.bits << " bits";
if (sstat.bits > cfg->size_limits.max_msg_bits || sstat.cells > cfg->size_limits.max_msg_cells) {
LOG(DEBUG) << "inbound external message too large, invalid";
return false;
}
+ if (cell_info.max_merkle_depth > max_allowed_merkle_depth) {
+ LOG(DEBUG) << "inbound external message has too big merkle depth, invalid";
+ return false;
+ }
// fetch message pricing info
CHECK(cfg);
const MsgPrices& msg_prices = cfg->fetch_msg_prices(account.is_masterchain());
@@ -745,6 +751,7 @@ bool Transaction::prepare_credit_phase() {
total_fees += std::move(collected);
return true;
}
+} // namespace transaction
bool ComputePhaseConfig::parse_GasLimitsPrices(Ref cell, td::RefInt256& freeze_due_limit,
td::RefInt256& delete_due_limit) {
@@ -837,6 +844,7 @@ td::RefInt256 ComputePhaseConfig::compute_gas_price(td::uint64 gas_used) const {
: td::rshift(gas_price256 * (gas_used - flat_gas_limit), 16, 1) + flat_gas_price;
}
+namespace transaction {
bool Transaction::compute_gas_limits(ComputePhase& cp, const ComputePhaseConfig& cfg) {
// Compute gas limits
if (account.is_special) {
@@ -1057,13 +1065,21 @@ bool Transaction::prepare_compute_phase(const ComputePhaseConfig& cfg) {
std::unique_ptr logger;
auto vm_log = vm::VmLog();
if (cfg.with_vm_log) {
- logger = std::make_unique();
+ size_t log_max_size = cfg.vm_log_verbosity > 0 ? 1024 * 1024 : 256;
+ logger = std::make_unique(log_max_size);
vm_log.log_interface = logger.get();
vm_log.log_options = td::LogOptions(VERBOSITY_NAME(DEBUG), true, false);
+ if (cfg.vm_log_verbosity > 1) {
+ vm_log.log_mask |= vm::VmLog::ExecLocation;
+ if (cfg.vm_log_verbosity > 2) {
+ vm_log.log_mask |= vm::VmLog::DumpStack | vm::VmLog::GasRemaining;
+ }
+ }
}
vm::VmState vm{new_code, std::move(stack), gas, 1, new_data, vm_log, compute_vm_libraries(cfg)};
vm.set_max_data_depth(cfg.max_vm_data_depth);
vm.set_c7(prepare_vm_c7(cfg)); // tuple with SmartContractInfo
+ vm.set_chksig_always_succeed(cfg.ignore_chksig);
// vm.incr_stack_trace(1); // enable stack dump after each step
LOG(DEBUG) << "starting VM";
@@ -1145,19 +1161,20 @@ bool Transaction::prepare_action_phase(const ActionPhaseConfig& cfg) {
ap.reserved_balance.set_zero();
td::Ref old_code = new_code, old_data = new_data, old_library = new_library;
- auto enforce_state_size_limits = [&]() {
+ auto enforce_state_limits = [&]() {
if (account.is_special) {
return true;
}
- if (!check_state_size_limit(cfg)) {
+ auto S = check_state_limits(cfg);
+ if (S.is_error()) {
// Rollback changes to state, fail action phase
- LOG(INFO) << "Account state size exceeded limits";
+ LOG(INFO) << "Account state size exceeded limits: " << S.move_as_error();
new_storage_stat.clear();
new_code = old_code;
new_data = old_data;
new_library = old_library;
ap.result_code = 50;
- ap.state_size_too_big = true;
+ ap.state_exceeds_limits = true;
return false;
}
return true;
@@ -1238,8 +1255,8 @@ bool Transaction::prepare_action_phase(const ActionPhaseConfig& cfg) {
ap.no_funds = true;
}
LOG(DEBUG) << "invalid action " << ap.result_arg << " in action list: error code " << ap.result_code;
- // This is reuqired here because changes to libraries are applied even if actipn phase fails
- enforce_state_size_limits();
+ // This is reuqired here because changes to libraries are applied even if action phase fails
+ enforce_state_limits();
return true;
}
}
@@ -1249,7 +1266,7 @@ bool Transaction::prepare_action_phase(const ActionPhaseConfig& cfg) {
new_code = ap.new_code;
}
new_data = compute_phase->new_data; // tentative persistent data update applied
- if (!enforce_state_size_limits()) {
+ if (!enforce_state_limits()) {
return true;
}
@@ -1322,8 +1339,8 @@ int Transaction::try_action_change_library(vm::CellSlice& cs, ActionPhase& ap, c
return 41;
}
vm::CellStorageStat sstat;
- sstat.compute_used_storage(lib_ref);
- if (sstat.cells > cfg.size_limits.max_library_cells) {
+ auto cell_info = sstat.compute_used_storage(lib_ref).move_as_ok();
+ if (sstat.cells > cfg.size_limits.max_library_cells || cell_info.max_merkle_depth > max_allowed_merkle_depth) {
return 43;
}
vm::CellBuilder cb;
@@ -1338,6 +1355,7 @@ int Transaction::try_action_change_library(vm::CellSlice& cs, ActionPhase& ap, c
ap.spec_actions++;
return 0;
}
+} // namespace transaction
// msg_fwd_fees = (lump_price + ceil((bit_price * msg.bits + cell_price * msg.cells)/2^16)) nanograms
// ihr_fwd_fees = ceil((msg_fwd_fees * ihr_price_factor)/2^16) nanograms
@@ -1372,6 +1390,7 @@ td::RefInt256 MsgPrices::get_next_part(td::RefInt256 total) const {
return (std::move(total) * next_frac) >> 16;
}
+namespace transaction {
bool Transaction::check_replace_src_addr(Ref& src_addr) const {
int t = (int)src_addr->prefetch_ulong(2);
if (!t && src_addr->size_ext() == 2) {
@@ -1594,16 +1613,27 @@ int Transaction::try_action_send_msg(const vm::CellSlice& cs0, ActionPhase& ap,
// compute size of message
vm::CellStorageStat sstat; // for message size
// preliminary storage estimation of the resulting message
- sstat.add_used_storage(msg.init, true, 3); // message init
- sstat.add_used_storage(msg.body, true, 3); // message body (the root cell itself is not counted)
+ unsigned max_merkle_depth = 0;
+ auto add_used_storage = [&](const auto& x, unsigned skip_root_count) {
+ if (x.not_null()) {
+ auto res = sstat.add_used_storage(x, true, skip_root_count).move_as_ok();
+ max_merkle_depth = std::max(max_merkle_depth, res.max_merkle_depth);
+ }
+ };
+ add_used_storage(msg.init, 3); // message init
+ add_used_storage(msg.body, 3); // message body (the root cell itself is not counted)
if (!ext_msg) {
- sstat.add_used_storage(info.value->prefetch_ref());
+ add_used_storage(info.value->prefetch_ref(), 0);
}
LOG(DEBUG) << "storage paid for a message: " << sstat.cells << " cells, " << sstat.bits << " bits";
if (sstat.bits > cfg.size_limits.max_msg_bits || sstat.cells > cfg.size_limits.max_msg_cells) {
LOG(DEBUG) << "message too large, invalid";
return skip_invalid ? 0 : 40;
}
+ if (max_merkle_depth > max_allowed_merkle_depth) {
+ LOG(DEBUG) << "message has too big merkle depth, invalid";
+ return skip_invalid ? 0 : 40;
+ }
// compute forwarding fees
auto fees_c = msg_prices.compute_fwd_ihr_fees(sstat.cells, sstat.bits, info.ihr_disabled);
@@ -1855,7 +1885,7 @@ int Transaction::try_action_reserve_currency(vm::CellSlice& cs, ActionPhase& ap,
return 0;
}
-bool Transaction::check_state_size_limit(const ActionPhaseConfig& cfg) {
+td::Status Transaction::check_state_limits(const ActionPhaseConfig& cfg) {
auto cell_equal = [](const td::Ref& a, const td::Ref& b) -> bool {
if (a.is_null()) {
return b.is_null();
@@ -1867,21 +1897,36 @@ bool Transaction::check_state_size_limit(const ActionPhaseConfig& cfg) {
};
if (cell_equal(account.code, new_code) && cell_equal(account.data, new_data) &&
cell_equal(account.library, new_library)) {
- return true;
+ return td::Status::OK();
}
// new_storage_stat is used here beause these stats will be reused in compute_state()
new_storage_stat.limit_cells = cfg.size_limits.max_acc_state_cells;
new_storage_stat.limit_bits = cfg.size_limits.max_acc_state_bits;
- new_storage_stat.add_used_storage(new_code);
- new_storage_stat.add_used_storage(new_data);
- new_storage_stat.add_used_storage(new_library);
+ td::Timer timer;
+ auto add_used_storage = [&](const td::Ref& cell) -> td::Status {
+ if (cell.not_null()) {
+ TRY_RESULT(res, new_storage_stat.add_used_storage(cell));
+ if (res.max_merkle_depth > max_allowed_merkle_depth) {
+ return td::Status::Error("too big merkle depth");
+ }
+ }
+ return td::Status::OK();
+ };
+ TRY_STATUS(add_used_storage(new_code));
+ TRY_STATUS(add_used_storage(new_data));
+ TRY_STATUS(add_used_storage(new_library));
+ if (timer.elapsed() > 0.1) {
+ LOG(INFO) << "Compute used storage took " << timer.elapsed() << "s";
+ }
if (acc_status == Account::acc_active) {
new_storage_stat.clear_limit();
} else {
new_storage_stat.clear();
}
return new_storage_stat.cells <= cfg.size_limits.max_acc_state_cells &&
- new_storage_stat.bits <= cfg.size_limits.max_acc_state_bits;
+ new_storage_stat.bits <= cfg.size_limits.max_acc_state_bits
+ ? td::Status::OK()
+ : td::Status::Error("state too big");
}
bool Transaction::prepare_bounce_phase(const ActionPhaseConfig& cfg) {
@@ -1978,6 +2023,7 @@ bool Transaction::prepare_bounce_phase(const ActionPhaseConfig& cfg) {
bp.ok = true;
return true;
}
+} // namespace transaction
/*
*
@@ -2033,6 +2079,7 @@ static td::optional try_update_storage_stat(const vm::CellS
return new_stat;
}
+namespace transaction {
bool Transaction::compute_state() {
if (new_total_state.not_null()) {
return true;
@@ -2108,7 +2155,7 @@ bool Transaction::compute_state() {
stats = new_stats.unwrap();
} else {
td::Timer timer;
- CHECK(stats.add_used_storage(Ref(storage)));
+ stats.add_used_storage(Ref(storage)).ensure();
if (timer.elapsed() > 0.1) {
LOG(INFO) << "Compute used storage took " << timer.elapsed() << "s";
}
@@ -2460,6 +2507,7 @@ void Transaction::extract_out_msgs(std::vector& list) {
list.emplace_back(start_lt + i + 1, std::move(out_msgs[i]));
}
}
+} // namespace transaction
void Account::push_transaction(Ref trans_root, ton::LogicalTime trans_lt) {
transactions.emplace_back(trans_lt, std::move(trans_root));
@@ -2503,4 +2551,82 @@ bool Account::libraries_changed() const {
}
}
+td::Status FetchConfigParams::fetch_config_params(const block::Config& config,
+ Ref* old_mparams,
+ std::vector* storage_prices,
+ block::StoragePhaseConfig* storage_phase_cfg,
+ td::BitArray<256>* rand_seed,
+ block::ComputePhaseConfig* compute_phase_cfg,
+ block::ActionPhaseConfig* action_phase_cfg,
+ td::RefInt256* masterchain_create_fee,
+ td::RefInt256* basechain_create_fee,
+ ton::WorkchainId wc,
+ ton::UnixTime now) {
+ *old_mparams = config.get_config_param(9);
+ {
+ auto res = config.get_storage_prices();
+ if (res.is_error()) {
+ return res.move_as_error();
+ }
+ *storage_prices = res.move_as_ok();
+ }
+ if (rand_seed->is_zero()) {
+ // generate rand seed
+ prng::rand_gen().strong_rand_bytes(rand_seed->data(), 32);
+ LOG(DEBUG) << "block random seed set to " << rand_seed->to_hex();
+ }
+ TRY_RESULT(size_limits, config.get_size_limits_config());
+ {
+ // compute compute_phase_cfg / storage_phase_cfg
+ auto cell = config.get_config_param(wc == ton::masterchainId ? 20 : 21);
+ if (cell.is_null()) {
+ return td::Status::Error(-668, "cannot fetch current gas prices and limits from masterchain configuration");
+ }
+ if (!compute_phase_cfg->parse_GasLimitsPrices(std::move(cell), storage_phase_cfg->freeze_due_limit,
+ storage_phase_cfg->delete_due_limit)) {
+ return td::Status::Error(-668, "cannot unpack current gas prices and limits from masterchain configuration");
+ }
+ compute_phase_cfg->block_rand_seed = *rand_seed;
+ compute_phase_cfg->max_vm_data_depth = size_limits.max_vm_data_depth;
+ compute_phase_cfg->global_config = config.get_root_cell();
+ compute_phase_cfg->suspended_addresses = config.get_suspended_addresses(now);
+ }
+ {
+ // compute action_phase_cfg
+ block::gen::MsgForwardPrices::Record rec;
+ auto cell = config.get_config_param(24);
+ if (cell.is_null() || !tlb::unpack_cell(std::move(cell), rec)) {
+ return td::Status::Error(-668, "cannot fetch masterchain message transfer prices from masterchain configuration");
+ }
+ action_phase_cfg->fwd_mc =
+ block::MsgPrices{rec.lump_price, rec.bit_price, rec.cell_price, rec.ihr_price_factor,
+ (unsigned)rec.first_frac, (unsigned)rec.next_frac};
+ cell = config.get_config_param(25);
+ if (cell.is_null() || !tlb::unpack_cell(std::move(cell), rec)) {
+ return td::Status::Error(-668, "cannot fetch standard message transfer prices from masterchain configuration");
+ }
+ action_phase_cfg->fwd_std =
+ block::MsgPrices{rec.lump_price, rec.bit_price, rec.cell_price, rec.ihr_price_factor,
+ (unsigned)rec.first_frac, (unsigned)rec.next_frac};
+ action_phase_cfg->workchains = &config.get_workchain_list();
+ action_phase_cfg->bounce_msg_body = (config.has_capability(ton::capBounceMsgBody) ? 256 : 0);
+ action_phase_cfg->size_limits = size_limits;
+ }
+ {
+ // fetch block_grams_created
+ auto cell = config.get_config_param(14);
+ if (cell.is_null()) {
+ *basechain_create_fee = *masterchain_create_fee = td::zero_refint();
+ } else {
+ block::gen::BlockCreateFees::Record create_fees;
+ if (!(tlb::unpack_cell(cell, create_fees) &&
+ block::tlb::t_Grams.as_integer_to(create_fees.masterchain_block_fee, *masterchain_create_fee) &&
+ block::tlb::t_Grams.as_integer_to(create_fees.basechain_block_fee, *basechain_create_fee))) {
+ return td::Status::Error(-668, "cannot unpack BlockCreateFees from configuration parameter #14");
+ }
+ }
+ }
+ return td::Status::OK();
+}
+
} // namespace block
diff --git a/crypto/block/transaction.h b/crypto/block/transaction.h
index 2560c010..6346ddcd 100644
--- a/crypto/block/transaction.h
+++ b/crypto/block/transaction.h
@@ -35,7 +35,10 @@ using td::Ref;
using LtCellRef = std::pair>;
struct Account;
+
+namespace transaction {
struct Transaction;
+} // namespace transaction
struct CollatorError {
std::string msg;
@@ -106,9 +109,11 @@ struct ComputePhaseConfig {
std::unique_ptr libraries;
Ref global_config;
td::BitArray<256> block_rand_seed;
+ bool ignore_chksig{false};
bool with_vm_log{false};
td::uint16 max_vm_data_depth = 512;
std::unique_ptr suspended_addresses;
+ int vm_log_verbosity = 0;
ComputePhaseConfig(td::uint64 _gas_price = 0, td::uint64 _gas_limit = 0, td::uint64 _gas_credit = 0)
: gas_price(_gas_price), gas_limit(_gas_limit), special_gas_limit(_gas_limit), gas_credit(_gas_credit) {
compute_threshold();
@@ -186,7 +191,7 @@ struct ActionPhase {
bool code_changed{false};
bool action_list_invalid{false};
bool acc_delete_req{false};
- bool state_size_too_big{false};
+ bool state_exceeds_limits{false};
enum { acst_unchanged = 0, acst_frozen = 2, acst_deleted = 3 };
int acc_status_change{acst_unchanged};
td::RefInt256 total_fwd_fees; // all fees debited from the account
@@ -273,7 +278,7 @@ struct Account {
bool create_account_block(vm::CellBuilder& cb); // stores an AccountBlock with all transactions
protected:
- friend struct Transaction;
+ friend struct transaction::Transaction;
bool set_split_depth(int split_depth);
bool check_split_depth(int split_depth) const;
bool forget_split_depth();
@@ -288,7 +293,9 @@ struct Account {
bool compute_my_addr(bool force = false);
};
+namespace transaction {
struct Transaction {
+ static constexpr unsigned max_allowed_merkle_depth = 2;
enum {
tr_none,
tr_ord,
@@ -354,7 +361,7 @@ struct Transaction {
std::vector[> compute_vm_libraries(const ComputePhaseConfig& cfg);
bool prepare_compute_phase(const ComputePhaseConfig& cfg);
bool prepare_action_phase(const ActionPhaseConfig& cfg);
- bool check_state_size_limit(const ActionPhaseConfig& cfg);
+ td::Status check_state_limits(const ActionPhaseConfig& cfg);
bool prepare_bounce_phase(const ActionPhaseConfig& cfg);
bool compute_state();
bool serialize();
@@ -390,5 +397,20 @@ struct Transaction {
bool serialize_bounce_phase(vm::CellBuilder& cb);
bool unpack_msg_state(bool lib_only = false);
};
+} // namespace transaction
+
+struct FetchConfigParams {
+static td::Status fetch_config_params(const block::Config& config,
+ Ref* old_mparams,
+ std::vector* storage_prices,
+ StoragePhaseConfig* storage_phase_cfg,
+ td::BitArray<256>* rand_seed,
+ ComputePhaseConfig* compute_phase_cfg,
+ ActionPhaseConfig* action_phase_cfg,
+ td::RefInt256* masterchain_create_fee,
+ td::RefInt256* basechain_create_fee,
+ ton::WorkchainId wc,
+ ton::UnixTime now);
+};
} // namespace block
diff --git a/crypto/fift/Continuation.cpp b/crypto/fift/Continuation.cpp
index 7e3b5ea2..e895082f 100644
--- a/crypto/fift/Continuation.cpp
+++ b/crypto/fift/Continuation.cpp
@@ -27,7 +27,7 @@ namespace fift {
//
bool FiftCont::print_dict_name(std::ostream& os, const IntCtx& ctx) const {
std::string word_name;
- if (ctx.dictionary && ctx.dictionary->lookup_def(this, &word_name)) {
+ if (ctx.dictionary.lookup_def(this, &word_name)) {
if (word_name.size() && word_name.back() == ' ') {
word_name.pop_back();
}
@@ -39,7 +39,7 @@ bool FiftCont::print_dict_name(std::ostream& os, const IntCtx& ctx) const {
std::string FiftCont::get_dict_name(const IntCtx& ctx) const {
std::string word_name;
- if (ctx.dictionary && ctx.dictionary->lookup_def(this, &word_name)) {
+ if (ctx.dictionary.lookup_def(this, &word_name)) {
if (word_name.size() && word_name.back() == ' ') {
word_name.pop_back();
}
@@ -63,6 +63,140 @@ bool FiftCont::dump(std::ostream& os, const IntCtx& ctx) const {
return ok;
}
+//
+// StackWord
+//
+Ref StackWord::run_tail(IntCtx& ctx) const {
+ f(ctx.stack);
+ return {};
+}
+
+//
+// CtxWord
+//
+Ref CtxWord::run_tail(IntCtx& ctx) const {
+ f(ctx);
+ return {};
+}
+
+//
+// CtxTailWord
+//
+Ref CtxTailWord::run_tail(IntCtx& ctx) const {
+ return f(ctx);
+}
+
+//
+// WordList
+//
+WordList::WordList(std::vector][>&& _list) : list(std::move(_list)) {
+}
+
+WordList::WordList(const std::vector][>& _list) : list(_list) {
+}
+
+WordList& WordList::push_back(Ref word_def) {
+ list.push_back(std::move(word_def));
+ return *this;
+}
+
+WordList& WordList::push_back(FiftCont& wd) {
+ list.emplace_back(&wd);
+ return *this;
+}
+
+Ref WordList::run_tail(IntCtx& ctx) const {
+ if (list.empty()) {
+ return {};
+ }
+ if (list.size() > 1) {
+ ctx.next = td::make_ref(std::move(ctx.next), Ref(this), 1);
+ }
+ return list[0];
+}
+
+void WordList::close() {
+ list.shrink_to_fit();
+}
+
+WordList& WordList::append(const std::vector][>& other) {
+ list.insert(list.end(), other.begin(), other.end());
+ return *this;
+}
+
+WordList& WordList::append(const Ref* begin, const Ref* end) {
+ list.insert(list.end(), begin, end);
+ return *this;
+}
+
+bool WordList::dump(std::ostream& os, const IntCtx& ctx) const {
+ os << "{";
+ for (auto entry : list) {
+ os << ' ';
+ entry->print_name(os, ctx);
+ }
+ os << " }" << std::endl;
+ return true;
+}
+
+//
+// ListCont
+//
+
+Ref ListCont::run_tail(IntCtx& ctx) const {
+ auto sz = list->size();
+ if (pos >= sz) {
+ return std::move(ctx.next);
+ } else if (ctx.next.not_null()) {
+ ctx.next = td::make_ref(SeqCont::seq(next, std::move(ctx.next)), list, pos + 1);
+ } else if (pos + 1 == sz) {
+ ctx.next = next;
+ } else {
+ ctx.next = td::make_ref(next, list, pos + 1);
+ }
+ return list->at(pos);
+}
+
+Ref ListCont::run_modify(IntCtx& ctx) {
+ auto sz = list->size();
+ if (pos >= sz) {
+ return std::move(ctx.next);
+ }
+ auto cur = list->at(pos++);
+ if (ctx.next.not_null()) {
+ next = SeqCont::seq(next, std::move(ctx.next));
+ }
+ if (pos == sz) {
+ ctx.next = std::move(next);
+ } else {
+ ctx.next = self();
+ }
+ return cur;
+}
+
+bool ListCont::dump(std::ostream& os, const IntCtx& ctx) const {
+ std::string dict_name = list->get_dict_name(ctx);
+ if (!dict_name.empty()) {
+ os << "[in " << dict_name << ":] ";
+ }
+ std::size_t sz = list->size(), i, a = (pos >= 16 ? pos - 16 : 0), b = std::min(pos + 16, sz);
+ if (a > 0) {
+ os << "... ";
+ }
+ for (i = a; i < b; i++) {
+ if (i == pos) {
+ os << "**HERE** ";
+ }
+ list->at(i)->print_name(os, ctx);
+ os << ' ';
+ }
+ if (b < sz) {
+ os << "...";
+ }
+ os << std::endl;
+ return true;
+}
+
//
// QuitCont
//
@@ -295,12 +429,15 @@ bool GenericLitCont::print_name(std::ostream& os, const IntCtx& ctx) const {
bool sp = false;
for (auto entry : list) {
if (sp) {
- os << sp;
+ os << ' ';
}
sp = true;
int tp = entry.type();
if (entry.is_int() || entry.is(vm::StackEntry::t_string) || entry.is(vm::StackEntry::t_bytes)) {
entry.dump(os);
+ } else if (entry.is_atom()) {
+ os << '`';
+ entry.dump(os);
} else {
auto cont_lit = entry.as_object();
if (cont_lit.not_null()) {
diff --git a/crypto/fift/Continuation.h b/crypto/fift/Continuation.h
index f2c44e7b..6623b642 100644
--- a/crypto/fift/Continuation.h
+++ b/crypto/fift/Continuation.h
@@ -17,6 +17,7 @@
Copyright 2020 Telegram Systems LLP
*/
#pragma once
+#include
#include "common/refcnt.hpp"
#include "common/refint.h"
#include "vm/stack.hpp"
@@ -76,6 +77,101 @@ class FiftCont : public td::CntObject {
}
};
+typedef std::function StackWordFunc;
+typedef std::function CtxWordFunc;
+typedef std::function][(IntCtx&)> CtxTailWordFunc;
+
+class NopWord : public FiftCont {
+ public:
+ NopWord() = default;
+ ~NopWord() override = default;
+ Ref run_tail(IntCtx& ctx) const override {
+ return {};
+ }
+};
+
+class StackWord : public FiftCont {
+ StackWordFunc f;
+
+ public:
+ StackWord(StackWordFunc _f) : f(std::move(_f)) {
+ }
+ ~StackWord() override = default;
+ Ref run_tail(IntCtx& ctx) const override;
+};
+
+class CtxWord : public FiftCont {
+ CtxWordFunc f;
+
+ public:
+ CtxWord(CtxWordFunc _f) : f(std::move(_f)) {
+ }
+ ~CtxWord() override = default;
+ Ref run_tail(IntCtx& ctx) const override;
+};
+
+class CtxTailWord : public FiftCont {
+ CtxTailWordFunc f;
+
+ public:
+ CtxTailWord(CtxTailWordFunc _f) : f(std::move(_f)) {
+ }
+ ~CtxTailWord() override = default;
+ Ref run_tail(IntCtx& ctx) const override;
+};
+
+class WordList : public FiftCont {
+ std::vector][> list;
+
+ public:
+ ~WordList() override = default;
+ WordList() = default;
+ WordList(std::vector][>&& _list);
+ WordList(const std::vector][>& _list);
+ WordList& push_back(Ref word_def);
+ WordList& push_back(FiftCont& wd);
+ Ref run_tail(IntCtx& ctx) const override;
+ void close();
+ bool is_list() const override {
+ return true;
+ }
+ long long list_size() const override {
+ return (long long)list.size();
+ }
+ std::size_t size() const {
+ return list.size();
+ }
+ const Ref& at(std::size_t idx) const {
+ return list.at(idx);
+ }
+ const Ref* get_list() const override {
+ return list.data();
+ }
+ WordList& append(const std::vector][>& other);
+ WordList& append(const Ref* begin, const Ref* end);
+ WordList* make_copy() const override {
+ return new WordList(list);
+ }
+ bool dump(std::ostream& os, const IntCtx& ctx) const override;
+};
+
+class ListCont : public FiftCont {
+ Ref next;
+ Ref list;
+ std::size_t pos;
+
+ public:
+ ListCont(Ref nxt, Ref wl, std::size_t p = 0) : next(std::move(nxt)), list(std::move(wl)), pos(p) {
+ }
+ ~ListCont() override = default;
+ Ref run_tail(IntCtx& ctx) const override;
+ Ref run_modify(IntCtx& ctx) override;
+ Ref up() const override {
+ return next;
+ }
+ bool dump(std::ostream& os, const IntCtx& ctx) const override;
+};
+
class QuitCont : public FiftCont {
int exit_code;
diff --git a/crypto/fift/Dictionary.cpp b/crypto/fift/Dictionary.cpp
index 59da278f..d2eae0a3 100644
--- a/crypto/fift/Dictionary.cpp
+++ b/crypto/fift/Dictionary.cpp
@@ -17,143 +17,10 @@
Copyright 2017-2020 Telegram Systems LLP
*/
#include "Dictionary.h"
+#include "IntCtx.h"
namespace fift {
-//
-// StackWord
-//
-Ref StackWord::run_tail(IntCtx& ctx) const {
- f(ctx.stack);
- return {};
-}
-
-//
-// CtxWord
-//
-Ref CtxWord::run_tail(IntCtx& ctx) const {
- f(ctx);
- return {};
-}
-
-//
-// CtxTailWord
-//
-Ref CtxTailWord::run_tail(IntCtx& ctx) const {
- return f(ctx);
-}
-
-//
-// WordList
-//
-WordList::WordList(std::vector][>&& _list) : list(std::move(_list)) {
-}
-
-WordList::WordList(const std::vector][>& _list) : list(_list) {
-}
-
-WordList& WordList::push_back(Ref word_def) {
- list.push_back(std::move(word_def));
- return *this;
-}
-
-WordList& WordList::push_back(FiftCont& wd) {
- list.emplace_back(&wd);
- return *this;
-}
-
-Ref WordList::run_tail(IntCtx& ctx) const {
- if (list.empty()) {
- return {};
- }
- if (list.size() > 1) {
- ctx.next = td::make_ref(std::move(ctx.next), Ref(this), 1);
- }
- return list[0];
-}
-
-void WordList::close() {
- list.shrink_to_fit();
-}
-
-WordList& WordList::append(const std::vector][>& other) {
- list.insert(list.end(), other.begin(), other.end());
- return *this;
-}
-
-WordList& WordList::append(const Ref* begin, const Ref* end) {
- list.insert(list.end(), begin, end);
- return *this;
-}
-
-bool WordList::dump(std::ostream& os, const IntCtx& ctx) const {
- os << "{";
- for (auto entry : list) {
- os << ' ';
- entry->print_name(os, ctx);
- }
- os << " }" << std::endl;
- return true;
-}
-
-//
-// ListCont
-//
-
-Ref ListCont::run_tail(IntCtx& ctx) const {
- auto sz = list->size();
- if (pos >= sz) {
- return std::move(ctx.next);
- } else if (ctx.next.not_null()) {
- ctx.next = td::make_ref(SeqCont::seq(next, std::move(ctx.next)), list, pos + 1);
- } else if (pos + 1 == sz) {
- ctx.next = next;
- } else {
- ctx.next = td::make_ref(next, list, pos + 1);
- }
- return list->at(pos);
-}
-
-Ref ListCont::run_modify(IntCtx& ctx) {
- auto sz = list->size();
- if (pos >= sz) {
- return std::move(ctx.next);
- }
- auto cur = list->at(pos++);
- if (ctx.next.not_null()) {
- next = SeqCont::seq(next, std::move(ctx.next));
- }
- if (pos == sz) {
- ctx.next = std::move(next);
- } else {
- ctx.next = self();
- }
- return cur;
-}
-
-bool ListCont::dump(std::ostream& os, const IntCtx& ctx) const {
- std::string dict_name = list->get_dict_name(ctx);
- if (!dict_name.empty()) {
- os << "[in " << dict_name << ":] ";
- }
- std::size_t sz = list->size(), i, a = (pos >= 16 ? pos - 16 : 0), b = std::min(pos + 16, sz);
- if (a > 0) {
- os << "... ";
- }
- for (i = a; i < b; i++) {
- if (i == pos) {
- os << "**HERE** ";
- }
- list->at(i)->print_name(os, ctx);
- os << ' ';
- }
- if (b < sz) {
- os << "...";
- }
- os << std::endl;
- return true;
-}
-
//
// DictEntry
//
@@ -167,15 +34,49 @@ DictEntry::DictEntry(CtxWordFunc func, bool _act) : def(Ref{true, std::
DictEntry::DictEntry(CtxTailWordFunc func, bool _act) : def(Ref{true, std::move(func)}), active(_act) {
}
+DictEntry DictEntry::create_from(vm::StackEntry se) {
+ if (se.is_tuple()) {
+ auto& tuple = *se.as_tuple();
+ if (tuple.size() == 1) {
+ auto def = tuple[0].as_object();
+ if (def.not_null()) {
+ return DictEntry{std::move(def), true};
+ }
+ }
+ } else {
+ auto def = std::move(se).as_object();
+ if (def.not_null()) {
+ return DictEntry{std::move(def)};
+ }
+ }
+ return {};
+}
+
+DictEntry::operator vm::StackEntry() const& {
+ if (def.is_null()) {
+ return {};
+ } else if (active) {
+ return vm::make_tuple_ref(vm::StackEntry{vm::from_object, def});
+ } else {
+ return {vm::from_object, def};
+ }
+}
+
+DictEntry::operator vm::StackEntry() && {
+ if (def.is_null()) {
+ return {};
+ } else if (active) {
+ return vm::make_tuple_ref(vm::StackEntry{vm::from_object, std::move(def)});
+ } else {
+ return {vm::from_object, std::move(def)};
+ }
+}
+
//
// Dictionary
//
-DictEntry* Dictionary::lookup(td::Slice name) {
- auto it = words_.find(name);
- if (it == words_.end()) {
- return nullptr;
- }
- return &it->second;
+DictEntry Dictionary::lookup(std::string name) const {
+ return DictEntry::create_from(words().get(name));
}
void Dictionary::def_ctx_word(std::string name, CtxWordFunc func) {
@@ -196,26 +97,27 @@ void Dictionary::def_ctx_tail_word(std::string name, CtxTailWordFunc func) {
}
void Dictionary::def_word(std::string name, DictEntry word) {
- auto res = words_.emplace(name, std::move(word));
- LOG_IF(FATAL, !res.second) << "Cannot redefine word: " << name;
+ auto dict = words();
+ dict.set(std::move(name), vm::StackEntry(std::move(word)));
+ set_words(dict);
}
-void Dictionary::undef_word(td::Slice name) {
- auto it = words_.find(name);
- if (it == words_.end()) {
- return;
+void Dictionary::undef_word(std::string name) {
+ auto dict = words();
+ if (dict.remove(name)) {
+ set_words(dict);
}
- words_.erase(it);
}
bool Dictionary::lookup_def(const FiftCont* cont, std::string* word_ptr) const {
if (!cont) {
return false;
}
- for (const auto& entry : words_) {
- if (entry.second.get_def().get() == cont) {
+ for (auto entry : words()) {
+ auto val = DictEntry::create_from(entry.value());
+ if (val.get_def().get() == cont && entry.key().is_string()) {
if (word_ptr) {
- *word_ptr = entry.first;
+ *word_ptr = vm::StackEntry(entry.key()).as_string();
}
return true;
}
@@ -223,35 +125,4 @@ bool Dictionary::lookup_def(const FiftCont* cont, std::string* word_ptr) const {
return false;
}
-void interpret_nop(vm::Stack& stack) {
-}
-
-Ref Dictionary::nop_word_def = Ref{true, interpret_nop};
-
-//
-// functions for wordef
-//
-Ref pop_exec_token(vm::Stack& stack) {
- stack.check_underflow(1);
- auto wd_ref = stack.pop().as_object();
- if (wd_ref.is_null()) {
- throw IntError{"execution token expected"};
- }
- return wd_ref;
-}
-
-Ref pop_word_list(vm::Stack& stack) {
- stack.check_underflow(1);
- auto wl_ref = stack.pop().as_object();
- if (wl_ref.is_null()) {
- throw IntError{"word list expected"};
- }
- return wl_ref;
-}
-
-void push_argcount(vm::Stack& stack, int args) {
- stack.push_smallint(args);
- stack.push({vm::from_object, Dictionary::nop_word_def});
-}
-
} // namespace fift
diff --git a/crypto/fift/Dictionary.h b/crypto/fift/Dictionary.h
index 7307cdbe..b24bc742 100644
--- a/crypto/fift/Dictionary.h
+++ b/crypto/fift/Dictionary.h
@@ -17,115 +17,27 @@
Copyright 2017-2020 Telegram Systems LLP
*/
#pragma once
-
-#include
-#include ]