1
0
Fork 0
mirror of https://github.com/ton-blockchain/ton synced 2025-02-15 04:32:21 +00:00

Merge branch 'testnet' into accelerator

This commit is contained in:
SpyCheese 2025-01-22 11:17:04 +03:00
commit a5e814e594
39 changed files with 898 additions and 616 deletions

View file

@ -1,20 +0,0 @@
FROM ubuntu:20.04
RUN apt update
RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata
RUN apt install -y build-essential cmake clang openssl libssl-dev zlib1g-dev gperf wget git curl libreadline-dev ccache libmicrohttpd-dev ninja-build libsecp256k1-dev libsodium-dev pkg-config
WORKDIR /
ARG BRANCH
ARG REPO
RUN git clone --recurse-submodules https://github.com/$REPO ton && cd ton && git checkout $BRANCH && git submodule update
WORKDIR /ton
RUN mkdir /ton/build
WORKDIR /ton/build
ENV CC clang
ENV CXX clang++
ENV CCACHE_DISABLE 1
RUN cmake -GNinja -DCMAKE_BUILD_TYPE=Release -DPORTABLE=1 -DTON_ARCH= -DCMAKE_CXX_FLAGS="-mavx2" ..
RUN ninja storage-daemon storage-daemon-cli tonlibjson blockchain-explorer fift func tolk validator-engine validator-engine-console create-state generate-random-id create-hardfork dht-server lite-client

View file

@ -1,20 +0,0 @@
FROM ubuntu:22.04
RUN apt update
RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata
RUN apt install -y build-essential cmake clang openssl libssl-dev zlib1g-dev gperf wget git curl libreadline-dev ccache libmicrohttpd-dev ninja-build libsecp256k1-dev libsodium-dev pkg-config
WORKDIR /
ARG BRANCH
ARG REPO
RUN git clone --recurse-submodules https://github.com/$REPO ton && cd ton && git checkout $BRANCH && git submodule update
WORKDIR /ton
RUN mkdir /ton/build
WORKDIR /ton/build
ENV CC clang
ENV CXX clang++
ENV CCACHE_DISABLE 1
RUN cmake -GNinja -DCMAKE_BUILD_TYPE=Release -DPORTABLE=1 -DTON_ARCH= -DCMAKE_CXX_FLAGS="-mavx2" ..
RUN ninja storage-daemon storage-daemon-cli tonlibjson blockchain-explorer fift func tolk validator-engine validator-engine-console create-state generate-random-id create-hardfork dht-server lite-client

View file

@ -1,20 +0,0 @@
FROM ubuntu:20.04
RUN apt update
RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata
RUN apt install -y build-essential cmake clang openssl libssl-dev zlib1g-dev gperf wget git curl libreadline-dev ccache libmicrohttpd-dev ninja-build libsecp256k1-dev libsodium-dev pkg-config
WORKDIR /
ARG BRANCH
ARG REPO
RUN git clone --recurse-submodules https://github.com/$REPO ton && cd ton && git checkout $BRANCH && git submodule update
WORKDIR /ton
RUN mkdir /ton/build
WORKDIR /ton/build
ENV CC clang
ENV CXX clang++
ENV CCACHE_DISABLE 1
RUN cmake -GNinja -DCMAKE_BUILD_TYPE=Release -DPORTABLE=1 -DTON_ARCH= ..
RUN ninja storage-daemon storage-daemon-cli tonlibjson blockchain-explorer fift func tolk validator-engine validator-engine-console create-state generate-random-id dht-server lite-client

View file

@ -1,20 +0,0 @@
FROM ubuntu:22.04
RUN apt update
RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata
RUN apt install -y build-essential cmake clang openssl libssl-dev zlib1g-dev gperf wget git curl libreadline-dev ccache libmicrohttpd-dev ninja-build libsecp256k1-dev libsodium-dev pkg-config
WORKDIR /
ARG BRANCH
ARG REPO
RUN git clone --recurse-submodules https://github.com/$REPO ton && cd ton && git checkout $BRANCH && git submodule update
WORKDIR /ton
RUN mkdir /ton/build
WORKDIR /ton/build
ENV CC clang
ENV CXX clang++
ENV CCACHE_DISABLE 1
RUN cmake -GNinja -DCMAKE_BUILD_TYPE=Release -DPORTABLE=1 -DTON_ARCH= ..
RUN ninja storage-daemon storage-daemon-cli tonlibjson blockchain-explorer fift func tolk validator-engine validator-engine-console create-state generate-random-id dht-server lite-client

View file

@ -0,0 +1,57 @@
name: Ubuntu TON build (AppImages, arm64)
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
runs-on: ubuntu-22.04-arm
steps:
- name: Check out repository
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Install system libraries
run: |
sudo apt update
sudo apt install -y build-essential git cmake ninja-build zlib1g-dev libsecp256k1-dev libmicrohttpd-dev libsodium-dev liblz4-dev
sudo apt remove libgsl-dev
- name: Install clang-16
run: |
wget https://apt.llvm.org/llvm.sh
chmod +x llvm.sh
sudo ./llvm.sh 16 all
- name: Build TON
run: |
git submodule sync --recursive
git submodule update
cp assembly/native/build-ubuntu-appimages.sh .
chmod +x build-ubuntu-appimages.sh
./build-ubuntu-appimages.sh -a
- name: Make AppImages
run: |
cp assembly/appimage/create-appimages.sh .
cp assembly/appimage/AppRun .
cp assembly/appimage/ton.png .
chmod +x create-appimages.sh
./create-appimages.sh aarch64
rm -rf artifacts
- name: Build TON libs
run: |
cp assembly/native/build-ubuntu-portable-libs.sh .
chmod +x build-ubuntu-portable-libs.sh
./build-ubuntu-portable-libs.sh -a
cp ./artifacts/libtonlibjson.so appimages/artifacts/
cp ./artifacts/libemulator.so appimages/artifacts/
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-arm64-linux
path: appimages/artifacts

View file

@ -0,0 +1,43 @@
name: Ubuntu TON build (shared, arm64)
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
strategy:
fail-fast: false
matrix:
os: [ubuntu-22.04-arm, ubuntu-24.04-arm]
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Install system libraries
run: |
sudo apt-get update
sudo apt-get install -y build-essential git cmake ninja-build zlib1g-dev libsecp256k1-dev libmicrohttpd-dev libsodium-dev liblz4-dev libjemalloc-dev
- if: matrix.os != 'ubuntu-24.04-arm'
name: Install llvm-16
run: |
wget https://apt.llvm.org/llvm.sh
chmod +x llvm.sh
sudo ./llvm.sh 16 all
- name: Build TON
run: |
git submodule sync --recursive
git submodule update
cp assembly/native/build-ubuntu-shared.sh .
chmod +x build-ubuntu-shared.sh
./build-ubuntu-shared.sh -t -a
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-binaries-${{ matrix.os }}
path: artifacts

View file

@ -0,0 +1,63 @@
name: Ubuntu TON build (AppImages, x86-64)
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
runs-on: ubuntu-20.04
steps:
- name: Check out repository
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Install system libraries
run: |
sudo apt update
sudo apt install -y build-essential git cmake ninja-build zlib1g-dev libsecp256k1-dev libmicrohttpd-dev libsodium-dev liblz4-dev
sudo apt remove libgsl-dev
- name: Install gcc-11 g++-11
run: |
sudo apt install -y manpages-dev software-properties-common
sudo add-apt-repository ppa:ubuntu-toolchain-r/test
sudo apt update && sudo apt install gcc-11 g++-11
- name: Install clang-16
run: |
wget https://apt.llvm.org/llvm.sh
chmod +x llvm.sh
sudo ./llvm.sh 16 all
- name: Build TON
run: |
git submodule sync --recursive
git submodule update
cp assembly/native/build-ubuntu-appimages.sh .
chmod +x build-ubuntu-appimages.sh
./build-ubuntu-appimages.sh -a
- name: Make AppImages
run: |
cp assembly/appimage/create-appimages.sh .
cp assembly/appimage/AppRun .
cp assembly/appimage/ton.png .
chmod +x create-appimages.sh
./create-appimages.sh x86_64
rm -rf artifacts
- name: Build TON libs
run: |
cp assembly/native/build-ubuntu-portable-libs.sh .
chmod +x build-ubuntu-portable-libs.sh
./build-ubuntu-portable-libs.sh -a
cp ./artifacts/libtonlibjson.so appimages/artifacts/
cp ./artifacts/libemulator.so appimages/artifacts/
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-x86_64-linux
path: appimages/artifacts

View file

@ -0,0 +1,27 @@
name: MacOS-13 TON build (portable, x86-64)
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
runs-on: macos-13
steps:
- name: Check out repository
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Build TON
run: |
git submodule sync --recursive
git submodule update
cp assembly/native/build-macos-portable.sh .
chmod +x build-macos-portable.sh
./build-macos-portable.sh -t -a
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-x86_64-macos
path: artifacts

View file

@ -0,0 +1,27 @@
name: MacOS-14 TON build (portable, arm64)
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
runs-on: macos-14
steps:
- name: Check out repository
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Build TON
run: |
git submodule sync --recursive
git submodule update
cp assembly/native/build-macos-portable.sh .
chmod +x build-macos-portable.sh
./build-macos-portable.sh -t -a
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-arm64-macos
path: artifacts

View file

@ -28,5 +28,5 @@ jobs:
- name: Upload artifacts - name: Upload artifacts
uses: actions/upload-artifact@master uses: actions/upload-artifact@master
with: with:
name: ton-wasm-binaries name: ton-wasm
path: artifacts path: artifacts

View file

@ -11,10 +11,26 @@ jobs:
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Download Linux arm64 artifacts
uses: dawidd6/action-download-artifact@v6
with:
workflow: build-ton-linux-arm64-appimage.yml
path: artifacts
workflow_conclusion: success
skip_unpack: true
- name: Download and unzip Linux arm64 artifacts
uses: dawidd6/action-download-artifact@v6
with:
workflow: build-ton-linux-arm64-appimage.yml
path: artifacts
workflow_conclusion: success
skip_unpack: false
- name: Download Linux x86-64 artifacts - name: Download Linux x86-64 artifacts
uses: dawidd6/action-download-artifact@v6 uses: dawidd6/action-download-artifact@v6
with: with:
workflow: ton-x86-64-linux.yml workflow: build-ton-linux-x86-64-appimage.yml
path: artifacts path: artifacts
workflow_conclusion: success workflow_conclusion: success
skip_unpack: true skip_unpack: true
@ -22,7 +38,7 @@ jobs:
- name: Download and unzip Linux x86-64 artifacts - name: Download and unzip Linux x86-64 artifacts
uses: dawidd6/action-download-artifact@v6 uses: dawidd6/action-download-artifact@v6
with: with:
workflow: ton-x86-64-linux.yml workflow: build-ton-linux-x86-64-appimage.yml
path: artifacts path: artifacts
workflow_conclusion: success workflow_conclusion: success
skip_unpack: false skip_unpack: false
@ -30,7 +46,7 @@ jobs:
- name: Download Mac x86-64 artifacts - name: Download Mac x86-64 artifacts
uses: dawidd6/action-download-artifact@v6 uses: dawidd6/action-download-artifact@v6
with: with:
workflow: ton-x86-64-macos.yml workflow: build-ton-macos-13-x86-64-portable.yml
path: artifacts path: artifacts
workflow_conclusion: success workflow_conclusion: success
skip_unpack: true skip_unpack: true
@ -38,7 +54,7 @@ jobs:
- name: Download Mac arm64 artifacts - name: Download Mac arm64 artifacts
uses: dawidd6/action-download-artifact@v6 uses: dawidd6/action-download-artifact@v6
with: with:
workflow: ton-arm64-macos.yml workflow: build-ton-macos-14-arm64-portable.yml
path: artifacts path: artifacts
workflow_conclusion: success workflow_conclusion: success
skip_unpack: true skip_unpack: true
@ -46,7 +62,7 @@ jobs:
- name: Download and unzip Mac x86-64 artifacts - name: Download and unzip Mac x86-64 artifacts
uses: dawidd6/action-download-artifact@v6 uses: dawidd6/action-download-artifact@v6
with: with:
workflow: ton-x86-64-macos.yml workflow: build-ton-macos-13-x86-64-portable.yml
path: artifacts path: artifacts
workflow_conclusion: success workflow_conclusion: success
skip_unpack: false skip_unpack: false
@ -54,7 +70,7 @@ jobs:
- name: Download and unzip arm64 artifacts - name: Download and unzip arm64 artifacts
uses: dawidd6/action-download-artifact@v6 uses: dawidd6/action-download-artifact@v6
with: with:
workflow: ton-arm64-macos.yml workflow: build-ton-macos-14-arm64-portable.yml
path: artifacts path: artifacts
workflow_conclusion: success workflow_conclusion: success
skip_unpack: false skip_unpack: false
@ -147,7 +163,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-win-binaries.zip file: artifacts/ton-x86-64-windows.zip
asset_name: ton-win-x86-64.zip asset_name: ton-win-x86-64.zip
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -155,7 +171,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-win-binaries/fift.exe file: artifacts/ton-x86-64-windows/fift.exe
asset_name: fift.exe asset_name: fift.exe
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -163,7 +179,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-win-binaries/func.exe file: artifacts/ton-x86-64-windows/func.exe
asset_name: func.exe asset_name: func.exe
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -171,7 +187,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-win-binaries/tolk.exe file: artifacts/ton-x86-64-windows/tolk.exe
asset_name: tolk.exe asset_name: tolk.exe
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -179,7 +195,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-win-binaries/lite-client.exe file: artifacts/ton-x86-64-windows/lite-client.exe
asset_name: lite-client.exe asset_name: lite-client.exe
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -187,7 +203,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-win-binaries/proxy-liteserver.exe file: artifacts/ton-x86-64-windows/proxy-liteserver.exe
asset_name: proxy-liteserver.exe asset_name: proxy-liteserver.exe
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -195,7 +211,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-win-binaries/rldp-http-proxy.exe file: artifacts/ton-x86-64-windows/rldp-http-proxy.exe
asset_name: rldp-http-proxy.exe asset_name: rldp-http-proxy.exe
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -203,7 +219,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-win-binaries/http-proxy.exe file: artifacts/ton-x86-64-windows/http-proxy.exe
asset_name: http-proxy.exe asset_name: http-proxy.exe
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -211,7 +227,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-win-binaries/storage-daemon-cli.exe file: artifacts/ton-x86-64-windows/storage-daemon-cli.exe
asset_name: storage-daemon-cli.exe asset_name: storage-daemon-cli.exe
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -219,7 +235,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-win-binaries/storage-daemon.exe file: artifacts/ton-x86-64-windows/storage-daemon.exe
asset_name: storage-daemon.exe asset_name: storage-daemon.exe
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -227,7 +243,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-win-binaries/tonlibjson.dll file: artifacts/ton-x86-64-windows/tonlibjson.dll
asset_name: tonlibjson.dll asset_name: tonlibjson.dll
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -235,7 +251,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-win-binaries/emulator.dll file: artifacts/ton-x86-64-windows/emulator.dll
asset_name: libemulator.dll asset_name: libemulator.dll
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -243,7 +259,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-win-binaries/tonlib-cli.exe file: artifacts/ton-x86-64-windows/tonlib-cli.exe
asset_name: tonlib-cli.exe asset_name: tonlib-cli.exe
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -253,7 +269,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-macos-binaries.zip file: artifacts/ton-x86_64-macos.zip
asset_name: ton-mac-x86-64.zip asset_name: ton-mac-x86-64.zip
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -261,7 +277,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-macos-binaries/fift file: artifacts/ton-x86_64-macos/fift
asset_name: fift-mac-x86-64 asset_name: fift-mac-x86-64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -269,7 +285,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-macos-binaries/func file: artifacts/ton-x86_64-macos/func
asset_name: func-mac-x86-64 asset_name: func-mac-x86-64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -277,7 +293,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-macos-binaries/tolk file: artifacts/ton-x86_64-macos/tolk
asset_name: tolk-mac-x86-64 asset_name: tolk-mac-x86-64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -285,7 +301,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-macos-binaries/lite-client file: artifacts/ton-x86_64-macos/lite-client
asset_name: lite-client-mac-x86-64 asset_name: lite-client-mac-x86-64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -293,7 +309,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-macos-binaries/proxy-liteserver file: artifacts/ton-x86_64-macos/proxy-liteserver
asset_name: proxy-liteserver-mac-x86-64 asset_name: proxy-liteserver-mac-x86-64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -301,7 +317,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-macos-binaries/rldp-http-proxy file: artifacts/ton-x86_64-macos/rldp-http-proxy
asset_name: rldp-http-proxy-mac-x86-64 asset_name: rldp-http-proxy-mac-x86-64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -309,7 +325,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-macos-binaries/http-proxy file: artifacts/ton-x86_64-macos/http-proxy
asset_name: http-proxy-mac-x86-64 asset_name: http-proxy-mac-x86-64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -317,7 +333,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-macos-binaries/storage-daemon-cli file: artifacts/ton-x86_64-macos/storage-daemon-cli
asset_name: storage-daemon-cli-mac-x86-64 asset_name: storage-daemon-cli-mac-x86-64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -325,7 +341,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-macos-binaries/storage-daemon file: artifacts/ton-x86_64-macos/storage-daemon
asset_name: storage-daemon-mac-x86-64 asset_name: storage-daemon-mac-x86-64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -333,7 +349,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-macos-binaries/libtonlibjson.dylib file: artifacts/ton-x86_64-macos/libtonlibjson.dylib
asset_name: tonlibjson-mac-x86-64.dylib asset_name: tonlibjson-mac-x86-64.dylib
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -341,7 +357,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-macos-binaries/libemulator.dylib file: artifacts/ton-x86_64-macos/libemulator.dylib
asset_name: libemulator-mac-x86-64.dylib asset_name: libemulator-mac-x86-64.dylib
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -349,7 +365,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-macos-binaries/tonlib-cli file: artifacts/ton-x86_64-macos/tonlib-cli
asset_name: tonlib-cli-mac-x86-64 asset_name: tonlib-cli-mac-x86-64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -360,7 +376,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-macos-binaries.zip file: artifacts/ton-arm64-macos.zip
asset_name: ton-mac-arm64.zip asset_name: ton-mac-arm64.zip
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -368,7 +384,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-macos-binaries/fift file: artifacts/ton-arm64-macos/fift
asset_name: fift-mac-arm64 asset_name: fift-mac-arm64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -376,7 +392,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-macos-binaries/func file: artifacts/ton-arm64-macos/func
asset_name: func-mac-arm64 asset_name: func-mac-arm64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -384,7 +400,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-macos-binaries/tolk file: artifacts/ton-arm64-macos/tolk
asset_name: tolk-mac-arm64 asset_name: tolk-mac-arm64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -392,7 +408,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-macos-binaries/lite-client file: artifacts/ton-arm64-macos/lite-client
asset_name: lite-client-mac-arm64 asset_name: lite-client-mac-arm64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -400,7 +416,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-macos-binaries/proxy-liteserver file: artifacts/ton-arm64-macos/proxy-liteserver
asset_name: proxy-liteserver-mac-arm64 asset_name: proxy-liteserver-mac-arm64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -408,7 +424,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-macos-binaries/rldp-http-proxy file: artifacts/ton-arm64-macos/rldp-http-proxy
asset_name: rldp-http-proxy-mac-arm64 asset_name: rldp-http-proxy-mac-arm64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -416,7 +432,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-macos-binaries/http-proxy file: artifacts/ton-arm64-macos/http-proxy
asset_name: http-proxy-mac-arm64 asset_name: http-proxy-mac-arm64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -424,7 +440,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-macos-binaries/storage-daemon-cli file: artifacts/ton-arm64-macos/storage-daemon-cli
asset_name: storage-daemon-cli-mac-arm64 asset_name: storage-daemon-cli-mac-arm64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -432,7 +448,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-macos-binaries/storage-daemon file: artifacts/ton-arm64-macos/storage-daemon
asset_name: storage-daemon-mac-arm64 asset_name: storage-daemon-mac-arm64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -440,7 +456,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-macos-binaries/libtonlibjson.dylib file: artifacts/ton-arm64-macos/libtonlibjson.dylib
asset_name: tonlibjson-mac-arm64.dylib asset_name: tonlibjson-mac-arm64.dylib
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -448,7 +464,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-macos-binaries/libemulator.dylib file: artifacts/ton-arm64-macos/libemulator.dylib
asset_name: libemulator-mac-arm64.dylib asset_name: libemulator-mac-arm64.dylib
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -456,7 +472,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-macos-binaries/tonlib-cli file: artifacts/ton-arm64-macos/tonlib-cli
asset_name: tonlib-cli-mac-arm64 asset_name: tonlib-cli-mac-arm64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -466,7 +482,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-linux-binaries.zip file: artifacts/ton-x86_64-linux.zip
asset_name: ton-linux-x86_64.zip asset_name: ton-linux-x86_64.zip
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -474,7 +490,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-linux-binaries/fift file: artifacts/ton-x86_64-linux/fift
asset_name: fift-linux-x86_64 asset_name: fift-linux-x86_64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -482,7 +498,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-linux-binaries/func file: artifacts/ton-x86_64-linux/func
asset_name: func-linux-x86_64 asset_name: func-linux-x86_64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -490,7 +506,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-linux-binaries/tolk file: artifacts/ton-x86_64-linux/tolk
asset_name: tolk-linux-x86_64 asset_name: tolk-linux-x86_64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -498,7 +514,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-linux-binaries/lite-client file: artifacts/ton-x86_64-linux/lite-client
asset_name: lite-client-linux-x86_64 asset_name: lite-client-linux-x86_64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -506,7 +522,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-linux-binaries/proxy-liteserver file: artifacts/ton-x86_64-linux/proxy-liteserver
asset_name: proxy-liteserver-linux-x86_64 asset_name: proxy-liteserver-linux-x86_64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -514,7 +530,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-linux-binaries/rldp-http-proxy file: artifacts/ton-x86_64-linux/rldp-http-proxy
asset_name: rldp-http-proxy-linux-x86_64 asset_name: rldp-http-proxy-linux-x86_64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -522,7 +538,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-linux-binaries/http-proxy file: artifacts/ton-x86_64-linux/http-proxy
asset_name: http-proxy-linux-x86_64 asset_name: http-proxy-linux-x86_64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -530,7 +546,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-linux-binaries/storage-daemon-cli file: artifacts/ton-x86_64-linux/storage-daemon-cli
asset_name: storage-daemon-cli-linux-x86_64 asset_name: storage-daemon-cli-linux-x86_64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -538,7 +554,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-linux-binaries/storage-daemon file: artifacts/ton-x86_64-linux/storage-daemon
asset_name: storage-daemon-linux-x86_64 asset_name: storage-daemon-linux-x86_64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -546,7 +562,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-linux-binaries/libtonlibjson.so file: artifacts/ton-x86_64-linux/libtonlibjson.so
asset_name: tonlibjson-linux-x86_64.so asset_name: tonlibjson-linux-x86_64.so
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -554,7 +570,7 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-linux-binaries/libemulator.so file: artifacts/ton-x86_64-linux/libemulator.so
asset_name: libemulator-linux-x86_64.so asset_name: libemulator-linux-x86_64.so
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
@ -562,16 +578,124 @@ jobs:
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-x86_64-linux-binaries/tonlib-cli file: artifacts/ton-x86_64-linux/tonlib-cli
asset_name: tonlib-cli-linux-x86_64 asset_name: tonlib-cli-linux-x86_64
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
# linux arm64
- name: Upload Linux arm64 artifacts
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-linux.zip
asset_name: ton-linux-arm64.zip
tag: ${{ steps.tag.outputs.TAG }}
- name: Upload Linux arm64 single artifact - fift
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-linux/fift
asset_name: fift-linux-arm64
tag: ${{ steps.tag.outputs.TAG }}
- name: Upload Linux arm64 single artifact - func
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-linux/func
asset_name: func-linux-arm64
tag: ${{ steps.tag.outputs.TAG }}
- name: Upload Linux arm64 single artifact - tolk
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-linux/tolk
asset_name: tolk-linux-arm64
tag: ${{ steps.tag.outputs.TAG }}
- name: Upload Linux arm64 single artifact - lite-client
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-linux/lite-client
asset_name: lite-client-linux-arm64
tag: ${{ steps.tag.outputs.TAG }}
- name: Upload Linux arm64 single artifact - proxy-liteserver
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-linux/proxy-liteserver
asset_name: proxy-liteserver-linux-arm64
tag: ${{ steps.tag.outputs.TAG }}
- name: Upload Linux arm64 single artifact - rldp-http-proxy
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-linux/rldp-http-proxy
asset_name: rldp-http-proxy-linux-arm64
tag: ${{ steps.tag.outputs.TAG }}
- name: Upload Linux arm64 single artifact - http-proxy
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-linux/http-proxy
asset_name: http-proxy-linux-arm64
tag: ${{ steps.tag.outputs.TAG }}
- name: Upload Linux arm64 single artifact - storage-daemon-cli
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-linux/storage-daemon-cli
asset_name: storage-daemon-cli-linux-arm64
tag: ${{ steps.tag.outputs.TAG }}
- name: Upload Linux arm64 single artifact - storage-daemon
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-linux/storage-daemon
asset_name: storage-daemon-linux-arm64
tag: ${{ steps.tag.outputs.TAG }}
- name: Upload Linux arm64 single artifact - tonlibjson
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-linux/libtonlibjson.so
asset_name: tonlibjson-linux-arm64.so
tag: ${{ steps.tag.outputs.TAG }}
- name: Upload Linux arm64 single artifact - libemulator
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-linux/libemulator.so
asset_name: libemulator-linux-arm64.so
tag: ${{ steps.tag.outputs.TAG }}
- name: Upload Linux arm64 single artifact - tonlib-cli
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-arm64-linux/tonlib-cli
asset_name: tonlib-cli-linux-arm64
tag: ${{ steps.tag.outputs.TAG }}
- name: Upload WASM artifacts - name: Upload WASM artifacts
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: artifacts/ton-wasm-binaries.zip file: artifacts/ton-wasm.zip
asset_name: ton-wasm-binaries.zip asset_name: ton-wasm.zip
tag: ${{ steps.tag.outputs.TAG }} tag: ${{ steps.tag.outputs.TAG }}
- name: Upload Android Tonlib artifacts - name: Upload Android Tonlib artifacts

View file

@ -1,40 +0,0 @@
name: MacOS TON build (portable, arm64)
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
runs-on: macos-14
steps:
- uses: actions/checkout@v3
with:
submodules: 'recursive'
- uses: cachix/install-nix-action@v23
with:
extra_nix_config: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
- name: Build TON
run: |
git submodule sync --recursive
git submodule update
cp assembly/nix/build-macos-nix.sh .
chmod +x build-macos-nix.sh
./build-macos-nix.sh -t
- name: Simple binaries test
run: |
sudo mv /nix/store /nix/store2
artifacts/validator-engine -V
artifacts/lite-client -V
artifacts/fift -V
artifacts/func -V
artifacts/tolk -v
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-arm64-macos-binaries
path: artifacts

View file

@ -1,44 +0,0 @@
name: Ubuntu TON build (portable, x86-64)
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
runs-on: ubuntu-22.04
steps:
- run: |
sudo apt update
sudo apt install -y apt-utils
- uses: actions/checkout@v3
with:
submodules: 'recursive'
- uses: cachix/install-nix-action@v23
with:
extra_nix_config: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
- name: Build TON
run: |
git submodule sync --recursive
git submodule update
cp assembly/nix/build-linux-x86-64-nix.sh .
chmod +x build-linux-x86-64-nix.sh
./build-linux-x86-64-nix.sh -t
- name: Simple binaries test
run: |
sudo mv /nix/store /nix/store2
artifacts/validator-engine -V
artifacts/lite-client -V
artifacts/fift -V
artifacts/func -V
artifacts/tolk -v
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-x86_64-linux-binaries
path: artifacts

View file

@ -1,40 +0,0 @@
name: MacOS TON build (portable, x86-64)
on: [push,workflow_dispatch,workflow_call]
jobs:
build:
runs-on: macos-13
steps:
- uses: actions/checkout@v3
with:
submodules: 'recursive'
- uses: cachix/install-nix-action@v23
with:
extra_nix_config: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
- name: Build TON
run: |
git submodule sync --recursive
git submodule update
cp assembly/nix/build-macos-nix.sh .
chmod +x build-macos-nix.sh
./build-macos-nix.sh -t
- name: Simple binaries test
run: |
sudo mv /nix/store /nix/store2
artifacts/validator-engine -V
artifacts/lite-client -V
artifacts/fift -V
artifacts/func -V
artifacts/tolk -v
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-x86_64-macos-binaries
path: artifacts

View file

@ -32,5 +32,5 @@ jobs:
- name: Upload artifacts - name: Upload artifacts
uses: actions/upload-artifact@master uses: actions/upload-artifact@master
with: with:
name: ton-win-binaries name: ton-x86-64-windows
path: artifacts path: artifacts

View file

@ -71,7 +71,7 @@ Usually, the response to your pull request will indicate which section it falls
## Build TON blockchain ## Build TON blockchain
### Ubuntu 20.4, 22.04 (x86-64, aarch64) ### Ubuntu 20.4, 22.04, 24.04 (x86-64, aarch64)
Install additional system libraries Install additional system libraries
```bash ```bash
sudo apt-get update sudo apt-get update
@ -141,18 +141,10 @@ Compile TON tonlib library
./build-android-tonlib.sh ./build-android-tonlib.sh
``` ```
### Build TON portable binaries with Nix package manager ### TON portable binaries
You need to install Nix first.
```bash Linux portable binaries are wrapped into AppImages, at the same time MacOS portable binaries are statically linked executables.
sh <(curl -L https://nixos.org/nix/install) --daemon Linux and MacOS binaries are available for both x86-64 and arm64 architectures.
```
Then compile TON with Nix by executing below command from the root folder:
```bash
cp -r assembly/nix/* .
export NIX_PATH=nixpkgs=https://github.com/nixOS/nixpkgs/archive/23.05.tar.gz
nix-build linux-x86-64-static.nix
```
More examples for other platforms can be found under `assembly/nix`.
## Running tests ## Running tests

3
assembly/appimage/AppRun Normal file
View file

@ -0,0 +1,3 @@
#!/bin/sh
export LD_LIBRARY_PATH="${APPDIR}/usr/lib:${LD_LIBRARY_PATH}"
exec "$(dirname $0)"/usr/bin/app "$@"

View file

@ -0,0 +1,50 @@
#!/bin/bash
if [ ! -d "artifacts" ]; then
echo "No artifacts found."
exit 2
fi
# x86_64 or aarch64
ARCH=$1
rm -rf appimages
mkdir -p appimages/artifacts
wget -nc https://github.com/AppImage/appimagetool/releases/download/continuous/appimagetool-$ARCH.AppImage
chmod +x ./appimagetool-$ARCH.AppImage
cd appimages
for file in ../artifacts/*; do
if [[ -f "$file" && "$file" != *.so ]]; then
appName=$(basename "$file")
echo $appName
# prepare AppDir
mkdir -p $appName.AppDir/usr/{bin,lib}
cp ../AppRun $appName.AppDir/AppRun
sed -i "s/app/$appName/g" $appName.AppDir/AppRun
chmod +x ./$appName.AppDir/AppRun
printf '[Desktop Entry]\nName='$appName'\nExec='$appName'\nIcon='$appName'\nType=Application\nCategories=Utility;\n' > $appName.AppDir/$appName.desktop
cp ../ton.png $appName.AppDir/$appName.png
cp $file $appName.AppDir/usr/bin/
cp ../build/openssl_3/libcrypto.so.3 \
/lib/$ARCH-linux-gnu/libatomic.so.1 \
/lib/$ARCH-linux-gnu/libsodium.so.23 \
/lib/$ARCH-linux-gnu/libz.so.1 \
/lib/$ARCH-linux-gnu/liblz4.so.1 \
/lib/$ARCH-linux-gnu/libmicrohttpd.so.12 \
/lib/$ARCH-linux-gnu/libreadline.so.8 \
/lib/$ARCH-linux-gnu/libstdc++.so.6 \
$appName.AppDir/usr/lib/
chmod +x ./$appName.AppDir/usr/bin/$appName
# create AppImage
./../appimagetool-$ARCH.AppImage -l $appName.AppDir
mv $appName-$ARCH.AppImage artifacts/$appName
fi
done
ls -larth artifacts
cp -r ../artifacts/{smartcont,lib} artifacts/
pwd
ls -larth artifacts

BIN
assembly/appimage/ton.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.4 KiB

View file

@ -1,237 +0,0 @@
pipeline {
agent none
stages {
stage('Run Builds') {
parallel {
stage('Ubuntu 20.04 x86-64 (shared)') {
agent {
label 'Ubuntu_x86-64'
}
steps {
timeout(time: 180, unit: 'MINUTES') {
sh '''
cp assembly/native/build-ubuntu-shared.sh .
chmod +x build-ubuntu-shared.sh
./build-ubuntu-shared.sh -a
'''
sh '''
cd artifacts
zip -9r ton-x86_64-linux-shared ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-x86_64-linux-shared.zip'
}
}
}
stage('Ubuntu 20.04 x86-64 (portable)') {
agent {
label 'Ubuntu_x86-64'
}
steps {
timeout(time: 180, unit: 'MINUTES') {
sh '''
cp assembly/nix/build-linux-x86-64-nix.sh .
chmod +x build-linux-x86-64-nix.sh
./build-linux-x86-64-nix.sh
'''
sh '''
cd artifacts
zip -9r ton-x86-64-linux-portable ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-x86-64-linux-portable.zip'
}
}
}
stage('Ubuntu 20.04 aarch64 (shared)') {
agent {
label 'Ubuntu_arm64'
}
steps {
timeout(time: 180, unit: 'MINUTES') {
sh '''
cp assembly/native/build-ubuntu-shared.sh .
chmod +x build-ubuntu-shared.sh
./build-ubuntu-shared.sh -a
'''
sh '''
cd artifacts
zip -9r ton-arm64-linux-shared ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-arm64-linux-shared.zip'
}
}
}
stage('Ubuntu 20.04 aarch64 (portable)') {
agent {
label 'Ubuntu_arm64'
}
steps {
timeout(time: 180, unit: 'MINUTES') {
sh '''
cp assembly/nix/build-linux-arm64-nix.sh .
chmod +x build-linux-arm64-nix.sh
./build-linux-arm64-nix.sh
'''
sh '''
cd artifacts
zip -9r ton-arm64-linux-portable ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-arm64-linux-portable.zip'
}
}
}
stage('macOS 12.7 x86-64 (shared)') {
agent {
label 'macOS_12.7_x86-64'
}
steps {
timeout(time: 180, unit: 'MINUTES') {
sh '''
cp assembly/native/build-macos-shared.sh .
chmod +x build-macos-shared.sh
./build-macos-shared.sh -a
'''
sh '''
cd artifacts
zip -9r ton-x86-64-macos-shared ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-x86-64-macos-shared.zip'
}
}
}
stage('macOS 12.7 x86-64 (portable)') {
agent {
label 'macOS_12.7_x86-64'
}
steps {
timeout(time: 180, unit: 'MINUTES') {
sh '''
cp assembly/nix/build-macos-nix.sh .
chmod +x build-macos-nix.sh
./build-macos-nix.sh
'''
sh '''
cd artifacts
zip -9r ton-x86-64-macos-portable ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-x86-64-macos-portable.zip'
}
}
}
stage('macOS 12.6 aarch64 (shared)') {
agent {
label 'macOS_12.6-arm64-m1'
}
steps {
timeout(time: 180, unit: 'MINUTES') {
sh '''
cp assembly/native/build-macos-shared.sh .
chmod +x build-macos-shared.sh
./build-macos-shared.sh -a
'''
sh '''
cd artifacts
zip -9r ton-arm64-macos-m1-shared ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-arm64-macos-m1-shared.zip'
}
}
}
stage('macOS 12.6 aarch64 (portable)') {
agent {
label 'macOS_12.6-arm64-m1'
}
steps {
timeout(time: 180, unit: 'MINUTES') {
sh '''
cp assembly/nix/build-macos-nix.sh .
chmod +x build-macos-nix.sh
./build-macos-nix.sh
'''
sh '''
cd artifacts
zip -9r ton-arm64-macos-portable ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-arm64-macos-portable.zip'
}
}
}
stage('macOS 13.2 aarch64 (shared)') {
agent {
label 'macOS_13.2-arm64-m2'
}
steps {
timeout(time: 180, unit: 'MINUTES') {
sh '''
cp assembly/native/build-macos-shared.sh .
chmod +x build-macos-shared.sh
./build-macos-shared.sh -a
'''
sh '''
cd artifacts
zip -9r ton-arm64-macos-m2-shared ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-arm64-macos-m2-shared.zip'
}
}
}
stage('Windows Server 2022 x86-64') {
agent {
label 'Windows_x86-64'
}
steps {
timeout(time: 180, unit: 'MINUTES') {
bat '''
copy assembly\\native\\build-windows.bat .
build-windows.bat
'''
bat '''
cd artifacts
zip -9r ton-x86-64-windows ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-x86-64-windows.zip'
}
}
}
stage('Android Tonlib') {
agent {
label 'Ubuntu_x86-64'
}
steps {
timeout(time: 180, unit: 'MINUTES') {
sh '''
cp assembly/android/build-android-tonlib.sh .
chmod +x build-android-tonlib.sh
./build-android-tonlib.sh -a
'''
sh '''
cd artifacts/tonlib-android-jni
zip -9r ton-android-tonlib ./*
'''
archiveArtifacts artifacts: 'artifacts/tonlib-android-jni/ton-android-tonlib.zip'
}
}
}
stage('WASM fift func emulator') {
agent {
label 'Ubuntu_x86-64'
}
steps {
timeout(time: 180, unit: 'MINUTES') {
sh '''
cp assembly/wasm/fift-func-wasm-build-ubuntu.sh .
chmod +x fift-func-wasm-build-ubuntu.sh
./fift-func-wasm-build-ubuntu.sh -a
'''
sh '''
cd artifacts
zip -9r ton-wasm-binaries ./*
'''
archiveArtifacts artifacts: 'artifacts/ton-wasm-binaries.zip'
}
}
}
}
}
}
}

View file

@ -0,0 +1,109 @@
#/bin/bash
with_tests=false
with_artifacts=false
while getopts 'ta' flag; do
case "${flag}" in
t) with_tests=true ;;
a) with_artifacts=true ;;
*) break
;;
esac
done
if [ ! -d "build" ]; then
mkdir build
cd build
else
cd build
rm -rf .ninja* CMakeCache.txt
fi
export CC=$(which clang-16)
export CXX=$(which clang++-16)
export CCACHE_DISABLE=1
if [ ! -d "openssl_3" ]; then
git clone https://github.com/openssl/openssl openssl_3
cd openssl_3
opensslPath=`pwd`
git checkout openssl-3.1.4
./config
make build_libs -j12
test $? -eq 0 || { echo "Can't compile openssl_3"; exit 1; }
cd ..
else
opensslPath=$(pwd)/openssl_3
echo "Using compiled openssl_3"
fi
cmake -GNinja .. \
-DCMAKE_BUILD_TYPE=Release \
-DPORTABLE=1 \
-DOPENSSL_ROOT_DIR=$opensslPath \
-DOPENSSL_INCLUDE_DIR=$opensslPath/include \
-DOPENSSL_CRYPTO_LIBRARY=$opensslPath/libcrypto.so
test $? -eq 0 || { echo "Can't configure ton"; exit 1; }
if [ "$with_tests" = true ]; then
ninja storage-daemon storage-daemon-cli fift func tolk tonlib tonlibjson tonlib-cli \
validator-engine lite-client pow-miner validator-engine-console blockchain-explorer \
generate-random-id json2tlo dht-server http-proxy rldp-http-proxy \
adnl-proxy create-state emulator test-ed25519 test-ed25519-crypto test-bigint \
test-vm test-fift test-cells test-smartcont test-net test-tdactor test-tdutils \
test-tonlib-offline test-adnl test-dht test-rldp test-rldp2 test-catchain \
test-fec test-tddb test-db test-validator-session-state test-emulator proxy-liteserver
test $? -eq 0 || { echo "Can't compile ton"; exit 1; }
else
ninja storage-daemon storage-daemon-cli fift func tolk tonlib tonlibjson tonlib-cli \
validator-engine lite-client pow-miner validator-engine-console blockchain-explorer \
generate-random-id json2tlo dht-server http-proxy rldp-http-proxy \
adnl-proxy create-state emulator proxy-liteserver
test $? -eq 0 || { echo "Can't compile ton"; exit 1; }
fi
# simple binaries' test
./storage/storage-daemon/storage-daemon -V || exit 1
./validator-engine/validator-engine -V || exit 1
./lite-client/lite-client -V || exit 1
./crypto/fift -V || exit 1
echo validator-engine
ldd ./validator-engine/validator-engine || exit 1
ldd ./validator-engine-console/validator-engine-console || exit 1
ldd ./crypto/fift || exit 1
echo blockchain-explorer
ldd ./blockchain-explorer/blockchain-explorer || exit 1
echo libtonlibjson.so
ldd ./tonlib/libtonlibjson.so.0.5 || exit 1
echo libemulator.so
ldd ./emulator/libemulator.so || exit 1
cd ..
if [ "$with_artifacts" = true ]; then
rm -rf artifacts
mkdir artifacts
mv build/tonlib/libtonlibjson.so.0.5 build/tonlib/libtonlibjson.so
cp build/storage/storage-daemon/storage-daemon build/storage/storage-daemon/storage-daemon-cli \
build/crypto/fift build/crypto/tlbc build/crypto/func build/tolk/tolk build/crypto/create-state build/blockchain-explorer/blockchain-explorer \
build/validator-engine-console/validator-engine-console build/tonlib/tonlib-cli build/utils/proxy-liteserver \
build/tonlib/libtonlibjson.so build/http/http-proxy build/rldp-http-proxy/rldp-http-proxy \
build/dht-server/dht-server build/lite-client/lite-client build/validator-engine/validator-engine \
build/utils/generate-random-id build/utils/json2tlo build/adnl/adnl-proxy build/emulator/libemulator.so \
artifacts
test $? -eq 0 || { echo "Can't copy final binaries"; exit 1; }
cp -R crypto/smartcont artifacts
cp -R crypto/fift/lib artifacts
chmod -R +x artifacts/*
fi
if [ "$with_tests" = true ]; then
cd build
# ctest --output-on-failure -E "test-catchain|test-actors|test-smartcont|test-adnl|test-validator-session-state|test-dht|test-rldp"
ctest --output-on-failure --timeout 1800
fi

View file

@ -0,0 +1,132 @@
#/bin/bash
#sudo apt-get update
#sudo apt-get install -y build-essential git cmake ninja-build automake libtool texinfo autoconf libc++-dev libc++abi-dev
with_artifacts=false
while getopts 'ta' flag; do
case "${flag}" in
a) with_artifacts=true ;;
*) break
;;
esac
done
if [ ! -d "build" ]; then
mkdir build
cd build
else
cd build
rm -rf .ninja* CMakeCache.txt
fi
export CC=$(which clang)
export CXX=$(which clang++)
export CCACHE_DISABLE=1
if [ ! -d "lz4" ]; then
git clone https://github.com/lz4/lz4.git
cd lz4
lz4Path=`pwd`
git checkout v1.9.4
CFLAGS="-fPIC" make -j12
test $? -eq 0 || { echo "Can't compile lz4"; exit 1; }
cd ..
# ./lib/liblz4.a
# ./lib
else
lz4Path=$(pwd)/lz4
echo "Using compiled lz4"
fi
if [ ! -d "libsodium" ]; then
export LIBSODIUM_FULL_BUILD=1
git clone https://github.com/jedisct1/libsodium.git
cd libsodium
sodiumPath=`pwd`
git checkout 1.0.18
./autogen.sh
./configure --with-pic --enable-static
make -j12
test $? -eq 0 || { echo "Can't compile libsodium"; exit 1; }
cd ..
else
sodiumPath=$(pwd)/libsodium
echo "Using compiled libsodium"
fi
if [ ! -d "openssl_3" ]; then
git clone https://github.com/openssl/openssl openssl_3
cd openssl_3
opensslPath=`pwd`
git checkout openssl-3.1.4
./config
make build_libs -j12
test $? -eq 0 || { echo "Can't compile openssl_3"; exit 1; }
cd ..
else
opensslPath=$(pwd)/openssl_3
echo "Using compiled openssl_3"
fi
if [ ! -d "zlib" ]; then
git clone https://github.com/madler/zlib.git
cd zlib
zlibPath=`pwd`
./configure --static
make -j12
test $? -eq 0 || { echo "Can't compile zlib"; exit 1; }
cd ..
else
zlibPath=$(pwd)/zlib
echo "Using compiled zlib"
fi
if [ ! -d "libmicrohttpd" ]; then
git clone https://git.gnunet.org/libmicrohttpd.git
cd libmicrohttpd
libmicrohttpdPath=`pwd`
./autogen.sh
./configure --enable-static --disable-tests --disable-benchmark --disable-shared --disable-https --with-pic
make -j12
test $? -eq 0 || { echo "Can't compile libmicrohttpd"; exit 1; }
cd ..
else
libmicrohttpdPath=$(pwd)/libmicrohttpd
echo "Using compiled libmicrohttpd"
fi
cmake -GNinja .. \
-DPORTABLE=1 \
-DCMAKE_BUILD_TYPE=Release \
-DOPENSSL_FOUND=1 \
-DOPENSSL_INCLUDE_DIR=$opensslPath/include \
-DOPENSSL_CRYPTO_LIBRARY=$opensslPath/libcrypto.a \
-DZLIB_FOUND=1 \
-DZLIB_INCLUDE_DIR=$zlibPath \
-DZLIB_LIBRARIES=$zlibPath/libz.a \
-DSODIUM_FOUND=1 \
-DSODIUM_INCLUDE_DIR=$sodiumPath/src/libsodium/include \
-DSODIUM_LIBRARY_RELEASE=$sodiumPath/src/libsodium/.libs/libsodium.a \
-DMHD_FOUND=1 \
-DMHD_INCLUDE_DIR=$libmicrohttpdPath/src/include \
-DMHD_LIBRARY=$libmicrohttpdPath/src/microhttpd/.libs/libmicrohttpd.a \
-DLZ4_FOUND=1 \
-DLZ4_INCLUDE_DIRS=$lz4Path/lib \
-DLZ4_LIBRARIES=$lz4Path/lib/liblz4.a
test $? -eq 0 || { echo "Can't configure ton"; exit 1; }
ninja tonlibjson emulator
test $? -eq 0 || { echo "Can't compile ton"; exit 1; }
cd ..
mkdir artifacts
mv build/tonlib/libtonlibjson.so.0.5 build/tonlib/libtonlibjson.so
cp build/tonlib/libtonlibjson.so \
build/emulator/libemulator.so \
artifacts

View file

@ -1,7 +1,7 @@
#/bin/bash #/bin/bash
#sudo apt-get update #sudo apt-get update
#sudo apt-get install -y build-essential git cmake ninja-build automake libtool texinfo autoconf #sudo apt-get install -y build-essential git cmake ninja-build automake libtool texinfo autoconf libc++-dev libc++abi-dev
with_tests=false with_tests=false
with_artifacts=false with_artifacts=false
@ -24,8 +24,8 @@ else
rm -rf .ninja* CMakeCache.txt rm -rf .ninja* CMakeCache.txt
fi fi
export CC=$(which clang-16) export CC=$(which clang)
export CXX=$(which clang++-16) export CXX=$(which clang++)
export CCACHE_DISABLE=1 export CCACHE_DISABLE=1
if [ ! -d "lz4" ]; then if [ ! -d "lz4" ]; then
@ -33,7 +33,7 @@ git clone https://github.com/lz4/lz4.git
cd lz4 cd lz4
lz4Path=`pwd` lz4Path=`pwd`
git checkout v1.9.4 git checkout v1.9.4
make -j12 CFLAGS="-fPIC" make -j12
test $? -eq 0 || { echo "Can't compile lz4"; exit 1; } test $? -eq 0 || { echo "Can't compile lz4"; exit 1; }
cd .. cd ..
# ./lib/liblz4.a # ./lib/liblz4.a

View file

@ -2292,7 +2292,8 @@ Ref<vm::Cell> ConfigInfo::lookup_library(td::ConstBitPtr root_hash) const {
td::Result<Ref<vm::Tuple>> ConfigInfo::get_prev_blocks_info() const { td::Result<Ref<vm::Tuple>> ConfigInfo::get_prev_blocks_info() const {
// [ wc:Integer shard:Integer seqno:Integer root_hash:Integer file_hash:Integer] = BlockId; // [ wc:Integer shard:Integer seqno:Integer root_hash:Integer file_hash:Integer] = BlockId;
// [ last_mc_blocks:[BlockId...] // [ last_mc_blocks:[BlockId...]
// prev_key_block:BlockId ] : PrevBlocksInfo // prev_key_block:BlockId
// last_mc_blocks_100[BlockId...] ] : PrevBlocksInfo
auto block_id_to_tuple = [](const ton::BlockIdExt& block_id) -> vm::Ref<vm::Tuple> { auto block_id_to_tuple = [](const ton::BlockIdExt& block_id) -> vm::Ref<vm::Tuple> {
td::RefInt256 shard = td::make_refint(block_id.id.shard); td::RefInt256 shard = td::make_refint(block_id.id.shard);
if (shard->sgn() < 0) { if (shard->sgn() < 0) {
@ -2302,25 +2303,44 @@ td::Result<Ref<vm::Tuple>> ConfigInfo::get_prev_blocks_info() const {
td::make_refint(block_id.id.seqno), td::bits_to_refint(block_id.root_hash.bits(), 256), td::make_refint(block_id.id.seqno), td::bits_to_refint(block_id.root_hash.bits(), 256),
td::bits_to_refint(block_id.file_hash.bits(), 256)); td::bits_to_refint(block_id.file_hash.bits(), 256));
}; };
std::vector<vm::StackEntry> last_mc_blocks; std::vector<vm::StackEntry> tuple;
std::vector<vm::StackEntry> last_mc_blocks;
last_mc_blocks.push_back(block_id_to_tuple(block_id)); last_mc_blocks.push_back(block_id_to_tuple(block_id));
for (ton::BlockSeqno seqno = block_id.id.seqno; seqno > 0 && last_mc_blocks.size() < 16;) { for (ton::BlockSeqno seqno = block_id.id.seqno; seqno > 0 && last_mc_blocks.size() < 16;) {
--seqno; --seqno;
ton::BlockIdExt block_id; ton::BlockIdExt id;
if (!get_old_mc_block_id(seqno, block_id)) { if (!get_old_mc_block_id(seqno, id)) {
return td::Status::Error("cannot fetch old mc block"); return td::Status::Error("cannot fetch old mc block");
} }
last_mc_blocks.push_back(block_id_to_tuple(block_id)); last_mc_blocks.push_back(block_id_to_tuple(id));
} }
tuple.push_back(td::make_cnt_ref<std::vector<vm::StackEntry>>(std::move(last_mc_blocks)));
ton::BlockIdExt last_key_block; ton::BlockIdExt last_key_block;
ton::LogicalTime last_key_block_lt; ton::LogicalTime last_key_block_lt;
if (!get_last_key_block(last_key_block, last_key_block_lt)) { if (!get_last_key_block(last_key_block, last_key_block_lt)) {
return td::Status::Error("cannot fetch last key block"); return td::Status::Error("cannot fetch last key block");
} }
return vm::make_tuple_ref(td::make_cnt_ref<std::vector<vm::StackEntry>>(std::move(last_mc_blocks)), tuple.push_back(block_id_to_tuple(last_key_block));
block_id_to_tuple(last_key_block));
if (get_global_version() >= 9) {
std::vector<vm::StackEntry> last_mc_blocks_100;
for (ton::BlockSeqno seqno = block_id.id.seqno / 100 * 100; last_mc_blocks_100.size() < 16;) {
ton::BlockIdExt id;
if (!get_old_mc_block_id(seqno, id)) {
return td::Status::Error("cannot fetch old mc block");
}
last_mc_blocks_100.push_back(block_id_to_tuple(id));
if (seqno < 100) {
break;
}
seqno -= 100;
}
tuple.push_back(td::make_cnt_ref<std::vector<vm::StackEntry>>(std::move(last_mc_blocks_100)));
}
return td::make_cnt_ref<std::vector<vm::StackEntry>>(std::move(tuple));
} }
td::optional<PrecompiledContractsConfig::Contract> PrecompiledContractsConfig::get_contract( td::optional<PrecompiledContractsConfig::Contract> PrecompiledContractsConfig::get_contract(

View file

@ -1145,31 +1145,64 @@ td::RefInt256 ComputePhaseConfig::compute_gas_price(td::uint64 gas_used) const {
namespace transaction { namespace transaction {
/** /**
* Checks if it is required to increase gas_limit (from GasLimitsPrices config) to special_gas_limit * 2 * Checks if it is required to increase gas_limit (from GasLimitsPrices config) for the transaction
* from masterchain GasLimitsPrices config for the transaction.
* *
* In January 2024 a highload wallet of @wallet Telegram bot in mainnet was stuck because current gas limit (1M) is * In January 2024 a highload wallet of @wallet Telegram bot in mainnet was stuck because current gas limit (1M) is
* not enough to clean up old queires, thus locking funds inside. * not enough to clean up old queires, thus locking funds inside.
* See comment in crypto/smartcont/highload-wallet-v2-code.fc for details on why this happened. * See comment in crypto/smartcont/highload-wallet-v2-code.fc for details on why this happened.
* Account address: EQD_v9j1rlsuHHw2FIhcsCFFSD367ldfDdCKcsNmNpIRzUlu * Account address: EQD_v9j1rlsuHHw2FIhcsCFFSD367ldfDdCKcsNmNpIRzUlu
* It was proposed to validators to increase gas limit for this account for a limited amount of time (until 2024-02-29). * It was proposed to validators to increase gas limit for this account to 70M for a limited amount
* of time (until 2024-02-29).
* It is activated by setting global version to 5 in ConfigParam 8. * It is activated by setting global version to 5 in ConfigParam 8.
* This config change also activates new behavior for special accounts in masterchain. * This config change also activates new behavior for special accounts in masterchain.
* *
* In Augost 2024 it was decided to unlock other old highload wallets that got into the same situation.
* See https://t.me/tondev_news/129
* It is activated by setting global version to 9.
*
* @param cfg The compute phase configuration. * @param cfg The compute phase configuration.
* @param now The Unix time of the transaction. * @param now The Unix time of the transaction.
* @param account The account of the transaction. * @param account The account of the transaction.
* *
* @returns True if gas_limit override is required, false otherwise * @returns Overridden gas limit or empty td::optional
*/ */
static bool override_gas_limit(const ComputePhaseConfig& cfg, ton::UnixTime now, const Account& account) { static td::optional<td::uint64> override_gas_limit(const ComputePhaseConfig& cfg, ton::UnixTime now,
if (!cfg.special_gas_full) { const Account& account) {
return false; struct OverridenGasLimit {
td::uint64 new_limit;
int from_version;
ton::UnixTime until;
};
static std::map<std::pair<ton::WorkchainId, ton::StdSmcAddress>, OverridenGasLimit> accounts = []() {
auto parse_addr = [](const char* s) -> std::pair<ton::WorkchainId, ton::StdSmcAddress> {
auto r_addr = StdAddress::parse(td::Slice(s));
r_addr.ensure();
return {r_addr.ok().workchain, r_addr.ok().addr};
};
std::map<std::pair<ton::WorkchainId, ton::StdSmcAddress>, OverridenGasLimit> accounts;
// Increase limit for EQD_v9j1rlsuHHw2FIhcsCFFSD367ldfDdCKcsNmNpIRzUlu until 2024-02-29 00:00:00 UTC
accounts[parse_addr("0:FFBFD8F5AE5B2E1C7C3614885CB02145483DFAEE575F0DD08A72C366369211CD")] = {
.new_limit = 70'000'000, .from_version = 5, .until = 1709164800};
// Increase limit for multiple accounts (https://t.me/tondev_news/129) until 2025-03-01 00:00:00 UTC
accounts[parse_addr("UQBeSl-dumOHieZ3DJkNKVkjeso7wZ0VpzR4LCbLGTQ8xr57")] = {
.new_limit = 70'000'000, .from_version = 9, .until = 1740787200};
accounts[parse_addr("EQC3VcQ-43klww9UfimR58TBjBzk7GPupXQ3CNuthoNp-uTR")] = {
.new_limit = 70'000'000, .from_version = 9, .until = 1740787200};
accounts[parse_addr("EQBhwBb8jvokGvfreHRRoeVxI237PrOJgyrsAhLA-4rBC_H5")] = {
.new_limit = 70'000'000, .from_version = 9, .until = 1740787200};
accounts[parse_addr("EQCkoRp4OE-SFUoMEnYfL3vF43T3AzNfW8jyTC4yzk8cJqMS")] = {
.new_limit = 70'000'000, .from_version = 9, .until = 1740787200};
accounts[parse_addr("EQBDanbCeUqI4_v-xrnAN0_I2wRvEIaLg1Qg2ZN5c6Zl1KOh")] = {
.new_limit = 225'000'000, .from_version = 9, .until = 1740787200};
return accounts;
}();
auto it = accounts.find({account.workchain, account.addr});
if (it == accounts.end() || cfg.global_version < it->second.from_version || now >= it->second.until) {
return {};
} }
ton::UnixTime until = 1709164800; // 2024-02-29 00:00:00 UTC return it->second.new_limit;
ton::WorkchainId wc = 0;
const char* addr_hex = "FFBFD8F5AE5B2E1C7C3614885CB02145483DFAEE575F0DD08A72C366369211CD";
return now < until && account.workchain == wc && account.addr.to_hex() == addr_hex;
} }
/** /**
@ -1183,10 +1216,12 @@ static bool override_gas_limit(const ComputePhaseConfig& cfg, ton::UnixTime now,
* @returns The amount of gas. * @returns The amount of gas.
*/ */
td::uint64 Transaction::gas_bought_for(const ComputePhaseConfig& cfg, td::RefInt256 nanograms) { td::uint64 Transaction::gas_bought_for(const ComputePhaseConfig& cfg, td::RefInt256 nanograms) {
if (override_gas_limit(cfg, now, account)) { if (auto new_limit = override_gas_limit(cfg, now, account)) {
gas_limit_overridden = true; gas_limit_overridden = true;
// Same as ComputePhaseConfig::gas_bought for, but with other gas_limit and max_gas_threshold // Same as ComputePhaseConfig::gas_bought for, but with other gas_limit and max_gas_threshold
auto gas_limit = cfg.mc_gas_prices.special_gas_limit * 2; auto gas_limit = new_limit.value();
LOG(INFO) << "overridding gas limit for account " << account.workchain << ":" << account.addr.to_hex() << " to "
<< gas_limit;
auto max_gas_threshold = auto max_gas_threshold =
compute_max_gas_threshold(cfg.gas_price256, gas_limit, cfg.flat_gas_limit, cfg.flat_gas_price); compute_max_gas_threshold(cfg.gas_price256, gas_limit, cfg.flat_gas_limit, cfg.flat_gas_price);
if (nanograms.is_null() || sgn(nanograms) < 0) { if (nanograms.is_null() || sgn(nanograms) < 0) {
@ -1336,7 +1371,8 @@ Ref<vm::Tuple> Transaction::prepare_vm_c7(const ComputePhaseConfig& cfg) const {
// See crypto/block/mc-config.cpp#2223 (get_prev_blocks_info) // See crypto/block/mc-config.cpp#2223 (get_prev_blocks_info)
// [ wc:Integer shard:Integer seqno:Integer root_hash:Integer file_hash:Integer] = BlockId; // [ wc:Integer shard:Integer seqno:Integer root_hash:Integer file_hash:Integer] = BlockId;
// [ last_mc_blocks:[BlockId...] // [ last_mc_blocks:[BlockId...]
// prev_key_block:BlockId ] : PrevBlocksInfo // prev_key_block:BlockId
// last_mc_blocks_100:[BlockId...] ] : PrevBlocksInfo
// The only context where PrevBlocksInfo (13 parameter of c7) is null is inside emulator // The only context where PrevBlocksInfo (13 parameter of c7) is null is inside emulator
// where it need to be set via transaction_emulator_set_prev_blocks_info (see emulator/emulator-extern.cpp) // where it need to be set via transaction_emulator_set_prev_blocks_info (see emulator/emulator-extern.cpp)
// Inside validator, collator and liteserver checking external message contexts // Inside validator, collator and liteserver checking external message contexts
@ -1691,9 +1727,8 @@ bool Transaction::prepare_compute_phase(const ComputePhaseConfig& cfg) {
} }
} }
} }
vm::VmState vm{new_code, std::move(stack), gas, 1, new_data, vm_log, compute_vm_libraries(cfg)}; vm::VmState vm{new_code, cfg.global_version, std::move(stack), gas, 1, new_data, vm_log, compute_vm_libraries(cfg)};
vm.set_max_data_depth(cfg.max_vm_data_depth); vm.set_max_data_depth(cfg.max_vm_data_depth);
vm.set_global_version(cfg.global_version);
vm.set_c7(prepare_vm_c7(cfg)); // tuple with SmartContractInfo vm.set_c7(prepare_vm_c7(cfg)); // tuple with SmartContractInfo
vm.set_chksig_always_succeed(cfg.ignore_chksig); vm.set_chksig_always_succeed(cfg.ignore_chksig);
vm.set_stop_on_accept_message(cfg.stop_on_accept_message); vm.set_stop_on_accept_message(cfg.stop_on_accept_message);

View file

@ -1312,6 +1312,7 @@ x{F832} @Defop CONFIGPARAM
x{F833} @Defop CONFIGOPTPARAM x{F833} @Defop CONFIGOPTPARAM
x{F83400} @Defop PREVMCBLOCKS x{F83400} @Defop PREVMCBLOCKS
x{F83401} @Defop PREVKEYBLOCK x{F83401} @Defop PREVKEYBLOCK
x{F83402} @Defop PREVMCBLOCKS_100
x{F835} @Defop GLOBALID x{F835} @Defop GLOBALID
x{F836} @Defop GETGASFEE x{F836} @Defop GETGASFEE
x{F837} @Defop GETSTORAGEFEE x{F837} @Defop GETSTORAGEFEE

View file

@ -223,14 +223,14 @@ SmartContract::Answer run_smartcont(SmartContract::State state, td::Ref<vm::Stac
stack->dump(os, 2); stack->dump(os, 2);
LOG(DEBUG) << "VM stack:\n" << os.str(); LOG(DEBUG) << "VM stack:\n" << os.str();
} }
vm::VmState vm{state.code, std::move(stack), gas, 1, state.data, log}; int global_version = config ? config->get_global_version() : 0;
vm::VmState vm{state.code, global_version, std::move(stack), gas, 1, state.data, log};
vm.set_c7(std::move(c7)); vm.set_c7(std::move(c7));
vm.set_chksig_always_succeed(ignore_chksig); vm.set_chksig_always_succeed(ignore_chksig);
if (!libraries.is_null()) { if (!libraries.is_null()) {
vm.register_library_collection(libraries); vm.register_library_collection(libraries);
} }
if (config) { if (config) {
vm.set_global_version(config->get_global_version());
auto r_limits = config->get_size_limits_config(); auto r_limits = config->get_size_limits_config();
if (r_limits.is_ok()) { if (r_limits.is_ok()) {
vm.set_max_data_depth(r_limits.ok().max_vm_data_depth); vm.set_max_data_depth(r_limits.ok().max_vm_data_depth);

View file

@ -261,10 +261,10 @@ int exec_runvm_common(VmState* st, unsigned mode) {
vm::GasLimits gas{gas_limit, gas_max}; vm::GasLimits gas{gas_limit, gas_max};
VmStateInterface::Guard guard{nullptr}; // Don't consume gas for creating/loading cells during VM init VmStateInterface::Guard guard{nullptr}; // Don't consume gas for creating/loading cells during VM init
VmState new_state{std::move(code), std::move(new_stack), gas, (int)mode & 3, std::move(data), VmState new_state{
std::move(code), st->get_global_version(), std::move(new_stack), gas, (int)mode & 3, std::move(data),
VmLog{}, std::vector<Ref<Cell>>{}, std::move(c7)}; VmLog{}, std::vector<Ref<Cell>>{}, std::move(c7)};
new_state.set_chksig_always_succeed(st->get_chksig_always_succeed()); new_state.set_chksig_always_succeed(st->get_chksig_always_succeed());
new_state.set_global_version(st->get_global_version());
st->run_child_vm(std::move(new_state), with_data, mode & 32, mode & 8, mode & 128, ret_vals); st->run_child_vm(std::move(new_state), with_data, mode & 32, mode & 8, mode & 128, ret_vals);
return 0; return 0;
} }

View file

@ -309,7 +309,9 @@ class StaticBagOfCellsDbLazyImpl : public StaticBagOfCellsDb {
return 0; return 0;
} }
td::Slice offset_view; td::Slice offset_view;
CHECK(info_.offset_byte_size <= 8); if (info_.offset_byte_size > 8) {
return td::Status::Error(PSTRING() << "bag-of-cell error: invalid offset_byte_size " << info_.offset_byte_size);
}
char arr[8]; char arr[8];
td::RwMutex::ReadLock guard; td::RwMutex::ReadLock guard;
if (info_.has_index) { if (info_.has_index) {
@ -321,19 +323,25 @@ class StaticBagOfCellsDbLazyImpl : public StaticBagOfCellsDb {
offset_view = td::Slice(index_data_).substr((td::int64)idx * info_.offset_byte_size, info_.offset_byte_size); offset_view = td::Slice(index_data_).substr((td::int64)idx * info_.offset_byte_size, info_.offset_byte_size);
} }
CHECK(offset_view.size() == (size_t)info_.offset_byte_size); if (offset_view.size() != (size_t)info_.offset_byte_size) {
return td::Status::Error(PSTRING() << "bag-of-cell error: invalid offset view size" << offset_view.size());
}
return td::narrow_cast<std::size_t>(info_.read_offset(offset_view.ubegin())); return td::narrow_cast<std::size_t>(info_.read_offset(offset_view.ubegin()));
} }
td::Result<td::int64> load_root_idx(int root_i) { td::Result<td::int64> load_root_idx(int root_i) {
CHECK(root_i >= 0 && root_i < info_.root_count); if (root_i < 0 || root_i >= info_.root_count) {
return td::Status::Error(PSTRING() << "bag-of-cell error: invalid root index " << root_i);
}
if (!info_.has_roots) { if (!info_.has_roots) {
return 0; return 0;
} }
char arr[8]; char arr[8];
TRY_RESULT(idx_view, data_.view(td::MutableSlice(arr, info_.ref_byte_size), TRY_RESULT(idx_view, data_.view(td::MutableSlice(arr, info_.ref_byte_size),
info_.roots_offset + (td::int64)root_i * info_.ref_byte_size)); info_.roots_offset + (td::int64)root_i * info_.ref_byte_size));
CHECK(idx_view.size() == (size_t)info_.ref_byte_size); if (idx_view.size() != (size_t)info_.ref_byte_size) {
return td::Status::Error(PSTRING() << "bag-of-cell error: invalid idx_view size" << idx_view.size());
}
return info_.read_ref(idx_view.ubegin()); return info_.read_ref(idx_view.ubegin());
} }
@ -343,8 +351,9 @@ class StaticBagOfCellsDbLazyImpl : public StaticBagOfCellsDb {
bool should_cache; bool should_cache;
}; };
td::Result<CellLocation> get_cell_location(int idx) { td::Result<CellLocation> get_cell_location(int idx) {
CHECK(idx >= 0); if (idx < 0 || idx >= info_.cell_count) {
CHECK(idx < info_.cell_count); return td::Status::Error(PSTRING() << "bag-of-cell error: invalid cell index " << idx);
}
TRY_STATUS(preload_index(idx)); TRY_STATUS(preload_index(idx));
TRY_RESULT(from, load_idx_offset(idx - 1)); TRY_RESULT(from, load_idx_offset(idx - 1));
TRY_RESULT(till, load_idx_offset(idx)); TRY_RESULT(till, load_idx_offset(idx));
@ -357,10 +366,15 @@ class StaticBagOfCellsDbLazyImpl : public StaticBagOfCellsDb {
res.should_cache = res.end % 2 == 1; res.should_cache = res.end % 2 == 1;
res.end /= 2; res.end /= 2;
} }
CHECK(std::numeric_limits<std::size_t>::max() - res.begin >= info_.data_offset); if (std::numeric_limits<std::size_t>::max() - res.begin < info_.data_offset ||
CHECK(std::numeric_limits<std::size_t>::max() - res.end >= info_.data_offset); std::numeric_limits<std::size_t>::max() - res.end < info_.data_offset) {
return td::Status::Error(PSTRING() << "bag-of-cell error: invalid cell location (1) " << res.begin << ":" << res.end);
}
res.begin += static_cast<std::size_t>(info_.data_offset); res.begin += static_cast<std::size_t>(info_.data_offset);
res.end += static_cast<std::size_t>(info_.data_offset); res.end += static_cast<std::size_t>(info_.data_offset);
if (res.begin > res.end) {
return td::Status::Error(PSTRING() << "bag-of-cell error: invalid cell location (2) " << res.begin << ":" << res.end);
}
return res; return res;
} }
@ -396,8 +410,6 @@ class StaticBagOfCellsDbLazyImpl : public StaticBagOfCellsDb {
if (info_.has_index) { if (info_.has_index) {
return td::Status::OK(); return td::Status::OK();
} }
CHECK(idx < info_.cell_count);
if (index_i_.load(std::memory_order_relaxed) > idx) { if (index_i_.load(std::memory_order_relaxed) > idx) {
return td::Status::OK(); return td::Status::OK();
} }
@ -407,12 +419,17 @@ class StaticBagOfCellsDbLazyImpl : public StaticBagOfCellsDb {
auto buf_slice = td::MutableSlice(buf.data(), buf.size()); auto buf_slice = td::MutableSlice(buf.data(), buf.size());
for (; index_i_ <= idx; index_i_++) { for (; index_i_ <= idx; index_i_++) {
auto offset = td::narrow_cast<size_t>(info_.data_offset + index_offset_); auto offset = td::narrow_cast<size_t>(info_.data_offset + index_offset_);
CHECK(data_.size() >= offset); if (data_.size() < offset) {
return td::Status::Error(PSLICE() << "bag-of-cells error: invalid offset " << offset
<< " (size=" << data_.size() << ")");
}
TRY_RESULT(cell, data_.view(buf_slice.copy().truncate(data_.size() - offset), offset)); TRY_RESULT(cell, data_.view(buf_slice.copy().truncate(data_.size() - offset), offset));
CellSerializationInfo cell_info; CellSerializationInfo cell_info;
TRY_STATUS(cell_info.init(cell, info_.ref_byte_size)); TRY_STATUS(cell_info.init(cell, info_.ref_byte_size));
index_offset_ += cell_info.end_offset; index_offset_ += cell_info.end_offset;
LOG_CHECK((unsigned)info_.offset_byte_size <= 8) << info_.offset_byte_size; if ((unsigned)info_.offset_byte_size > 8) {
return td::Status::Error(PSTRING() << "bag-of-cell error: invalid offset_byte_size " << info_.offset_byte_size);
}
td::uint8 tmp[8]; td::uint8 tmp[8];
info_.write_offset(tmp, index_offset_); info_.write_offset(tmp, index_offset_);
auto guard = index_data_rw_mutex_.lock_write(); auto guard = index_data_rw_mutex_.lock_write();
@ -488,7 +505,10 @@ class StaticBagOfCellsDbLazyImpl : public StaticBagOfCellsDb {
bool should_cache) { bool should_cache) {
deserialize_cell_cnt_.add(1); deserialize_cell_cnt_.add(1);
Ref<Cell> refs[4]; Ref<Cell> refs[4];
CHECK(cell_info.refs_cnt <= 4); if (cell_info.refs_cnt > 4) {
return td::Status::Error(PSLICE() << "invalid bag-of-cells cell #" << idx << " has " << cell_info.refs_cnt
<< " refs");
}
auto* ref_ptr = cell_slice.ubegin() + cell_info.refs_offset; auto* ref_ptr = cell_slice.ubegin() + cell_info.refs_offset;
for (int k = 0; k < cell_info.refs_cnt; k++, ref_ptr += info_.ref_byte_size) { for (int k = 0; k < cell_info.refs_cnt; k++, ref_ptr += info_.ref_byte_size) {
int ref_idx = td::narrow_cast<int>(info_.read_ref(ref_ptr)); int ref_idx = td::narrow_cast<int>(info_.read_ref(ref_ptr));

View file

@ -566,7 +566,7 @@ int exec_dict_getnear(VmState* st, unsigned args) {
int exec_pfx_dict_set(VmState* st, Dictionary::SetMode mode, const char* name) { int exec_pfx_dict_set(VmState* st, Dictionary::SetMode mode, const char* name) {
Stack& stack = st->get_stack(); Stack& stack = st->get_stack();
VM_LOG(st) << "execute PFXDICT" << name; VM_LOG(st) << "execute PFXDICT" << name;
stack.check_underflow(3); stack.check_underflow(st->get_global_version() >= 9 ? 4 : 3);
int n = stack.pop_smallint_range(PrefixDictionary::max_key_bits); int n = stack.pop_smallint_range(PrefixDictionary::max_key_bits);
PrefixDictionary dict{stack.pop_maybe_cell(), n}; PrefixDictionary dict{stack.pop_maybe_cell(), n};
auto key_slice = stack.pop_cellslice(); auto key_slice = stack.pop_cellslice();
@ -580,7 +580,7 @@ int exec_pfx_dict_set(VmState* st, Dictionary::SetMode mode, const char* name) {
int exec_pfx_dict_delete(VmState* st) { int exec_pfx_dict_delete(VmState* st) {
Stack& stack = st->get_stack(); Stack& stack = st->get_stack();
VM_LOG(st) << "execute PFXDICTDEL\n"; VM_LOG(st) << "execute PFXDICTDEL\n";
stack.check_underflow(2); stack.check_underflow(st->get_global_version() >= 9 ? 3 : 2);
int n = stack.pop_smallint_range(PrefixDictionary::max_key_bits); int n = stack.pop_smallint_range(PrefixDictionary::max_key_bits);
PrefixDictionary dict{stack.pop_maybe_cell(), n}; PrefixDictionary dict{stack.pop_maybe_cell(), n};
auto key_slice = stack.pop_cellslice(); auto key_slice = stack.pop_cellslice();

View file

@ -279,6 +279,7 @@ int exec_get_global_id(VmState* st) {
int exec_get_gas_fee(VmState* st) { int exec_get_gas_fee(VmState* st) {
VM_LOG(st) << "execute GETGASFEE"; VM_LOG(st) << "execute GETGASFEE";
Stack& stack = st->get_stack(); Stack& stack = st->get_stack();
stack.check_underflow(st->get_global_version() >= 9 ? 2 : 0);
bool is_masterchain = stack.pop_bool(); bool is_masterchain = stack.pop_bool();
td::uint64 gas = stack.pop_long_range(std::numeric_limits<td::int64>::max(), 0); td::uint64 gas = stack.pop_long_range(std::numeric_limits<td::int64>::max(), 0);
block::GasLimitsPrices prices = util::get_gas_prices(get_unpacked_config_tuple(st), is_masterchain); block::GasLimitsPrices prices = util::get_gas_prices(get_unpacked_config_tuple(st), is_masterchain);
@ -289,6 +290,7 @@ int exec_get_gas_fee(VmState* st) {
int exec_get_storage_fee(VmState* st) { int exec_get_storage_fee(VmState* st) {
VM_LOG(st) << "execute GETSTORAGEFEE"; VM_LOG(st) << "execute GETSTORAGEFEE";
Stack& stack = st->get_stack(); Stack& stack = st->get_stack();
stack.check_underflow(st->get_global_version() >= 9 ? 4 : 0);
bool is_masterchain = stack.pop_bool(); bool is_masterchain = stack.pop_bool();
td::int64 delta = stack.pop_long_range(std::numeric_limits<td::int64>::max(), 0); td::int64 delta = stack.pop_long_range(std::numeric_limits<td::int64>::max(), 0);
td::uint64 bits = stack.pop_long_range(std::numeric_limits<td::int64>::max(), 0); td::uint64 bits = stack.pop_long_range(std::numeric_limits<td::int64>::max(), 0);
@ -302,6 +304,7 @@ int exec_get_storage_fee(VmState* st) {
int exec_get_forward_fee(VmState* st) { int exec_get_forward_fee(VmState* st) {
VM_LOG(st) << "execute GETFORWARDFEE"; VM_LOG(st) << "execute GETFORWARDFEE";
Stack& stack = st->get_stack(); Stack& stack = st->get_stack();
stack.check_underflow(st->get_global_version() >= 9 ? 3 : 0);
bool is_masterchain = stack.pop_bool(); bool is_masterchain = stack.pop_bool();
td::uint64 bits = stack.pop_long_range(std::numeric_limits<td::int64>::max(), 0); td::uint64 bits = stack.pop_long_range(std::numeric_limits<td::int64>::max(), 0);
td::uint64 cells = stack.pop_long_range(std::numeric_limits<td::int64>::max(), 0); td::uint64 cells = stack.pop_long_range(std::numeric_limits<td::int64>::max(), 0);
@ -320,6 +323,7 @@ int exec_get_precompiled_gas(VmState* st) {
int exec_get_original_fwd_fee(VmState* st) { int exec_get_original_fwd_fee(VmState* st) {
VM_LOG(st) << "execute GETORIGINALFWDFEE"; VM_LOG(st) << "execute GETORIGINALFWDFEE";
Stack& stack = st->get_stack(); Stack& stack = st->get_stack();
stack.check_underflow(st->get_global_version() >= 9 ? 2 : 0);
bool is_masterchain = stack.pop_bool(); bool is_masterchain = stack.pop_bool();
td::RefInt256 fwd_fee = stack.pop_int_finite(); td::RefInt256 fwd_fee = stack.pop_int_finite();
if (fwd_fee->sgn() < 0) { if (fwd_fee->sgn() < 0) {
@ -333,6 +337,7 @@ int exec_get_original_fwd_fee(VmState* st) {
int exec_get_gas_fee_simple(VmState* st) { int exec_get_gas_fee_simple(VmState* st) {
VM_LOG(st) << "execute GETGASFEESIMPLE"; VM_LOG(st) << "execute GETGASFEESIMPLE";
Stack& stack = st->get_stack(); Stack& stack = st->get_stack();
stack.check_underflow(st->get_global_version() >= 9 ? 2 : 0);
bool is_masterchain = stack.pop_bool(); bool is_masterchain = stack.pop_bool();
td::uint64 gas = stack.pop_long_range(std::numeric_limits<td::int64>::max(), 0); td::uint64 gas = stack.pop_long_range(std::numeric_limits<td::int64>::max(), 0);
block::GasLimitsPrices prices = util::get_gas_prices(get_unpacked_config_tuple(st), is_masterchain); block::GasLimitsPrices prices = util::get_gas_prices(get_unpacked_config_tuple(st), is_masterchain);
@ -343,6 +348,7 @@ int exec_get_gas_fee_simple(VmState* st) {
int exec_get_forward_fee_simple(VmState* st) { int exec_get_forward_fee_simple(VmState* st) {
VM_LOG(st) << "execute GETFORWARDFEESIMPLE"; VM_LOG(st) << "execute GETFORWARDFEESIMPLE";
Stack& stack = st->get_stack(); Stack& stack = st->get_stack();
stack.check_underflow(st->get_global_version() >= 9 ? 3 : 0);
bool is_masterchain = stack.pop_bool(); bool is_masterchain = stack.pop_bool();
td::uint64 bits = stack.pop_long_range(std::numeric_limits<td::int64>::max(), 0); td::uint64 bits = stack.pop_long_range(std::numeric_limits<td::int64>::max(), 0);
td::uint64 cells = stack.pop_long_range(std::numeric_limits<td::int64>::max(), 0); td::uint64 cells = stack.pop_long_range(std::numeric_limits<td::int64>::max(), 0);
@ -373,6 +379,7 @@ void register_ton_config_ops(OpcodeTable& cp0) {
.insert(OpcodeInstr::mksimple(0xf833, 16, "CONFIGOPTPARAM", std::bind(exec_get_config_param, _1, true))) .insert(OpcodeInstr::mksimple(0xf833, 16, "CONFIGOPTPARAM", std::bind(exec_get_config_param, _1, true)))
.insert(OpcodeInstr::mksimple(0xf83400, 24, "PREVMCBLOCKS", std::bind(exec_get_prev_blocks_info, _1, 0, "PREVMCBLOCKS"))->require_version(4)) .insert(OpcodeInstr::mksimple(0xf83400, 24, "PREVMCBLOCKS", std::bind(exec_get_prev_blocks_info, _1, 0, "PREVMCBLOCKS"))->require_version(4))
.insert(OpcodeInstr::mksimple(0xf83401, 24, "PREVKEYBLOCK", std::bind(exec_get_prev_blocks_info, _1, 1, "PREVKEYBLOCK"))->require_version(4)) .insert(OpcodeInstr::mksimple(0xf83401, 24, "PREVKEYBLOCK", std::bind(exec_get_prev_blocks_info, _1, 1, "PREVKEYBLOCK"))->require_version(4))
.insert(OpcodeInstr::mksimple(0xf83402, 24, "PREVMCBLOCKS_100", std::bind(exec_get_prev_blocks_info, _1, 2, "PREVMCBLOCKS_100"))->require_version(9))
.insert(OpcodeInstr::mksimple(0xf835, 16, "GLOBALID", exec_get_global_id)->require_version(4)) .insert(OpcodeInstr::mksimple(0xf835, 16, "GLOBALID", exec_get_global_id)->require_version(4))
.insert(OpcodeInstr::mksimple(0xf836, 16, "GETGASFEE", exec_get_gas_fee)->require_version(6)) .insert(OpcodeInstr::mksimple(0xf836, 16, "GETGASFEE", exec_get_gas_fee)->require_version(6))
.insert(OpcodeInstr::mksimple(0xf837, 16, "GETSTORAGEFEE", exec_get_storage_fee)->require_version(6)) .insert(OpcodeInstr::mksimple(0xf837, 16, "GETSTORAGEFEE", exec_get_storage_fee)->require_version(6))
@ -538,9 +545,10 @@ int exec_hash_ext(VmState* st, unsigned args) {
VM_LOG(st) << "execute HASHEXT" << (append ? "A" : "") << (rev ? "R" : "") << " " << (hash_id == 255 ? -1 : hash_id); VM_LOG(st) << "execute HASHEXT" << (append ? "A" : "") << (rev ? "R" : "") << " " << (hash_id == 255 ? -1 : hash_id);
Stack& stack = st->get_stack(); Stack& stack = st->get_stack();
if (hash_id == 255) { if (hash_id == 255) {
stack.check_underflow(st->get_global_version() >= 9 ? 2 : 0);
hash_id = stack.pop_smallint_range(254); hash_id = stack.pop_smallint_range(254);
} }
int cnt = stack.pop_smallint_range(stack.depth() - 1); int cnt = stack.pop_smallint_range(stack.depth() - 1 - (st->get_global_version() >= 9 ? (int)append : 0));
Hasher hasher{hash_id}; Hasher hasher{hash_id};
size_t total_bits = 0; size_t total_bits = 0;
long long gas_consumed = 0; long long gas_consumed = 0;

View file

@ -22,6 +22,8 @@
#include "vm/log.h" #include "vm/log.h"
#include "vm/vm.h" #include "vm/vm.h"
#include "cp0.h" #include "cp0.h"
#include "memo.h"
#include <sodium.h> #include <sodium.h>
namespace vm { namespace vm {
@ -31,33 +33,8 @@ VmState::VmState() : cp(-1), dispatch(&dummy_dispatch_table), quit0(true, 0), qu
init_cregs(); init_cregs();
} }
VmState::VmState(Ref<CellSlice> _code) VmState::VmState(Ref<CellSlice> _code, int global_version, Ref<Stack> _stack, const GasLimits& gas, int flags,
: code(std::move(_code)), cp(-1), dispatch(&dummy_dispatch_table), quit0(true, 0), quit1(true, 1) { Ref<Cell> _data, VmLog log, std::vector<Ref<Cell>> _libraries, Ref<Tuple> init_c7)
ensure_throw(init_cp(0));
init_cregs();
}
VmState::VmState(Ref<CellSlice> _code, Ref<Stack> _stack, int flags, Ref<Cell> _data, VmLog log,
std::vector<Ref<Cell>> _libraries, Ref<Tuple> init_c7)
: code(std::move(_code))
, stack(std::move(_stack))
, cp(-1)
, dispatch(&dummy_dispatch_table)
, quit0(true, 0)
, quit1(true, 1)
, log(log)
, libraries(std::move(_libraries))
, stack_trace((flags >> 2) & 1) {
ensure_throw(init_cp(0));
set_c4(std::move(_data));
if (init_c7.not_null()) {
set_c7(std::move(init_c7));
}
init_cregs(flags & 1, flags & 2);
}
VmState::VmState(Ref<CellSlice> _code, Ref<Stack> _stack, const GasLimits& gas, int flags, Ref<Cell> _data, VmLog log,
std::vector<Ref<Cell>> _libraries, Ref<Tuple> init_c7)
: code(std::move(_code)) : code(std::move(_code))
, stack(std::move(_stack)) , stack(std::move(_stack))
, cp(-1) , cp(-1)
@ -67,7 +44,8 @@ VmState::VmState(Ref<CellSlice> _code, Ref<Stack> _stack, const GasLimits& gas,
, log(log) , log(log)
, gas(gas) , gas(gas)
, libraries(std::move(_libraries)) , libraries(std::move(_libraries))
, stack_trace((flags >> 2) & 1) { , stack_trace((flags >> 2) & 1)
, global_version(global_version) {
ensure_throw(init_cp(0)); ensure_throw(init_cp(0));
set_c4(std::move(_data)); set_c4(std::move(_data));
if (init_c7.not_null()) { if (init_c7.not_null()) {
@ -102,12 +80,24 @@ void VmState::init_cregs(bool same_c3, bool push_0) {
} }
} }
Ref<CellSlice> VmState::convert_code_cell(Ref<Cell> code_cell) { Ref<CellSlice> VmState::convert_code_cell(Ref<Cell> code_cell, int global_version,
const std::vector<Ref<Cell>>& libraries) {
if (code_cell.is_null()) { if (code_cell.is_null()) {
return {}; return {};
} }
Ref<CellSlice> csr{true, NoVmOrd(), code_cell}; Ref<CellSlice> csr;
if (csr->is_valid()) { if (global_version >= 9) {
// Use DummyVmState instead of this to avoid consuming gas for cell loading
DummyVmState dummy{libraries, global_version};
Guard guard(&dummy);
try {
csr = load_cell_slice_ref(code_cell);
} catch (VmError&) { // NOLINT(*-empty-catch)
}
} else {
csr = td::Ref<CellSlice>{true, NoVmOrd(), code_cell};
}
if (csr.not_null() && csr->is_valid()) {
return csr; return csr;
} }
return load_cell_slice_ref(CellBuilder{}.store_ref(std::move(code_cell)).finalize()); return load_cell_slice_ref(CellBuilder{}.store_ref(std::move(code_cell)).finalize());
@ -577,6 +567,7 @@ int run_vm_code(Ref<CellSlice> code, Ref<Stack>& stack, int flags, Ref<Cell>* da
GasLimits* gas_limits, std::vector<Ref<Cell>> libraries, Ref<Tuple> init_c7, Ref<Cell>* actions_ptr, GasLimits* gas_limits, std::vector<Ref<Cell>> libraries, Ref<Tuple> init_c7, Ref<Cell>* actions_ptr,
int global_version) { int global_version) {
VmState vm{code, VmState vm{code,
global_version,
std::move(stack), std::move(stack),
gas_limits ? *gas_limits : GasLimits{}, gas_limits ? *gas_limits : GasLimits{},
flags, flags,
@ -584,7 +575,6 @@ int run_vm_code(Ref<CellSlice> code, Ref<Stack>& stack, int flags, Ref<Cell>* da
log, log,
std::move(libraries), std::move(libraries),
std::move(init_c7)}; std::move(init_c7)};
vm.set_global_version(global_version);
int res = vm.run(); int res = vm.run();
stack = vm.get_stack_ref(); stack = vm.get_stack_ref();
if (vm.committed() && data_ptr) { if (vm.committed() && data_ptr) {

View file

@ -164,14 +164,12 @@ class VmState final : public VmStateInterface {
bls_pairing_element_gas_price = 11800 bls_pairing_element_gas_price = 11800
}; };
VmState(); VmState();
VmState(Ref<CellSlice> _code); VmState(Ref<CellSlice> _code, int global_version, Ref<Stack> _stack, const GasLimits& _gas, int flags = 0, Ref<Cell> _data = {},
VmState(Ref<CellSlice> _code, Ref<Stack> _stack, int flags = 0, Ref<Cell> _data = {}, VmLog log = {},
std::vector<Ref<Cell>> _libraries = {}, Ref<Tuple> init_c7 = {});
VmState(Ref<CellSlice> _code, Ref<Stack> _stack, const GasLimits& _gas, int flags = 0, Ref<Cell> _data = {},
VmLog log = {}, std::vector<Ref<Cell>> _libraries = {}, Ref<Tuple> init_c7 = {}); VmLog log = {}, std::vector<Ref<Cell>> _libraries = {}, Ref<Tuple> init_c7 = {});
template <typename... Args> VmState(Ref<Cell> _code, int global_version, Ref<Stack> _stack, const GasLimits& _gas, int flags = 0,
VmState(Ref<Cell> code_cell, Args&&... args) Ref<Cell> _data = {}, VmLog log = {}, std::vector<Ref<Cell>> _libraries = {}, Ref<Tuple> init_c7 = {})
: VmState(convert_code_cell(std::move(code_cell)), std::forward<Args>(args)...) { : VmState(convert_code_cell(std::move(_code), global_version, _libraries), global_version, std::move(_stack),
_gas, flags, std::move(_data), std::move(log), _libraries, std::move(init_c7)) {
} }
VmState(const VmState&) = delete; VmState(const VmState&) = delete;
VmState(VmState&&) = default; VmState(VmState&&) = default;
@ -345,9 +343,6 @@ class VmState final : public VmStateInterface {
int get_global_version() const override { int get_global_version() const override {
return global_version; return global_version;
} }
void set_global_version(int version) {
global_version = version;
}
int call(Ref<Continuation> cont); int call(Ref<Continuation> cont);
int call(Ref<Continuation> cont, int pass_args, int ret_args = -1); int call(Ref<Continuation> cont, int pass_args, int ret_args = -1);
int jump(Ref<Continuation> cont); int jump(Ref<Continuation> cont);
@ -382,7 +377,8 @@ class VmState final : public VmStateInterface {
} }
return res; return res;
} }
static Ref<CellSlice> convert_code_cell(Ref<Cell> code_cell); static Ref<CellSlice> convert_code_cell(Ref<Cell> code_cell, int global_version,
const std::vector<Ref<Cell>>& libraries);
bool try_commit(); bool try_commit();
void force_commit(); void force_commit();

View file

@ -3,6 +3,7 @@ Global version is a parameter specified in `ConfigParam 8` ([block.tlb](https://
Various features are enabled depending on the global version. Various features are enabled depending on the global version.
## Version 4 ## Version 4
New features of version 4 are desctibed in detail in [the documentation](https://docs.ton.org/v3/documentation/tvm/changelog/tvm-upgrade-2023-07).
### New TVM instructions ### New TVM instructions
* `PREVMCBLOCKS`, `PREVKEYBLOCK` * `PREVMCBLOCKS`, `PREVKEYBLOCK`
@ -47,7 +48,7 @@ Version 5 enables higher gas limits for special contracts.
Previously only ticktock transactions had this limit, while ordinary transactions had a default limit of `gas_limit` gas (1M). Previously only ticktock transactions had this limit, while ordinary transactions had a default limit of `gas_limit` gas (1M).
* Gas usage of special contracts is not taken into account when checking block limits. This allows keeping masterchain block limits low * Gas usage of special contracts is not taken into account when checking block limits. This allows keeping masterchain block limits low
while having high gas limits for elector. while having high gas limits for elector.
* Gas limit on `EQD_v9j1rlsuHHw2FIhcsCFFSD367ldfDdCKcsNmNpIRzUlu` is increased to `special_gas_limit * 2` until 2024-02-29. * Gas limit on `EQD_v9j1rlsuHHw2FIhcsCFFSD367ldfDdCKcsNmNpIRzUlu` is increased to 70M (`special_gas_limit * 2`) until 2024-02-29.
See [this post](https://t.me/tonstatus/88) for details. See [this post](https://t.me/tonstatus/88) for details.
### Loading libraries ### Loading libraries
@ -113,14 +114,23 @@ Operations for working with Merkle proofs, where cells can have non-zero level a
## Version 9 ## Version 9
### c7 tuple
c7 tuple parameter number **13** (previous blocks info tuple) now has the third element. It contains ids of the 16 last masterchain blocks with seqno divisible by 100.
Example: if the last masterchain block seqno is `19071` then the list contains block ids with seqnos `19000`, `18900`, ..., `17500`.
### New TVM instructions ### New TVM instructions
- `SECP256K1_XONLY_PUBKEY_TWEAK_ADD` (`key tweak - 0 or f x y -1`) - performs [`secp256k1_xonly_pubkey_tweak_add`](https://github.com/bitcoin-core/secp256k1/blob/master/include/secp256k1_extrakeys.h#L120). - `SECP256K1_XONLY_PUBKEY_TWEAK_ADD` (`key tweak - 0 or f x y -1`) - performs [`secp256k1_xonly_pubkey_tweak_add`](https://github.com/bitcoin-core/secp256k1/blob/master/include/secp256k1_extrakeys.h#L120).
`key` and `tweak` are 256-bit unsigned integers. 65-byte public key is returned as `uint8 f`, `uint256 x, y` (as in `ECRECOVER`). Gas cost: `1276`. `key` and `tweak` are 256-bit unsigned integers. 65-byte public key is returned as `uint8 f`, `uint256 x, y` (as in `ECRECOVER`). Gas cost: `1276`.
- `mask SETCONTCTRMANY` (`cont - cont'`) - takes continuation, performs the equivalent of `c[i] PUSHCTR SWAP c[i] SETCONTCNR` for each `i` that is set in `mask` (mask is in `0..255`). - `mask SETCONTCTRMANY` (`cont - cont'`) - takes continuation, performs the equivalent of `c[i] PUSHCTR SWAP c[i] SETCONTCNR` for each `i` that is set in `mask` (mask is in `0..255`).
- `SETCONTCTRMANYX` (`cont mask - cont'`) - same as `SETCONTCTRMANY`, but takes `mask` from stack. - `SETCONTCTRMANYX` (`cont mask - cont'`) - same as `SETCONTCTRMANY`, but takes `mask` from stack.
- `PREVMCBLOCKS_100` returns the third element of the previous block info tuple (see above).
### Other changes ### Other changes
- Fix `RAWRESERVE` action with flag `4` (use original balance of the account) by explicitly setting `original_balance` to `balance - msg_balance_remaining`. - Fix `RAWRESERVE` action with flag `4` (use original balance of the account) by explicitly setting `original_balance` to `balance - msg_balance_remaining`.
- Previously it did not work if storage fee was greater than the original balance. - Previously it did not work if storage fee was greater than the original balance.
- Jumps to nested continuations of depth more than 8 consume 1 gas for eact subsequent continuation (this does not affect most of TVM code). - Jumps to nested continuations of depth more than 8 consume 1 gas for eact subsequent continuation (this does not affect most of TVM code).
- Support extra currencies in reserve action with `+2` mode. - Support extra currencies in reserve action with `+2` mode.
- Fix exception code in some TVM instructions: now `stk_und` has priority over other error codes.
- `PFXDICTADD`, `PFXDICTSET`, `PFXDICTREPLACE`, `PFXDICTDEL`, `GETGASFEE`, `GETSTORAGEFEE`, `GETFORWARDFEE`, `GETORIGINALFWDFEE`, `GETGASFEESIMPLE`, `GETFORWARDFEESIMPLE`, `HASHEXT`
- Now setting the contract code to a library cell does not consume additional gas on execution of the code.
- Temporary increase gas limit for some accounts (see [this post](https://t.me/tondev_news/129) for details, `override_gas_limit` in `transaction.cpp` for the list of accounts).

View file

@ -35,7 +35,12 @@ else()
add_library(emulator STATIC ${EMULATOR_SOURCE} ${EMULATOR_HEADERS}) add_library(emulator STATIC ${EMULATOR_SOURCE} ${EMULATOR_HEADERS})
endif() endif()
if (PORTABLE AND NOT APPLE)
target_link_libraries(emulator PUBLIC emulator_static git -static-libgcc -static-libstdc++)
else()
target_link_libraries(emulator PUBLIC emulator_static git) target_link_libraries(emulator PUBLIC emulator_static git)
endif()
generate_export_header(emulator EXPORT_FILE_NAME ${CMAKE_CURRENT_BINARY_DIR}/emulator_export.h) generate_export_header(emulator EXPORT_FILE_NAME ${CMAKE_CURRENT_BINARY_DIR}/emulator_export.h)
target_include_directories(emulator PUBLIC target_include_directories(emulator PUBLIC
$<BUILD_INTERFACE:${CMAKE_CURRENT_BINARY_DIR}> $<BUILD_INTERFACE:${CMAKE_CURRENT_BINARY_DIR}>

View file

@ -2293,7 +2293,7 @@ void TestNode::run_smc_method(int mode, ton::BlockIdExt ref_blk, ton::BlockIdExt
// auto log = create_vm_log(ctx.error_stream ? &ostream_logger : nullptr); // auto log = create_vm_log(ctx.error_stream ? &ostream_logger : nullptr);
vm::GasLimits gas{gas_limit}; vm::GasLimits gas{gas_limit};
LOG(DEBUG) << "creating VM"; LOG(DEBUG) << "creating VM";
vm::VmState vm{code, std::move(stack), gas, 1, data, vm::VmLog()}; vm::VmState vm{code, ton::SUPPORTED_VERSION, std::move(stack), gas, 1, data, vm::VmLog()};
vm.set_c7(liteclient::prepare_vm_c7(info.gen_utime, info.gen_lt, td::make_ref<vm::CellSlice>(acc.addr->clone()), vm.set_c7(liteclient::prepare_vm_c7(info.gen_utime, info.gen_lt, td::make_ref<vm::CellSlice>(acc.addr->clone()),
balance)); // tuple with SmartContractInfo balance)); // tuple with SmartContractInfo
// vm.incr_stack_trace(1); // enable stack dump after each step // vm.incr_stack_trace(1); // enable stack dump after each step

View file

@ -98,7 +98,12 @@ else()
add_library(tonlibjson STATIC ${TONLIB_JSON_SOURCE} ${TONLIB_JSON_HEADERS}) add_library(tonlibjson STATIC ${TONLIB_JSON_SOURCE} ${TONLIB_JSON_HEADERS})
endif() endif()
if (PORTABLE AND NOT APPLE)
target_link_libraries(tonlibjson PRIVATE tonlibjson_private -static-libgcc -static-libstdc++)
else()
target_link_libraries(tonlibjson PRIVATE tonlibjson_private) target_link_libraries(tonlibjson PRIVATE tonlibjson_private)
endif()
generate_export_header(tonlibjson EXPORT_FILE_NAME ${CMAKE_CURRENT_BINARY_DIR}/tonlib/tonlibjson_export.h) generate_export_header(tonlibjson EXPORT_FILE_NAME ${CMAKE_CURRENT_BINARY_DIR}/tonlib/tonlibjson_export.h)
if (!BUILD_SHARED_LIBS) if (!BUILD_SHARED_LIBS)
target_compile_definitions(tonlibjson PUBLIC TONLIBJSON_STATIC_DEFINE) target_compile_definitions(tonlibjson PUBLIC TONLIBJSON_STATIC_DEFINE)

View file

@ -135,8 +135,8 @@ runInfo time_run_vm(td::Slice command, td::Ref<vm::Stack> stack) {
CHECK(stack.is_unique()); CHECK(stack.is_unique());
try { try {
vm::GasLimits gas_limit; vm::GasLimits gas_limit;
vm::VmState vm{vm::load_cell_slice_ref(cell), std::move(stack), gas_limit, 0, {}, vm::VmLog{}, {}, c7}; vm::VmState vm{
vm.set_global_version(ton::SUPPORTED_VERSION); vm::load_cell_slice_ref(cell), ton::SUPPORTED_VERSION, std::move(stack), gas_limit, 0, {}, vm::VmLog{}, {}, c7};
std::clock_t cStart = std::clock(); std::clock_t cStart = std::clock();
int ret = ~vm.run(); int ret = ~vm.run();
std::clock_t cEnd = std::clock(); std::clock_t cEnd = std::clock();

View file

@ -1520,11 +1520,17 @@ void LiteQuery::finish_runSmcMethod(td::BufferSlice shard_proof, td::BufferSlice
libraries.push_back(acc_libs); libraries.push_back(acc_libs);
} }
vm::GasLimits gas{gas_limit, gas_limit}; vm::GasLimits gas{gas_limit, gas_limit};
vm::VmState vm{code, std::move(stack_), gas, 1, std::move(data), vm::VmLog::Null(), std::move(libraries)}; vm::VmState vm{code,
config->get_global_version(),
std::move(stack_),
gas,
1,
std::move(data),
vm::VmLog::Null(),
std::move(libraries)};
auto c7 = prepare_vm_c7(gen_utime, gen_lt, td::make_ref<vm::CellSlice>(acc.addr->clone()), balance, config.get(), auto c7 = prepare_vm_c7(gen_utime, gen_lt, td::make_ref<vm::CellSlice>(acc.addr->clone()), balance, config.get(),
std::move(code), due_payment); std::move(code), due_payment);
vm.set_c7(c7); // tuple with SmartContractInfo vm.set_c7(c7); // tuple with SmartContractInfo
vm.set_global_version(config->get_global_version());
// vm.incr_stack_trace(1); // enable stack dump after each step // vm.incr_stack_trace(1); // enable stack dump after each step
LOG(INFO) << "starting VM to run GET-method of smart contract " << acc_workchain_ << ":" << acc_addr_.to_hex(); LOG(INFO) << "starting VM to run GET-method of smart contract " << acc_workchain_ << ":" << acc_addr_.to_hex();
// **** RUN VM **** // **** RUN VM ****