1
0
Fork 0
mirror of https://github.com/ton-blockchain/ton synced 2025-02-12 19:22:37 +00:00

Merge pull request #340 from newton-blockchain/rebase_with_dates

Rebase newton history to original repository
This commit is contained in:
EmelyanenkoK 2022-05-02 15:42:54 +03:00 committed by GitHub
commit d01bcee5d4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
80 changed files with 9780 additions and 237 deletions

View file

@ -0,0 +1,44 @@
name: Build Docker Ubuntu Image
on:
push:
branches:
- 'master'
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
jobs:
build-and-push:
runs-on: ubuntu-latest
steps:
-
name: Checkout
uses: actions/checkout@v2
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
-
name: Login to GitHub Container Registry
uses: docker/login-action@v1
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
-
name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
push: true
context: ./docker
tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest

View file

@ -0,0 +1,60 @@
name: C/C++ CI macOS-10.15 Compile
on: [push,workflow_dispatch]
jobs:
build:
runs-on: macos-10.15
steps:
- name: Check out repository
uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: Compile OpenSSL
run: |
git clone https://github.com/openssl/openssl openssl_1_1_1
cd openssl_1_1_1
git checkout OpenSSL_1_1_1-stable
./Configure --prefix=/usr/local/macos darwin64-x86_64-cc -static -mmacosx-version-min=10.15
make build_libs -j4
- name: Build all
run: |
rootPath=`pwd`
mkdir build
cd build
cmake -DOPENSSL_FOUND=1 -DOPENSSL_INCLUDE_DIR=$rootPath/openssl_1_1_1/include -DOPENSSL_CRYPTO_LIBRARY=$rootPath/openssl_1_1_1/libcrypto.a -DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=10.15 -DCMAKE_CXX_FLAGS="-stdlib=libc++" -DCMAKE_BUILD_TYPE=Release ..
make -j4
- name: Build native-lib
run: |
rootPath=`pwd`
export JAVA_AWT_LIBRARY=NotNeeded
export JAVA_JVM_LIBRARY=NotNeeded
export JAVA_INCLUDE_PATH=${JAVA_HOME}/include
export JAVA_AWT_INCLUDE_PATH=${JAVA_HOME}/include
export JAVA_INCLUDE_PATH2=${JAVA_HOME}/include/darwin
git fetch --all
git pull --all
git checkout wallets
cd example/android/
mkdir build
cd build
cmake -DOPENSSL_FOUND=1 -DOPENSSL_INCLUDE_DIR=$rootPath/openssl_1_1_1/include -DOPENSSL_CRYPTO_LIBRARY=$rootPath/openssl_1_1_1/libcrypto.a -DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=10.15 -DCMAKE_CXX_FLAGS="-stdlib=libc++" -DCMAKE_BUILD_TYPE=Release -DTON_ONLY_TONLIB=ON ..
cmake --build . --target prepare_cross_compiling
cmake --build . --target native-lib
- name: Find & copy binaries
run: |
mkdir -p artifacts/tonlib-java
rsync -r --exclude 'CMakeFiles' --exclude 'Makefile' --exclude '*.a' --exclude '*.cmake' --exclude 'third-party' --exclude 'test-*' --exclude '*.cc' --exclude '*.json' --exclude '*.txt' build/* artifacts/
cp example/android/src/drinkless/org/ton/TonApi.java artifacts/tonlib-java/
cp example/android/build/libnative-lib.dylib artifacts/tonlib-java/
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-macos-binaries
path: artifacts

View file

@ -0,0 +1,66 @@
name: C/C++ CI Ubuntu 18.04 Compile
on: [push,workflow_dispatch]
jobs:
build:
runs-on: ubuntu-18.04
steps:
- name: Check out repository
uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: Run Cppcheck
uses: Bedzior/run-cppcheck@master
with:
enabled checks: all
enable inconclusive: true
generate report: true
- name: Upload report
uses: actions/upload-artifact@v1
with:
name: report
path: output
- name: mkdir
run: |
mkdir build
- name: cmake all
run: |
cd build
cmake -DCMAKE_BUILD_TYPE=Release ..
- name: make all
run: |
cd build
make -j4 fift func tonlib tonlibjson tonlib-cli validator-engine lite-client pow-miner validator-engine-console generate-random-id json2tlo dht-server http-proxy rldp-http-proxy adnl-proxy create-state create-hardfork
- name: build native-lib
run: |
export JAVA_AWT_LIBRARY=NotNeeded
export JAVA_JVM_LIBRARY=NotNeeded
export JAVA_INCLUDE_PATH=${JAVA_HOME}/include
export JAVA_AWT_INCLUDE_PATH=${JAVA_HOME}/include
export JAVA_INCLUDE_PATH2=${JAVA_HOME}/include/linux
git fetch --all
git pull --all
git checkout wallets
cd example/android/
mkdir build
cd build
cmake -DCMAKE_BUILD_TYPE=Release -DTON_ONLY_TONLIB=ON ..
cmake --build . --target prepare_cross_compiling
cmake --build . --target native-lib
- name: find & copy binaries
run: |
mkdir -p artifacts/tonlib-java
cp --parents build/crypto/fift build/crypto/tlbc build/crypto/func build/crypto/create-state build/validator-engine-console/validator-engine-console build/tonlib/tonlib-cli build/tonlib/libtonlibjson.so.0.5 build/http/http-proxy build/rldp-http-proxy/rldp-http-proxy build/dht-server/dht-server build/lite-client/lite-client build/validator-engine/validator-engine build/utils/generate-random-id build/utils/json2tlo build/adnl/adnl-proxy artifacts
cp example/android/src/drinkless/org/ton/TonApi.java artifacts/tonlib-java/
cp example/android/build/libnative-lib.so artifacts/tonlib-java/
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-binaries
path: artifacts

View file

@ -0,0 +1,98 @@
name: C/C++ CI Windows Server 2019 x64 Compile
on: [push,workflow_dispatch]
defaults:
run:
shell: cmd
jobs:
build:
runs-on: windows-2019
steps:
- name: Get Current OS version
run: |
systeminfo | findstr /B /C:"OS Name" /C:"OS Version"
- name: Check out current repository
uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: Check out zlib repository
uses: actions/checkout@v2
with:
repository: desktop-app/zlib
path: zlib
- name: Setup msbuild.exe
uses: microsoft/setup-msbuild@v1.0.2
- name: Compile zlib Win64
run: |
cd zlib\contrib\vstudio\vc14
msbuild zlibstat.vcxproj /p:Configuration=Debug /p:platform=x64 -p:PlatformToolset=v142
msbuild zlibstat.vcxproj /p:Configuration=ReleaseWithoutAsm /p:platform=x64 -p:PlatformToolset=v142
- name: Install OpenSSL Win64
run: |
curl -o openssl.msi https://slproweb.com/download/Win64OpenSSL-1_1_1n.msi
msiexec /i openssl.msi /quiet /qn /norestart
- name: Build ton
run: |
set root=%cd%
echo %root%
mkdir build
cd build
cmake -DZLIB_FOUND=1 -DZLIB_INCLUDE_DIR=%root%\zlib -DZLIB_LIBRARY=%root%\zlib\contrib\vstudio\vc14\x64\ZlibStatReleaseWithoutAsm\zlibstat.lib -DCMAKE_CXX_FLAGS="/DTD_WINDOWS=1 /EHsc /bigobj /W0" ..
- name: Compile ton
run: |
cd build
cmake --build . --config Release
continue-on-error: true
- name: Compile native-lib
run: |
set root=%cd%
set JAVA_AWT_LIBRARY=NotNeeded
set JAVA_JVM_LIBRARY=NotNeeded
set JAVA_INCLUDE_PATH=${JAVA_HOME}/include
set JAVA_AWT_INCLUDE_PATH=${JAVA_HOME}/include
set JAVA_INCLUDE_PATH2=${JAVA_HOME}/include/win32
git fetch --all
git pull --all
git checkout wallets
cd example/android/
mkdir build
cd build
cmake -DZLIB_FOUND=1 -DZLIB_INCLUDE_DIR=%root%\zlib -DZLIB_LIBRARY=%root%\zlib\contrib\vstudio\vc14\x64\ZlibStatReleaseWithoutAsm\zlibstat.lib -DTON_ONLY_TONLIB=ON ..
cmake --build . --target native-lib --config Release
- name: Show executables
run: |
cd build
del Release\test-*
dir *.exe /a-D /S /B
dir *.dll /a-D /S /B
- name: Check if validator-engine.exe exists
run: |
set root=%cd%
copy %root%\build\validator-engine\Release\validator-engine.exe test
- name: Find & copy binaries
run: |
mkdir artifacts
for /f %%a in ('dir *.exe /b /a /s') do copy /Y %%a artifacts
copy build\tonlib\Release\tonlibjson.dll artifacts
copy example\android\build\Release\native-lib.dll artifacts
- name: Upload artifacts
uses: actions/upload-artifact@master
with:
name: ton-win64-binaries
path: artifacts

1
.gitignore vendored
View file

@ -11,3 +11,4 @@ crypto/smartcont/auto/
test/regression-tests.cache/
*.swp
**/*build*/
.idea

View file

@ -4,6 +4,16 @@ project(TON VERSION 0.5 LANGUAGES C CXX)
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
#set(OPENSSL_USE_STATIC_LIBS TRUE)
# Define the two required variables before including the source code for watching a git repository.
set(PRE_CONFIGURE_FILE "git.cc.in")
set(POST_CONFIGURE_FILE "${CMAKE_CURRENT_BINARY_DIR}/git.cc")
include(git_watcher.cmake)
# Create a library out of the compiled post-configure file.
add_library(git STATIC ${POST_CONFIGURE_FILE})
target_include_directories(git PUBLIC ${CMAKE_CURRENT_SOURCE_DIR})
add_dependencies(git check_git)
# Prevent in-source build
get_filename_component(TON_REAL_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}" REALPATH)
get_filename_component(TON_REAL_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}" REALPATH)

10
README.md Normal file
View file

@ -0,0 +1,10 @@
## "Soft" Pull Request rules
* Thou shall not merge your own PRs, at least one person should review the PR and merge it (4-eyes rule)
* Thou shall make sure that workflows are cleanly completed for your PR before considering merge
## Workflows responsibility
If a CI workflow fails not because of your changes but workflow issues, try to fix it yourself or contact one of the persons listed below via Telegram messenger:
* **C/C++ CI (ccpp-linux.yml)**: TBD
* **C/C++ CI Win64 Compile (ccpp-win64.yml)**: TBD

View file

@ -89,12 +89,12 @@ target_link_libraries(adnl PUBLIC tdactor ton_crypto tl_api tdnet tddb keys keyr
add_executable(adnl-proxy ${ADNL_PROXY_SOURCE})
target_include_directories(adnl-proxy PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/..>)
target_link_libraries(adnl-proxy PUBLIC tdactor ton_crypto tl_api tdnet common
tl-utils)
tl-utils git)
add_executable(adnl-pong adnl-pong.cpp)
target_include_directories(adnl-pong PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/..>)
target_link_libraries(adnl-pong PUBLIC tdactor ton_crypto tl_api tdnet common
tl-utils adnl dht)
tl-utils adnl dht git)
add_library(adnltest STATIC ${ADNL_TEST_SOURCE})
target_include_directories(adnltest PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/..>)
@ -105,4 +105,4 @@ endif()
add_library(adnllite STATIC ${ADNL_LITE_SOURCE})
target_include_directories(adnllite PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/..>)
target_link_libraries(adnllite PUBLIC tdactor ton_crypto tl_lite_api tdnet keys )
target_link_libraries(adnllite PUBLIC tdactor ton_crypto tl_lite_api tdnet keys)

View file

@ -41,6 +41,7 @@
#include "auto/tl/ton_api_json.h"
#include "adnl/adnl.h"
#include <map>
#include "git.h"
#if TD_DARWIN || TD_LINUX
#include <unistd.h>
@ -97,6 +98,10 @@ int main(int argc, char *argv[]) {
int v = VERBOSITY_NAME(FATAL) + (td::to_integer<int>(arg));
SET_VERBOSITY_LEVEL(v);
});
p.add_option('V', "version", "shows adnl-pong build information", [&]() {
std::cout << "adnl-pong build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
});
p.add_option('h', "help", "prints_help", [&]() {
char b[10240];
td::StringBuilder sb(td::MutableSlice{b, 10000});

View file

@ -43,6 +43,7 @@
#include "adnl-proxy-types.h"
#include "adnl-received-mask.h"
#include <map>
#include "git.h"
#if TD_DARWIN || TD_LINUX
#include <unistd.h>
@ -303,6 +304,10 @@ int main(int argc, char *argv[]) {
int v = VERBOSITY_NAME(FATAL) + (td::to_integer<int>(arg));
SET_VERBOSITY_LEVEL(v);
});
p.add_option('V', "version", "shows adnl-proxy build information", [&]() {
std::cout << "adnl-proxy build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
});
p.add_option('h', "help", "prints_help", [&]() {
char b[10240];
td::StringBuilder sb(td::MutableSlice{b, 10000});

View file

@ -260,7 +260,7 @@ class CoreActor : public CoreActorInterface {
CoreActor() {
}
static int get_arg_iterate(void* cls, enum MHD_ValueKind kind, const char* key, const char* value) {
static MHD_RESULT get_arg_iterate(void* cls, enum MHD_ValueKind kind, const char* key, const char* value) {
auto X = static_cast<std::map<std::string, std::string>*>(cls);
if (key && value && std::strlen(key) > 0 && std::strlen(value) > 0) {
X->emplace(key, urldecode(td::Slice{value}, false));
@ -277,7 +277,7 @@ class CoreActor : public CoreActorInterface {
~HttpRequestExtra() {
MHD_destroy_post_processor(postprocessor);
}
static int iterate_post(void* coninfo_cls, enum MHD_ValueKind kind, const char* key, const char* filename,
static MHD_RESULT iterate_post(void* coninfo_cls, enum MHD_ValueKind kind, const char* key, const char* filename,
const char* content_type, const char* transfer_encoding, const char* data, uint64_t off,
size_t size) {
auto ptr = static_cast<HttpRequestExtra*>(coninfo_cls);
@ -305,10 +305,10 @@ class CoreActor : public CoreActorInterface {
}
}
static int process_http_request(void* cls, struct MHD_Connection* connection, const char* url, const char* method,
static MHD_RESULT process_http_request(void* cls, struct MHD_Connection* connection, const char* url, const char* method,
const char* version, const char* upload_data, size_t* upload_data_size, void** ptr) {
struct MHD_Response* response = nullptr;
int ret;
MHD_RESULT ret;
bool is_post = false;
if (std::strcmp(method, "GET") == 0) {

View file

@ -31,9 +31,21 @@
#include "td/utils/buffer.h"
#include "ton/ton-types.h"
#include "td/utils/port/IPAddress.h"
#include <microhttpd.h>
#define MAX_POST_SIZE (64 << 10)
// Beginning with v0.9.71, libmicrohttpd changed the return type of most
// functions from int to enum MHD_Result
// https://git.gnunet.org/gnunet.git/tree/src/include/gnunet_mhd_compat.h
// proposes to define a constant for the return type so it works well
// with all versions of libmicrohttpd
#if MHD_VERSION >= 0x00097002
#define MHD_RESULT enum MHD_Result
#else
#define MHD_RESULT int
#endif
extern bool local_scripts_;
class CoreActorInterface : public td::actor::Actor {

View file

@ -12,6 +12,6 @@ set(CREATE_HARDFORK_SOURCE
add_executable(create-hardfork ${CREATE_HARDFORK_SOURCE})
target_link_libraries(create-hardfork overlay tdutils tdactor adnl tl_api dht
rldp catchain validatorsession full-node validator-hardfork ton_validator
validator-hardfork fift-lib memprof ${JEMALLOC_LIBRARIES})
validator-hardfork fift-lib memprof git ${JEMALLOC_LIBRARIES})
install(TARGETS create-hardfork RUNTIME DESTINATION bin)

View file

@ -43,6 +43,10 @@
#include "td/utils/filesystem.h"
#include "td/utils/port/path.h"
#include "ton/ton-types.h"
#include "ton/ton-tl.hpp"
#include "ton/ton-io.hpp"
#include "validator/fabric.h"
#include "validator/impl/collator.h"
#include "crypto/vm/cp0.h"
@ -55,6 +59,7 @@
#endif
#include <iostream>
#include <sstream>
#include "git.h"
int verbosity;
@ -76,6 +81,8 @@ class HardforkCreator : public td::actor::Actor {
td::actor::ActorOwn<ton::validator::ValidatorManagerInterface> validator_manager_;
std::string db_root_ = "/var/ton-work/db/";
std::string global_config_;
td::Ref<ton::validator::ValidatorManagerOptions> opts_;
td::BufferSlice bs_;
std::vector<td::BufferSlice> ext_msgs_;
std::vector<td::BufferSlice> top_shard_descrs_;
@ -90,6 +97,9 @@ class HardforkCreator : public td::actor::Actor {
void set_db_root(std::string db_root) {
db_root_ = db_root;
}
void set_global_config_path(std::string path) {
global_config_ = path;
}
void set_shard(ton::ShardIdFull shard) {
LOG(DEBUG) << "setting shard to " << shard.to_str();
shard_ = shard;
@ -140,6 +150,49 @@ class HardforkCreator : public td::actor::Actor {
void do_save_file() {
}
td::Status create_validator_options() {
if(!global_config_.length()) {
opts_ = ton::validator::ValidatorManagerOptions::create(
ton::BlockIdExt{ton::masterchainId, ton::shardIdAll, 0, ton::RootHash::zero(), ton::FileHash::zero()},
ton::BlockIdExt{ton::masterchainId, ton::shardIdAll, 0, ton::RootHash::zero(), ton::FileHash::zero()});
return td::Status::OK();
}
TRY_RESULT_PREFIX(conf_data, td::read_file(global_config_), "failed to read: ");
TRY_RESULT_PREFIX(conf_json, td::json_decode(conf_data.as_slice()), "failed to parse json: ");
ton::ton_api::config_global conf;
TRY_STATUS_PREFIX(ton::ton_api::from_json(conf, conf_json.get_object()), "json does not fit TL scheme: ");
auto zero_state = ton::create_block_id(conf.validator_->zero_state_);
ton::BlockIdExt init_block;
if (!conf.validator_->init_block_) {
LOG(INFO) << "no init block in config. using zero state";
init_block = zero_state;
} else {
init_block = ton::create_block_id(conf.validator_->init_block_);
}
opts_ = ton::validator::ValidatorManagerOptions::create(zero_state, init_block);
std::vector<ton::BlockIdExt> h;
for (auto &x : conf.validator_->hardforks_) {
auto b = ton::create_block_id(x);
if (!b.is_masterchain()) {
return td::Status::Error(ton::ErrorCode::error,
"[validator/hardforks] section contains not masterchain block id");
}
if (!b.is_valid_full()) {
return td::Status::Error(ton::ErrorCode::error, "[validator/hardforks] section contains invalid block_id");
}
for (auto &y : h) {
if (y.is_valid() && y.seqno() >= b.seqno()) {
y.invalidate();
}
}
h.push_back(b);
}
opts_.write().set_hardforks(std::move(h));
return td::Status::OK();
}
void run() {
td::mkdir(db_root_).ensure();
ton::errorlog::ErrorLog::create(db_root_);
@ -148,9 +201,13 @@ class HardforkCreator : public td::actor::Actor {
do_save_file();
}
auto opts = ton::validator::ValidatorManagerOptions::create(
ton::BlockIdExt{ton::masterchainId, ton::shardIdAll, 0, ton::RootHash::zero(), ton::FileHash::zero()},
ton::BlockIdExt{ton::masterchainId, ton::shardIdAll, 0, ton::RootHash::zero(), ton::FileHash::zero()});
auto Sr = create_validator_options();
if (Sr.is_error()) {
LOG(ERROR) << "failed to load global config'" << global_config_ << "': " << Sr;
std::_Exit(2);
}
auto opts = opts_;
opts.write().set_initial_sync_disabled(true);
validator_manager_ =
ton::validator::ValidatorManagerHardforkFactory::create(opts, shard_, shard_top_block_id_, db_root_);
@ -263,8 +320,14 @@ int main(int argc, char *argv[]) {
std::cout << sb.as_cslice().c_str();
std::exit(2);
});
p.add_option('V', "version", "shows create-hardfork build information", [&]() {
std::cout << "create-hardfork build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
});
p.add_option('D', "db", "root for dbs",
[&](td::Slice fname) { td::actor::send_closure(x, &HardforkCreator::set_db_root, fname.str()); });
p.add_option('C', "config", "global config path",
[&](td::Slice fname) { td::actor::send_closure(x, &HardforkCreator::set_global_config_path, fname.str()); });
p.add_option('m', "ext-message", "binary file with serialized inbound external message",
[&](td::Slice fname) { td::actor::send_closure(x, &HardforkCreator::load_ext_message, fname.str()); });
p.add_option(

View file

@ -284,7 +284,7 @@ target_link_libraries(fift-lib PUBLIC ton_crypto ton_db tdutils ton_block)
set_target_properties(fift-lib PROPERTIES OUTPUT_NAME fift)
add_executable(fift fift/fift-main.cpp)
target_link_libraries(fift PUBLIC fift-lib)
target_link_libraries(fift PUBLIC fift-lib git)
if (WINGETOPT_FOUND)
target_link_libraries_system(fift wingetopt)
endif()
@ -295,7 +295,7 @@ target_link_libraries(src_parser PUBLIC ton_crypto)
add_executable(func func/func.cpp ${FUNC_LIB_SOURCE})
target_include_directories(func PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>)
target_link_libraries(func PUBLIC ton_crypto src_parser)
target_link_libraries(func PUBLIC ton_crypto src_parser git)
if (WINGETOPT_FOUND)
target_link_libraries_system(func wingetopt)
endif()
@ -312,10 +312,11 @@ target_include_directories(pow-miner-lib PUBLIC $<BUILD_INTERFACE:${CMAKE_CURREN
target_link_libraries(pow-miner-lib PUBLIC ton_crypto ton_block)
add_executable(pow-miner util/pow-miner.cpp)
target_link_libraries(pow-miner PRIVATE ton_crypto ton_block pow-miner-lib)
target_link_libraries(pow-miner PRIVATE ton_crypto ton_block pow-miner-lib git)
if (WINGETOPT_FOUND)
target_link_libraries_system(fift wingetopt)
target_link_libraries_system(pow-miner wingetopt)
endif()
add_library(ton_block ${BLOCK_SOURCE})
@ -404,12 +405,12 @@ add_executable(create-state block/create-state.cpp)
target_include_directories(create-state PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/..>)
if (INTERNAL_COMPILE)
target_link_libraries(create-state PUBLIC ton_crypto fift-lib ton_block tonlib)
target_link_libraries(create-state PUBLIC ton_crypto fift-lib ton_block tonlib git)
else()
if (TONLIB_COMPILE)
target_link_libraries(create-state PUBLIC ton_crypto fift-lib ton_block tonlib)
target_link_libraries(create-state PUBLIC ton_crypto fift-lib ton_block tonlib git)
else()
target_link_libraries(create-state PUBLIC ton_crypto fift-lib ton_block)
target_link_libraries(create-state PUBLIC ton_crypto fift-lib ton_block git)
endif()
endif()
if (WINGETOPT_FOUND)
@ -419,7 +420,7 @@ endif()
add_executable(dump-block block/dump-block.cpp)
target_include_directories(dump-block PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/..>)
target_link_libraries(dump-block PUBLIC ton_crypto fift-lib ton_block)
target_link_libraries(dump-block PUBLIC ton_crypto fift-lib ton_block git)
if (WINGETOPT_FOUND)
target_link_libraries_system(dump-block wingetopt)
endif()
@ -427,18 +428,20 @@ endif()
add_executable(adjust-block block/adjust-block.cpp)
target_include_directories(adjust-block PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/..>)
target_link_libraries(adjust-block PUBLIC ton_crypto fift-lib ton_block)
target_link_libraries(adjust-block PUBLIC ton_crypto fift-lib ton_block git)
if (WINGETOPT_FOUND)
target_link_libraries_system(dump-block wingetopt)
target_link_libraries_system(adjust-block wingetopt)
endif()
add_executable(test-weight-distr block/test-weight-distr.cpp)
target_include_directories(test-weight-distr PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/..>)
target_link_libraries(test-weight-distr PUBLIC ton_crypto fift-lib ton_block)
target_link_libraries(test-weight-distr PUBLIC ton_crypto fift-lib ton_block git)
if (WINGETOPT_FOUND)
target_link_libraries_system(test-weight-distr wingetopt)
endif()
install(TARGETS fift func RUNTIME DESTINATION bin)
install(TARGETS fift func pow-miner RUNTIME DESTINATION bin)
install(DIRECTORY fift/lib/ DESTINATION lib/fift)
install(DIRECTORY smartcont DESTINATION share/ton)

View file

@ -34,6 +34,7 @@
#include "vm/cp0.h"
#include "td/utils/crypto.h"
#include <getopt.h>
#include "git.h"
using td::Ref;
using namespace std::literals::string_literals;
@ -144,7 +145,7 @@ int main(int argc, char* const argv[]) {
int i, vseqno_incr = 1;
int new_verbosity_level = VERBOSITY_NAME(INFO);
std::string in_fname, out_fname;
while ((i = getopt(argc, argv, "hi:v:")) != -1) {
while ((i = getopt(argc, argv, "hi:v:V")) != -1) {
switch (i) {
case 'h':
usage();
@ -156,6 +157,10 @@ int main(int argc, char* const argv[]) {
case 'v':
new_verbosity_level = VERBOSITY_NAME(FATAL) + (verbosity = td::to_integer<int>(td::Slice(optarg)));
break;
case 'V':
std::cout << "adjust-block build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
break;
default:
usage();
break;

View file

@ -696,6 +696,13 @@ consensus_config_new#d7 flags:(## 7) { flags = 0 } new_catchain_ids:Bool
fast_attempts:uint32 attempt_duration:uint32 catchain_max_deps:uint32
max_block_bytes:uint32 max_collated_bytes:uint32 = ConsensusConfig;
consensus_config_v3#d8 flags:(## 7) { flags = 0 } new_catchain_ids:Bool
round_candidates:(## 8) { round_candidates >= 1 }
next_candidate_delay_ms:uint32 consensus_timeout_ms:uint32
fast_attempts:uint32 attempt_duration:uint32 catchain_max_deps:uint32
max_block_bytes:uint32 max_collated_bytes:uint32
proto_version:uint16 = ConsensusConfig;
_ CatchainConfig = ConfigParam 28;
_ ConsensusConfig = ConfigParam 29;
@ -711,6 +718,20 @@ validator_temp_key#3 adnl_addr:bits256 temp_public_key:SigPubKey seqno:# valid_u
signed_temp_key#4 key:^ValidatorTempKey signature:CryptoSignature = ValidatorSignedTempKey;
_ (HashmapE 256 ValidatorSignedTempKey) = ConfigParam 39;
misbehaviour_punishment_config_v1#01
default_flat_fine:Grams default_proportional_fine:uint32
severity_flat_mult:uint16 severity_proportional_mult:uint16
unpunishable_interval:uint16
long_interval:uint16 long_flat_mult:uint16 long_proportional_mult:uint16
medium_interval:uint16 medium_flat_mult:uint16 medium_proportional_mult:uint16
= MisbehaviourPunishmentConfig;
_ MisbehaviourPunishmentConfig = ConfigParam 40;
oracle_bridge_params#_ bridge_address:bits256 oracle_mutlisig_address:bits256 oracles:(HashmapE 256 uint256) external_chain_address:bits256 = OracleBridgeParams;
_ OracleBridgeParams = ConfigParam 71; // Ethereum bridge
_ OracleBridgeParams = ConfigParam 72; // Binance Smart Chain bridge
_ OracleBridgeParams = ConfigParam 73; // Polygon bridge
//
// PROOFS
//

View file

@ -59,6 +59,7 @@
#include "block-parse.h"
#include "block-auto.h"
#include "mc-config.h"
#include "git.h"
#if defined(_INTERNAL_COMPILE) || defined(_TONLIB_COMPILE)
#define WITH_TONLIB
@ -804,7 +805,8 @@ void usage(const char* progname) {
"\t-I<source-search-path>\tSets colon-separated library source include path. If not indicated, "
"$FIFTPATH is used instead.\n"
"\t-L<library-fif-file>\tPre-loads a library source file\n"
"\t-v<verbosity-level>\tSet verbosity level\n";
"\t-v<verbosity-level>\tSet verbosity level\n"
"\t-V<version>\tShow create-state build information\n";
std::exit(2);
}
@ -842,7 +844,7 @@ int main(int argc, char* const argv[]) {
int i;
int new_verbosity_level = VERBOSITY_NAME(INFO);
while (!script_mode && (i = getopt(argc, argv, "hinsI:L:v:")) != -1) {
while (!script_mode && (i = getopt(argc, argv, "hinsI:L:v:V")) != -1) {
switch (i) {
case 'i':
interactive = true;
@ -864,6 +866,10 @@ int main(int argc, char* const argv[]) {
case 'v':
new_verbosity_level = VERBOSITY_NAME(FATAL) + (verbosity = td::to_integer<int>(td::Slice(optarg)));
break;
case 'V':
std::cout << "create-state build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
break;
case 'h':
default:
usage(argv[0]);

View file

@ -34,6 +34,7 @@
#include "mc-config.h"
#include "vm/cp0.h"
#include <getopt.h>
#include "git.h"
using td::Ref;
using namespace std::literals::string_literals;
@ -246,7 +247,8 @@ td::Status test_vset() {
void usage() {
std::cout << "usage: dump-block [-t<typename>][-S][<boc-file>]\n\tor dump-block -h\n\tDumps specified blockchain "
"block or state "
"from <boc-file>, or runs some tests\n\t-S\tDump a blockchain state instead of a block\n";
"from <boc-file>, or runs some tests\n\t-S\tDump a blockchain state instead of a block\n"
"\t-V<version>\tShow fift build information\n";
std::exit(2);
}
@ -259,7 +261,7 @@ int main(int argc, char* const argv[]) {
bool store_loaded = false;
int dump = 3;
auto zerostate = std::make_unique<block::ZerostateInfo>();
while ((i = getopt(argc, argv, "CSt:hqv:")) != -1) {
while ((i = getopt(argc, argv, "CSt:hqv:V")) != -1) {
switch (i) {
case 'C':
type = &block::gen::t_VmCont;
@ -280,6 +282,10 @@ int main(int argc, char* const argv[]) {
store_loaded = true;
dump = 0;
break;
case 'V':
std::cout << "dump-block build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
break;
case 'h':
usage();
std::exit(2);

View file

@ -310,7 +310,7 @@ td::Status Config::visit_validator_params() const {
ton::ValidatorSessionConfig Config::get_consensus_config() const {
auto cc = get_config_param(29);
ton::ValidatorSessionConfig c;
auto set = [&c](auto& r, bool new_cc_ids) {
auto set = [&c](auto& r) {
c.catchain_idle_timeout = r.consensus_timeout_ms * 0.001;
c.catchain_max_deps = r.catchain_max_deps;
c.round_candidates = r.round_candidates;
@ -319,13 +319,23 @@ ton::ValidatorSessionConfig Config::get_consensus_config() const {
c.max_round_attempts = r.fast_attempts;
c.max_block_size = r.max_block_bytes;
c.max_collated_data_size = r.max_collated_bytes;
c.new_catchain_ids = new_cc_ids;
return true;
};
auto set_new_cc_ids = [&c] (auto& r) {
c.new_catchain_ids = r.new_catchain_ids;
return true;
};
auto set_proto = [&c](auto& r) {
c.proto_version = r.proto_version;
return true;
};
if (cc.not_null()) {
block::gen::ConsensusConfig::Record_consensus_config_v3 r2;
block::gen::ConsensusConfig::Record_consensus_config_new r1;
block::gen::ConsensusConfig::Record_consensus_config r0;
(tlb::unpack_cell(cc, r1) && set(r1, r1.new_catchain_ids)) || (tlb::unpack_cell(cc, r0) && set(r0, false));
(tlb::unpack_cell(cc, r2) && set(r2) && set_new_cc_ids(r2) && set_proto(r2)) ||
(tlb::unpack_cell(cc, r1) && set(r1) && set_new_cc_ids(r1)) ||
(tlb::unpack_cell(cc, r0) && set(r0));
}
return c;
}

View file

@ -395,6 +395,7 @@ bool Account::init_new(ton::UnixTime now) {
}
state_hash = addr_orig;
status = orig_status = acc_nonexist;
split_depth_set_ = false;
created = true;
return true;
}
@ -2243,6 +2244,8 @@ Ref<vm::Cell> Transaction::commit(Account& acc) {
acc.tock = new_tock;
} else {
acc.tick = acc.tock = false;
acc.split_depth_set_ = false;
acc.created = true;
}
end_lt = 0;
acc.push_transaction(root, start_lt);

View file

@ -1312,7 +1312,10 @@ bool AnyIntView<Tr>::mod_div_any(const AnyIntView<Tr>& yp, AnyIntView<Tr>& quot,
if (k > quot.max_size()) {
return invalidate_bool();
}
quot.set_size(k);
quot.set_size(max(k,1));
for(int qi=0; qi< max(k,1); qi++) {
quot.digits[qi]=0;
}
} else {
if (k >= quot.max_size()) {
return invalidate_bool();

View file

@ -53,6 +53,8 @@
#include "td/utils/Parser.h"
#include "td/utils/port/path.h"
#include "git.h"
void usage(const char* progname) {
std::cerr << "A simple Fift interpreter. Type `bye` to quit, or `words` to get a list of all commands\n";
std::cerr
@ -65,7 +67,8 @@ void usage(const char* progname) {
"\t-L<library-fif-file>\tPre-loads a library source file\n"
"\t-d<ton-db-path>\tUse a ton database\n"
"\t-s\tScript mode: use first argument as a fift source file and import remaining arguments as $n)\n"
"\t-v<verbosity-level>\tSet verbosity level\n";
"\t-v<verbosity-level>\tSet verbosity level\n"
"\t-V<version>\tShow fift build information\n";
std::exit(2);
}
@ -92,7 +95,7 @@ int main(int argc, char* const argv[]) {
int i;
int new_verbosity_level = VERBOSITY_NAME(INFO);
while (!script_mode && (i = getopt(argc, argv, "hinI:L:d:sv:")) != -1) {
while (!script_mode && (i = getopt(argc, argv, "hinI:L:d:sv:V")) != -1) {
switch (i) {
case 'i':
interactive = true;
@ -116,6 +119,11 @@ int main(int argc, char* const argv[]) {
case 'v':
new_verbosity_level = VERBOSITY_NAME(FATAL) + td::to_integer<int>(td::Slice(optarg));
break;
case 'V':
std::cout << "Fift build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
break;
case 'h':
default:
usage(argv[0]);

View file

@ -31,6 +31,7 @@
#include "parser/symtable.h"
#include <getopt.h>
#include <fstream>
#include "git.h"
namespace funC {
@ -171,7 +172,8 @@ void usage(const char* progname) {
"-S\tInclude stack layout comments in the output code\n"
"-R\tInclude operation rewrite comments in the output code\n"
"-W<output-boc-file>\tInclude Fift code to serialize and save generated code into specified BoC file. Enables "
"-A and -P.\n";
"-A and -P.\n"
"\t-V<version>\tShow func build information\n";
std::exit(2);
}
@ -180,7 +182,7 @@ std::string output_filename;
int main(int argc, char* const argv[]) {
int i;
bool interactive = false;
while ((i = getopt(argc, argv, "Ahi:Io:O:PRSvW:")) != -1) {
while ((i = getopt(argc, argv, "Ahi:Io:O:PRSvW:V")) != -1) {
switch (i) {
case 'A':
funC::asm_preamble = true;
@ -213,6 +215,10 @@ int main(int argc, char* const argv[]) {
funC::boc_output_filename = optarg;
funC::asm_preamble = funC::program_envelope = true;
break;
case 'V':
std::cout << "Func build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
break;
case 'h':
default:
usage(argv[0]);

View file

@ -601,7 +601,9 @@ _ unpack_proposal(slice pstatus) inline_ref {
voters_list = cons(voter_id, voters_list);
}
} until (~ f);
var (rounds_remaining, losses, wins) = (rest~load_uint(8), rest~load_uint(8), rest~load_uint(8));
;; Note there is a bug in config contract currently deployed in testnet2:
;; wins and losses are messed up
var (rounds_remaining, wins, losses) = (rest~load_uint(8), rest~load_uint(8), rest~load_uint(8));
rest.end_parse();
var (param_id, param_val, param_hash) = parse_config_proposal(proposal);
return [expires, critical?, [param_id, param_val, param_hash], vset_id, voters_list, weight_remaining, rounds_remaining, losses, wins];

View file

@ -16,7 +16,7 @@ begin-options
"Creates a critical parameter change proposal" option-help
"x" "--expires-in" { parse-int =: expire-in } short-long-option-arg
"Sets proposal expiration time in seconds (default " expire-in (.) $+ +")" option-help
"H" "--old-hash" { (hex-number) not abort"256-bit hex number expected as hash" =: old-hash }
"H" "--old-hash" { (hex-number) 1 = not .s abort"256-bit hex number expected as hash" =: old-hash }
short-long-option-arg
"Sets the required cell hash of existing parameter value (0 means no value)" option-help
"h" "--help" { usage } short-long-option

View file

@ -36,6 +36,7 @@ class WalletInterface : public SmartContract {
struct Gift {
block::StdAddress destination;
td::int64 gramms;
td::int32 send_mode{-1};
bool is_encrypted{false};
std::string message;

View file

@ -40,6 +40,9 @@ td::Result<td::Ref<vm::Cell>> WalletV3::make_a_gift_message(const td::Ed25519::P
if (gift.gramms == -1) {
send_mode += 128;
}
if (gift.send_mode > -1) {
send_mode = gift.send_mode;
}
cb.store_long(send_mode, 8).store_ref(create_int_message(gift));
}

View file

@ -37,7 +37,7 @@
#include <thread>
#include <cstdlib>
#include <getopt.h>
#include "git.h"
#include "Miner.h"
const char* progname;
@ -46,7 +46,7 @@ int usage() {
std::cerr
<< "usage: " << progname
<< " [-v][-B][-w<threads>] [-t<timeout>] <my-address> <pow-seed> <pow-complexity> <iterations> [<miner-addr> "
"<output-ext-msg-boc>]\n"
"<output-ext-msg-boc>] [-V]\n"
"Outputs a valid <rdata> value for proof-of-work testgiver after computing at most <iterations> hashes "
"or terminates with non-zero exit code\n";
std::exit(2);
@ -156,14 +156,14 @@ int main(int argc, char* const argv[]) {
progname = argv[0];
int i, threads = 0;
bool bounce = false, benchmark = false;
while ((i = getopt(argc, argv, "bnvw:t:Bh")) != -1) {
while ((i = getopt(argc, argv, "bnvw:t:Bh:V")) != -1) {
switch (i) {
case 'v':
++verbosity;
break;
case 'w':
threads = atoi(optarg);
CHECK(threads > 0 && threads <= 128);
CHECK(threads > 0 && threads <= 256);
break;
case 't': {
int timeout = atoi(optarg);
@ -180,6 +180,10 @@ int main(int argc, char* const argv[]) {
case 'n':
bounce = false;
break;
case 'V':
std::cout << "pow-miner build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
break;
case 'h':
return usage();
default:

View file

@ -34,7 +34,7 @@ class DataCell : public Cell {
td::bitstring::bits_store_long(dest, depth, depth_bits);
}
static td::uint16 load_depth(const td::uint8* src) {
return td::bitstring::bits_load_ulong(src, depth_bits) & 0xff;
return td::bitstring::bits_load_ulong(src, depth_bits) & 0xffff;
}
protected:

View file

@ -11,4 +11,6 @@ set (DHT_SERVER_SOURCE
)
add_executable (dht-server ${DHT_SERVER_SOURCE})
target_link_libraries(dht-server tdutils tdactor adnl tl_api dht memprof ${JEMALLOC_LIBRARIES})
target_link_libraries(dht-server tdutils tdactor adnl tl_api dht memprof git ${JEMALLOC_LIBRARIES})
install(TARGETS dht-server RUNTIME DESTINATION bin)

View file

@ -48,6 +48,7 @@
#include <sstream>
#include <cstdlib>
#include <set>
#include "git.h"
Config::Config() {
out_port = 3278;
@ -1182,6 +1183,10 @@ int main(int argc, char *argv[]) {
int v = VERBOSITY_NAME(FATAL) + (td::to_integer<int>(arg));
SET_VERBOSITY_LEVEL(v);
});
p.add_option('V', "version", "shows dht-server build information", [&]() {
std::cout << "dht-server build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
});
p.add_option('h', "help", "prints_help", [&]() {
char b[10240];
td::StringBuilder sb(td::MutableSlice{b, 10000});

32
docker/Dockerfile Normal file
View file

@ -0,0 +1,32 @@
FROM ubuntu:20.04 as builder
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get install -y build-essential cmake clang-6.0 openssl libssl-dev zlib1g-dev gperf wget git && \
rm -rf /var/lib/apt/lists/*
ENV CC clang-6.0
ENV CXX clang++-6.0
WORKDIR /
RUN git clone --recursive https://github.com/newton-blockchain/ton
WORKDIR /ton
RUN mkdir build && \
cd build && \
cmake .. -DCMAKE_BUILD_TYPE=Release && \
make -j 4
FROM ubuntu:20.04
RUN apt-get update && \
apt-get install -y openssl wget&& \
rm -rf /var/lib/apt/lists/*
RUN mkdir -p /var/ton-work/db && \
mkdir -p /var/ton-work/db/static
COPY --from=builder /ton/build/lite-client/lite-client /usr/local/bin/
COPY --from=builder /ton/build/validator-engine/validator-engine /usr/local/bin/
COPY --from=builder /ton/build/validator-engine-console/validator-engine-console /usr/local/bin/
COPY --from=builder /ton/build/utils/generate-random-id /usr/local/bin/
WORKDIR /var/ton-work/db
COPY init.sh control.template ./
RUN chmod +x init.sh
ENTRYPOINT ["/var/ton-work/db/init.sh"]

28
docker/README.md Normal file
View file

@ -0,0 +1,28 @@
# The Open Network Node
Dockerfile for The Open Network Node
#### Install
```docker pull ghcr.io/newton-blockchain/ton```
#### Create volume
```docker volume create ton-db```
#### Run
```docker run -d --name ton-node --mount source=ton-db,target=/var/ton-work/db --network host -e "PUBLIC_IP=<YOUR_PUBLIC_IP>" -e "CONSOLE_PORT=<TCP-PORT1>" -e "LITESERVER=true" -e "LITE_PORT=<TCP-PORT2>" -it ghcr.io/newton-blockchain/ton```
If you don't need Liteserver, then remove -e "LITESERVER=true".
#### Use
```docker exec -ti <container-id> /bin/bash```
```./validator-engine-console -k client -p server.pub -a <IP>:<TCP-PORT1>```
IP:PORT is shown at start of container.
#### Lite-client
To use lite-client you need to get liteserver.pub from container.
```docker cp <container-id>:/var/ton-work/db/liteserver.pub /your/path```
Then you can connect to it, but be sure you use right port, it's different from fullnode console port.
```lite-client -a <IP>:<TCP-PORT2> -p liteserver.pub```

9
docker/control.template Normal file
View file

@ -0,0 +1,9 @@
"control" : [
{ "id" : SERVER-ID,
"port" : CONSOLE-PORT,
"allowed" : [
{ "id" : CLIENT-ID,
"permissions" : 15
}
]
}

69
docker/init.sh Normal file
View file

@ -0,0 +1,69 @@
#!/usr/bin/env bash
# global config
if [ ! -z "$GCONFURL" ]; then
echo -e "\e[1;32m[+]\e[0m Downloading provided global config."
wget -q $GCONFURL -O /var/ton-work/db/ton-global.config
else
echo -e "\e[1;33m[=]\e[0m No global config provided, downloading default."
wget -q https://api.tontech.io/ton/wallet-mainnet.autoconf.json -O /var/ton-work/db/ton-global.config
fi
# Init local config with IP:PORT
if [ ! -z "$PUBLIC_IP" ]; then
if [ -z "$CONSOLE_PORT" ]; then
CONSOLE_PORT="43678"
fi
echo -e "\e[1;32m[+]\e[0m Using provided IP: $PUBLIC_IP:$CONSOLE_PORT"
validator-engine -C /var/ton-work/db/ton-global.config --db /var/ton-work/db --ip "$PUBLIC_IP:$CONSOLE_PORT"
else
echo -e "\e[1;31m[!]\e[0m No IP:PORT provided, exiting"
exit 1
fi
# Generating server certificate
if [ -f "./server" ]; then
echo -e "\e[1;33m[=]\e[0m Found existing server certificate, skipping"
else
echo -e "\e[1;32m[+]\e[0m Generating and installing server certificate for remote control"
read -r SERVER_ID1 SERVER_ID2 <<< $(generate-random-id -m keys -n server)
echo "Server IDs: $SERVER_ID1 $SERVER_ID2"
cp server /var/ton-work/db/keyring/$SERVER_ID1
fi
# Generating client certificate
if [ -f "./client" ]; then
echo -e "\e[1;33m[=]\e[0m Found existing client certificate, skipping"
else
read -r CLIENT_ID1 CLIENT_ID2 <<< $(generate-random-id -m keys -n client)
echo -e "\e[1;32m[+]\e[0m Generated client private certificate $CLIENT_ID1 $CLIENT_ID2"
echo -e "\e[1;32m[+]\e[0m Generated client public certificate"
# Adding client permissions
sed -e "s/CONSOLE-PORT/\"$(printf "%q" $CONSOLE_PORT)\"/g" -e "s~SERVER-ID~\"$(printf "%q" $SERVER_ID2)\"~g" -e "s~CLIENT-ID~\"$(printf "%q" $CLIENT_ID2)\"~g" control.template > control.new
sed -e "s~\"control\"\ \:\ \[~$(printf "%q" $(cat control.new))~g" config.json > config.json.new
mv config.json.new config.json
fi
# Liteserver
if [ -z "$LITESERVER" ]; then
echo -e "\e[1;33m[=]\e[0m Liteserver disabled"
else
if [ -f "./liteserver" ]; then
echo -e "\e[1;33m[=]\e[0m Found existing liteserver certificate, skipping"
else
echo -e "\e[1;32m[+]\e[0m Generating and installing liteserver certificate for remote control"
read -r LITESERVER_ID1 LITESERVER_ID2 <<< $(generate-random-id -m keys -n liteserver)
echo "Liteserver IDs: $LITESERVER_ID1 $LITESERVER_ID2"
cp liteserver /var/ton-work/db/keyring/$LITESERVER_ID1
if [ -z "$LITE_PORT" ]; then
LITE_PORT="43679"
fi
LITESERVERS=$(printf "%q" "\"liteservers\":[{\"id\":\"$LITESERVER_ID2\",\"port\":\"$LITE_PORT\"}")
sed -e "s~\"liteservers\"\ \:\ \[~$LITESERVERS~g" config.json > config.json.liteservers
mv config.json.liteservers config.json
fi
fi
echo -e "\e[1;32m[+]\e[0m Running validator-engine"
exec validator-engine -c /var/ton-work/db/config.json -C /var/ton-work/db/ton-global.config --db /var/ton-work/db

29
git.cc.in Normal file
View file

@ -0,0 +1,29 @@
#include "git.h"
bool GitMetadata::Populated() {
return @GIT_RETRIEVED_STATE@;
}
bool GitMetadata::AnyUncommittedChanges() {
return @GIT_IS_DIRTY@;
}
std::string GitMetadata::AuthorName() {
return "@GIT_AUTHOR_NAME@";
}
std::string GitMetadata::AuthorEmail() {
return "@GIT_AUTHOR_EMAIL@";
}
std::string GitMetadata::CommitSHA1() {
return "@GIT_HEAD_SHA1@";
}
std::string GitMetadata::CommitDate() {
return "@GIT_COMMIT_DATE_ISO8601@";
}
std::string GitMetadata::CommitSubject() {
return "@GIT_COMMIT_SUBJECT@";
}
std::string GitMetadata::CommitBody() {
return @GIT_COMMIT_BODY@;
}
std::string GitMetadata::Describe() {
return "@GIT_DESCRIBE@";
}

29
git.h Normal file
View file

@ -0,0 +1,29 @@
#pragma once
#include <string>
class GitMetadata {
public:
// Is the metadata populated? We may not have metadata if
// there wasn't a .git directory (e.g. downloaded source
// code without revision history).
static bool Populated();
// Were there any uncommitted changes that won't be reflected
// in the CommitID?
static bool AnyUncommittedChanges();
// The commit author's name.
static std::string AuthorName();
// The commit author's email.
static std::string AuthorEmail();
// The commit SHA1.
static std::string CommitSHA1();
// The ISO8601 commit date.
static std::string CommitDate();
// The commit subject.
static std::string CommitSubject();
// The commit body.
static std::string CommitBody();
// The commit describe.
static std::string Describe();
};

314
git_watcher.cmake Normal file
View file

@ -0,0 +1,314 @@
# git_watcher.cmake
# https://raw.githubusercontent.com/andrew-hardin/cmake-git-version-tracking/master/git_watcher.cmake
#
# Released under the MIT License.
# https://raw.githubusercontent.com/andrew-hardin/cmake-git-version-tracking/master/LICENSE
# This file defines a target that monitors the state of a git repo.
# If the state changes (e.g. a commit is made), then a file gets reconfigured.
# Here are the primary variables that control script behavior:
#
# PRE_CONFIGURE_FILE (REQUIRED)
# -- The path to the file that'll be configured.
#
# POST_CONFIGURE_FILE (REQUIRED)
# -- The path to the configured PRE_CONFIGURE_FILE.
#
# GIT_STATE_FILE (OPTIONAL)
# -- The path to the file used to store the previous build's git state.
# Defaults to the current binary directory.
#
# GIT_WORKING_DIR (OPTIONAL)
# -- The directory from which git commands will be run.
# Defaults to the directory with the top level CMakeLists.txt.
#
# GIT_EXECUTABLE (OPTIONAL)
# -- The path to the git executable. It'll automatically be set if the
# user doesn't supply a path.
#
# DESIGN
# - This script was designed similar to a Python application
# with a Main() function. I wanted to keep it compact to
# simplify "copy + paste" usage.
#
# - This script is invoked under two CMake contexts:
# 1. Configure time (when build files are created).
# 2. Build time (called via CMake -P).
# The first invocation is what registers the script to
# be executed at build time.
#
# MODIFICATIONS
# You may wish to track other git properties like when the last
# commit was made. There are two sections you need to modify,
# and they're tagged with a ">>>" header.
# Short hand for converting paths to absolute.
macro(PATH_TO_ABSOLUTE var_name)
get_filename_component(${var_name} "${${var_name}}" ABSOLUTE)
endmacro()
# Check that a required variable is set.
macro(CHECK_REQUIRED_VARIABLE var_name)
if(NOT DEFINED ${var_name})
message(FATAL_ERROR "The \"${var_name}\" variable must be defined.")
endif()
PATH_TO_ABSOLUTE(${var_name})
endmacro()
# Check that an optional variable is set, or, set it to a default value.
macro(CHECK_OPTIONAL_VARIABLE var_name default_value)
if(NOT DEFINED ${var_name})
set(${var_name} ${default_value})
endif()
PATH_TO_ABSOLUTE(${var_name})
endmacro()
CHECK_REQUIRED_VARIABLE(PRE_CONFIGURE_FILE)
CHECK_REQUIRED_VARIABLE(POST_CONFIGURE_FILE)
CHECK_OPTIONAL_VARIABLE(GIT_STATE_FILE "${CMAKE_BINARY_DIR}/git-state-hash")
CHECK_OPTIONAL_VARIABLE(GIT_WORKING_DIR "${CMAKE_SOURCE_DIR}")
# Check the optional git variable.
# If it's not set, we'll try to find it using the CMake packaging system.
if(NOT DEFINED GIT_EXECUTABLE)
find_package(Git QUIET REQUIRED)
endif()
CHECK_REQUIRED_VARIABLE(GIT_EXECUTABLE)
set(_state_variable_names
GIT_RETRIEVED_STATE
GIT_HEAD_SHA1
GIT_IS_DIRTY
GIT_AUTHOR_NAME
GIT_AUTHOR_EMAIL
GIT_COMMIT_DATE_ISO8601
GIT_COMMIT_SUBJECT
GIT_COMMIT_BODY
GIT_DESCRIBE
# >>>
# 1. Add the name of the additional git variable you're interested in monitoring
# to this list.
)
# Macro: RunGitCommand
# Description: short-hand macro for calling a git function. Outputs are the
# "exit_code" and "output" variables.
macro(RunGitCommand)
execute_process(COMMAND
"${GIT_EXECUTABLE}" ${ARGV}
WORKING_DIRECTORY "${_working_dir}"
RESULT_VARIABLE exit_code
OUTPUT_VARIABLE output
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if(NOT exit_code EQUAL 0)
set(ENV{GIT_RETRIEVED_STATE} "false")
endif()
endmacro()
# Function: GetGitState
# Description: gets the current state of the git repo.
# Args:
# _working_dir (in) string; the directory from which git commands will be executed.
function(GetGitState _working_dir)
# This is an error code that'll be set to FALSE if the
# RunGitCommand ever returns a non-zero exit code.
set(ENV{GIT_RETRIEVED_STATE} "true")
# Get whether or not the working tree is dirty.
RunGitCommand(status --porcelain)
if(NOT exit_code EQUAL 0)
set(ENV{GIT_IS_DIRTY} "false")
else()
if(NOT "${output}" STREQUAL "")
set(ENV{GIT_IS_DIRTY} "true")
else()
set(ENV{GIT_IS_DIRTY} "false")
endif()
endif()
# There's a long list of attributes grabbed from git show.
set(object HEAD)
RunGitCommand(show -s "--format=%H" ${object})
if(exit_code EQUAL 0)
set(ENV{GIT_HEAD_SHA1} ${output})
endif()
RunGitCommand(show -s "--format=%an" ${object})
if(exit_code EQUAL 0)
set(ENV{GIT_AUTHOR_NAME} "${output}")
endif()
RunGitCommand(show -s "--format=%ae" ${object})
if(exit_code EQUAL 0)
set(ENV{GIT_AUTHOR_EMAIL} "${output}")
endif()
RunGitCommand(show -s "--format=%ci" ${object})
if(exit_code EQUAL 0)
set(ENV{GIT_COMMIT_DATE_ISO8601} "${output}")
endif()
RunGitCommand(show -s "--format=%s" ${object})
if(exit_code EQUAL 0)
# Escape quotes
string(REPLACE "\"" "\\\"" output "${output}")
set(ENV{GIT_COMMIT_SUBJECT} "${output}")
endif()
RunGitCommand(show -s "--format=%b" ${object})
if(exit_code EQUAL 0)
if(output)
# Escape quotes
string(REPLACE "\"" "\\\"" output "${output}")
# Escape line breaks in the commit message.
string(REPLACE "\r\n" "\\r\\n\\\r\n" safe "${output}")
if(safe STREQUAL output)
# Didn't have windows lines - try unix lines.
string(REPLACE "\n" "\\n\\\n" safe "${output}")
endif()
else()
# There was no commit body - set the safe string to empty.
set(safe "")
endif()
set(ENV{GIT_COMMIT_BODY} "\"${safe}\"")
else()
set(ENV{GIT_COMMIT_BODY} "\"\"") # empty string.
endif()
# Get output of git describe
RunGitCommand(describe --always ${object})
if(NOT exit_code EQUAL 0)
set(ENV{GIT_DESCRIBE} "unknown")
else()
set(ENV{GIT_DESCRIBE} "${output}")
endif()
# >>>
# 2. Additional git properties can be added here via the
# "execute_process()" command. Be sure to set them in
# the environment using the same variable name you added
# to the "_state_variable_names" list.
endfunction()
# Function: GitStateChangedAction
# Description: this function is executed when the state of the git
# repository changes (e.g. a commit is made).
function(GitStateChangedAction)
foreach(var_name ${_state_variable_names})
set(${var_name} $ENV{${var_name}})
endforeach()
configure_file("${PRE_CONFIGURE_FILE}" "${POST_CONFIGURE_FILE}" @ONLY)
endfunction()
# Function: HashGitState
# Description: loop through the git state variables and compute a unique hash.
# Args:
# _state (out) string; a hash computed from the current git state.
function(HashGitState _state)
set(ans "")
foreach(var_name ${_state_variable_names})
string(SHA256 ans "${ans}$ENV{${var_name}}")
endforeach()
set(${_state} ${ans} PARENT_SCOPE)
endfunction()
# Function: CheckGit
# Description: check if the git repo has changed. If so, update the state file.
# Args:
# _working_dir (in) string; the directory from which git commands will be ran.
# _state_changed (out) bool; whether or no the state of the repo has changed.
function(CheckGit _working_dir _state_changed)
# Get the current state of the repo.
GetGitState("${_working_dir}")
# Convert that state into a hash that we can compare against
# the hash stored on-disk.
HashGitState(state)
# Issue 14: post-configure file isn't being regenerated.
#
# Update the state to include the SHA256 for the pre-configure file.
# This forces the post-configure file to be regenerated if the
# pre-configure file has changed.
file(SHA256 ${PRE_CONFIGURE_FILE} preconfig_hash)
string(SHA256 state "${preconfig_hash}${state}")
# Check if the state has changed compared to the backup on disk.
if(EXISTS "${GIT_STATE_FILE}")
file(READ "${GIT_STATE_FILE}" OLD_HEAD_CONTENTS)
if(OLD_HEAD_CONTENTS STREQUAL "${state}")
# State didn't change.
set(${_state_changed} "false" PARENT_SCOPE)
return()
endif()
endif()
# The state has changed.
# We need to update the state file on disk.
# Future builds will compare their state to this file.
file(WRITE "${GIT_STATE_FILE}" "${state}")
set(${_state_changed} "true" PARENT_SCOPE)
endfunction()
# Function: SetupGitMonitoring
# Description: this function sets up custom commands that make the build system
# check the state of git before every build. If the state has
# changed, then a file is configured.
function(SetupGitMonitoring)
add_custom_target(check_git
ALL
DEPENDS ${PRE_CONFIGURE_FILE}
BYPRODUCTS
${POST_CONFIGURE_FILE}
${GIT_STATE_FILE}
COMMENT "Checking the git repository for changes..."
COMMAND
${CMAKE_COMMAND}
-D_BUILD_TIME_CHECK_GIT=TRUE
-DGIT_WORKING_DIR=${GIT_WORKING_DIR}
-DGIT_EXECUTABLE=${GIT_EXECUTABLE}
-DGIT_STATE_FILE=${GIT_STATE_FILE}
-DPRE_CONFIGURE_FILE=${PRE_CONFIGURE_FILE}
-DPOST_CONFIGURE_FILE=${POST_CONFIGURE_FILE}
-P "${CMAKE_CURRENT_LIST_FILE}")
endfunction()
# Function: Main
# Description: primary entry-point to the script. Functions are selected based
# on whether it's configure or build time.
function(Main)
if(_BUILD_TIME_CHECK_GIT)
# Check if the repo has changed.
# If so, run the change action.
CheckGit("${GIT_WORKING_DIR}" changed)
if(changed OR NOT EXISTS "${POST_CONFIGURE_FILE}")
GitStateChangedAction()
endif()
else()
# >> Executes at configure time.
SetupGitMonitoring()
endif()
endfunction()
# And off we go...
Main()

View file

@ -22,4 +22,4 @@ target_link_libraries(tonhttp PUBLIC tdactor ton_crypto tl_api tdnet )
add_executable(http-proxy http-proxy.cpp)
target_include_directories(http-proxy PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/..>)
target_link_libraries(http-proxy PRIVATE tonhttp)
target_link_libraries(http-proxy PRIVATE tonhttp git)

View file

@ -34,6 +34,7 @@
#include <algorithm>
#include <list>
#include "git.h"
#if TD_DARWIN || TD_LINUX
#include <unistd.h>
@ -264,6 +265,10 @@ int main(int argc, char *argv[]) {
int v = VERBOSITY_NAME(FATAL) + (td::to_integer<int>(arg));
SET_VERBOSITY_LEVEL(v);
});
p.add_option('V', "version", "shows http-proxy build version", [&]() {
std::cout << "http-proxy build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
});
p.add_option('h', "help", "prints_help", [&]() {
char b[10240];
td::StringBuilder sb(td::MutableSlice{b, 10000});

View file

@ -5,4 +5,6 @@ target_link_libraries(lite-client-common PUBLIC tdutils tdactor adnllite tl_api
add_executable(lite-client lite-client.cpp lite-client.h)
target_link_libraries(lite-client tdutils tdactor adnllite tl_api tl_lite_api tl-lite-utils ton_crypto ton_block
terminal lite-client-common)
terminal lite-client-common git)
install(TARGETS lite-client RUNTIME DESTINATION bin)

View file

@ -69,6 +69,7 @@
#endif
#include <iostream>
#include <sstream>
#include "git.h"
using namespace std::literals::string_literals;
using td::Ref;
@ -3461,7 +3462,7 @@ bool TestNode::ValidatorLoadInfo::store_record(const td::Bits256& key, const blo
if (it == vset_map.end()) {
return false;
}
created.at(it->second) = std::make_pair<td::int64, td::int64>(mc_cnt.total, shard_cnt.total);
created.at(it->second) = std::make_pair(mc_cnt.total, shard_cnt.total);
return true;
}
@ -3674,10 +3675,16 @@ void TestNode::continue_check_validator_load3(std::unique_ptr<TestNode::Validato
}
}
bool compute_punishment(int interval, bool severe, td::RefInt256& fine, unsigned& fine_part) {
bool compute_punishment_default(int interval, bool severe, td::RefInt256& fine, unsigned& fine_part) {
if (interval <= 1000) {
return false; // no punishments for less than 1000 seconds
}
fine = td::make_refint(101 * 1000000000LL); // 101
fine_part = 0;
return true; // todo: (tolya-yanot) temporary reduction of fine
if (severe) {
fine = td::make_refint(2500 * 1000000000LL); // GR$2500
fine_part = (1 << 30); // 1/4 of stake
@ -3698,10 +3705,44 @@ bool compute_punishment(int interval, bool severe, td::RefInt256& fine, unsigned
return true;
}
bool check_punishment(int interval, bool severe, td::RefInt256 fine, unsigned fine_part) {
bool compute_punishment(int interval, bool severe, td::RefInt256& fine, unsigned& fine_part, Ref<vm::Cell> punishment_params) {
if(punishment_params.is_null()) {
return compute_punishment_default(interval, severe, fine, fine_part);
}
block::gen::MisbehaviourPunishmentConfig::Record rec;
if (!tlb::unpack_cell(punishment_params, rec)) {
return false;
}
if(interval <= rec.unpunishable_interval) {
return false;
}
fine = block::tlb::t_Grams.as_integer(rec.default_flat_fine);
fine_part = rec.default_proportional_fine;
if (severe) {
fine = fine * rec.severity_flat_mult; fine >>= 8;
fine_part = fine_part * rec.severity_proportional_mult; fine_part >>= 8;
}
if (interval >= rec.long_interval) {
fine = fine * rec.long_flat_mult; fine >>= 8;
fine_part = fine_part * rec.long_proportional_mult; fine_part >>= 8;
return true;
}
if (interval >= rec.medium_interval) {
fine = fine * rec.medium_flat_mult; fine >>= 8;
fine_part = fine_part * rec.medium_proportional_mult; fine_part >>= 8;
return true;
}
return true;
}
bool check_punishment(int interval, bool severe, td::RefInt256 fine, unsigned fine_part, Ref<vm::Cell> punishment_params) {
td::RefInt256 computed_fine;
unsigned computed_fine_part;
return compute_punishment(interval, severe, computed_fine, computed_fine_part) &&
return compute_punishment(interval, severe, computed_fine, computed_fine_part, punishment_params) &&
std::llabs((long long)fine_part - (long long)computed_fine_part) <=
(std::max(fine_part, computed_fine_part) >> 3) &&
fine * 7 <= computed_fine * 8 && computed_fine * 7 <= fine * 8;
@ -3729,10 +3770,13 @@ td::Status TestNode::write_val_create_proof(TestNode::ValidatorLoadInfo& info1,
if (interval <= 0) {
return td::Status::Error("non-positive time interval");
}
auto punishment_params = info2.config->get_config_param(40);
int severity = (severe ? 2 : 1);
td::RefInt256 fine = td::make_refint(1000000000);
unsigned fine_part = 0xffffffff / 16; // 1/16
if (!compute_punishment(interval, severe, fine, fine_part)) {
td::RefInt256 fine = td::make_refint(101000000000);
unsigned fine_part = 0; // todo: (tolya-yanot) temporary reduction of fine // 0xffffffff / 16; // 1/16
if (!compute_punishment(interval, severe, fine, fine_part, punishment_params)) {
return td::Status::Error("cannot compute adequate punishment");
}
Ref<vm::Cell> cpl_descr, complaint;
@ -4046,7 +4090,7 @@ td::Status TestNode::continue_check_validator_load_proof(std::unique_ptr<Validat
if (suggested_fine.is_null()) {
return td::Status::Error("cannot parse suggested fine");
}
if (!check_punishment(interval, severe, suggested_fine, rec.suggested_fine_part)) {
if (!check_punishment(interval, severe, suggested_fine, rec.suggested_fine_part, info2->config->get_config_param(40))) {
LOG(ERROR) << "proposed punishment (fine " << td::dec_string(suggested_fine)
<< ", fine_part=" << (double)rec.suggested_fine_part / (1LL << 32) << " is too harsh";
show_vote(root->get_hash().bits(), false);
@ -4194,6 +4238,11 @@ int main(int argc, char* argv[]) {
SET_VERBOSITY_LEVEL(VERBOSITY_NAME(FATAL) + verbosity);
return (verbosity >= 0 && verbosity <= 9) ? td::Status::OK() : td::Status::Error("verbosity must be 0..9");
});
p.add_option('V', "version", "shows lite-client build information", [&]() {
std::cout << "lite-client build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
});
p.add_option('i', "idx", "set liteserver idx", [&](td::Slice arg) {
auto idx = td::to_integer<int>(arg);
td::actor::send_closure(x, &TestNode::set_liteserver_idx, idx);

View file

@ -2,4 +2,4 @@ cmake_minimum_required(VERSION 3.0.2 FATAL_ERROR)
add_executable(rldp-http-proxy rldp-http-proxy.cpp)
target_include_directories(rldp-http-proxy PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/..>)
target_link_libraries(rldp-http-proxy PRIVATE tonhttp rldp dht tonlib)
target_link_libraries(rldp-http-proxy PRIVATE tonhttp rldp dht tonlib git)

View file

@ -51,6 +51,7 @@
#include <algorithm>
#include <list>
#include <set>
#include "git.h"
#if TD_DARWIN || TD_LINUX
#include <unistd.h>
@ -1133,6 +1134,10 @@ int main(int argc, char *argv[]) {
int v = VERBOSITY_NAME(FATAL) + (td::to_integer<int>(arg));
SET_VERBOSITY_LEVEL(v);
});
p.add_option('V', "version", "shows rldp-http-proxy build information", [&]() {
std::cout << "rldp-http-proxy build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
});
p.add_option('h', "help", "prints a help message", [&]() {
char b[10240];
td::StringBuilder sb(td::MutableSlice{b, 10000});

View file

@ -278,7 +278,7 @@ void RldpConnection::receive_raw_obj(ton::ton_api::rldp2_messagePart &part) {
return;
}
auto r_total_size = td::narrow_cast_safe<td::size_t>(part.total_size_);
auto r_total_size = td::narrow_cast_safe<std::size_t>(part.total_size_);
if (r_total_size.is_error()) {
return;
}

View file

@ -45,7 +45,7 @@ target_include_directories(storage PUBLIC
add_executable(storage-cli ${STORAGE_CLI_SOURCE})
target_link_libraries(storage-cli storage overlay tdutils tdactor adnl tl_api dht
rldp rldp2 catchain validatorsession full-node validator ton_validator validator
fift-lib memprof terminal ${JEMALLOC_LIBRARIES})
fift-lib memprof terminal git ${JEMALLOC_LIBRARIES})
set(STORAGE_TEST_SOURCE
${CMAKE_CURRENT_SOURCE_DIR}/test/storage.cpp

View file

@ -22,7 +22,7 @@
#include "td/utils/format.h"
namespace ton {
void LoadSpeed::add(td::size_t size, td::Timestamp now) {
void LoadSpeed::add(std::size_t size, td::Timestamp now) {
total_size_ += size;
events_.push(Event{size, now});
update(now);

View file

@ -26,17 +26,17 @@
namespace ton {
class LoadSpeed {
public:
void add(td::size_t size, td::Timestamp now);
void add(std::size_t size, td::Timestamp now);
double speed(td::Timestamp now = td::Timestamp::now()) const;
friend td::StringBuilder &operator<<(td::StringBuilder &sb, const LoadSpeed &speed);
private:
struct Event {
td::size_t size;
std::size_t size;
td::Timestamp at;
};
mutable td::VectorQueue<Event> events_;
mutable td::size_t total_size_{0};
mutable std::size_t total_size_{0};
double duration() const;
void update(td::Timestamp now) const;

View file

@ -138,7 +138,7 @@ void MerkleTree::init_finish() {
CHECK(root_hash_);
}
void MerkleTree::remove_chunk(td::size_t index) {
void MerkleTree::remove_chunk(std::size_t index) {
CHECK(index < n_);
index += n_;
while (proof_[index].not_null()) {
@ -147,13 +147,13 @@ void MerkleTree::remove_chunk(td::size_t index) {
}
}
bool MerkleTree::has_chunk(td::size_t index) const {
bool MerkleTree::has_chunk(std::size_t index) const {
CHECK(index < n_);
index += n_;
return proof_[index].not_null();
}
void MerkleTree::add_chunk(td::size_t index, td::Slice hash) {
void MerkleTree::add_chunk(std::size_t index, td::Slice hash) {
CHECK(hash.size() == 32);
CHECK(index < n_);
index += n_;

View file

@ -39,7 +39,7 @@ class MerkleTree {
MerkleTree(size_t chunks_count, td::Ref<vm::Cell> root_proof);
struct Chunk {
td::size_t index{0};
std::size_t index{0};
td::Bits256 hash;
};
@ -47,7 +47,7 @@ class MerkleTree {
MerkleTree() = default;
void init_begin(size_t chunks_count);
void init_add_chunk(td::size_t index, td::Slice hash);
void init_add_chunk(std::size_t index, td::Slice hash);
void init_finish();
// merge external proof with an existing proof
@ -70,20 +70,20 @@ class MerkleTree {
private:
td::uint64 total_blocks_;
td::size_t n_; // n = 2^log_n
std::size_t n_; // n = 2^log_n
td::uint32 log_n_;
td::size_t mark_id_{0};
std::vector<td::size_t> mark_; // n_ * 2
std::size_t mark_id_{0};
std::vector<std::size_t> mark_; // n_ * 2
std::vector<td::Ref<vm::Cell>> proof_; // n_ * 2
td::optional<td::Bits256> root_hash_;
td::Ref<vm::Cell> root_proof_;
td::Status validate_proof(td::Ref<vm::Cell> new_root);
bool has_chunk(td::size_t index) const;
void remove_chunk(td::size_t index);
bool has_chunk(std::size_t index) const;
void remove_chunk(std::size_t index);
void add_chunk(td::size_t index, td::Slice hash);
void add_chunk(std::size_t index, td::Slice hash);
void init_proof();
td::Ref<vm::Cell> merge(td::Ref<vm::Cell> root, size_t index);

View file

@ -51,6 +51,7 @@
#include <limits>
#include <map>
#include <set>
#include "git.h"
namespace ton_rldp = ton::rldp2;
@ -749,7 +750,7 @@ class StorageCli : public td::actor::Actor {
auto file_id_str = parser.read_word();
size_t file_id = std::numeric_limits<size_t>::max();
if (file_id_str != "*") {
TRY_RESULT_PROMISE_ASSIGN(promise, file_id, td::to_integer_safe<td::size_t>(file_id_str));
TRY_RESULT_PROMISE_ASSIGN(promise, file_id, td::to_integer_safe<std::size_t>(file_id_str));
}
TRY_RESULT_PROMISE(promise, priority, td::to_integer_safe<td::uint8>(parser.read_word()));
if (priority == 255) {
@ -837,6 +838,10 @@ int main(int argc, char *argv[]) {
SET_VERBOSITY_LEVEL(VERBOSITY_NAME(FATAL) + verbosity);
return (verbosity >= 0 && verbosity <= 20) ? td::Status::OK() : td::Status::Error("verbosity must be 0..20");
});
p.add_option('V', "version", "shows storage-cli build information", [&]() {
std::cout << "storage-cli build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
});
p.add_option('C', "config", "set ton config", [&](td::Slice arg) { options.config = arg.str(); });
p.add_option('D', "db", "root for dbs", [&](td::Slice fname) { options.db_root = fname.str(); });
p.add_checked_option('I', "ip", "set ip:port", [&](td::Slice arg) {

View file

@ -251,6 +251,7 @@ set(TDUTILS_SOURCE
td/utils/ThreadLocalStorage.h
td/utils/ThreadSafeCounter.h
td/utils/Time.h
td/utils/date.h
td/utils/TimedStat.h
td/utils/Timer.h
td/utils/TsFileLog.h

8007
tdutils/td/utils/date.h Normal file

File diff suppressed because it is too large Load diff

View file

@ -23,6 +23,7 @@
#include "td/utils/port/thread_local.h"
#include "td/utils/Slice.h"
#include "td/utils/Time.h"
#include "td/utils/date.h"
#include <atomic>
#include <cstdlib>
@ -65,6 +66,8 @@ Logger::Logger(LogInterface &log, const LogOptions &options, int log_level, Slic
return;
}
using namespace date;
// log level
sb_ << '[';
if (log_level < 10) {
@ -81,7 +84,8 @@ Logger::Logger(LogInterface &log, const LogOptions &options, int log_level, Slic
sb_ << thread_id << ']';
// timestamp
sb_ << '[' << StringBuilder::FixedDouble(Clocks::system(), 9) << ']';
//sb_ << '[' << StringBuilder::FixedDouble(Clocks::system(), 9) << ']';
sb_ << '[' << date::format("%F %T", std::chrono::system_clock::now()) << ']';
// file : line
if (!file_name.empty()) {

View file

@ -58,6 +58,8 @@
#define PSAPI_VERSION 1
#endif
#include <psapi.h>
#pragma comment( lib, "psapi.lib" )
#endif

View file

@ -43,6 +43,11 @@
#include "td/utils/filesystem.h"
#include "td/utils/port/path.h"
#include "ton/ton-types.h"
#include "ton/ton-tl.hpp"
#include "ton/ton-io.hpp"
#include "validator/fabric.h"
#include "validator/impl/collator.h"
#include "crypto/vm/cp0.h"
@ -76,6 +81,9 @@ class TestNode : public td::actor::Actor {
td::actor::ActorOwn<ton::validator::ValidatorManagerInterface> validator_manager_;
std::string db_root_ = "/var/ton-work/db/";
std::string global_config_;
td::Ref<ton::validator::ValidatorManagerOptions> opts_;
ton::ZeroStateIdExt zero_id_;
td::BufferSlice bs_;
std::vector<td::BufferSlice> ext_msgs_;
@ -92,6 +100,10 @@ class TestNode : public td::actor::Actor {
void set_db_root(std::string db_root) {
db_root_ = db_root;
}
void set_global_config_path(std::string path) {
global_config_ = path;
}
void set_zero_root_hash(td::Bits256 hash) {
zero_id_.root_hash = hash;
}
@ -218,6 +230,54 @@ class TestNode : public td::actor::Actor {
}
}
td::Status create_validator_options() {
if(!global_config_.length()) {
LOG(INFO) << "no global config file passed. Using zero-init config";
opts_ = ton::validator::ValidatorManagerOptions::create(
ton::BlockIdExt{ton::masterchainId, ton::shardIdAll, 0, ton::RootHash::zero(), ton::FileHash::zero()},
ton::BlockIdExt{ton::masterchainId, ton::shardIdAll, 0, ton::RootHash::zero(), ton::FileHash::zero()});
return td::Status::OK();
}
TRY_RESULT_PREFIX(conf_data, td::read_file(global_config_), "failed to read: ");
TRY_RESULT_PREFIX(conf_json, td::json_decode(conf_data.as_slice()), "failed to parse json: ");
ton::ton_api::config_global conf;
TRY_STATUS_PREFIX(ton::ton_api::from_json(conf, conf_json.get_object()), "json does not fit TL scheme: ");
auto zero_state = ton::create_block_id(conf.validator_->zero_state_);
ton::BlockIdExt init_block;
if (!conf.validator_->init_block_) {
LOG(INFO) << "no init block in config. using zero state";
init_block = zero_state;
} else {
init_block = ton::create_block_id(conf.validator_->init_block_);
}
opts_ = ton::validator::ValidatorManagerOptions::create(zero_state, init_block);
std::vector<ton::BlockIdExt> h;
for (auto &x : conf.validator_->hardforks_) {
auto b = ton::create_block_id(x);
if (!b.is_masterchain()) {
return td::Status::Error(ton::ErrorCode::error,
"[validator/hardforks] section contains not masterchain block id");
}
if (!b.is_valid_full()) {
return td::Status::Error(ton::ErrorCode::error, "[validator/hardforks] section contains invalid block_id");
}
for (auto &y : h) {
if (y.is_valid() && y.seqno() >= b.seqno()) {
y.invalidate();
}
}
h.push_back(b);
}
opts_.write().set_hardforks(std::move(h));
LOG(INFO) << "Hardforks num in config: "<< opts_->get_hardforks().size();
return td::Status::OK();
}
void run() {
zero_id_.workchain = ton::masterchainId;
td::mkdir(db_root_).ensure();
@ -227,9 +287,14 @@ class TestNode : public td::actor::Actor {
do_save_file();
}
auto opts = ton::validator::ValidatorManagerOptions::create(
ton::BlockIdExt{ton::masterchainId, ton::shardIdAll, 0, zero_id_.root_hash, zero_id_.file_hash},
ton::BlockIdExt{ton::masterchainId, ton::shardIdAll, 0, zero_id_.root_hash, zero_id_.file_hash});
auto Sr = create_validator_options();
if (Sr.is_error()) {
LOG(ERROR) << "failed to load global config'" << global_config_ << "': " << Sr;
std::_Exit(2);
}
auto opts = opts_;
opts.write().set_initial_sync_disabled(true);
validator_manager_ = ton::validator::ValidatorManagerDiskFactory::create(ton::PublicKeyHash::zero(), opts, shard_,
shard_top_block_id_, db_root_);
@ -366,6 +431,8 @@ int main(int argc, char *argv[]) {
[&](td::Slice fname) { td::actor::send_closure(x, &TestNode::set_zero_file, fname.str()); });
p.add_option('D', "db", "root for dbs",
[&](td::Slice fname) { td::actor::send_closure(x, &TestNode::set_db_root, fname.str()); });
p.add_option('C', "config", "global config path",
[&](td::Slice fname) { td::actor::send_closure(x, &TestNode::set_global_config_path, fname.str()); });
p.add_option('m', "ext-message", "binary file with serialized inbound external message",
[&](td::Slice fname) { td::actor::send_closure(x, &TestNode::load_ext_message, fname.str()); });
p.add_option('M', "top-shard-message", "binary file with serialized shard top block description",

View file

@ -307,6 +307,8 @@ validatorSession.candidate src:int256 round:int root_hash:int256 data:bytes coll
validatorSession.config catchain_idle_timeout:double catchain_max_deps:int round_candidates:int next_candidate_delay:double round_attempt_duration:int
max_round_attempts:int max_block_size:int max_collated_data_size:int = validatorSession.Config;
validatorSession.configVersioned catchain_idle_timeout:double catchain_max_deps:int round_candidates:int next_candidate_delay:double round_attempt_duration:int
max_round_attempts:int max_block_size:int max_collated_data_size:int version:int = validatorSession.Config;
validatorSession.configNew catchain_idle_timeout:double catchain_max_deps:int round_candidates:int next_candidate_delay:double round_attempt_duration:int
max_round_attempts:int max_block_size:int max_collated_data_size:int new_catchain_ids:Bool = validatorSession.Config;

Binary file not shown.

View file

@ -105,7 +105,7 @@ msg.dataDecrypted proof:bytes data:msg.Data = msg.DataDecrypted;
msg.dataEncryptedArray elements:vector<msg.dataEncrypted> = msg.DataEncryptedArray;
msg.dataDecryptedArray elements:vector<msg.dataDecrypted> = msg.DataDecryptedArray;
msg.message destination:accountAddress public_key:string amount:int64 data:msg.Data = msg.Message;
msg.message destination:accountAddress public_key:string amount:int64 data:msg.Data send_mode:int32 = msg.Message;
//
// DNS
@ -202,6 +202,14 @@ data bytes:secureBytes = Data;
liteServer.info now:int53 version:int32 capabilities:int64 = liteServer.Info;
blocks.masterchainInfo last:ton.BlockIdExt state_root_hash:bytes init:ton.BlockIdExt = blocks.MasterchainInfo;
blocks.shards shards:vector<ton.BlockIdExt> = blocks.Shards;
blocks.accountTransactionId account:bytes lt:int64 = blocks.AccountTransactionId;
blocks.shortTxId mode:# account:mode.0?bytes lt:mode.1?int64 hash:mode.2?bytes = liteServer.TransactionId;
blocks.transactions id:ton.blockIdExt req_count:int32 incomplete:Bool transactions:vector<blocks.shortTxId> = blocks.Transactions;
blocks.header id:ton.blockIdExt global_id:int32 version:int32 after_merge:Bool after_split:Bool before_split:Bool want_merge:Bool want_split:Bool validator_list_hash_short:int32 catchain_seqno:int32 min_ref_mc_seqno:int32 is_key_block:Bool prev_key_block_seqno:int32 start_lt:int64 end_lt:int64 vert_seqno:# prev_blocks:vector<ton.blockIdExt> = blocks.Header;
//blocks.shortData header:blocks.Header transactions:blocks.Header = blocks.BlockData;
---functions---
init options:options = options.Info;
@ -278,6 +286,13 @@ pchan.validatePromise public_key:bytes promise:pchan.promise = Ok;
pchan.packPromise promise:pchan.promise = Data;
pchan.unpackPromise data:secureBytes = pchan.Promise;
blocks.getMasterchainInfo = blocks.MasterchainInfo;
blocks.getShards id:ton.blockIdExt = blocks.Shards;
blocks.lookupBlock mode:int32 id:ton.blockId lt:int64 utime:int32 = ton.BlockIdExt;
blocks.getTransactions id:ton.blockIdExt mode:# count:# after:blocks.accountTransactionId = blocks.Transactions;
blocks.getBlockHeader id:ton.blockIdExt = blocks.Header;
onLiteServerQueryResult id:int64 bytes:bytes = Ok;
onLiteServerQueryError id:int64 error:error = Ok;

Binary file not shown.

View file

@ -69,7 +69,7 @@ if (TONLIB_ENABLE_JNI AND NOT ANDROID) # jni is available by default on Android
endif()
add_executable(tonlib-cli tonlib/tonlib-cli.cpp)
target_link_libraries(tonlib-cli tonlib tdactor tdutils terminal pow-miner-lib)
target_link_libraries(tonlib-cli tonlib tdactor tdutils terminal pow-miner-lib git)
if (NOT CMAKE_CROSSCOMPILING)
if (TONLIB_ENABLE_JNI)
@ -165,3 +165,4 @@ install(FILES "TonlibConfig.cmake" "${CMAKE_CURRENT_BINARY_DIR}/TonlibConfigVers
# Add SOVERSION to shared libraries
set_property(TARGET tonlibjson PROPERTY SOVERSION ${TON_VERSION})
install(TARGETS tonlib-cli RUNTIME DESTINATION bin)

View file

@ -264,7 +264,7 @@ td::Result<QueryId> create_send_grams_query(Client& client, const Wallet& source
data = tonlib_api::make_object<tonlib_api::msg_dataRaw>(message.raw.unwrap(), message.init_state.unwrap());
}
msgs.push_back(tonlib_api::make_object<tonlib_api::msg_message>(
tonlib_api::make_object<tonlib_api::accountAddress>(destination), "", amount, std::move(data)));
tonlib_api::make_object<tonlib_api::accountAddress>(destination), "", amount, std::move(data), -1));
auto r_id =
sync_send(client, tonlib_api::make_object<tonlib_api::createQuery>(

View file

@ -43,6 +43,11 @@
#include "ton/ton-shard.h"
#include "vm/boc.h"
#include "vm/cellops.h"
#include "vm/cells/MerkleProof.h"
#include "vm/vm.h"
#include "vm/cp0.h"
#include "vm/memo.h"
#include "td/utils/as.h"
#include "td/utils/Random.h"
@ -54,6 +59,11 @@
#include "common/util.h"
template <class Type>
using lite_api_ptr = ton::lite_api::object_ptr<Type>;
template <class Type>
using tonlib_api_ptr = ton::tonlib_api::object_ptr<Type>;
namespace tonlib {
namespace int_api {
struct GetAccountState {
@ -193,6 +203,14 @@ td::Result<block::PublicKey> public_key_from_bytes(td::Slice bytes) {
return key_bytes;
}
td::Result<ton::WalletV3::InitData> to_init_data(const tonlib_api::wallet_v3_initialAccountState& wallet_state) {
TRY_RESULT(key_bytes, get_public_key(wallet_state.public_key_));
ton::WalletV3::InitData init_data;
init_data.public_key = td::SecureString(key_bytes.key);
init_data.wallet_id = static_cast<td::uint32>(wallet_state.wallet_id_);
return std::move(init_data);
}
td::Result<ton::RestrictedWallet::InitData> to_init_data(const tonlib_api::rwallet_initialAccountState& rwallet_state) {
TRY_RESULT(init_key_bytes, get_public_key(rwallet_state.init_public_key_));
TRY_RESULT(key_bytes, get_public_key(rwallet_state.public_key_));
@ -485,6 +503,23 @@ class AccountState {
initial_account_state,
td::overloaded(
[](auto& x) {},
[&](tonlib_api::wallet_v3_initialAccountState& v3wallet) {
for (auto revision : ton::SmartContractCode::get_revisions(ton::SmartContractCode::WalletV3)) {
auto init_data = to_init_data(v3wallet);
if (init_data.is_error()) {
continue;
}
auto wallet = ton::WalletV3::create(init_data.move_as_ok(), revision);
if (!(wallet->get_address(ton::masterchainId) == address_ ||
wallet->get_address(ton::basechainId) == address_)) {
continue;
}
wallet_type_ = WalletType::WalletV3;
wallet_revision_ = revision;
set_new_state(wallet->get_state());
break;
}
},
[&](tonlib_api::rwallet_initialAccountState& rwallet) {
for (auto revision : ton::SmartContractCode::get_revisions(ton::SmartContractCode::RestrictedWallet)) {
auto r_init_data = to_init_data(rwallet);
@ -1903,17 +1938,68 @@ const MasterConfig& get_default_master_config() {
res.add_config("testnet", R"abc({
"liteservers": [
],
"validator": {
"@type": "validator.config.global",
"zero_state": {
"file_hash": "BzgfRpFgyNjHI7aR5KQhq4Wtr2wN+sXzLaW+rfHvP5A=",
"seqno": 0,
"root_hash": "WPsS1IiRjT0MSD6Xvys4QYQh7rrc9x0ybzXojwJ4gH0=",
"workchain": -1,
"shard": -9223372036854775808
},
"init_block":{
"file_hash":"CXvKA37GFugspx6U7sv4Sh7KIVqR1R+Pj4AmGx2Ezi0=",
"seqno":2908533,
"root_hash":"oFi1uMHmELXV/AfjuvTZREmRY7TtNgq608sFjnRJCAo=",
"workchain":-1,
"shard":-9223372036854775808
},
"hardforks":[
{
"file_hash":"x97xobbkPDggz7/u6KQzlT3pBmM5XfGE6vN7AiuTMU0=",
"seqno":1907338,
"root_hash":"BuZOCJvbg0ys2JBnfocqCf1T4rt0OzLLslOG3CjcSgU=",
"workchain":-1,
"shard":-9223372036854775808
},
{
"file_hash": "CXvKA37GFugspx6U7sv4Sh7KIVqR1R+Pj4AmGx2Ezi0=",
"seqno": 2908533,
"root_hash": "oFi1uMHmELXV/AfjuvTZREmRY7TtNgq608sFjnRJCAo=",
"workchain": -1,
"shard": -9223372036854775808
}
]
}
})abc");
res.add_config("mainnet", R"abc({
"liteservers": [
],
"validator": {
"@type": "validator.config.global",
"zero_state": {
"workchain": -1,
"shard": -9223372036854775808,
"seqno": 0,
"root_hash": "VCSXxDHhTALFxReyTZRd8E4Ya3ySOmpOWAS4rBX9XBY=",
"file_hash": "eh9yveSz1qMdJ7mOsO+I+H77jkLr9NpAuEkoJuseXBo="
"root_hash": "F6OpKZKqvqeFp6CQmFomXNMfMj2EnaUSOXN+Mh+wVWk=",
"file_hash": "XplPz01CXAps5qeSWUtxcyBfdAo5zVb1N979KLSKD24="
},
"init_block":
{"workchain":-1,"shard":-9223372036854775808,"seqno":870721,"root_hash":"jYKhSQ1xeSPprzgjqiUOnAWwc2yqs7nCVAU21k922s4=","file_hash":"kHidF02CZpaz2ia9jtXUJLp0AiWMWwfzprTUIsddHSo="}
"init_block" : {
"root_hash": "irEt9whDfgaYwD+8AzBlYzrMZHhrkhSVp3PU1s4DOz4=",
"seqno": 10171687,
"file_hash": "lay/bUKUUFDJXU9S6gx9GACQFl+uK+zX8SqHWS9oLZc=",
"workchain": -1,
"shard": -9223372036854775808
},
"hardforks": [
{
"file_hash": "t/9VBPODF7Zdh4nsnA49dprO69nQNMqYL+zk5bCjV/8=",
"seqno": 8536841,
"root_hash": "08Kpc9XxrMKC6BF/FeNHPS3MEL1/Vi/fQU/C9ELUrkc=",
"workchain": -1,
"shard": -9223372036854775808
}
]
}
})abc");
res.add_config("testnet2", R"abc({
@ -1928,13 +2014,13 @@ const MasterConfig& get_default_master_config() {
"root_hash": "F6OpKZKqvqeFp6CQmFomXNMfMj2EnaUSOXN+Mh+wVWk=",
"file_hash": "XplPz01CXAps5qeSWUtxcyBfdAo5zVb1N979KLSKD24="
},
"init_block" : {
"file_hash": "t/9VBPODF7Zdh4nsnA49dprO69nQNMqYL+zk5bCjV/8=",
"seqno": 8536841,
"root_hash": "08Kpc9XxrMKC6BF/FeNHPS3MEL1/Vi/fQU/C9ELUrkc=",
"workchain": -1,
"shard": -9223372036854775808
},
"init_block" : {
"root_hash": "irEt9whDfgaYwD+8AzBlYzrMZHhrkhSVp3PU1s4DOz4=",
"seqno": 10171687,
"file_hash": "lay/bUKUUFDJXU9S6gx9GACQFl+uK+zX8SqHWS9oLZc=",
"workchain": -1,
"shard": -9223372036854775808
},
"hardforks": [
{
"file_hash": "t/9VBPODF7Zdh4nsnA49dprO69nQNMqYL+zk5bCjV/8=",
@ -2027,14 +2113,15 @@ td::Result<TonlibClient::FullConfig> TonlibClient::validate_config(tonlib_api::o
}
state.vert_seqno = vert_seqno;
//TODO: this could be useful to override master config
if (false && new_config.init_block_id.is_valid() &&
bool user_defined_init_block = false;
if (new_config.init_block_id.is_valid() &&
state.last_key_block_id.id.seqno < new_config.init_block_id.id.seqno) {
state.last_key_block_id = new_config.init_block_id;
user_defined_init_block = true;
LOG(INFO) << "Use init block from USER config: " << new_config.init_block_id.to_str();
}
if (o_master_config) {
if (o_master_config && !user_defined_init_block) {
auto master_config = o_master_config.unwrap();
if (master_config.init_block_id.is_valid() &&
state.last_key_block_id.id.seqno < master_config.init_block_id.id.seqno) {
@ -2054,9 +2141,6 @@ td::Result<TonlibClient::FullConfig> TonlibClient::validate_config(tonlib_api::o
res.config = std::move(new_config);
res.use_callbacks_for_network = config->use_callbacks_for_network_;
res.wallet_id = td::as<td::uint32>(res.config.zero_state_id.root_hash.as_slice().data());
if (res.config.name == "mainnet") {
res.wallet_id = 0x4BA92D89 + 1; // user will subtract -1 for basechain
}
res.rwallet_init_public_key = "Puasxr0QfFZZnYISRphVse7XHKfW7pZU5SJarVHXvQ+rpzkD";
res.last_state_key = std::move(last_state_key);
res.last_state = std::move(state);
@ -2482,6 +2566,7 @@ class GenericCreateSendGrams : public TonlibQueryActor {
struct Action {
block::StdAddress destination;
td::int64 amount;
td::int32 send_mode{-1};
bool is_encrypted{false};
bool should_encrypt;
@ -2533,6 +2618,7 @@ class GenericCreateSendGrams : public TonlibQueryActor {
auto key = td::Ed25519::PublicKey(td::SecureString(public_key.key));
res.o_public_key = std::move(key);
}
res.send_mode = message.send_mode_;
auto status = downcast_call2<td::Status>(
*message.data_, td::overloaded(
[&](tonlib_api::msg_dataRaw& text) {
@ -2968,6 +3054,7 @@ class GenericCreateSendGrams : public TonlibQueryActor {
auto& destination = destinations_[i];
gift.destination = destinations_[i]->get_address();
gift.gramms = action.amount;
gift.send_mode = action.send_mode;
// Temporary turn off this dangerous transfer
if (false && action.amount == source_->get_balance()) {
@ -3000,9 +3087,9 @@ class GenericCreateSendGrams : public TonlibQueryActor {
}
}
if (!o_public_key) {
// if (!o_public_key) { // todo: (tolya-yanot) temporary disable msg comment encryption (The exchanges/payment services needs to read the comment of incoming messages). This will be uncommented when a general standard is developed.
return TonlibError::MessageEncryption("Get public key (in destination)");
}
// }
auto addr = source_->get_address();
addr.bounceable = true;
@ -3928,17 +4015,18 @@ td::Status TonlibClient::do_request(const tonlib_api::liteServer_getInfo& reques
return td::Status::OK();
}
auto to_bits256(td::Slice data, td::Slice name) -> td::Result<td::Bits256> {
if (data.size() != 32) {
return TonlibError::InvalidField(name, "wrong length (not 32 bytes)");
}
return td::Bits256(data.ubegin());
}
td::Status TonlibClient::do_request(tonlib_api::withBlock& request,
td::Promise<object_ptr<tonlib_api::Object>>&& promise) {
if (!request.id_) {
return TonlibError::EmptyField("id");
}
auto to_bits256 = [](td::Slice data, td::Slice name) -> td::Result<td::Bits256> {
if (data.size() != 32) {
return TonlibError::InvalidField(name, "wrong length (not 32 bytes)");
}
return td::Bits256(data.ubegin());
};
TRY_RESULT(root_hash, to_bits256(request.id_->root_hash_, "root_hash"));
TRY_RESULT(file_hash, to_bits256(request.id_->file_hash_, "file_hash"));
ton::BlockIdExt block_id(request.id_->workchain_, request.id_->shard_, request.id_->seqno_, root_hash, file_hash);
@ -3946,6 +4034,202 @@ td::Status TonlibClient::do_request(tonlib_api::withBlock& request,
return td::Status::OK();
}
auto to_tonlib_api(const ton::lite_api::tonNode_blockIdExt& blk) -> tonlib_api_ptr<tonlib_api::ton_blockIdExt> {
return tonlib_api::make_object<tonlib_api::ton_blockIdExt>(
blk.workchain_, blk.shard_, blk.seqno_, blk.root_hash_.as_slice().str(), blk.file_hash_.as_slice().str());
}
/*auto to_tonlib_api(const ton::BlockIdExt& blk) -> tonlib_api_ptr<tonlib_api::ton_blockIdExt> {
return tonlib_api::make_object<tonlib_api::ton_blockIdExt>(
blk.workchain, blk.shard, blk.seqno, blk.root_hash.as_slice().str(), blk.file_hash.as_slice().str());
}*/
auto to_tonlib_api(const ton::lite_api::tonNode_zeroStateIdExt& zeroStateId)
-> tonlib_api_ptr<tonlib_api::ton_blockIdExt> {
return tonlib_api::make_object<tonlib_api::ton_blockIdExt>( //TODO check wether shard indeed 0???
zeroStateId.workchain_, 0, 0, zeroStateId.root_hash_.as_slice().str(), zeroStateId.file_hash_.as_slice().str());
}
auto to_lite_api(const tonlib_api::ton_blockIdExt& blk) -> td::Result<lite_api_ptr<ton::lite_api::tonNode_blockIdExt>> {
TRY_RESULT(root_hash, to_bits256(blk.root_hash_, "blk.root_hash"))
TRY_RESULT(file_hash, to_bits256(blk.file_hash_, "blk.file_hash"))
return ton::lite_api::make_object<ton::lite_api::tonNode_blockIdExt>(
blk.workchain_, blk.shard_, blk.seqno_, root_hash, file_hash);
}
td::Status TonlibClient::do_request(const tonlib_api::blocks_getMasterchainInfo& masterchain_info,
td::Promise<object_ptr<tonlib_api::blocks_masterchainInfo>>&& promise) {
client_.send_query(ton::lite_api::liteServer_getMasterchainInfo(),
promise.wrap([](lite_api_ptr<ton::lite_api::liteServer_masterchainInfo>&& masterchain_info) {
return tonlib_api::make_object<tonlib_api::blocks_masterchainInfo>(
to_tonlib_api(*masterchain_info->last_), masterchain_info->state_root_hash_.as_slice().str(),
to_tonlib_api(*masterchain_info->init_));
}));
return td::Status::OK();
}
td::Status TonlibClient::do_request(const tonlib_api::blocks_getShards& request,
td::Promise<object_ptr<tonlib_api::blocks_shards>>&& promise) {
TRY_RESULT(block, to_lite_api(*request.id_))
client_.send_query(ton::lite_api::liteServer_getAllShardsInfo(std::move(block)),
promise.wrap([](lite_api_ptr<ton::lite_api::liteServer_allShardsInfo>&& all_shards_info) {
td::BufferSlice proof = std::move((*all_shards_info).proof_);
td::BufferSlice data = std::move((*all_shards_info).data_);
if (data.empty()) {
//return td::Status::Error("shard configuration is empty");
} else {
auto R = vm::std_boc_deserialize(data.clone());
if (R.is_error()) {
//return td::Status::Error("cannot deserialize shard configuration");
}
auto root = R.move_as_ok();
block::ShardConfig sh_conf;
if (!sh_conf.unpack(vm::load_cell_slice_ref(root))) {
//return td::Status::Error("cannot extract shard block list from shard configuration");
} else {
auto ids = sh_conf.get_shard_hash_ids(true);
tonlib_api::blocks_shards shards;
for (auto id : ids) {
auto ref = sh_conf.get_shard_hash(ton::ShardIdFull(id));
if (ref.not_null()) {
shards.shards_.push_back(to_tonlib_api(ref->top_block_id()));
}
}
return tonlib_api::make_object<tonlib_api::blocks_shards>(std::move(shards));
}
}
}));
return td::Status::OK();
}
td::Status TonlibClient::do_request(const tonlib_api::blocks_lookupBlock& request,
td::Promise<object_ptr<tonlib_api::ton_blockIdExt>>&& promise) {
client_.send_query(ton::lite_api::liteServer_lookupBlock(
request.mode_,
ton::lite_api::make_object<ton::lite_api::tonNode_blockId>((*request.id_).workchain_, (*request.id_).shard_, (*request.id_).seqno_),
(td::uint64)(request.lt_),
(td::uint32)(request.utime_)),
promise.wrap([](lite_api_ptr<ton::lite_api::liteServer_blockHeader>&& header) {
const auto& id = header->id_;
return to_tonlib_api(*id);
//tonlib_api::make_object<tonlib_api::ton_blockIdExt>(
// ton::tonlib_api::ton_blockIdExt(id->workchain_, id->)
//);
}));
return td::Status::OK();
}
auto to_tonlib_api(const ton::lite_api::liteServer_transactionId& txid)
-> tonlib_api_ptr<tonlib_api::blocks_shortTxId> {
return tonlib_api::make_object<tonlib_api::blocks_shortTxId>(
txid.mode_, txid.account_.as_slice().str(), txid.lt_, txid.hash_.as_slice().str());
}
td::Status TonlibClient::do_request(const tonlib_api::blocks_getTransactions& request,
td::Promise<object_ptr<tonlib_api::blocks_transactions>>&& promise) {
TRY_RESULT(block, to_lite_api(*request.id_))
TRY_RESULT(account, to_bits256((*request.after_).account_, "account"));
auto after = ton::lite_api::make_object<ton::lite_api::liteServer_transactionId3>(account, (*request.after_).lt_);
client_.send_query(ton::lite_api::liteServer_listBlockTransactions(
std::move(block),
request.mode_,
request.count_,
std::move(after),
false,
false),
promise.wrap([](lite_api_ptr<ton::lite_api::liteServer_blockTransactions>&& bTxes) {
const auto& id = bTxes->id_;
//for (auto id : ids) {
tonlib_api::blocks_transactions r;
r.id_ = to_tonlib_api(*id);
r.req_count_ = bTxes->req_count_;
r.incomplete_ = bTxes->incomplete_;
for (auto& id: bTxes->ids_) {
//tonlib_api::blocks_shortTxId txid = tonlib_api::blocks_shortTxId(id->mode_, id->account_.as_slice().str(), id->lt_, id->hash_.as_slice().str());
//r.transactions_.push_back(txid);
r.transactions_.push_back(to_tonlib_api(*id));
}
return tonlib_api::make_object<tonlib_api::blocks_transactions>(std::move(r));
}));
return td::Status::OK();
}
td::Status TonlibClient::do_request(const tonlib_api::blocks_getBlockHeader& request,
td::Promise<object_ptr<tonlib_api::blocks_header>>&& promise) {
TRY_RESULT(block, to_lite_api(*request.id_))
client_.send_query(ton::lite_api::liteServer_getBlockHeader(
std::move(block),
0xffff),
promise.wrap([](lite_api_ptr<ton::lite_api::liteServer_blockHeader>&& hdr) {
auto blk_id = ton::create_block_id(hdr->id_);
auto R = vm::std_boc_deserialize(std::move(hdr->header_proof_));
tonlib_api::blocks_header header;
if (R.is_error()) {
LOG(WARNING) << "R.is_error() ";
} else {
auto root = R.move_as_ok();
try {
ton::RootHash vhash{root->get_hash().bits()};
auto virt_root = vm::MerkleProof::virtualize(root, 1);
if (virt_root.is_null()) {
LOG(WARNING) << "virt root is null";
} else {
std::vector<ton::BlockIdExt> prev;
ton::BlockIdExt mc_blkid;
bool after_split;
auto res = block::unpack_block_prev_blk_ext(virt_root, blk_id, prev, mc_blkid, after_split);
if (res.is_error()) {
LOG(WARNING) << "res.is_error() ";
} else {
block::gen::Block::Record blk;
block::gen::BlockInfo::Record info;
if (!(tlb::unpack_cell(virt_root, blk) && tlb::unpack_cell(blk.info, info))) {
LOG(WARNING) << "unpack failed";
} else {
header.id_ = to_tonlib_api(blk_id);
header.global_id_ = blk.global_id;
header.version_ = info.version;
header.after_merge_ = info.after_merge;
header.after_split_ = info.after_split;
header.before_split_ = info.before_split;
header.want_merge_ = info.want_merge;
header.want_split_ = info.want_split;
header.validator_list_hash_short_ = info.gen_validator_list_hash_short;
header.catchain_seqno_ = info.gen_catchain_seqno;
header.min_ref_mc_seqno_ = info.min_ref_mc_seqno;
header.start_lt_ = info.start_lt;
header.end_lt_ = info.end_lt;
header.vert_seqno_ = info.vert_seq_no;
if(!info.not_master) {
header.prev_key_block_seqno_ = info.prev_key_block_seqno;
}
for (auto id : prev) {
header.prev_blocks_.push_back(to_tonlib_api(id));
}
//if(info.before_split) {
//} else {
//}
return tonlib_api::make_object<tonlib_api::blocks_header>(std::move(header));
}
}
}
} catch (vm::VmError& err) {
auto E = err.as_status(PSLICE() << "error processing header for " << blk_id.to_str() << " :");
LOG(ERROR) << std::move(E);
} catch (vm::VmVirtError& err) {
auto E = err.as_status(PSLICE() << "error processing header for " << blk_id.to_str() << " :");
LOG(ERROR) << std::move(E);
} catch (...) {
LOG(WARNING) << "exception catched ";
}
}
return tonlib_api::make_object<tonlib_api::blocks_header>(std::move(header));
}));
return td::Status::OK();
}
template <class P>
td::Status TonlibClient::do_request(const tonlib_api::runTests& request, P&&) {
UNREACHABLE();

View file

@ -321,6 +321,8 @@ class TonlibClient : public td::actor::Actor {
td::Status do_request(tonlib_api::pchan_unpackPromise& request,
td::Promise<object_ptr<tonlib_api::pchan_promise>>&& promise);
void do_dns_request(std::string name, td::int32 category, td::int32 ttl, td::optional<ton::BlockIdExt> block_id,
block::StdAddress address, td::Promise<object_ptr<tonlib_api::dns_resolved>>&& promise);
struct DnsFinishData {
@ -342,6 +344,17 @@ class TonlibClient : public td::actor::Actor {
td::Status do_request(tonlib_api::withBlock& request, td::Promise<object_ptr<tonlib_api::Object>>&& promise);
td::Status do_request(const tonlib_api::blocks_getMasterchainInfo& masterchain_info,
td::Promise<object_ptr<tonlib_api::blocks_masterchainInfo>>&& promise);
td::Status do_request(const tonlib_api::blocks_getShards& request,
td::Promise<object_ptr<tonlib_api::blocks_shards>>&& promise);
td::Status do_request(const tonlib_api::blocks_lookupBlock& block_header,
td::Promise<object_ptr<tonlib_api::ton_blockIdExt>>&& promise);
td::Status do_request(const tonlib_api::blocks_getTransactions& block_data,
td::Promise<object_ptr<tonlib_api::blocks_transactions>>&& promise);
td::Status do_request(const tonlib_api::blocks_getBlockHeader& request,
td::Promise<object_ptr<tonlib_api::blocks_header>>&& promise);
void proxy_request(td::int64 query_id, std::string data);
friend class TonlibQueryActor;

View file

@ -61,6 +61,7 @@
#include <cinttypes>
#include <iostream>
#include <map>
#include "git.h"
using tonlib_api::make_object;
@ -1095,7 +1096,7 @@ class TonlibCli : public td::actor::Actor {
std::vector<tonlib_api::object_ptr<tonlib_api::msg_message>> messages;
messages.push_back(
make_object<tonlib_api::msg_message>(channels_[pchan_id].to_address(), "", value,
make_object<tonlib_api::msg_dataRaw>(query->body_, query->init_state_)));
make_object<tonlib_api::msg_dataRaw>(query->body_, query->init_state_), -1));
auto action = make_object<tonlib_api::actionMsg>(std::move(messages), true);
send_query(
make_object<tonlib_api::createQuery>(addr.input_key(), std::move(addr.address), 60, std::move(action), nullptr),
@ -2216,7 +2217,7 @@ class TonlibCli : public td::actor::Actor {
data = make_object<tonlib_api::msg_dataText>(message.str());
}
messages.push_back(
make_object<tonlib_api::msg_message>(std::move(address.address), "", amount.nano, std::move(data)));
make_object<tonlib_api::msg_message>(std::move(address.address), "", amount.nano, std::move(data), -1));
return td::Status::OK();
};
@ -2308,6 +2309,10 @@ int main(int argc, char* argv[]) {
SET_VERBOSITY_LEVEL(VERBOSITY_NAME(FATAL) + verbosity);
return (verbosity >= 0 && verbosity <= 20) ? td::Status::OK() : td::Status::Error("verbosity must be 0..20");
});
p.add_option('V', "version", "show tonlib-cli build information", [&]() {
std::cout << "tonlib-cli build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
});
p.add_checked_option('C', "config-force", "set lite server config, drop config related blockchain cache",
[&](td::Slice arg) {
TRY_RESULT(data, td::read_file_str(arg.str()));

View file

@ -5,17 +5,19 @@ if (NOT OPENSSL_FOUND)
endif()
add_executable(generate-random-id generate-random-id.cpp )
target_link_libraries(generate-random-id tl_api ton_crypto keys adnl)
target_link_libraries(generate-random-id tl_api ton_crypto keys adnl git)
target_include_directories(generate-random-id PUBLIC
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>/..)
add_executable(json2tlo json2tlo.cpp )
target_link_libraries(json2tlo tl_api ton_crypto keys )
target_link_libraries(json2tlo tl_api ton_crypto keys git)
target_include_directories(json2tlo PUBLIC
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>/..)
add_executable(pack-viewer pack-viewer.cpp )
target_link_libraries(pack-viewer tl_api ton_crypto keys validator tddb )
target_link_libraries(pack-viewer tl_api ton_crypto keys validator tddb)
target_include_directories(pack-viewer PUBLIC
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>/..)
install(TARGETS generate-random-id RUNTIME DESTINATION bin)

View file

@ -39,6 +39,7 @@
#include "td/utils/filesystem.h"
#include "keys/encryptor.h"
#include "keys/keys.hpp"
#include "git.h"
int main(int argc, char *argv[]) {
ton::PrivateKey pk;
@ -59,6 +60,10 @@ int main(int argc, char *argv[]) {
std::cout << sb.as_cslice().c_str();
std::exit(2);
});
p.add_option('V', "version", "shows generate-random-id build information", [&]() {
std::cout << "generate-random-id build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
});
p.add_option('n', "name", "path to save private keys to", [&](td::Slice arg) { name = arg.str(); });
p.add_checked_option('k', "key", "path to private key to import", [&](td::Slice key) {
if (!pk.empty()) {

View file

@ -39,6 +39,7 @@
#include "tl/tl_json.h"
#include "auto/tl/ton_api.h"
#include "auto/tl/ton_api_json.h"
#include "git.h"
int main(int argc, char *argv[]) {
std::string in_f;
@ -51,6 +52,10 @@ int main(int argc, char *argv[]) {
p.add_option('i', "in", "input", [&](td::Slice key) { in_f = key.str(); });
p.add_option('o', "out", "output", [&](td::Slice key) { out_f = key.str(); });
p.add_option('r', "reverse", "read tlo, print json", [&]() { reverse_ = !reverse_; });
p.add_option('V', "version", "shows json2tlo build information", [&]() {
std::cout << "json2tlo build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
});
p.add_option('h', "help", "prints_help", [&]() {
char b[10240];
td::StringBuilder sb(td::MutableSlice{b, 10000});

View file

@ -3,7 +3,7 @@ cmake_minimum_required(VERSION 3.0.2 FATAL_ERROR)
add_executable (validator-engine-console validator-engine-console.cpp
validator-engine-console.h validator-engine-console-query.cpp
validator-engine-console-query.h )
target_link_libraries(validator-engine-console tdutils tdactor adnllite tl_api tl_lite_api tl-lite-utils ton_crypto ton_block terminal)
target_link_libraries(validator-engine-console tdutils tdactor adnllite tl_api tl_lite_api tl-lite-utils ton_crypto ton_block terminal git)
install(TARGETS validator-engine-console RUNTIME DESTINATION bin)

View file

@ -59,6 +59,7 @@
#endif
#include <iostream>
#include <sstream>
#include "git.h"
int verbosity;
@ -256,6 +257,10 @@ int main(int argc, char* argv[]) {
std::cout << sb.as_cslice().c_str();
std::exit(2);
});
p.add_option('V', "version", "shows validator-engine-console build information", [&]() {
std::cout << "validator-engine-console build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
});
p.add_checked_option('a', "address", "server address", [&](td::Slice arg) {
td::IPAddress addr;
TRY_STATUS(addr.init_host_port(arg.str()));

View file

@ -13,6 +13,6 @@ set(VALIDATOR_ENGINE_SOURCE
add_executable(validator-engine ${VALIDATOR_ENGINE_SOURCE})
target_link_libraries(validator-engine overlay tdutils tdactor adnl tl_api dht
rldp catchain validatorsession full-node validator ton_validator validator
fift-lib memprof ${JEMALLOC_LIBRARIES})
fift-lib memprof git ${JEMALLOC_LIBRARIES})
install(TARGETS validator-engine RUNTIME DESTINATION bin)

View file

@ -62,6 +62,7 @@
#include <sstream>
#include <cstdlib>
#include <set>
#include "git.h"
Config::Config() {
out_port = 3278;
@ -1323,6 +1324,9 @@ td::Status ValidatorEngine::load_global_config() {
for (auto seq : unsafe_catchains_) {
validator_options_.write().add_unsafe_resync_catchain(seq);
}
for (auto rot : unsafe_catchain_rotations_) {
validator_options_.write().add_unsafe_catchain_rotate(rot.first, rot.second.first, rot.second.second);
}
if (truncate_seqno_ > 0) {
validator_options_.write().truncate_db(truncate_seqno_);
}
@ -3276,7 +3280,7 @@ int main(int argc, char *argv[]) {
td::log_interface = td::default_log_interface;
};
LOG_STATUS(td::change_maximize_rlimit(td::RlimitType::nofile, 65536));
LOG_STATUS(td::change_maximize_rlimit(td::RlimitType::nofile, 786432));
std::vector<std::function<void()>> acts;
@ -3286,6 +3290,10 @@ int main(int argc, char *argv[]) {
int v = VERBOSITY_NAME(FATAL) + (td::to_integer<int>(arg));
SET_VERBOSITY_LEVEL(v);
});
p.add_option('V', "version", "shows validator-engine build information", [&]() {
std::cout << "validator-engine build information: [ Commit: " << GitMetadata::CommitSHA1() << ", Date: " << GitMetadata::CommitDate() << "]\n";
std::exit(0);
});
p.add_option('h', "help", "prints_help", [&]() {
char b[10240];
td::StringBuilder sb(td::MutableSlice{b, 10000});
@ -3359,6 +3367,18 @@ int main(int argc, char *argv[]) {
acts.push_back([&x, seq]() { td::actor::send_closure(x, &ValidatorEngine::add_unsafe_catchain, seq); });
return td::Status::OK();
});
p.add_checked_option(
'F', "unsafe-catchain-rotate", "use forceful and DANGEROUS catchain rotation", [&](td::Slice params) {
auto pos1 = params.find(':');
TRY_RESULT(b_seq, td::to_integer_safe<ton::BlockSeqno>(params.substr(0, pos1)));
params = params.substr(++pos1, params.size());
auto pos2 = params.find(':');
TRY_RESULT(cc_seq, td::to_integer_safe<ton::CatchainSeqno>(params.substr(0, pos2)));
params = params.substr(++pos2, params.size());
auto h = std::stoi(params.substr(0, params.size()).str());
acts.push_back([&x, b_seq, cc_seq, h]() { td::actor::send_closure(x, &ValidatorEngine::add_unsafe_catchain_rotation, b_seq, cc_seq, h); });
return td::Status::OK();
});
td::uint32 threads = 7;
p.add_checked_option(
't', "threads", PSTRING() << "number of threads (default=" << threads << ")", [&](td::Slice fname) {

View file

@ -201,6 +201,7 @@ class ValidatorEngine : public td::actor::Actor {
ton::BlockSeqno truncate_seqno_{0};
std::set<ton::CatchainSeqno> unsafe_catchains_;
std::map<ton::BlockSeqno, std::pair<ton::CatchainSeqno, td::uint32>> unsafe_catchain_rotations_;
public:
static constexpr td::uint8 max_cat() {
@ -210,6 +211,9 @@ class ValidatorEngine : public td::actor::Actor {
void add_unsafe_catchain(ton::CatchainSeqno seq) {
unsafe_catchains_.insert(seq);
}
void add_unsafe_catchain_rotation(ton::BlockSeqno b_seqno, ton::CatchainSeqno cc_seqno, td::uint32 value) {
unsafe_catchain_rotations_.insert({b_seqno, {cc_seqno, value}});
}
void set_local_config(std::string str);
void set_global_config(std::string str);
void set_fift_dir(std::string str) {

View file

@ -53,6 +53,8 @@ struct ValidatorSessionOptions {
bool new_catchain_ids = false;
td::uint32 proto_version = 0;
td::Bits256 get_hash() const;
};

View file

@ -881,19 +881,26 @@ td::actor::ActorOwn<ValidatorSession> ValidatorSession::create(
}
td::Bits256 ValidatorSessionOptions::get_hash() const {
if (!new_catchain_ids) {
return create_hash_tl_object<ton_api::validatorSession_config>(
catchain_idle_timeout, catchain_max_deps, round_candidates, next_candidate_delay, round_attempt_duration,
max_round_attempts, max_block_size, max_collated_data_size);
if(!proto_version) {
if (!new_catchain_ids) {
return create_hash_tl_object<ton_api::validatorSession_config>(
catchain_idle_timeout, catchain_max_deps, round_candidates, next_candidate_delay, round_attempt_duration,
max_round_attempts, max_block_size, max_collated_data_size);
} else {
return create_hash_tl_object<ton_api::validatorSession_configNew>(
catchain_idle_timeout, catchain_max_deps, round_candidates, next_candidate_delay, round_attempt_duration,
max_round_attempts, max_block_size, max_collated_data_size, new_catchain_ids);
}
} else {
return create_hash_tl_object<ton_api::validatorSession_configNew>(
catchain_idle_timeout, catchain_max_deps, round_candidates, next_candidate_delay, round_attempt_duration,
max_round_attempts, max_block_size, max_collated_data_size, new_catchain_ids);
}
return create_hash_tl_object<ton_api::validatorSession_configVersioned>(
catchain_idle_timeout, catchain_max_deps, round_candidates, next_candidate_delay, round_attempt_duration,
max_round_attempts, max_block_size, max_collated_data_size, proto_version);
}
}
ValidatorSessionOptions::ValidatorSessionOptions(const ValidatorSessionConfig &conf) {
CHECK(!conf.proto_version);
proto_version = conf.proto_version;
catchain_idle_timeout = conf.catchain_idle_timeout;
catchain_max_deps = conf.catchain_max_deps;
max_block_size = conf.max_block_size;

View file

@ -960,7 +960,7 @@ void FullNodeShardImpl::got_neighbour_capabilities(adnl::AdnlNodeIdShort adnl_id
it->second.query_failed();
} else {
it->second.update_proto_version(*F.move_as_ok().get());
it->second.update_roundtrip(t);
it->second.query_success(t);
}
}

View file

@ -2298,6 +2298,10 @@ Ref<vm::Cell> Collator::create_ordinary_transaction(Ref<vm::Cell> msg_root) {
}
register_new_msgs(*trans);
update_max_lt(acc->last_trans_end_lt_);
// temporary patch to stop producing dangerous block
if (acc->status == block::Account::acc_nonexist) {
block_full_ = true;
}
return trans_root;
}
@ -2447,6 +2451,10 @@ int Collator::process_one_new_message(block::NewOutMsg msg, bool enqueue_only, R
if (!insert_out_msg(cb.finalize())) {
return -1;
}
// 6.5. check for temporary patch can be left here
if (block_full_) {
return 3;
}
// 7. check whether the block is full now
if (!block_limit_status_->fits(block::ParamLimits::cl_normal)) {
block_full_ = true;

View file

@ -1692,6 +1692,12 @@ void ValidatorManagerImpl::update_shards() {
auto exp_vec = last_masterchain_state_->get_shards();
auto config = last_masterchain_state_->get_consensus_config();
validatorsession::ValidatorSessionOptions opts{config};
td::uint32 threshold = 9407194;
bool force_group_id_upgrade = last_masterchain_seqno_ == threshold;
auto legacy_opts_hash = opts.get_hash();
if(last_masterchain_seqno_ >= threshold) { //TODO move to get_consensus_config()
opts.proto_version = 1;
}
auto opts_hash = opts.get_hash();
std::map<ShardIdFull, std::vector<BlockIdExt>> new_shards;
@ -1766,6 +1772,45 @@ void ValidatorManagerImpl::update_shards() {
BlockSeqno key_seqno = last_key_block_handle_->id().seqno();
if (force_group_id_upgrade) {
for (auto &desc : new_shards) {
auto shard = desc.first;
auto prev = desc.second;
for (auto &p : prev) {
CHECK(p.is_valid());
}
auto val_set = last_masterchain_state_->get_validator_set(shard);
auto validator_id = get_validator(shard, val_set);
if (!validator_id.is_zero()) {
auto legacy_val_group_id = get_validator_set_id(shard, val_set, legacy_opts_hash, key_seqno, opts);
auto val_group_id = get_validator_set_id(shard, val_set, opts_hash, key_seqno, opts);
auto it = validator_groups_.find(legacy_val_group_id);
if (it != validator_groups_.end()) {
new_validator_groups_.emplace(val_group_id, std::move(it->second));
} else {
auto it2 = next_validator_groups_.find(legacy_val_group_id);
if (it2 != next_validator_groups_.end()) {
if (!it2->second.empty()) {
td::actor::send_closure(it2->second, &ValidatorGroup::start, prev, last_masterchain_block_id_,
last_masterchain_state_->get_unix_time());
}
new_validator_groups_.emplace(val_group_id, std::move(it2->second));
} else {
auto G = create_validator_group(val_group_id, shard, val_set, opts, started_);
if (!G.empty()) {
td::actor::send_closure(G, &ValidatorGroup::start, prev, last_masterchain_block_id_,
last_masterchain_state_->get_unix_time());
}
new_validator_groups_.emplace(val_group_id, std::move(G));
}
}
}
}
}
if (allow_validate_) {
for (auto &desc : new_shards) {
auto shard = desc.first;

View file

@ -146,6 +146,7 @@ struct ValidatorManagerOptionsImpl : public ValidatorManagerOptions {
unsafe_catchains_.insert(seqno);
}
void add_unsafe_catchain_rotate(BlockSeqno seqno, CatchainSeqno cc_seqno, td::uint32 value) override {
VLOG(INFO) << "Add unsafe catchain rotation: Master block seqno " << seqno<<" Catchain seqno " << cc_seqno << " New value "<< value;
unsafe_catchain_rotates_[cc_seqno] = std::make_pair(seqno, value);
}
void truncate_db(BlockSeqno seqno) override {