mirror of
https://github.com/ton-blockchain/ton
synced 2025-03-09 15:40:10 +00:00
Merge branch 'testnet' into accelerator
This commit is contained in:
commit
8c1692fd30
8 changed files with 178 additions and 119 deletions
|
@ -843,23 +843,27 @@ td::Status GetOverlaysStatsQuery::receive(td::BufferSlice data) {
|
|||
td::StringBuilder sb;
|
||||
sb << "overlay_id: " << s->overlay_id_ << " adnl_id: " << s->adnl_id_ << " scope: " << s->scope_ << "\n";
|
||||
sb << " nodes:\n";
|
||||
|
||||
td::uint32 overlay_t_out_bytes = 0;
|
||||
td::uint32 overlay_t_out_pckts = 0;
|
||||
td::uint32 overlay_t_in_bytes = 0;
|
||||
td::uint32 overlay_t_in_pckts = 0;
|
||||
|
||||
|
||||
auto print_traffic = [&](const char *name, const char *indent,
|
||||
ton::tl_object_ptr<ton::ton_api::engine_validator_overlayStatsTraffic> &t) {
|
||||
sb << indent << name << ":\n"
|
||||
<< indent << " out: " << t->t_out_bytes_ << " bytes/sec, " << t->t_out_pckts_ << " pckts/sec\n"
|
||||
<< indent << " in: " << t->t_in_bytes_ << " bytes/sec, " << t->t_in_pckts_ << " pckts/sec\n";
|
||||
};
|
||||
for (auto &n : s->nodes_) {
|
||||
sb << " adnl_id: " << n->adnl_id_ << " ip_addr: " << n->ip_addr_ << " broadcast_errors: " << n->bdcst_errors_ << " fec_broadcast_errors: " << n->fec_bdcst_errors_ << " last_in_query: " << n->last_in_query_ << " (" << time_to_human(n->last_in_query_) << ")" << " last_out_query: " << n->last_out_query_ << " (" << time_to_human(n->last_out_query_) << ")" << "\n throughput:\n out: " << n->t_out_bytes_ << " bytes/sec, " << n->t_out_pckts_ << " pckts/sec\n in: " << n->t_in_bytes_ << " bytes/sec, " << n->t_in_pckts_ << " pckts/sec\n";
|
||||
|
||||
overlay_t_out_bytes += n->t_out_bytes_;
|
||||
overlay_t_out_pckts += n->t_out_pckts_;
|
||||
|
||||
overlay_t_in_bytes += n->t_in_bytes_;
|
||||
overlay_t_in_pckts += n->t_in_pckts_;
|
||||
sb << " adnl_id: " << n->adnl_id_ << " ip_addr: " << n->ip_addr_ << " broadcast_errors: " << n->bdcst_errors_
|
||||
<< " fec_broadcast_errors: " << n->fec_bdcst_errors_ << " last_in_query: " << n->last_in_query_ << " ("
|
||||
<< time_to_human(n->last_in_query_) << ")"
|
||||
<< " last_out_query: " << n->last_out_query_ << " (" << time_to_human(n->last_out_query_) << ")"
|
||||
<< "\n";
|
||||
sb << " is_neighbour: " << n->is_neighbour_ << " is_alive: " << n->is_alive_
|
||||
<< " node_flags: " << n->node_flags_ << "\n";
|
||||
print_traffic("throughput", " ", n->traffic_);
|
||||
print_traffic("throughput (responses only)", " ", n->traffic_responses_);
|
||||
}
|
||||
sb << " total_throughput:\n out: " << overlay_t_out_bytes << " bytes/sec, " << overlay_t_out_pckts << " pckts/sec\n in: " << overlay_t_in_bytes << " bytes/sec, " << overlay_t_in_pckts << " pckts/sec\n";
|
||||
|
||||
print_traffic("total_throughput", " ", s->total_traffic_);
|
||||
print_traffic("total_throughput (responses only)", " ", s->total_traffic_responses_);
|
||||
|
||||
sb << " stats:\n";
|
||||
for (auto &t : s->stats_) {
|
||||
sb << " " << t->key_ << "\t" << t->value_ << "\n";
|
||||
|
@ -885,70 +889,64 @@ td::Status GetOverlaysStatsJsonQuery::receive(td::BufferSlice data) {
|
|||
TRY_RESULT_PREFIX(f, ton::fetch_tl_object<ton::ton_api::engine_validator_overlaysStats>(data.as_slice(), true),
|
||||
"received incorrect answer: ");
|
||||
std::ofstream sb(file_name_);
|
||||
|
||||
|
||||
sb << "[\n";
|
||||
bool rtail = false;
|
||||
for (auto &s : f->overlays_) {
|
||||
if(rtail) {
|
||||
if (rtail) {
|
||||
sb << ",\n";
|
||||
} else {
|
||||
rtail = true;
|
||||
}
|
||||
|
||||
sb << "{\n \"overlay_id\": \"" << s->overlay_id_ << "\",\n \"adnl_id\": \"" << s->adnl_id_ << "\",\n \"scope\": " << s->scope_ << ",\n";
|
||||
|
||||
sb << "{\n \"overlay_id\": \"" << s->overlay_id_ << "\",\n \"adnl_id\": \"" << s->adnl_id_
|
||||
<< "\",\n \"scope\": " << s->scope_ << ",\n";
|
||||
sb << " \"nodes\": [\n";
|
||||
|
||||
td::uint32 overlay_t_out_bytes = 0;
|
||||
td::uint32 overlay_t_out_pckts = 0;
|
||||
td::uint32 overlay_t_in_bytes = 0;
|
||||
td::uint32 overlay_t_in_pckts = 0;
|
||||
|
||||
|
||||
auto print_traffic = [&](const char *name,
|
||||
ton::tl_object_ptr<ton::ton_api::engine_validator_overlayStatsTraffic> &t) {
|
||||
sb << "\"" << name << "\": { \"out_bytes_sec\": " << t->t_out_bytes_ << ", \"out_pckts_sec\": " << t->t_out_pckts_
|
||||
<< ", \"in_bytes_sec\": " << t->t_in_bytes_ << ", \"in_pckts_sec\": " << t->t_in_pckts_ << " }";
|
||||
};
|
||||
|
||||
bool tail = false;
|
||||
for (auto &n : s->nodes_) {
|
||||
if(tail) {
|
||||
if (tail) {
|
||||
sb << ",\n";
|
||||
} else {
|
||||
tail = true;
|
||||
}
|
||||
|
||||
sb << " {\n";
|
||||
sb << R"( "adnl_id": ")" << n->adnl_id_ << "\",\n";
|
||||
sb << R"( "ip_addr": ")" << n->ip_addr_ << "\",\n";
|
||||
sb << R"( "is_neighbour": )" << (n->is_neighbour_ ? "true" : "false") << ",\n";
|
||||
sb << R"( "is_alive": )" << (n->is_alive_ ? "true" : "false") << ",\n";
|
||||
sb << R"( "node_flags": )" << n->node_flags_ << ",\n";
|
||||
sb << R"( "broadcast_errors": )" << n->bdcst_errors_ << ",\n";
|
||||
sb << R"( "fec_broadcast_errors": )" << n->fec_bdcst_errors_ << ",\n";
|
||||
sb << R"( "last_in_query_unix": )" << n->last_in_query_ << ",\n";
|
||||
sb << R"( "last_in_query_human": ")" << time_to_human(n->last_in_query_) << "\",\n";
|
||||
sb << R"( "last_out_query_unix": )" << n->last_out_query_ << ",\n";
|
||||
sb << R"( "last_out_query_human": ")" << time_to_human(n->last_out_query_) << "\",\n\n";
|
||||
sb << R"( "throughput": { "out_bytes_sec": )" << n->t_out_bytes_ << R"(, "out_pckts_sec": )" << n->t_out_pckts_
|
||||
<< R"(, "in_bytes_sec": )" << n->t_in_bytes_ << R"(, "in_pckts_sec": )" << n->t_in_pckts_ << " }\n";
|
||||
sb << " }";
|
||||
|
||||
overlay_t_out_bytes += n->t_out_bytes_;
|
||||
overlay_t_out_pckts += n->t_out_pckts_;
|
||||
|
||||
overlay_t_in_bytes += n->t_in_bytes_;
|
||||
overlay_t_in_pckts += n->t_in_pckts_;
|
||||
sb << " {\n \"adnl_id\": \"" << n->adnl_id_ << "\",\n \"ip_addr\": \"" << n->ip_addr_
|
||||
<< "\",\n \"broadcast_errors\": " << n->bdcst_errors_
|
||||
<< ",\n \"fec_broadcast_errors\": " << n->fec_bdcst_errors_
|
||||
<< ",\n \"last_in_query_unix\": " << n->last_in_query_ << ",\n \"last_in_query_human\": \""
|
||||
<< time_to_human(n->last_in_query_) << "\",\n"
|
||||
<< " \"last_out_query_unix\": " << n->last_out_query_ << ",\n \"last_out_query_human\": \""
|
||||
<< time_to_human(n->last_out_query_) << "\",\n"
|
||||
<< "\n ";
|
||||
print_traffic("throughput", n->traffic_);
|
||||
sb << ",\n ";
|
||||
print_traffic("throughput_responses", n->traffic_responses_);
|
||||
sb << "\n }";
|
||||
}
|
||||
sb << " ],\n";
|
||||
sb << " ],\n ";
|
||||
|
||||
print_traffic("total_throughput", s->total_traffic_);
|
||||
sb << ",\n ";
|
||||
print_traffic("total_throughput_responses", s->total_traffic_responses_);
|
||||
sb << ",\n";
|
||||
|
||||
sb << " \"total_throughput\": { \"out_bytes_sec\": " << overlay_t_out_bytes
|
||||
<< ", \"out_pckts_sec\": " << overlay_t_out_pckts << ", \"in_bytes_sec\": " << overlay_t_in_bytes
|
||||
<< ", \"in_pckts_sec\": " << overlay_t_in_pckts << " },\n";
|
||||
|
||||
sb << " \"stats\": {\n";
|
||||
|
||||
|
||||
tail = false;
|
||||
for (auto &t : s->stats_) {
|
||||
if(tail) {
|
||||
if (tail) {
|
||||
sb << ",\n";
|
||||
} else {
|
||||
tail = true;
|
||||
}
|
||||
|
||||
|
||||
sb << " \"" << t->key_ << "\": \"" << t->value_ << "\"";
|
||||
}
|
||||
sb << "\n }";
|
||||
|
@ -966,7 +964,7 @@ td::Status GetOverlaysStatsJsonQuery::receive(td::BufferSlice data) {
|
|||
}
|
||||
sb << "]\n";
|
||||
sb << std::flush;
|
||||
|
||||
|
||||
td::TerminalIO::output(std::string("wrote stats to " + file_name_ + "\n"));
|
||||
return td::Status::OK();
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue