mirror of
https://github.com/ton-blockchain/ton
synced 2025-03-09 15:40:10 +00:00
Allow constants with the same name and value (#462)
Co-authored-by: legaii <jgates.ardux@gmail.com>
This commit is contained in:
parent
2512f0287b
commit
3b1d33f543
2 changed files with 30 additions and 6 deletions
|
@ -254,9 +254,7 @@ void parse_const_decl(Lexer& lex) {
|
||||||
if (!sym_def) {
|
if (!sym_def) {
|
||||||
lex.cur().error_at("cannot define global symbol `", "`");
|
lex.cur().error_at("cannot define global symbol `", "`");
|
||||||
}
|
}
|
||||||
if (sym_def->value) {
|
Lexem ident = lex.cur();
|
||||||
lex.cur().error_at("global symbol `", "` already exists");
|
|
||||||
}
|
|
||||||
lex.next();
|
lex.next();
|
||||||
if (lex.tp() != '=') {
|
if (lex.tp() != '=') {
|
||||||
lex.cur().error_at("expected = instead of ", "");
|
lex.cur().error_at("expected = instead of ", "");
|
||||||
|
@ -273,10 +271,11 @@ void parse_const_decl(Lexer& lex) {
|
||||||
if ((wanted_type != Expr::_None) && (x->cls != wanted_type)) {
|
if ((wanted_type != Expr::_None) && (x->cls != wanted_type)) {
|
||||||
lex.cur().error("expression type does not match wanted type");
|
lex.cur().error("expression type does not match wanted type");
|
||||||
}
|
}
|
||||||
|
SymValConst* new_value = nullptr;
|
||||||
if (x->cls == Expr::_Const) { // Integer constant
|
if (x->cls == Expr::_Const) { // Integer constant
|
||||||
sym_def->value = new SymValConst{const_cnt++, x->intval};
|
new_value = new SymValConst{const_cnt++, x->intval};
|
||||||
} else if (x->cls == Expr::_SliceConst) { // Slice constant (string)
|
} else if (x->cls == Expr::_SliceConst) { // Slice constant (string)
|
||||||
sym_def->value = new SymValConst{const_cnt++, x->strval};
|
new_value = new SymValConst{const_cnt++, x->strval};
|
||||||
} else if (x->cls == Expr::_Apply) {
|
} else if (x->cls == Expr::_Apply) {
|
||||||
code.emplace_back(loc, Op::_Import, std::vector<var_idx_t>());
|
code.emplace_back(loc, Op::_Import, std::vector<var_idx_t>());
|
||||||
auto tmp_vars = x->pre_compile(code);
|
auto tmp_vars = x->pre_compile(code);
|
||||||
|
@ -304,10 +303,20 @@ void parse_const_decl(Lexer& lex) {
|
||||||
if (op.origin.is_null() || !op.origin->is_valid()) {
|
if (op.origin.is_null() || !op.origin->is_valid()) {
|
||||||
lex.cur().error("precompiled expression did not result in a valid integer constant");
|
lex.cur().error("precompiled expression did not result in a valid integer constant");
|
||||||
}
|
}
|
||||||
sym_def->value = new SymValConst{const_cnt++, op.origin};
|
new_value = new SymValConst{const_cnt++, op.origin};
|
||||||
} else {
|
} else {
|
||||||
lex.cur().error("integer or slice literal or constant expected");
|
lex.cur().error("integer or slice literal or constant expected");
|
||||||
}
|
}
|
||||||
|
if (sym_def->value) {
|
||||||
|
SymValConst* old_value = dynamic_cast<SymValConst*>(sym_def->value);
|
||||||
|
Keyword new_type = new_value->get_type();
|
||||||
|
if (!old_value || old_value->get_type() != new_type ||
|
||||||
|
(new_type == _Int && *old_value->get_int_value() != *new_value->get_int_value()) ||
|
||||||
|
(new_type == _Slice && old_value->get_str_value() != new_value->get_str_value())) {
|
||||||
|
ident.error_at("global symbol `", "` already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sym_def->value = new_value;
|
||||||
}
|
}
|
||||||
|
|
||||||
FormalArgList parse_formal_args(Lexer& lex) {
|
FormalArgList parse_formal_args(Lexer& lex) {
|
||||||
|
|
15
crypto/func/test/co2.fc
Normal file
15
crypto/func/test/co2.fc
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
const int x = 5;
|
||||||
|
const slice s = "abacaba";
|
||||||
|
const int y = 3;
|
||||||
|
const slice s = "abacaba";
|
||||||
|
const int x = 5;
|
||||||
|
const int z = 4, z = 4;
|
||||||
|
|
||||||
|
int sdeq (slice s1, slice s2) asm "SDEQ";
|
||||||
|
|
||||||
|
() main() {
|
||||||
|
throw_unless(101, x == 5);
|
||||||
|
throw_unless(102, y == 3);
|
||||||
|
throw_unless(103, z == 4);
|
||||||
|
throw_unless(104, sdeq(s, "abacaba"));
|
||||||
|
}
|
Loading…
Add table
Add a link
Reference in a new issue