Barretenberg
The ZK-SNARK library at the core of Aztec
Loading...
Searching...
No Matches
instr_fetching.test.cpp
Go to the documentation of this file.
1#include <gmock/gmock.h>
2#include <gtest/gtest.h>
3
4#include <cstdint>
5#include <memory>
6#include <vector>
7
31
32namespace bb::avm2::constraining {
33namespace {
34
35using tracegen::BytecodeTraceBuilder;
36using tracegen::PrecomputedTraceBuilder;
37using tracegen::RangeCheckTraceBuilder;
38using tracegen::TestTraceContainer;
39
41using C = Column;
42
43using instr_fetching = instr_fetching<FF>;
44
45using simulation::BytecodeDecompositionEvent;
47using simulation::Instruction;
48using simulation::InstructionFetchingEvent;
50using simulation::RangeCheckEvent;
51
52TEST(InstrFetchingConstrainingTest, EmptyRow)
53{
54 check_relation<instr_fetching>(testing::empty_trace());
55}
56
57// Basic positive test with a hardcoded bytecode for ADD_8
58TEST(InstrFetchingConstrainingTest, Add8WithTraceGen)
59{
60 TestTraceContainer trace;
61 BytecodeTraceBuilder builder;
62 PrecomputedTraceBuilder precomputed_builder;
63
64 Instruction add_8_instruction = {
65 .opcode = WireOpCode::ADD_8,
66 .addressing_mode = 3,
67 .operands = { Operand::from<uint8_t>(0x34), Operand::from<uint8_t>(0x35), Operand::from<uint8_t>(0x36) },
68 };
69
70 std::vector<uint8_t> bytecode = add_8_instruction.serialize();
71
72 builder.process_instruction_fetching({ { .bytecode_id = 1,
73 .pc = 0,
74 .instruction = add_8_instruction,
76 trace);
77 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
78
79 EXPECT_EQ(trace.get_num_rows(), 2);
80 check_relation<instr_fetching>(trace);
81}
82
83// Basic positive test with a hardcoded bytecode for ECADD
84// Cover the longest amount of operands.
85TEST(InstrFetchingConstrainingTest, EcaddWithTraceGen)
86{
87 TestTraceContainer trace;
88 BytecodeTraceBuilder builder;
89 PrecomputedTraceBuilder precomputed_builder;
90
91 Instruction ecadd_instruction = {
92 .opcode = WireOpCode::ECADD,
93 .addressing_mode = 0x1f1f,
94 .operands = { Operand::from<uint16_t>(0x1279),
95 Operand::from<uint16_t>(0x127a),
96 Operand::from<uint16_t>(0x127b),
97 Operand::from<uint16_t>(0x127c),
98 Operand::from<uint16_t>(0x127d),
99 Operand::from<uint16_t>(0x127e),
100 Operand::from<uint16_t>(0x127f) },
101 };
102
103 std::vector<uint8_t> bytecode = ecadd_instruction.serialize();
104 builder.process_instruction_fetching({ { .bytecode_id = 1,
105 .pc = 0,
106 .instruction = ecadd_instruction,
108 trace);
109 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
110
111 EXPECT_EQ(trace.get_num_rows(), 2);
112 check_relation<instr_fetching>(trace);
113}
114
115// Helper routine generating a vector of instruction fetching events for each
116// opcode.
117std::vector<InstructionFetchingEvent> gen_instr_events_each_opcode()
118{
119 std::vector<uint8_t> bytecode;
120 std::vector<Instruction> instructions;
121 constexpr auto num_opcodes = static_cast<size_t>(WireOpCode::LAST_OPCODE_SENTINEL);
122 instructions.reserve(num_opcodes);
124
125 for (size_t i = 0; i < num_opcodes; i++) {
126 pc_positions.at(i) = static_cast<uint32_t>(bytecode.size());
127 const auto instr = testing::random_instruction(static_cast<WireOpCode>(i));
128 instructions.emplace_back(instr);
129 const auto instruction_bytes = instr.serialize();
130 bytecode.insert(bytecode.end(),
131 std::make_move_iterator(instruction_bytes.begin()),
132 std::make_move_iterator(instruction_bytes.end()));
133 }
134
135 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
136 // Always use *bytecode_ptr from now on instead of bytecode as this one was moved.
137
139 instr_events.reserve(num_opcodes);
140 for (size_t i = 0; i < num_opcodes; i++) {
141 instr_events.emplace_back(InstructionFetchingEvent{
142 .bytecode_id = 1, .pc = pc_positions.at(i), .instruction = instructions.at(i), .bytecode = bytecode_ptr });
143 }
144 return instr_events;
145}
146
147// Positive test for each opcode. We assume that decode instruction is working correctly.
148// It works as long as the relations are not constraining the correct range for TAG nor indirect.
149TEST(InstrFetchingConstrainingTest, EachOpcodeWithTraceGen)
150{
151 TestTraceContainer trace;
152 BytecodeTraceBuilder builder;
153 PrecomputedTraceBuilder precomputed_builder;
154
155 builder.process_instruction_fetching(gen_instr_events_each_opcode(), trace);
156 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
157
158 constexpr auto num_opcodes = static_cast<size_t>(WireOpCode::LAST_OPCODE_SENTINEL);
159 EXPECT_EQ(trace.get_num_rows(), num_opcodes + 1);
160 check_relation<instr_fetching>(trace);
161}
162
163// Negative test about decomposition of operands. We mutate correct operand values in the trace.
164// This also covers wrong operands which are not "involved" by the instruction.
165// We perform this for a random instruction for opcodes: REVERT_16, CAST_8, TORADIXBE
166TEST(InstrFetchingConstrainingTest, NegativeWrongOperand)
167{
168 BytecodeTraceBuilder builder;
169 PrecomputedTraceBuilder precomputed_builder;
170
172 std::vector<size_t> sub_relations = {
181 };
182
183 constexpr std::array<C, 8> operand_cols = {
184 C::instr_fetching_addressing_mode,
185 C::instr_fetching_op1,
186 C::instr_fetching_op2,
187 C::instr_fetching_op3,
188 C::instr_fetching_op4,
189 C::instr_fetching_op5,
190 C::instr_fetching_op6,
191 C::instr_fetching_op7,
192 };
193
194 for (const auto& opcode : opcodes) {
195 TestTraceContainer trace;
196 const auto instr = testing::random_instruction(opcode);
197 builder.process_instruction_fetching(
198 { { .bytecode_id = 1,
199 .pc = 0,
200 .instruction = instr,
201 .bytecode = std::make_shared<std::vector<uint8_t>>(instr.serialize()) } },
202 trace);
203 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
204
205 check_relation<instr_fetching>(trace);
206
207 EXPECT_EQ(trace.get_num_rows(), 2);
208
209 for (size_t i = 0; i < operand_cols.size(); i++) {
210 auto mutated_trace = trace;
211 const FF mutated_operand = trace.get(operand_cols.at(i), 0) + 1; // Mutate to value + 1
212 mutated_trace.set(operand_cols.at(i), 0, mutated_operand);
213 EXPECT_THROW_WITH_MESSAGE(check_relation<instr_fetching>(mutated_trace, sub_relations.at(i)),
214 instr_fetching::get_subrelation_label(sub_relations.at(i)));
215 }
216 }
217}
218
219// Positive test for interaction with instruction spec table using same events as for the test
220// EachOpcodeWithTraceGen, i.e., one event/row is generated per wire opcode.
221// It works as long as the relations are not constraining the correct range for TAG nor indirect.
222TEST(InstrFetchingConstrainingTest, WireInstructionSpecInteractions)
223{
224 TestTraceContainer trace;
225 BytecodeTraceBuilder bytecode_builder;
226 PrecomputedTraceBuilder precomputed_builder;
227
230 bytecode_builder.process_instruction_fetching(gen_instr_events_each_opcode(), trace);
231 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
232
233 EXPECT_EQ(trace.get_num_rows(), 1 << 8); // 2^8 for selector against wire_instruction_spec
234
235 check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_wire_instruction_info_settings>(trace);
236 check_relation<instr_fetching>(trace);
237}
238
239std::vector<RangeCheckEvent> gen_range_check_events(const std::vector<InstructionFetchingEvent>& instr_events)
240{
241 std::vector<RangeCheckEvent> range_check_events;
242 range_check_events.reserve(instr_events.size());
243
244 for (const auto& instr_event : instr_events) {
245 range_check_events.emplace_back(RangeCheckEvent{
246 .value =
247 (instr_event.error.has_value() && instr_event.error == InstrDeserializationEventError::PC_OUT_OF_RANGE)
248 ? instr_event.pc - instr_event.bytecode->size()
249 : instr_event.bytecode->size() - instr_event.pc - 1,
250 .num_bits = AVM_PC_SIZE_IN_BITS,
251 });
252 }
253 return range_check_events;
254}
255
256// Positive test for the interaction with bytecode decomposition table.
257// One event/row is generated per wire opcode (same as for test WireInstructionSpecInteractions).
258TEST(InstrFetchingConstrainingTest, BcDecompositionInteractions)
259{
260 TestTraceContainer trace;
261 BytecodeTraceBuilder bytecode_builder;
262 PrecomputedTraceBuilder precomputed_builder;
263
264 const auto instr_fetch_events = gen_instr_events_each_opcode();
265 bytecode_builder.process_instruction_fetching(instr_fetch_events, trace);
266 bytecode_builder.process_decomposition({ {
267 .bytecode_id = instr_fetch_events.at(0).bytecode_id,
268 .bytecode = instr_fetch_events.at(0).bytecode,
269 } },
270 trace);
271 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
272
273 check_interaction<BytecodeTraceBuilder,
276
277 // BC Decomposition trace is the longest here.
278 EXPECT_EQ(trace.get_num_rows(), instr_fetch_events.at(0).bytecode->size() + 1);
279
280 check_relation<instr_fetching>(trace);
281}
282
283void check_all(const std::vector<InstructionFetchingEvent>& instr_events,
284 const std::vector<RangeCheckEvent>& range_check_events,
286{
287 TestTraceContainer trace;
288 BytecodeTraceBuilder bytecode_builder;
289 PrecomputedTraceBuilder precomputed_builder;
290 RangeCheckTraceBuilder range_check_builder;
291
296 bytecode_builder.process_instruction_fetching(instr_events, trace);
297 bytecode_builder.process_decomposition(decomposition_events, trace);
298 range_check_builder.process(range_check_events, trace);
299 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
300
301 check_interaction<BytecodeTraceBuilder,
308
309 EXPECT_EQ(trace.get_num_rows(), 1 << 16); // 2^16 for range checks
310
311 check_relation<instr_fetching>(trace);
312}
313
314void check_without_range_check(const std::vector<InstructionFetchingEvent>& instr_events,
316{
317 TestTraceContainer trace;
318 BytecodeTraceBuilder bytecode_builder;
319 PrecomputedTraceBuilder precomputed_builder;
320
324 bytecode_builder.process_instruction_fetching(instr_events, trace);
325 bytecode_builder.process_decomposition(decomposition_events, trace);
326 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
327
328 check_interaction<BytecodeTraceBuilder,
334
335 EXPECT_EQ(trace.get_num_rows(), 1 << 8); // 2^8 for range checks
336
337 check_relation<instr_fetching>(trace);
338}
339
340// Positive test with 5 five bytecodes and bytecode_id = 0,1,2,3,4
341// Bytecode i is generated by truncating instr_fetch_events to i * 6 instructions.
342// Check relations and all interactions.
343TEST(InstrFetchingConstrainingTest, MultipleBytecodes)
344{
345 const auto instr_fetch_events = gen_instr_events_each_opcode();
346 constexpr size_t num_of_bytecodes = 5;
349
350 for (size_t i = 0; i < num_of_bytecodes; i++) {
351 std::vector<uint8_t> bytecode;
352 const auto num_of_instr = i * 6;
353
354 for (size_t j = 0; j < num_of_instr; j++) {
355 const auto& instr = instr_fetch_events.at(j).instruction;
356 const auto instruction_bytes = instr.serialize();
357 bytecode.insert(bytecode.end(),
358 std::make_move_iterator(instruction_bytes.begin()),
359 std::make_move_iterator(instruction_bytes.end()));
360 }
361
362 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
363
364 for (size_t j = 0; j < num_of_instr; j++) {
365 auto instr_event = instr_fetch_events.at(j);
366 instr_event.bytecode_id = static_cast<BytecodeId>(i);
367 instr_event.bytecode = bytecode_ptr;
368 instr_events.emplace_back(instr_event);
369 }
370
371 decomposition_events.emplace_back(BytecodeDecompositionEvent{
372 .bytecode_id = static_cast<BytecodeId>(i),
373 .bytecode = bytecode_ptr,
374 });
375 }
376
377 check_all(instr_events, gen_range_check_events(instr_events), decomposition_events);
378}
379
380// Positive test with one single instruction with error INSTRUCTION_OUT_OF_RANGE.
381// The bytecode consists into a serialized single instruction with pc = 0 and
382// the bytecode had the last byte removed. This byte corresponds to a full operand.
383TEST(InstrFetchingConstrainingTest, SingleInstructionOutOfRange)
384{
385 Instruction add_8_instruction = {
386 .opcode = WireOpCode::ADD_8,
387 .addressing_mode = 3,
388 .operands = { Operand::from<uint8_t>(0x34), Operand::from<uint8_t>(0x35), Operand::from<uint8_t>(0x36) },
389 };
390
391 std::vector<uint8_t> bytecode = add_8_instruction.serialize();
392 bytecode.pop_back(); // Remove last byte
393 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
394
395 const std::vector<InstructionFetchingEvent> instr_events = {
396 {
397 .bytecode_id = 1,
398 .pc = 0,
399 .bytecode = bytecode_ptr,
400 .error = InstrDeserializationEventError::INSTRUCTION_OUT_OF_RANGE,
401 },
402 };
403
405 {
406 .bytecode_id = 1,
407 .bytecode = bytecode_ptr,
408 },
409 };
410
411 check_without_range_check(instr_events, decomposition_events);
412}
413
414// Positive test with one single instruction (SET_FF) with error INSTRUCTION_OUT_OF_RANGE.
415// The bytecode consists into a serialized single instruction with pc = 0 and
416// the bytecode had the two last bytes removed. The truncated instruction is cut
417// in the middle of an operand.
418TEST(InstrFetchingConstrainingTest, SingleInstructionOutOfRangeSplitOperand)
419{
420 Instruction set_ff_instruction = {
421 .opcode = WireOpCode::SET_FF,
422 .addressing_mode = 0x01,
423 .operands = { Operand::from<uint16_t>(0x1279),
424 Operand::from<uint8_t>(static_cast<uint8_t>(MemoryTag::FF)),
425 Operand::from<FF>(FF::modulus_minus_two) },
426 };
427
428 std::vector<uint8_t> bytecode = set_ff_instruction.serialize();
429 bytecode.resize(bytecode.size() - 2); // Remove last two bytes)
430 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
431
432 const std::vector<InstructionFetchingEvent> instr_events = {
433 {
434 .bytecode_id = 1,
435 .pc = 0,
436 .bytecode = bytecode_ptr,
437 .error = InstrDeserializationEventError::INSTRUCTION_OUT_OF_RANGE,
438 },
439 };
440
442 {
443 .bytecode_id = 1,
444 .bytecode = bytecode_ptr,
445 },
446 };
447
448 check_without_range_check(instr_events, decomposition_events);
449}
450
451// Positive test with error case PC_OUT_OF_RANGE. We pass a pc which is out of range.
452TEST(InstrFetchingConstrainingTest, SingleInstructionPcOutOfRange)
453{
454 Instruction add_8_instruction = {
455 .opcode = WireOpCode::SUB_8,
456 .addressing_mode = 3,
457 .operands = { Operand::from<uint8_t>(0x34), Operand::from<uint8_t>(0x35), Operand::from<uint8_t>(0x36) },
458 };
459
460 std::vector<uint8_t> bytecode = add_8_instruction.serialize();
461 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
462
463 const std::vector<InstructionFetchingEvent> instr_events = {
464 // We first need a first instruction at pc == 0 as the trace assumes this.
465 {
466 .bytecode_id = 1,
467 .pc = 0,
468 .instruction = add_8_instruction,
469 .bytecode = bytecode_ptr,
470 },
471 {
472 .bytecode_id = 1,
473 .pc = static_cast<uint32_t>(bytecode_ptr->size() + 1),
474 .bytecode = bytecode_ptr,
475 .error = InstrDeserializationEventError::PC_OUT_OF_RANGE,
476 },
477 };
478
480 {
481 .bytecode_id = 1,
482 .bytecode = bytecode_ptr,
483 },
484 };
485
486 check_all(instr_events, gen_range_check_events(instr_events), decomposition_events);
487}
488
489// Positive test with error case OPCODE_OUT_OF_RANGE. We generate bytecode of a SET_128 instruction and
490// move the PC to a position corresponding to the beginning of the 128-bit immediate value of SET_128.
491// The immediate value in SET_128 starts with byte 0xFF (which we know is not a valid opcode).
492TEST(InstrFetchingConstrainingTest, SingleInstructionOpcodeOutOfRange)
493{
494 Instruction set_128_instruction = {
495 .opcode = WireOpCode::SET_128,
496 .addressing_mode = 0,
497 .operands = { Operand::from<uint16_t>(0x1234),
498 Operand::from<uint8_t>(static_cast<uint8_t>(MemoryTag::U128)),
499 Operand::from<uint128_t>(static_cast<uint128_t>(0xFF) << 120) },
500 };
501
502 std::vector<uint8_t> bytecode = set_128_instruction.serialize();
503 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
504
505 const std::vector<InstructionFetchingEvent> instr_events = {
506 {
507 .bytecode_id = 1,
508 .pc = 0,
509 .instruction = set_128_instruction,
510 .bytecode = bytecode_ptr,
511 },
512 {
513 .bytecode_id = 1,
514 .pc = 5, // We move pc to the beginning of the 128-bit immediate value.
515 .bytecode = bytecode_ptr,
516 .error = InstrDeserializationEventError::OPCODE_OUT_OF_RANGE,
517 },
518 };
519
521 {
522 .bytecode_id = 1,
523 .bytecode = bytecode_ptr,
524 },
525 };
526
527 check_without_range_check(instr_events, decomposition_events);
528}
529
530// Positive test with one single instruction (SET_16) with error TAG_OUT_OF_RANGE.
531// The bytecode consists into a serialized single instruction with pc = 0.
532// The operand at index 1 is wrongly set to value 12
533TEST(InstrFetchingConstrainingTest, SingleInstructionTagOutOfRange)
534{
535 Instruction set_16_instruction = {
536 .opcode = WireOpCode::SET_16,
537 .addressing_mode = 0,
538 .operands = { Operand::from<uint16_t>(0x1234), Operand::from<uint8_t>(12), Operand::from<uint16_t>(0x5678) },
539 };
540
541 std::vector<uint8_t> bytecode = set_16_instruction.serialize();
542 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
543
544 const std::vector<InstructionFetchingEvent> instr_events = {
545 {
546 .bytecode_id = 1,
547 .pc = 0,
548 .instruction = set_16_instruction,
549 .bytecode = bytecode_ptr,
550 .error = InstrDeserializationEventError::TAG_OUT_OF_RANGE,
551 },
552 };
553
555 {
556 .bytecode_id = 1,
557 .bytecode = bytecode_ptr,
558 },
559 };
560
561 check_without_range_check(instr_events, decomposition_events);
562}
563
564// Negative interaction test with some values not matching the instruction spec table.
565TEST(InstrFetchingConstrainingTest, NegativeWrongWireInstructionSpecInteractions)
566{
567 BytecodeTraceBuilder bytecode_builder;
568 PrecomputedTraceBuilder precomputed_builder;
569
570 // Some arbitrary chosen opcodes. We limit to one as this unit test is costly.
571 // Test works if the following vector is extended to other opcodes though.
573
574 for (const auto& opcode : opcodes) {
575 TestTraceContainer trace;
576 const auto instr = testing::random_instruction(opcode);
577 bytecode_builder.process_instruction_fetching(
578 { { .bytecode_id = 1,
579 .pc = 0,
580 .instruction = instr,
581 .bytecode = std::make_shared<std::vector<uint8_t>>(instr.serialize()) } },
582 trace);
585 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
586
587 check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_wire_instruction_info_settings>(trace);
588
589 ASSERT_EQ(trace.get(C::lookup_instr_fetching_wire_instruction_info_counts, static_cast<uint32_t>(opcode)), 1);
590
591 constexpr std::array<C, 21> mutated_cols = {
592 C::instr_fetching_exec_opcode, C::instr_fetching_instr_size, C::instr_fetching_sel_has_tag,
593 C::instr_fetching_sel_tag_is_op2, C::instr_fetching_sel_op_dc_0, C::instr_fetching_sel_op_dc_1,
594 C::instr_fetching_sel_op_dc_2, C::instr_fetching_sel_op_dc_3, C::instr_fetching_sel_op_dc_4,
595 C::instr_fetching_sel_op_dc_5, C::instr_fetching_sel_op_dc_6, C::instr_fetching_sel_op_dc_7,
596 C::instr_fetching_sel_op_dc_8, C::instr_fetching_sel_op_dc_9, C::instr_fetching_sel_op_dc_10,
597 C::instr_fetching_sel_op_dc_11, C::instr_fetching_sel_op_dc_12, C::instr_fetching_sel_op_dc_13,
598 C::instr_fetching_sel_op_dc_14, C::instr_fetching_sel_op_dc_15, C::instr_fetching_sel_op_dc_16,
599 };
600
601 // Mutate execution opcode
602 for (const auto& col : mutated_cols) {
603 auto mutated_trace = trace;
604 const FF mutated_value = trace.get(col, 1) + 1; // Mutate to value + 1
605 mutated_trace.set(col, 1, mutated_value);
606
608 (check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_wire_instruction_info_settings>(
609 mutated_trace)),
610 "Failed.*LOOKUP_INSTR_FETCHING_WIRE_INSTRUCTION_INFO.*Could not find tuple in destination.");
611 }
612 }
613}
614
615// Negative interaction test with some values not matching the bytecode decomposition table.
616TEST(InstrFetchingConstrainingTest, NegativeWrongBcDecompositionInteractions)
617{
618 TestTraceContainer trace;
619 BytecodeTraceBuilder bytecode_builder;
620
621 // Some arbitrary chosen opcodes. We limit to one as this unit test is costly.
622 // Test works if the following vector is extended to other opcodes though.
624
625 for (const auto& opcode : opcodes) {
626 TestTraceContainer trace;
627 const auto instr = testing::random_instruction(opcode);
628 auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(instr.serialize());
629 bytecode_builder.process_instruction_fetching({ {
630 .bytecode_id = 1,
631 .pc = 0,
632 .instruction = instr,
633 .bytecode = bytecode_ptr,
634 } },
635 trace);
636 bytecode_builder.process_decomposition({ {
637 .bytecode_id = 1,
638 .bytecode = bytecode_ptr,
639 } },
640 trace);
641
642 auto valid_trace = trace; // Keep original trace before lookup processing
643 check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_bytes_from_bc_dec_settings>(valid_trace);
644
645 constexpr std::array<C, 39> mutated_cols = {
646 C::instr_fetching_pc, C::instr_fetching_bytecode_id, C::instr_fetching_bd0, C::instr_fetching_bd1,
647 C::instr_fetching_bd2, C::instr_fetching_bd3, C::instr_fetching_bd4, C::instr_fetching_bd5,
648 C::instr_fetching_bd6, C::instr_fetching_bd7, C::instr_fetching_bd8, C::instr_fetching_bd9,
649 C::instr_fetching_bd10, C::instr_fetching_bd11, C::instr_fetching_bd12, C::instr_fetching_bd13,
650 C::instr_fetching_bd14, C::instr_fetching_bd15, C::instr_fetching_bd16, C::instr_fetching_bd17,
651 C::instr_fetching_bd18, C::instr_fetching_bd19, C::instr_fetching_bd20, C::instr_fetching_bd21,
652 C::instr_fetching_bd22, C::instr_fetching_bd23, C::instr_fetching_bd24, C::instr_fetching_bd25,
653 C::instr_fetching_bd26, C::instr_fetching_bd27, C::instr_fetching_bd28, C::instr_fetching_bd29,
654 C::instr_fetching_bd30, C::instr_fetching_bd31, C::instr_fetching_bd32, C::instr_fetching_bd33,
655 C::instr_fetching_bd34, C::instr_fetching_bd35, C::instr_fetching_bd36,
656 };
657
658 // Mutate execution opcode
659 for (const auto& col : mutated_cols) {
660 auto mutated_trace = trace;
661 const FF mutated_value = trace.get(col, 1) + 1; // Mutate to value + 1
662 mutated_trace.set(col, 1, mutated_value);
663
665 (check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_bytes_from_bc_dec_settings>(
666 mutated_trace)),
667 "Failed.*BYTES_FROM_BC_DEC. Could not find tuple in destination.");
668 }
669 }
670}
671
672// Negative interaction test for #[BYTECODE_SIZE_FROM_BC_DEC] where bytecode_size has the wrong value.
673// We set pc different from zero.
674TEST(InstrFetchingConstrainingTest, NegativeWrongBytecodeSizeBcDecompositionInteractions)
675{
676 TestTraceContainer trace;
677 BytecodeTraceBuilder bytecode_builder;
678 PrecomputedTraceBuilder precomputed_builder;
679
680 const uint32_t pc = 15;
681 std::vector<uint8_t> bytecode(pc, 0x23);
682
683 // Some arbitrary chosen opcodes. We limit to one as this unit test is costly.
684 // Test works if the following vector is extended to other opcodes though.
686
687 for (const auto& opcode : opcodes) {
688 TestTraceContainer trace;
689
690 const auto instr = testing::random_instruction(opcode);
691 const auto instr_bytecode = instr.serialize();
692 bytecode.insert(bytecode.end(),
693 std::make_move_iterator(instr_bytecode.begin()),
694 std::make_move_iterator(instr_bytecode.end()));
696
697 bytecode_builder.process_instruction_fetching({ {
698 .bytecode_id = 1,
699 .pc = pc,
700 .instruction = instr,
701 .bytecode = bytecode_ptr,
702 } },
703 trace);
704 bytecode_builder.process_decomposition({ {
705 .bytecode_id = 1,
706 .bytecode = bytecode_ptr,
707 } },
708 trace);
709 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
710
711 auto valid_trace = trace; // Keep original trace before lookup processing
712 check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_bytecode_size_from_bc_dec_settings>(valid_trace);
713
714 auto mutated_trace = trace;
715 const FF mutated_value = trace.get(C::instr_fetching_bytecode_size, 1) + 1; // Mutate to value + 1
716 mutated_trace.set(C::instr_fetching_bytecode_size, 1, mutated_value);
717
719 (check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_bytecode_size_from_bc_dec_settings>(
720 mutated_trace)),
721 "Failed.*BYTECODE_SIZE_FROM_BC_DEC. Could not find tuple in destination.");
722 }
723}
724
725using ::bb::avm2::testing::InstructionBuilder;
726using simulation::EventEmitter;
727using simulation::MockExecutionIdManager;
728using simulation::MockGreaterThan;
729using simulation::Poseidon2;
730using simulation::Poseidon2HashEvent;
731using simulation::Poseidon2PermutationEvent;
732using simulation::Poseidon2PermutationMemoryEvent;
733using ::testing::StrictMock;
734using tracegen::Poseidon2TraceBuilder;
735
736TEST(InstrFetchingConstrainingTest, NegativeTruncatedBytecodeRepro)
737{
738 TestTraceContainer trace;
739 BytecodeTraceBuilder bytecode_builder;
740 PrecomputedTraceBuilder precomputed_builder;
741 RangeCheckTraceBuilder range_check_builder;
742 EventEmitter<Poseidon2HashEvent> hash_event_emitter;
743 EventEmitter<Poseidon2PermutationEvent> perm_event_emitter;
744 EventEmitter<Poseidon2PermutationMemoryEvent> perm_mem_event_emitter;
745 StrictMock<MockGreaterThan> mock_gt;
746 StrictMock<MockExecutionIdManager> mock_execution_id_manager;
747 // Note: this helper expects bytecode fields without the prepended separator and does not complete decomposition
750
751 Poseidon2TraceBuilder poseidon2_builder;
752
753 // Build some good bytecode:
754 const uint32_t pc = 15;
755 std::vector<uint8_t> bytecode(pc, 0x23);
756 const auto add_instr =
757 InstructionBuilder(WireOpCode::SUB_8).operand<uint8_t>(5).operand<uint8_t>(5).operand<uint8_t>(0).build();
758 const auto instr_bytecode = add_instr.serialize();
759 bytecode.insert(
760 bytecode.end(), std::make_move_iterator(instr_bytecode.begin()), std::make_move_iterator(instr_bytecode.end()));
761
762 std::vector<FF> fields = simulation::encode_bytecode(bytecode);
763 std::vector<FF> prepended_fields = { simulation::compute_public_bytecode_first_field(bytecode.size()) };
764 prepended_fields.insert(prepended_fields.end(), fields.begin(), fields.end());
766
767 // Remove the final byte (which has a value of zero)
768 std::vector<uint8_t> trunc_bytecode(pc, 0x23);
769 trunc_bytecode.insert(trunc_bytecode.end(),
770 std::make_move_iterator(instr_bytecode.begin()),
771 std::make_move_iterator(instr_bytecode.end()));
772 trunc_bytecode.resize(trunc_bytecode.size() - 1);
773 std::vector<FF> trunc_fields = simulation::encode_bytecode(trunc_bytecode);
774 std::vector<FF> trunc_prepended_fields = { DOM_SEP__PUBLIC_BYTECODE };
775 trunc_prepended_fields.insert(trunc_prepended_fields.end(), trunc_fields.begin(), trunc_fields.end());
776 FF trunc_hash = poseidon2.hash(trunc_prepended_fields);
777 // 'Real' bytecode: [ 23 23 23 23 23 23 23 23 23 23 23 23 23 23 23 02 00 05 05 00 ] of length 20 bytes
778 // We could previously process a truncated bytecode with the same id:
779 // 'Fake' bytecode: [ 23 23 23 23 23 23 23 23 23 23 23 23 23 23 23 02 00 05 05 ] of length 19 bytes
780 // Before introducing #[BYTECODE_LENGTH_BYTES] in bc_hashing.pil and including the size in
781 // compute_public_bytecode_first_field(), (#20254) trunc_hash == hash, meaning we could use truncated bytecode.
782 ASSERT_NE(hash, trunc_hash);
783
784 // Now, we cannot process the truncated bytecode and force a good instruction on the full bytecode to fail:
785 auto trunc_bytecode_ptr = std::make_shared<std::vector<uint8_t>>(trunc_bytecode);
787 InstructionFetchingEvent instr_event = {
788 .bytecode_id = hash,
789 .pc = pc,
790 .instruction = add_instr,
791 .bytecode = bytecode_ptr,
792 };
793 bytecode_builder.process_instruction_fetching({ instr_event }, trace);
794 bytecode_builder.process_hashing({ {
795 .bytecode_id = hash,
796 .bytecode_length = static_cast<uint32_t>(trunc_bytecode.size()),
797 .bytecode_fields = trunc_fields,
798 } },
799 trace);
800
801 bytecode_builder.process_decomposition({ {
802 .bytecode_id = hash,
803 .bytecode = trunc_bytecode_ptr,
804 } },
805 trace);
806
807 // Prep trace:
808 range_check_builder.process(gen_range_check_events({ instr_event }), trace);
813
814 tracegen::MultiPermutationBuilder<perm_bc_hashing_get_packed_field_0_settings,
817 perm_builder(C::bc_decomposition_sel_packed);
818 perm_builder.process(trace);
820}
821
822TEST(InstrFetchingConstrainingTest, NegativeWrongTagValidationInteractions)
823{
824 TestTraceContainer trace;
825 BytecodeTraceBuilder bytecode_builder;
826 PrecomputedTraceBuilder precomputed_builder;
827
828 // Some chosen opcode with a tag. We limit to one as this unit test is costly.
829 // Test works if the following vector is extended to other opcodes though.
831
832 for (const auto& opcode : opcodes) {
833 TestTraceContainer trace;
834 const auto instr = testing::random_instruction(opcode);
835 bytecode_builder.process_instruction_fetching(
836 { { .bytecode_id = 1,
837 .pc = 0,
838 .instruction = instr,
839 .bytecode = std::make_shared<std::vector<uint8_t>>(instr.serialize()) } },
840 trace);
843 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
844
845 check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_tag_value_validation_settings>(trace);
846
847 auto valid_trace = trace; // Keep original trace before lookup processing
848
849 // Mutate tag out-of-range error
850 auto mutated_trace = trace;
851 ASSERT_EQ(trace.get(C::instr_fetching_tag_out_of_range, 1), 0);
852 mutated_trace.set(C::instr_fetching_tag_out_of_range, 1, 1); // Mutate by toggling the error.
853
855 (check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_tag_value_validation_settings>(
856 mutated_trace)),
857 "Failed.*LOOKUP_INSTR_FETCHING_TAG_VALUE_VALIDATION.*Could not find tuple in destination.");
858 }
859}
860
861// Negative test on not toggling instr_out_of_range when instr_size > bytes_to_read
862TEST(InstrFetchingConstrainingTest, NegativeNotTogglingInstrOutOfRange)
863{
864 TestTraceContainer trace({
865 { { C::precomputed_first_row, 1 } },
866 {
867 { C::instr_fetching_bytes_to_read, 11 },
868 { C::instr_fetching_instr_abs_diff, 0 },
869 { C::instr_fetching_instr_out_of_range, 1 }, // Will be mutated to zero
870 { C::instr_fetching_instr_size, 12 },
871 { C::instr_fetching_sel, 1 },
872 },
873 });
874
875 check_relation<instr_fetching>(trace, instr_fetching::SR_INSTR_OUT_OF_RANGE_TOGGLE);
876
877 trace.set(C::instr_fetching_instr_out_of_range, 1, 0); // Mutate to wrong value
878
879 EXPECT_THROW_WITH_MESSAGE(check_relation<instr_fetching>(trace, instr_fetching::SR_INSTR_OUT_OF_RANGE_TOGGLE),
880 "INSTR_OUT_OF_RANGE_TOGGLE");
881}
882
883// Negative test on wrongly toggling instr_out_of_range when instr_size <= bytes_to_read
884TEST(InstrFetchingConstrainingTest, NegativeTogglingInstrInRange)
885{
886 TestTraceContainer trace({
887 { { C::precomputed_first_row, 1 } },
888 {
889 { C::instr_fetching_bytes_to_read, 12 },
890 { C::instr_fetching_instr_abs_diff, 0 },
891 { C::instr_fetching_instr_out_of_range, 0 }, // Will be mutated to 1
892 { C::instr_fetching_instr_size, 12 },
893 { C::instr_fetching_sel, 1 },
894 },
895 });
896
897 check_relation<instr_fetching>(trace, instr_fetching::SR_INSTR_OUT_OF_RANGE_TOGGLE);
898
899 trace.set(C::instr_fetching_instr_out_of_range, 1, 1); // Mutate to wrong value
900
901 EXPECT_THROW_WITH_MESSAGE(check_relation<instr_fetching>(trace, instr_fetching::SR_INSTR_OUT_OF_RANGE_TOGGLE),
902 "INSTR_OUT_OF_RANGE_TOGGLE");
903}
904
905// Negative test on not toggling pc_out_of_range when pc >= bytecode_size
906TEST(InstrFetchingConstrainingTest, NegativeNotTogglingPcOutOfRange)
907{
908 TestTraceContainer trace({
909 { { C::precomputed_first_row, 1 } },
910 {
911 { C::instr_fetching_bytecode_size, 12 },
912 { C::instr_fetching_pc, 12 },
913 { C::instr_fetching_pc_abs_diff, 0 },
914 { C::instr_fetching_pc_out_of_range, 1 }, // Will be mutated to 0
915 { C::instr_fetching_sel, 1 },
916 },
917 });
918
919 check_relation<instr_fetching>(trace, instr_fetching::SR_PC_OUT_OF_RANGE_TOGGLE);
920
921 trace.set(C::instr_fetching_pc_out_of_range, 1, 0); // Mutate to wrong value
922
923 EXPECT_THROW_WITH_MESSAGE(check_relation<instr_fetching>(trace, instr_fetching::SR_PC_OUT_OF_RANGE_TOGGLE),
924 "PC_OUT_OF_RANGE_TOGGLE");
925}
926
927// Negative test on wrongly toggling pc_out_of_range when pc < bytecode_size
928TEST(InstrFetchingConstrainingTest, NegativeTogglingPcInRange)
929{
930 TestTraceContainer trace({
931 { { C::precomputed_first_row, 1 } },
932 {
933 { C::instr_fetching_bytecode_size, 12 },
934 { C::instr_fetching_pc, 11 },
935 { C::instr_fetching_pc_abs_diff, 0 },
936 { C::instr_fetching_pc_out_of_range, 0 }, // Will be mutated to 1
937 { C::instr_fetching_sel, 1 },
938 },
939 });
940
941 check_relation<instr_fetching>(trace, instr_fetching::SR_PC_OUT_OF_RANGE_TOGGLE);
942
943 trace.set(C::instr_fetching_pc_out_of_range, 1, 1); // Mutate to wrong value
944
945 EXPECT_THROW_WITH_MESSAGE(check_relation<instr_fetching>(trace, instr_fetching::SR_PC_OUT_OF_RANGE_TOGGLE),
946 "PC_OUT_OF_RANGE_TOGGLE");
947}
948
949TEST(InstrFetchingConstrainingTest, ErrorFlagSetButSelParsingErrIsZero)
950{
951 // Create a minimal trace that satisfies all constraints EXCEPT the (commented out) one
952 // that should enforce sel_parsing_err = pc_out_of_range + opcode_out_of_range + instr_out_of_range +
953 // tag_out_of_range
954 TestTraceContainer trace({
955 { { C::precomputed_first_row, 1 } },
956 {
957 { C::instr_fetching_sel, 1 },
958 // Error flags - pc_out_of_range is SET to 1
959 { C::instr_fetching_pc_out_of_range, 1 },
960 { C::instr_fetching_opcode_out_of_range, 0 },
961 { C::instr_fetching_instr_out_of_range, 0 },
962 { C::instr_fetching_tag_out_of_range, 0 },
963 // sel_parsing_err should be 1 (since pc_out_of_range = 1) but we set it to 0
964 { C::instr_fetching_sel_parsing_err, 0 },
965 // Values to satisfy PC_OUT_OF_RANGE_TOGGLE constraint (subrelation 4):
966 // pc_abs_diff = sel * ((2 * pc_out_of_range - 1) * (pc - bytecode_size) - 1 + pc_out_of_range)
967 // With pc_out_of_range = 1: pc_abs_diff = (2*1-1) * (pc - bytecode_size) - 1 + 1 = pc - bytecode_size
968 { C::instr_fetching_bytecode_size, 10 },
969 { C::instr_fetching_pc, 15 }, // pc > bytecode_size
970 { C::instr_fetching_pc_abs_diff, 5 }, // pc - bytecode_size = 15 - 10 = 5
971 { C::instr_fetching_pc_size_in_bits, 32 }, // AVM_PC_SIZE_IN_BITS constant
972 // Values to satisfy INSTR_OUT_OF_RANGE_TOGGLE constraint (subrelation 6):
973 // instr_abs_diff = (2 * instr_out_of_range - 1) * (instr_size - bytes_to_read) - instr_out_of_range
974 // With instr_out_of_range = 0: instr_abs_diff = (-1) * (instr_size - bytes_to_read) = bytes_to_read -
975 // instr_size
976 { C::instr_fetching_bytes_to_read, 10 },
977 { C::instr_fetching_instr_size, 5 },
978 { C::instr_fetching_instr_abs_diff, 5 }, // bytes_to_read - instr_size = 10 - 5 = 5
979 },
980 });
981
982 EXPECT_THROW_WITH_MESSAGE(check_relation<instr_fetching>(trace),
983 "Relation instr_fetching, subrelation 5 failed at row 1");
984}
985
990TEST(InstrFetchingConstrainingTest, CorrectBehavior_SelParsingErrMatchesErrors)
991{
992 TestTraceContainer trace({
993 { { C::precomputed_first_row, 1 } },
994 {
995 { C::instr_fetching_sel, 1 },
996 { C::instr_fetching_pc_out_of_range, 1 },
997 { C::instr_fetching_opcode_out_of_range, 0 },
998 { C::instr_fetching_instr_out_of_range, 0 },
999 { C::instr_fetching_tag_out_of_range, 0 },
1000 { C::instr_fetching_sel_parsing_err, 1 }, // Correctly set to 1
1001 // Supporting values
1002 { C::instr_fetching_bytecode_size, 10 },
1003 { C::instr_fetching_pc, 15 },
1004 { C::instr_fetching_pc_abs_diff, 5 },
1005 { C::instr_fetching_pc_size_in_bits, 32 },
1006 { C::instr_fetching_bytes_to_read, 10 },
1007 { C::instr_fetching_instr_size, 5 },
1008 { C::instr_fetching_instr_abs_diff, 5 }, // bytes_to_read - instr_size = 10 - 5 = 5
1009 },
1010 });
1011
1012 // This should pass both before and after the fix.
1013 check_relation<instr_fetching>(trace);
1014}
1015
1019TEST(InstrFetchingConstrainingTest, CorrectBehavior_NoErrorsMeansSelParsingErrIsZero)
1020{
1021 TestTraceContainer trace({
1022 { { C::precomputed_first_row, 1 } },
1023 {
1024 { C::instr_fetching_sel, 1 },
1025 { C::instr_fetching_pc_out_of_range, 0 },
1026 { C::instr_fetching_opcode_out_of_range, 0 },
1027 { C::instr_fetching_instr_out_of_range, 0 },
1028 { C::instr_fetching_tag_out_of_range, 0 },
1029 { C::instr_fetching_sel_parsing_err, 0 }, // Correctly set to 0
1030 { C::instr_fetching_sel_pc_in_range, 1 }, // sel * (1 - pc_out_of_range) = 1 * 1 = 1
1031 // pc_abs_diff = sel * ((2 * pc_out_of_range - 1) * (pc - bytecode_size) - 1 + pc_out_of_range)
1032 // With pc_out_of_range = 0: pc_abs_diff = (2*0-1) * (pc - bytecode_size) - 1 + 0
1033 // = -(pc - bytecode_size) - 1 = bytecode_size - pc - 1
1034 { C::instr_fetching_bytecode_size, 20 },
1035 { C::instr_fetching_pc, 5 },
1036 { C::instr_fetching_pc_abs_diff, 14 }, // bytecode_size - pc - 1 = 20 - 5 - 1 = 14
1037 { C::instr_fetching_pc_size_in_bits, 32 },
1038 // instr_abs_diff = bytes_to_read - instr_size (when instr_out_of_range = 0)
1039 { C::instr_fetching_bytes_to_read, 15 },
1040 { C::instr_fetching_instr_size, 10 },
1041 { C::instr_fetching_instr_abs_diff, 5 }, // bytes_to_read - instr_size = 15 - 10 = 5
1042 },
1043 });
1044
1045 // This should pass both before and after the fix.
1046 check_relation<instr_fetching>(trace);
1047}
1048
1049} // namespace
1050} // namespace bb::avm2::constraining
#define EXPECT_THROW_WITH_MESSAGE(code, expectedMessageRegex)
Definition assert.hpp:193
std::shared_ptr< Napi::ThreadSafeFunction > bytecode
#define DOM_SEP__PUBLIC_BYTECODE
#define AVM_PC_SIZE_IN_BITS
StrictMock< MockGreaterThan > mock_gt
EventEmitter< Poseidon2PermutationMemoryEvent > perm_mem_event_emitter
EventEmitter< Poseidon2PermutationEvent > perm_event_emitter
EventEmitter< Poseidon2HashEvent > hash_event_emitter
Poseidon2TraceBuilder poseidon2_builder
StrictMock< MockExecutionIdManager > mock_execution_id_manager
EventEmitter< BytecodeDecompositionEvent > decomposition_events
static constexpr size_t SR_OP1_BYTES_DECOMPOSITION
static constexpr size_t SR_OP3_BYTES_DECOMPOSITION
static constexpr size_t SR_OP6_BYTES_DECOMPOSITION
static constexpr size_t SR_OP4_BYTES_DECOMPOSITION
static constexpr size_t SR_ADDRESSING_MODE_BYTES_DECOMPOSITION
static constexpr size_t SR_INSTR_OUT_OF_RANGE_TOGGLE
static std::string get_subrelation_label(size_t index)
static constexpr size_t SR_OP7_BYTES_DECOMPOSITION
static constexpr size_t SR_OP5_BYTES_DECOMPOSITION
static constexpr size_t SR_PC_OUT_OF_RANGE_TOGGLE
static constexpr size_t SR_OP2_BYTES_DECOMPOSITION
void process_hash(const simulation::EventEmitterInterface< simulation::Poseidon2HashEvent >::Container &hash_events, TraceContainer &trace)
Processes the hash events for the Poseidon2 hash function. It populates the columns for the poseidon2...
void process_misc(TraceContainer &trace, const uint32_t num_rows=PRECOMPUTED_TRACE_SIZE)
void process_wire_instruction_spec(TraceContainer &trace)
void process_memory_tag_range(TraceContainer &trace)
void process(const simulation::EventEmitterInterface< simulation::RangeCheckEvent >::Container &events, TraceContainer &trace)
Processes range check events and populates the trace with decomposed value columns.
const FF & get(Column col, uint32_t row) const
void set(Column col, uint32_t row, const FF &value)
Native Poseidon2 hash function implementation.
Definition poseidon2.hpp:22
static FF hash(const std::vector< FF > &input)
Hashes a vector of field elements.
RangeCheckTraceBuilder range_check_builder
Definition alu.test.cpp:121
PrecomputedTraceBuilder precomputed_builder
Definition alu.test.cpp:120
AluTraceBuilder builder
Definition alu.test.cpp:124
TestTraceContainer trace
void check_interaction(tracegen::TestTraceContainer &trace)
TEST(AvmFixedVKTests, FixedVKCommitments)
Test that the fixed VK commitments agree with the ones computed from precomputed columns.
void check_relation(const tracegen::TestTraceContainer &trace, Ts... subrelation)
std::vector< FF > encode_bytecode(std::span< const uint8_t > bytecode)
FF compute_public_bytecode_first_field(size_t bytecode_size)
Instruction random_instruction(WireOpCode w_opcode)
Definition fixtures.cpp:125
TestTraceContainer empty_trace()
Definition fixtures.cpp:153
lookup_settings< lookup_instr_fetching_wire_instruction_info_settings_ > lookup_instr_fetching_wire_instruction_info_settings
lookup_settings< lookup_instr_fetching_bytecode_size_from_bc_dec_settings_ > lookup_instr_fetching_bytecode_size_from_bc_dec_settings
lookup_settings< lookup_instr_fetching_bytes_from_bc_dec_settings_ > lookup_instr_fetching_bytes_from_bc_dec_settings
permutation_settings< perm_bc_hashing_get_packed_field_2_settings_ > perm_bc_hashing_get_packed_field_2_settings
permutation_settings< perm_bc_hashing_get_packed_field_1_settings_ > perm_bc_hashing_get_packed_field_1_settings
permutation_settings< perm_bc_hashing_get_packed_field_0_settings_ > perm_bc_hashing_get_packed_field_0_settings
lookup_settings< lookup_instr_fetching_instr_abs_diff_positive_settings_ > lookup_instr_fetching_instr_abs_diff_positive_settings
lookup_settings< lookup_instr_fetching_pc_abs_diff_positive_settings_ > lookup_instr_fetching_pc_abs_diff_positive_settings
lookup_settings< lookup_instr_fetching_tag_value_validation_settings_ > lookup_instr_fetching_tag_value_validation_settings
Instruction
Enumeration of VM instructions that can be executed.
constexpr decltype(auto) get(::tuplet::tuple< T... > &&t) noexcept
Definition tuple.hpp:13
unsigned __int128 uint128_t
Definition serialize.hpp:45
static constexpr uint256_t modulus_minus_two