3#include <gmock/gmock.h>
4#include <gtest/gtest.h>
29using simulation::Instruction;
30using simulation::InstructionFetchingEvent;
32TEST(BytecodeTraceGenTest, BasicRetrieval)
34 TestTraceContainer
trace;
37 const AppendOnlyTreeSnapshot snapshot_before = { .root = 12, .next_available_leaf_index = 1 };
38 const AppendOnlyTreeSnapshot snapshot_after = { .root = 34, .next_available_leaf_index = 2 };
43 .current_class_id = 34,
44 .contract_class = { .artifact_hash = 100, .private_functions_root = 200 },
45 .nullifier_root = 300,
46 .public_data_tree_root = 400,
47 .retrieved_bytecodes_snapshot_before = snapshot_before,
48 .retrieved_bytecodes_snapshot_after = snapshot_after,
55 ASSERT_EQ(rows.size(), 1);
67 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_root, snapshot_before.root),
68 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_size, snapshot_before.next_available_leaf_index),
69 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_root, snapshot_after.root),
70 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_size, snapshot_after.next_available_leaf_index),
80TEST(BytecodeTraceGenTest, RetrievalExistingClass)
82 TestTraceContainer
trace;
85 const AppendOnlyTreeSnapshot snapshot = { .root =
FF(12), .next_available_leaf_index = 2 };
90 .current_class_id = 34,
91 .contract_class = { .artifact_hash = 100, .private_functions_root = 200 },
92 .nullifier_root = 300,
93 .public_data_tree_root = 400,
94 .retrieved_bytecodes_snapshot_before = snapshot,
95 .retrieved_bytecodes_snapshot_after = snapshot,
96 .is_new_class =
false,
102 ASSERT_EQ(rows.size(), 1);
121 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_root, snapshot.root),
122 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_size, snapshot.next_available_leaf_index),
123 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_root, snapshot.root),
124 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_size, snapshot.next_available_leaf_index)));
127TEST(BytecodeTraceGenTest, MultipleRetrievalEvents)
129 TestTraceContainer
trace;
132 const AppendOnlyTreeSnapshot snapshot_before = { .root = 12, .next_available_leaf_index = 1 };
133 const AppendOnlyTreeSnapshot snapshot_after_0 = { .root = 34, .next_available_leaf_index = 2 };
134 const AppendOnlyTreeSnapshot snapshot_after_1 = { .root = 56, .next_available_leaf_index = 3 };
139 simulation::BytecodeRetrievalEvent{
142 .current_class_id = 34,
143 .contract_class = { .artifact_hash = 100, .private_functions_root = 200 },
144 .nullifier_root = 300,
145 .public_data_tree_root = 400,
146 .retrieved_bytecodes_snapshot_before = snapshot_before,
147 .retrieved_bytecodes_snapshot_after = snapshot_after_0,
148 .is_new_class =
true,
150 simulation::BytecodeRetrievalEvent{
152 .address = 0xdeadbeef,
153 .current_class_id = 56,
154 .contract_class = { .artifact_hash = 100, .private_functions_root = 200 },
155 .nullifier_root = 300,
156 .public_data_tree_root = 400,
157 .retrieved_bytecodes_snapshot_before = snapshot_after_0,
158 .retrieved_bytecodes_snapshot_after = snapshot_after_1,
159 .is_new_class =
true,
161 simulation::BytecodeRetrievalEvent{
163 .address = 0xdeadb33f,
164 .current_class_id = 56,
165 .contract_class = { .artifact_hash = 100, .private_functions_root = 200 },
166 .nullifier_root = 300,
167 .public_data_tree_root = 400,
168 .retrieved_bytecodes_snapshot_before = snapshot_after_1,
169 .retrieved_bytecodes_snapshot_after = snapshot_after_1,
170 .is_new_class =
false,
177 ASSERT_EQ(rows.size(), 3);
189 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_root, snapshot_before.root),
190 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_size, snapshot_before.next_available_leaf_index),
191 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_root, snapshot_after_0.root),
192 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_size, snapshot_after_0.next_available_leaf_index),
211 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_root, snapshot_after_0.root),
212 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_size, snapshot_after_0.next_available_leaf_index),
213 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_root, snapshot_after_1.root),
214 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_size, snapshot_after_1.next_available_leaf_index),
233 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_root, snapshot_after_1.root),
234 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_size, snapshot_after_1.next_available_leaf_index),
235 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_root, snapshot_after_1.root),
236 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_size, snapshot_after_1.next_available_leaf_index),
246TEST(BytecodeTraceGenTest, RetrievalInstanceNotFoundError)
248 TestTraceContainer
trace;
251 const AppendOnlyTreeSnapshot snapshot = { .root =
FF(12), .next_available_leaf_index = 1 };
257 .current_class_id = 0,
258 .contract_class = {},
259 .nullifier_root = 300,
260 .public_data_tree_root = 400,
261 .retrieved_bytecodes_snapshot_before = snapshot,
262 .retrieved_bytecodes_snapshot_after = snapshot,
263 .is_new_class =
false,
270 ASSERT_EQ(rows.size(), 1);
288 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_root, snapshot.root),
289 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_size, snapshot.next_available_leaf_index),
290 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_root, snapshot.root),
291 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_size, snapshot.next_available_leaf_index)));
294TEST(BytecodeTraceGenTest, RetrievalLimitError)
296 TestTraceContainer
trace;
300 const AppendOnlyTreeSnapshot snapshot = { .root =
FF(12),
301 .next_available_leaf_index =
307 .current_class_id = 34,
308 .contract_class = {},
309 .nullifier_root = 300,
310 .public_data_tree_root = 400,
311 .retrieved_bytecodes_snapshot_before = snapshot,
312 .retrieved_bytecodes_snapshot_after = snapshot,
313 .is_new_class =
true,
320 ASSERT_EQ(rows.size(), 1);
337 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_root, snapshot.root),
338 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_size, snapshot.next_available_leaf_index),
339 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_root, snapshot.root),
340 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_size, snapshot.next_available_leaf_index)));
343TEST(BytecodeTraceGenTest, BasicShortLength)
345 TestTraceContainer
trace;
350 simulation::BytecodeDecompositionEvent{
360 ASSERT_EQ(rows.size(), 4 + 1);
363 EXPECT_THAT(rows.at(1),
373 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
375 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, 0),
381 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv, 0)));
383 EXPECT_THAT(rows.at(2),
392 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
394 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, 0),
398 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv,
FF(31 - 1).invert()),
401 EXPECT_THAT(rows.at(3),
409 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
411 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, 0),
415 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv,
FF(31 - 2).invert()),
418 EXPECT_THAT(rows.at(4),
425 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
427 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, 0),
431 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv,
FF(31 - 3).invert()),
435TEST(BytecodeTraceGenTest, BasicSingleByte)
437 TestTraceContainer
trace;
442 simulation::BytecodeDecompositionEvent{
452 ASSERT_EQ(rows.size(), 1 + 1);
455 EXPECT_THAT(rows.at(1),
465 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
467 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, 0),
473 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv, 0)));
476TEST(BytecodeTraceGenTest, BasicLongerThanWindowSize)
478 TestTraceContainer
trace;
482 std::vector<uint8_t>
bytecode(bytecode_size);
483 const uint8_t first_byte = 17;
486 for (uint8_t i = 0; i < bytecode_size; i++) {
492 simulation::BytecodeDecompositionEvent{
502 ASSERT_EQ(rows.size(), bytecode_size + 1);
505 EXPECT_THAT(rows.at(1),
510 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, bytecode_size),
511 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 0),
512 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv,
FF(-8).invert()),
513 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, 0),
518 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv, 0),
523 EXPECT_THAT(rows.at(9),
529 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 0),
530 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, 0),
531 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, 1),
535 EXPECT_THAT(rows.at(10),
541 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
542 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, 1),
543 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, 0),
547 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv,
FF(31 - 9).invert()),
551 EXPECT_THAT(rows.at(bytecode_size),
554 ROW_FIELD_EQ(bc_decomposition_bytes, first_byte + bytecode_size - 1),
557 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
559 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, 0),
563 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv,
FF(62 - (bytecode_size - 1)).invert()),
567TEST(BytecodeTraceGenTest, MultipleEvents)
569 TestTraceContainer
trace;
575 std::transform(bc_sizes.begin(), bc_sizes.end(), bytecodes.begin(), [](uint32_t bc_size) -> std::vector<uint8_t> {
576 std::vector<uint8_t> bytecode(bc_size);
577 for (uint8_t i = 0; i < static_cast<uint8_t>(bc_size); i++) {
586 simulation::BytecodeDecompositionEvent{
590 simulation::BytecodeDecompositionEvent{
594 simulation::BytecodeDecompositionEvent{
598 simulation::BytecodeDecompositionEvent{
610 for (uint32_t i = 0; i < 4; i++) {
611 uint32_t next_packed_pc = 0;
612 for (uint32_t j = 0; j < bc_sizes[i]; j++) {
613 const auto bytes_rem = bc_sizes[i] - j;
620 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, bytes_rem),
623 bc_decomposition_windows_min_remaining_inv,
627 ROW_FIELD_EQ(bc_decomposition_sel_packed, j == next_packed_pc ? 1 : 0),
628 ROW_FIELD_EQ(bc_decomposition_next_packed_pc, next_packed_pc),
629 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv,
630 j == next_packed_pc ? 0 :
FF(next_packed_pc - j).invert()),
632 ROW_FIELD_EQ(bc_decomposition_last_of_contract, j == bc_sizes[i] - 1 ? 1 : 0)));
634 next_packed_pc += j % 31 == 0 ? 31 : 0;
639TEST(BytecodeTraceGenTest, BasicHashing)
641 TestTraceContainer
trace;
646 simulation::BytecodeHashingEvent{
648 .bytecode_length = 93,
649 .bytecode_fields = { 10, 20, 30 },
705 instructions.reserve(opcodes.size());
706 for (
const auto& opcode : opcodes) {
716 auto serialized_instruction =
instruction.serialize();
726 std::vector<size_t> pcs;
727 pcs.reserve(opcodes.size());
729 for (
const auto& opcode : opcodes) {
730 pcs.emplace_back(pc);
738 const std::vector<size_t>& pcs,
739 const std::shared_ptr<std::vector<uint8_t>>& bytecode_ptr,
743 events.reserve(instructions.size());
745 for (
size_t i = 0; i < instructions.size(); i++) {
746 events.emplace_back(InstructionFetchingEvent{
747 .bytecode_id = bytecode_id,
748 .pc =
static_cast<uint32_t
>(pcs.at(i)),
749 .instruction = instructions.at(i),
750 .bytecode = bytecode_ptr,
758TEST(BytecodeTraceGenTest, InstrDecompositionInBytesEachOpcode)
760 TestTraceContainer
trace;
764 C::instr_fetching_bd0, C::instr_fetching_bd1, C::instr_fetching_bd2, C::instr_fetching_bd3,
765 C::instr_fetching_bd4, C::instr_fetching_bd5, C::instr_fetching_bd6, C::instr_fetching_bd7,
766 C::instr_fetching_bd8, C::instr_fetching_bd9, C::instr_fetching_bd10, C::instr_fetching_bd11,
767 C::instr_fetching_bd12, C::instr_fetching_bd13, C::instr_fetching_bd14, C::instr_fetching_bd15,
768 C::instr_fetching_bd16, C::instr_fetching_bd17, C::instr_fetching_bd18, C::instr_fetching_bd19,
769 C::instr_fetching_bd20, C::instr_fetching_bd21, C::instr_fetching_bd22, C::instr_fetching_bd23,
770 C::instr_fetching_bd24, C::instr_fetching_bd25, C::instr_fetching_bd26, C::instr_fetching_bd27,
771 C::instr_fetching_bd28, C::instr_fetching_bd29, C::instr_fetching_bd30, C::instr_fetching_bd31,
772 C::instr_fetching_bd32, C::instr_fetching_bd33, C::instr_fetching_bd34, C::instr_fetching_bd35,
773 C::instr_fetching_bd36,
777 C::instr_fetching_op1, C::instr_fetching_op2, C::instr_fetching_op3, C::instr_fetching_op4,
778 C::instr_fetching_op5, C::instr_fetching_op6, C::instr_fetching_op7,
785 opcodes.reserve(num_opcodes);
786 for (
size_t i = 0; i < num_opcodes; i++) {
787 opcodes.emplace_back(
static_cast<WireOpCode>(i));
791 std::vector<size_t> pcs = gen_pcs(opcodes);
796 create_instruction_fetching_events(instructions, pcs, bytecode_ptr, bytecode_id);
798 builder.process_instruction_fetching(events, trace);
800 for (uint32_t i = 0; i < num_opcodes; i++) {
801 const auto& instr = instructions.at(i);
802 const auto instr_encoded = instr.serialize();
803 const auto w_opcode =
static_cast<WireOpCode>(i);
807 ASSERT_EQ(instr_encoded.size(), expected_size_in_bytes);
808 EXPECT_EQ(
FF(expected_size_in_bytes),
trace.
get(C::instr_fetching_instr_size, i + 1));
811 for (
size_t j = 0; j < static_cast<size_t>(expected_size_in_bytes); j++) {
812 EXPECT_EQ(
FF(instr_encoded.at(j)),
trace.
get(bd_columns.at(j), i + 1));
817 trace.
get(C::instr_fetching_exec_opcode, i + 1));
820 EXPECT_EQ(
FF(instr.addressing_mode),
trace.
get(C::instr_fetching_addressing_mode, i + 1));
823 EXPECT_EQ(
FF(pcs.at(i)),
trace.
get(C::instr_fetching_pc, i + 1));
826 size_t operand_idx = 0;
827 for (
const auto& operand : instr.operands) {
828 EXPECT_EQ(
FF(operand),
trace.
get(operand_columns.at(operand_idx++), i + 1));
833TEST(BytecodeTraceGenTest, InstrFetchingSingleBytecode)
835 TestTraceContainer
trace;
839 constexpr size_t num_of_opcodes = 10;
848 std::vector<size_t> pcs = gen_pcs(opcodes);
854 builder.process_instruction_fetching(events, trace);
858 const auto bytecode_size =
bytecode.size();
859 EXPECT_EQ(rows.size(), num_of_opcodes + 1);
861 for (
size_t i = 0; i < num_of_opcodes; i++) {
862 const auto pc = pcs.at(i);
865 const auto tag_is_op2 =
867 const auto bytes_remaining = bytecode_size - pc;
870 EXPECT_LE(instr_size, bytes_to_read);
871 const auto instr_abs_diff = bytes_to_read - instr_size;
873 EXPECT_LT(pc, bytecode_size);
874 const auto pc_abs_diff = bytecode_size - pc - 1;
876 ASSERT_LE(bytecode_size, UINT16_MAX);
878 EXPECT_THAT(rows.at(i + 1),
881 ROW_FIELD_EQ(instr_fetching_bd0,
static_cast<uint8_t
>(opcodes.at(i))),
883 ROW_FIELD_EQ(instr_fetching_bytes_to_read, bytes_to_read),
884 ROW_FIELD_EQ(instr_fetching_bytecode_size, bytecode_size),
886 ROW_FIELD_EQ(instr_fetching_instr_abs_diff, instr_abs_diff),
895 ROW_FIELD_EQ(instr_fetching_sel_tag_is_op2, tag_is_op2),
901TEST(BytecodeTraceGenTest, InstrFetchingMultipleBytecodes)
903 TestTraceContainer
trace;
906 constexpr size_t num_of_opcodes = 2;
913 std::vector<size_t> pcs = gen_pcs(opcodes);
917 for (
size_t i = 0; i < 3; i++) {
920 create_instruction_fetching_events(instructions, pcs, bytecode_ptr,
static_cast<BytecodeId>(i + 1));
921 events.insert(events.end(), new_events.begin(), new_events.end());
924 builder.process_instruction_fetching(events, trace);
928 EXPECT_EQ(rows.size(), 6 + 1);
930 for (
size_t i = 0; i < 3; i++) {
931 EXPECT_THAT(rows.at((2 * i) + 1),
ROW_FIELD_EQ(instr_fetching_pc, 0));
945TEST(BytecodeTraceGenTest, InstrFetchingParsingErrors)
947 TestTraceContainer
trace;
951 constexpr size_t bytecode_size = 20;
952 std::vector<uint8_t>
bytecode(bytecode_size);
953 for (
size_t i = 0; i < bytecode_size; i++) {
954 bytecode[i] =
static_cast<uint8_t
>(i);
960 events.emplace_back(InstructionFetchingEvent{
961 .bytecode_id = bytecode_id,
963 .bytecode = bytecode_ptr,
966 events.emplace_back(InstructionFetchingEvent{
967 .bytecode_id = bytecode_id,
969 .bytecode = bytecode_ptr,
972 events.emplace_back(InstructionFetchingEvent{
973 .bytecode_id = bytecode_id,
975 .bytecode = bytecode_ptr,
979 builder.process_instruction_fetching(events, trace);
983 ASSERT_EQ(rows.size(), 3 + 1);
985 EXPECT_THAT(rows.at(1),
997 EXPECT_THAT(rows.at(2),
1023TEST(BytecodeTraceGenTest, InstrFetchingErrorTagOutOfRange)
1028 TestTraceContainer
trace;
1033 constexpr uint32_t cast_size = 7;
1034 constexpr uint32_t set_64_size = 13;
1036 instr_cast.operands.at(2) = Operand::from<uint8_t>(0x09);
1037 instr_set.operands.at(1) = Operand::from<uint8_t>(0x0A);
1039 auto bytecode = instr_cast.serialize();
1040 ASSERT_EQ(
bytecode.size(), cast_size);
1042 auto instr_set_bytecode = instr_set.serialize();
1043 ASSERT_EQ(instr_set_bytecode.size(), set_64_size);
1045 bytecode.insert(
bytecode.end(), instr_set_bytecode.begin(), instr_set_bytecode.end());
1050 events.emplace_back(InstructionFetchingEvent{
1054 .bytecode = bytecode_ptr,
1058 events.emplace_back(InstructionFetchingEvent{
1063 .bytecode = bytecode_ptr,
1067 builder.process_instruction_fetching(events, trace);
1071 ASSERT_EQ(rows.size(), 2 + 1);
1073 EXPECT_THAT(rows.at(1),
1080 ROW_FIELD_EQ(instr_fetching_bytes_to_read, cast_size + set_64_size),
1086 cast_size + set_64_size - 1),
1097 ROW_FIELD_EQ(instr_fetching_bytes_to_read, set_64_size),
1101 ROW_FIELD_EQ(instr_fetching_pc_abs_diff, set_64_size - 1),
std::shared_ptr< Napi::ThreadSafeFunction > bytecode
#define MAX_PUBLIC_CALLS_TO_UNIQUE_CONTRACT_CLASS_IDS
std::vector< AvmFullRowConstRef > as_rows() const
const FF & get(Column col, uint32_t row) const
static FF hash(const std::vector< FF > &input)
Hashes a vector of field elements.
std::vector< uint8_t > create_bytecode(const std::vector< bb::avm2::simulation::Instruction > &instructions)
#define ROW_FIELD_EQ(field_name, expression)
@ INSTRUCTION_OUT_OF_RANGE
FF compute_public_bytecode_first_field(size_t bytecode_size)
Instruction deserialize_instruction(std::span< const uint8_t > bytecode, size_t pos)
Parsing of an instruction in the supplied bytecode at byte position pos. This checks that the WireOpC...
Instruction random_instruction(WireOpCode w_opcode)
const std::unordered_map< WireOpCode, WireInstructionSpec > & get_wire_instruction_spec()
constexpr uint32_t DECOMPOSE_WINDOW_SIZE
TEST(BoomerangMegaCircuitBuilder, BasicCircuit)
constexpr decltype(auto) get(::tuplet::tuple< T... > &&t) noexcept