Barretenberg
The ZK-SNARK library at the core of Aztec
Loading...
Searching...
No Matches
bytecode_trace.test.cpp
Go to the documentation of this file.
1#include <algorithm>
2#include <cstddef>
3#include <gmock/gmock.h>
4#include <gtest/gtest.h>
5
6#include <cstdint>
7#include <memory>
8#include <sys/types.h>
9#include <vector>
10
22
23namespace bb::avm2::tracegen {
24namespace {
25
26using C = Column;
28
29using simulation::Instruction;
30using simulation::InstructionFetchingEvent;
31
32TEST(BytecodeTraceGenTest, BasicRetrieval)
33{
34 TestTraceContainer trace;
35 BytecodeTraceBuilder builder;
36
37 const AppendOnlyTreeSnapshot snapshot_before = { .root = 12, .next_available_leaf_index = 1 };
38 const AppendOnlyTreeSnapshot snapshot_after = { .root = 34, .next_available_leaf_index = 2 };
39
40 builder.process_retrieval({ {
41 .bytecode_id = 43,
42 .address = 0xc0ffee,
43 .current_class_id = 34,
44 .contract_class = { .artifact_hash = 100, .private_functions_root = 200 },
45 .nullifier_root = 300,
46 .public_data_tree_root = 400,
47 .retrieved_bytecodes_snapshot_before = snapshot_before,
48 .retrieved_bytecodes_snapshot_after = snapshot_after,
49 .is_new_class = true,
50 } },
51 trace);
52 const auto rows = trace.as_rows();
53
54 // One retrieval event.
55 ASSERT_EQ(rows.size(), 1);
56
57 EXPECT_THAT(
58 rows.at(0),
59 AllOf(ROW_FIELD_EQ(bc_retrieval_sel, 1),
60 ROW_FIELD_EQ(bc_retrieval_bytecode_id, 43),
61 ROW_FIELD_EQ(bc_retrieval_address, 0xc0ffee),
62 ROW_FIELD_EQ(bc_retrieval_current_class_id, 34),
63 ROW_FIELD_EQ(bc_retrieval_artifact_hash, 100),
64 ROW_FIELD_EQ(bc_retrieval_private_functions_root, 200),
65 ROW_FIELD_EQ(bc_retrieval_nullifier_tree_root, 300),
66 ROW_FIELD_EQ(bc_retrieval_public_data_tree_root, 400),
67 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_root, snapshot_before.root),
68 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_size, snapshot_before.next_available_leaf_index),
69 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_root, snapshot_after.root),
70 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_size, snapshot_after.next_available_leaf_index),
71 ROW_FIELD_EQ(bc_retrieval_instance_exists, 1),
72 ROW_FIELD_EQ(bc_retrieval_is_new_class, 1),
73 ROW_FIELD_EQ(bc_retrieval_no_remaining_bytecodes, 0),
74 ROW_FIELD_EQ(bc_retrieval_remaining_bytecodes_inv,
76 ROW_FIELD_EQ(bc_retrieval_error, 0),
77 ROW_FIELD_EQ(bc_retrieval_should_retrieve, 1)));
78}
79
80TEST(BytecodeTraceGenTest, RetrievalExistingClass)
81{
82 TestTraceContainer trace;
83 BytecodeTraceBuilder builder;
84
85 const AppendOnlyTreeSnapshot snapshot = { .root = FF(12), .next_available_leaf_index = 2 };
86
87 builder.process_retrieval({ {
88 .bytecode_id = 43,
89 .address = 0xc0ffee,
90 .current_class_id = 34,
91 .contract_class = { .artifact_hash = 100, .private_functions_root = 200 },
92 .nullifier_root = 300,
93 .public_data_tree_root = 400,
94 .retrieved_bytecodes_snapshot_before = snapshot,
95 .retrieved_bytecodes_snapshot_after = snapshot,
96 .is_new_class = false,
97 } },
98 trace);
99 const auto rows = trace.as_rows();
100
101 // One retrieval event.
102 ASSERT_EQ(rows.size(), 1);
103
104 EXPECT_THAT(
105 rows.at(0),
106 AllOf(ROW_FIELD_EQ(bc_retrieval_sel, 1),
107 ROW_FIELD_EQ(bc_retrieval_bytecode_id, 43),
108 ROW_FIELD_EQ(bc_retrieval_address, 0xc0ffee),
109 ROW_FIELD_EQ(bc_retrieval_current_class_id, 34),
110 ROW_FIELD_EQ(bc_retrieval_artifact_hash, 100),
111 ROW_FIELD_EQ(bc_retrieval_private_functions_root, 200),
112 ROW_FIELD_EQ(bc_retrieval_nullifier_tree_root, 300),
113 ROW_FIELD_EQ(bc_retrieval_public_data_tree_root, 400),
114 ROW_FIELD_EQ(bc_retrieval_instance_exists, 1),
115 ROW_FIELD_EQ(bc_retrieval_is_new_class, 0),
116 ROW_FIELD_EQ(bc_retrieval_no_remaining_bytecodes, 0),
117 ROW_FIELD_EQ(bc_retrieval_remaining_bytecodes_inv,
119 ROW_FIELD_EQ(bc_retrieval_error, 0),
120 ROW_FIELD_EQ(bc_retrieval_should_retrieve, 1),
121 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_root, snapshot.root),
122 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_size, snapshot.next_available_leaf_index),
123 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_root, snapshot.root),
124 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_size, snapshot.next_available_leaf_index)));
125}
126
127TEST(BytecodeTraceGenTest, MultipleRetrievalEvents)
128{
129 TestTraceContainer trace;
130 BytecodeTraceBuilder builder;
131
132 const AppendOnlyTreeSnapshot snapshot_before = { .root = 12, .next_available_leaf_index = 1 };
133 const AppendOnlyTreeSnapshot snapshot_after_0 = { .root = 34, .next_available_leaf_index = 2 };
134 const AppendOnlyTreeSnapshot snapshot_after_1 = { .root = 56, .next_available_leaf_index = 3 };
135
136 // Two new bytecodes, one existing:
137 builder.process_retrieval(
138 {
139 simulation::BytecodeRetrievalEvent{
140 .bytecode_id = 43,
141 .address = 0xc0ffee,
142 .current_class_id = 34,
143 .contract_class = { .artifact_hash = 100, .private_functions_root = 200 },
144 .nullifier_root = 300,
145 .public_data_tree_root = 400,
146 .retrieved_bytecodes_snapshot_before = snapshot_before,
147 .retrieved_bytecodes_snapshot_after = snapshot_after_0,
148 .is_new_class = true,
149 },
150 simulation::BytecodeRetrievalEvent{
151 .bytecode_id = 21,
152 .address = 0xdeadbeef,
153 .current_class_id = 56,
154 .contract_class = { .artifact_hash = 100, .private_functions_root = 200 },
155 .nullifier_root = 300,
156 .public_data_tree_root = 400,
157 .retrieved_bytecodes_snapshot_before = snapshot_after_0,
158 .retrieved_bytecodes_snapshot_after = snapshot_after_1,
159 .is_new_class = true,
160 },
161 simulation::BytecodeRetrievalEvent{
162 .bytecode_id = 21,
163 .address = 0xdeadb33f,
164 .current_class_id = 56,
165 .contract_class = { .artifact_hash = 100, .private_functions_root = 200 },
166 .nullifier_root = 300,
167 .public_data_tree_root = 400,
168 .retrieved_bytecodes_snapshot_before = snapshot_after_1,
169 .retrieved_bytecodes_snapshot_after = snapshot_after_1,
170 .is_new_class = false,
171 },
172 },
173 trace);
174 const auto rows = trace.as_rows();
175
176 // Three retrieval events.
177 ASSERT_EQ(rows.size(), 3);
178
179 EXPECT_THAT(
180 rows.at(0),
181 AllOf(ROW_FIELD_EQ(bc_retrieval_sel, 1),
182 ROW_FIELD_EQ(bc_retrieval_bytecode_id, 43),
183 ROW_FIELD_EQ(bc_retrieval_address, 0xc0ffee),
184 ROW_FIELD_EQ(bc_retrieval_current_class_id, 34),
185 ROW_FIELD_EQ(bc_retrieval_artifact_hash, 100),
186 ROW_FIELD_EQ(bc_retrieval_private_functions_root, 200),
187 ROW_FIELD_EQ(bc_retrieval_nullifier_tree_root, 300),
188 ROW_FIELD_EQ(bc_retrieval_public_data_tree_root, 400),
189 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_root, snapshot_before.root),
190 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_size, snapshot_before.next_available_leaf_index),
191 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_root, snapshot_after_0.root),
192 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_size, snapshot_after_0.next_available_leaf_index),
193 ROW_FIELD_EQ(bc_retrieval_instance_exists, 1),
194 ROW_FIELD_EQ(bc_retrieval_is_new_class, 1),
195 ROW_FIELD_EQ(bc_retrieval_no_remaining_bytecodes, 0),
196 ROW_FIELD_EQ(bc_retrieval_remaining_bytecodes_inv,
198 ROW_FIELD_EQ(bc_retrieval_error, 0),
199 ROW_FIELD_EQ(bc_retrieval_should_retrieve, 1)));
200
201 EXPECT_THAT(
202 rows.at(1),
203 AllOf(ROW_FIELD_EQ(bc_retrieval_sel, 1),
204 ROW_FIELD_EQ(bc_retrieval_bytecode_id, 21),
205 ROW_FIELD_EQ(bc_retrieval_address, 0xdeadbeef),
206 ROW_FIELD_EQ(bc_retrieval_current_class_id, 56),
207 ROW_FIELD_EQ(bc_retrieval_artifact_hash, 100),
208 ROW_FIELD_EQ(bc_retrieval_private_functions_root, 200),
209 ROW_FIELD_EQ(bc_retrieval_nullifier_tree_root, 300),
210 ROW_FIELD_EQ(bc_retrieval_public_data_tree_root, 400),
211 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_root, snapshot_after_0.root),
212 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_size, snapshot_after_0.next_available_leaf_index),
213 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_root, snapshot_after_1.root),
214 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_size, snapshot_after_1.next_available_leaf_index),
215 ROW_FIELD_EQ(bc_retrieval_instance_exists, 1),
216 ROW_FIELD_EQ(bc_retrieval_is_new_class, 1),
217 ROW_FIELD_EQ(bc_retrieval_no_remaining_bytecodes, 0),
218 ROW_FIELD_EQ(bc_retrieval_remaining_bytecodes_inv,
220 ROW_FIELD_EQ(bc_retrieval_error, 0),
221 ROW_FIELD_EQ(bc_retrieval_should_retrieve, 1)));
222
223 EXPECT_THAT(
224 rows.at(2),
225 AllOf(ROW_FIELD_EQ(bc_retrieval_sel, 1),
226 ROW_FIELD_EQ(bc_retrieval_bytecode_id, 21),
227 ROW_FIELD_EQ(bc_retrieval_address, 0xdeadb33f),
228 ROW_FIELD_EQ(bc_retrieval_current_class_id, 56),
229 ROW_FIELD_EQ(bc_retrieval_artifact_hash, 100),
230 ROW_FIELD_EQ(bc_retrieval_private_functions_root, 200),
231 ROW_FIELD_EQ(bc_retrieval_nullifier_tree_root, 300),
232 ROW_FIELD_EQ(bc_retrieval_public_data_tree_root, 400),
233 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_root, snapshot_after_1.root),
234 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_size, snapshot_after_1.next_available_leaf_index),
235 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_root, snapshot_after_1.root),
236 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_size, snapshot_after_1.next_available_leaf_index),
237 ROW_FIELD_EQ(bc_retrieval_instance_exists, 1),
238 ROW_FIELD_EQ(bc_retrieval_is_new_class, 0),
239 ROW_FIELD_EQ(bc_retrieval_no_remaining_bytecodes, 0),
240 ROW_FIELD_EQ(bc_retrieval_remaining_bytecodes_inv,
242 ROW_FIELD_EQ(bc_retrieval_error, 0),
243 ROW_FIELD_EQ(bc_retrieval_should_retrieve, 1)));
244}
245
246TEST(BytecodeTraceGenTest, RetrievalInstanceNotFoundError)
247{
248 TestTraceContainer trace;
249 BytecodeTraceBuilder builder;
250
251 const AppendOnlyTreeSnapshot snapshot = { .root = FF(12), .next_available_leaf_index = 1 };
252
253 // The simulation sets class-related fields == 0 when the instance is not found.
254 builder.process_retrieval({ {
255 .bytecode_id = 0,
256 .address = 0xc0ffee,
257 .current_class_id = 0,
258 .contract_class = {},
259 .nullifier_root = 300,
260 .public_data_tree_root = 400,
261 .retrieved_bytecodes_snapshot_before = snapshot,
262 .retrieved_bytecodes_snapshot_after = snapshot,
263 .is_new_class = false,
265 } },
266 trace);
267 const auto rows = trace.as_rows();
268
269 // One retrieval event.
270 ASSERT_EQ(rows.size(), 1);
271
272 EXPECT_THAT(
273 rows.at(0),
274 AllOf(ROW_FIELD_EQ(bc_retrieval_sel, 1),
275 ROW_FIELD_EQ(bc_retrieval_address, 0xc0ffee),
276 ROW_FIELD_EQ(bc_retrieval_instance_exists, 0),
277 ROW_FIELD_EQ(bc_retrieval_is_new_class, 0),
278 ROW_FIELD_EQ(bc_retrieval_no_remaining_bytecodes, 0),
279 ROW_FIELD_EQ(bc_retrieval_remaining_bytecodes_inv,
281 ROW_FIELD_EQ(bc_retrieval_error, 1),
282 ROW_FIELD_EQ(bc_retrieval_should_retrieve, 0),
283 // Class-related fields are zeroed:
284 ROW_FIELD_EQ(bc_retrieval_bytecode_id, 0),
285 ROW_FIELD_EQ(bc_retrieval_current_class_id, 0),
286 ROW_FIELD_EQ(bc_retrieval_artifact_hash, 0),
287 ROW_FIELD_EQ(bc_retrieval_private_functions_root, 0),
288 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_root, snapshot.root),
289 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_size, snapshot.next_available_leaf_index),
290 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_root, snapshot.root),
291 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_size, snapshot.next_available_leaf_index)));
292}
293
294TEST(BytecodeTraceGenTest, RetrievalLimitError)
295{
296 TestTraceContainer trace;
297 BytecodeTraceBuilder builder;
298
299 // Set next leaf index to max tree size + 1:
300 const AppendOnlyTreeSnapshot snapshot = { .root = FF(12),
301 .next_available_leaf_index =
303 // The simulation sets class-related fields == 0 when the limit is reached.
304 builder.process_retrieval({ {
305 .bytecode_id = 0,
306 .address = 0xc0ffee,
307 .current_class_id = 34,
308 .contract_class = {},
309 .nullifier_root = 300,
310 .public_data_tree_root = 400,
311 .retrieved_bytecodes_snapshot_before = snapshot,
312 .retrieved_bytecodes_snapshot_after = snapshot,
313 .is_new_class = true,
315 } },
316 trace);
317 const auto rows = trace.as_rows();
318
319 // One retrieval event.
320 ASSERT_EQ(rows.size(), 1);
321
322 EXPECT_THAT(
323 rows.at(0),
324 AllOf(ROW_FIELD_EQ(bc_retrieval_sel, 1),
325 ROW_FIELD_EQ(bc_retrieval_address, 0xc0ffee),
326 ROW_FIELD_EQ(bc_retrieval_instance_exists, 1),
327 ROW_FIELD_EQ(bc_retrieval_is_new_class, 1),
328 ROW_FIELD_EQ(bc_retrieval_no_remaining_bytecodes, 1),
329 ROW_FIELD_EQ(bc_retrieval_remaining_bytecodes_inv, 0),
330 ROW_FIELD_EQ(bc_retrieval_error, 1),
331 ROW_FIELD_EQ(bc_retrieval_should_retrieve, 0),
332 ROW_FIELD_EQ(bc_retrieval_current_class_id, 34),
333 // Class-related fields are zeroed:
334 ROW_FIELD_EQ(bc_retrieval_bytecode_id, 0),
335 ROW_FIELD_EQ(bc_retrieval_artifact_hash, 0),
336 ROW_FIELD_EQ(bc_retrieval_private_functions_root, 0),
337 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_root, snapshot.root),
338 ROW_FIELD_EQ(bc_retrieval_prev_retrieved_bytecodes_tree_size, snapshot.next_available_leaf_index),
339 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_root, snapshot.root),
340 ROW_FIELD_EQ(bc_retrieval_next_retrieved_bytecodes_tree_size, snapshot.next_available_leaf_index)));
341}
342
343TEST(BytecodeTraceGenTest, BasicShortLength)
344{
345 TestTraceContainer trace;
346 BytecodeTraceBuilder builder;
347
348 builder.process_decomposition(
349 {
350 simulation::BytecodeDecompositionEvent{
351 .bytecode_id = 43,
352 .bytecode = std::make_shared<std::vector<uint8_t>>(std::vector<uint8_t>{ 12, 31, 5, 2 }),
353 },
354 },
355 trace);
356 auto rows = trace.as_rows();
357
358 // One extra empty row is prepended. Note that precomputed_first_row is not set through process_decomposition()
359 // because it pertains to another subtrace.
360 ASSERT_EQ(rows.size(), 4 + 1);
361
362 // We do not inspect row at index 0 as it is completely empty.
363 EXPECT_THAT(rows.at(1),
364 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
365 ROW_FIELD_EQ(bc_decomposition_id, 43),
366 ROW_FIELD_EQ(bc_decomposition_bytes, 12),
367 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_1, 31),
368 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_2, 5),
369 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_3, 2),
370 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_4, 0),
371 ROW_FIELD_EQ(bc_decomposition_pc, 0),
372 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, 4),
373 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
374 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(DECOMPOSE_WINDOW_SIZE - 4).invert()),
375 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, 0),
376 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, 4),
377 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 0),
378 ROW_FIELD_EQ(bc_decomposition_sel_packed, 1),
379 ROW_FIELD_EQ(bc_decomposition_start, 1),
380 ROW_FIELD_EQ(bc_decomposition_next_packed_pc, 0),
381 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv, 0)));
382
383 EXPECT_THAT(rows.at(2),
384 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
385 ROW_FIELD_EQ(bc_decomposition_id, 43),
386 ROW_FIELD_EQ(bc_decomposition_bytes, 31),
387 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_1, 5),
388 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_2, 2),
389 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_3, 0),
390 ROW_FIELD_EQ(bc_decomposition_pc, 1),
391 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, 3),
392 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
393 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(DECOMPOSE_WINDOW_SIZE - 3).invert()),
394 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, 0),
395 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, 3),
396 ROW_FIELD_EQ(bc_decomposition_sel_packed, 0),
397 ROW_FIELD_EQ(bc_decomposition_next_packed_pc, 31),
398 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv, FF(31 - 1).invert()),
399 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 0)));
400
401 EXPECT_THAT(rows.at(3),
402 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
403 ROW_FIELD_EQ(bc_decomposition_id, 43),
404 ROW_FIELD_EQ(bc_decomposition_bytes, 5),
405 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_1, 2),
406 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_2, 0),
407 ROW_FIELD_EQ(bc_decomposition_pc, 2),
408 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, 2),
409 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
410 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(DECOMPOSE_WINDOW_SIZE - 2).invert()),
411 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, 0),
412 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, 2),
413 ROW_FIELD_EQ(bc_decomposition_sel_packed, 0),
414 ROW_FIELD_EQ(bc_decomposition_next_packed_pc, 31),
415 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv, FF(31 - 2).invert()),
416 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 0)));
417
418 EXPECT_THAT(rows.at(4),
419 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
420 ROW_FIELD_EQ(bc_decomposition_id, 43),
421 ROW_FIELD_EQ(bc_decomposition_bytes, 2),
422 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_1, 0),
423 ROW_FIELD_EQ(bc_decomposition_pc, 3),
424 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, 1),
425 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
426 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(DECOMPOSE_WINDOW_SIZE - 1).invert()),
427 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, 0),
428 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, 1),
429 ROW_FIELD_EQ(bc_decomposition_sel_packed, 0),
430 ROW_FIELD_EQ(bc_decomposition_next_packed_pc, 31),
431 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv, FF(31 - 3).invert()),
432 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 1)));
433}
434
435TEST(BytecodeTraceGenTest, BasicSingleByte)
436{
437 TestTraceContainer trace;
438 BytecodeTraceBuilder builder;
439
440 builder.process_decomposition(
441 {
442 simulation::BytecodeDecompositionEvent{
443 .bytecode_id = 43,
444 .bytecode = std::make_shared<std::vector<uint8_t>>(std::vector<uint8_t>{ 24 }),
445 },
446 },
447 trace);
448 auto rows = trace.as_rows();
449
450 // One extra empty row is prepended. Note that precomputed_first_row is not set through process_decomposition()
451 // because it pertains to another subtrace.
452 ASSERT_EQ(rows.size(), 1 + 1);
453
454 // We do not inspect row at index 0 as it is completely empty.
455 EXPECT_THAT(rows.at(1),
456 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
457 ROW_FIELD_EQ(bc_decomposition_id, 43),
458 ROW_FIELD_EQ(bc_decomposition_bytes, 24),
459 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_1, 0),
460 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_2, 0),
461 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_3, 0),
462 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_4, 0),
463 ROW_FIELD_EQ(bc_decomposition_pc, 0),
464 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, 1),
465 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
466 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(DECOMPOSE_WINDOW_SIZE - 1).invert()),
467 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, 0),
468 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, 1),
469 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 1),
470 ROW_FIELD_EQ(bc_decomposition_sel_packed, 1),
471 ROW_FIELD_EQ(bc_decomposition_start, 1),
472 ROW_FIELD_EQ(bc_decomposition_next_packed_pc, 0),
473 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv, 0)));
474}
475
476TEST(BytecodeTraceGenTest, BasicLongerThanWindowSize)
477{
478 TestTraceContainer trace;
479 BytecodeTraceBuilder builder;
480
481 constexpr auto bytecode_size = DECOMPOSE_WINDOW_SIZE + 8;
482 std::vector<uint8_t> bytecode(bytecode_size);
483 const uint8_t first_byte = 17; // Arbitrary start value and we increment by one. We will hit invalid opcodes
484 // but it should not matter.
485
486 for (uint8_t i = 0; i < bytecode_size; i++) {
487 bytecode[i] = i + first_byte;
488 }
489
490 builder.process_decomposition(
491 {
492 simulation::BytecodeDecompositionEvent{
493 .bytecode_id = 7,
495 },
496 },
497 trace);
498 auto rows = trace.as_rows();
499
500 // One extra empty row is prepended. Note that precomputed_first_row is not set through process_decomposition()
501 // because it pertains to another subtrace.
502 ASSERT_EQ(rows.size(), bytecode_size + 1);
503
504 // We do not inspect row at index 0 as it is completely empty.
505 EXPECT_THAT(rows.at(1),
506 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
507 ROW_FIELD_EQ(bc_decomposition_id, 7),
508 ROW_FIELD_EQ(bc_decomposition_bytes, first_byte),
509 ROW_FIELD_EQ(bc_decomposition_pc, 0),
510 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, bytecode_size),
511 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 0),
512 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(-8).invert()),
513 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, 0),
514 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, DECOMPOSE_WINDOW_SIZE),
515 ROW_FIELD_EQ(bc_decomposition_sel_packed, 1),
516 ROW_FIELD_EQ(bc_decomposition_start, 1),
517 ROW_FIELD_EQ(bc_decomposition_next_packed_pc, 0),
518 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv, 0),
519 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 0)));
520
521 // We are interested to inspect the boundary aroud bytes_remaining == windows size
522
523 EXPECT_THAT(rows.at(9),
524 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
525 ROW_FIELD_EQ(bc_decomposition_id, 7),
526 ROW_FIELD_EQ(bc_decomposition_bytes, first_byte + 8),
527 ROW_FIELD_EQ(bc_decomposition_pc, 8),
528 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, DECOMPOSE_WINDOW_SIZE),
529 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 0),
530 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, 0),
531 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, 1),
532 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, DECOMPOSE_WINDOW_SIZE),
533 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 0)));
534
535 EXPECT_THAT(rows.at(10),
536 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
537 ROW_FIELD_EQ(bc_decomposition_id, 7),
538 ROW_FIELD_EQ(bc_decomposition_bytes, first_byte + 9),
539 ROW_FIELD_EQ(bc_decomposition_pc, 9),
540 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, DECOMPOSE_WINDOW_SIZE - 1),
541 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
542 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, 1),
543 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, 0),
544 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, DECOMPOSE_WINDOW_SIZE - 1),
545 ROW_FIELD_EQ(bc_decomposition_sel_packed, 0),
546 ROW_FIELD_EQ(bc_decomposition_next_packed_pc, 31),
547 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv, FF(31 - 9).invert()),
548 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 0)));
549
550 // Last row
551 EXPECT_THAT(rows.at(bytecode_size),
552 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
553 ROW_FIELD_EQ(bc_decomposition_id, 7),
554 ROW_FIELD_EQ(bc_decomposition_bytes, first_byte + bytecode_size - 1),
555 ROW_FIELD_EQ(bc_decomposition_pc, bytecode_size - 1),
556 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, 1),
557 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
558 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(DECOMPOSE_WINDOW_SIZE - 1).invert()),
559 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, 0),
560 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, 1),
561 ROW_FIELD_EQ(bc_decomposition_sel_packed, 0),
562 ROW_FIELD_EQ(bc_decomposition_next_packed_pc, 62),
563 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv, FF(62 - (bytecode_size - 1)).invert()),
564 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 1)));
565}
566
567TEST(BytecodeTraceGenTest, MultipleEvents)
568{
569 TestTraceContainer trace;
570 BytecodeTraceBuilder builder;
571
572 std::vector<uint32_t> bc_sizes = { DECOMPOSE_WINDOW_SIZE + 2, 17, DECOMPOSE_WINDOW_SIZE, 1 };
574
575 std::transform(bc_sizes.begin(), bc_sizes.end(), bytecodes.begin(), [](uint32_t bc_size) -> std::vector<uint8_t> {
576 std::vector<uint8_t> bytecode(bc_size);
577 for (uint8_t i = 0; i < static_cast<uint8_t>(bc_size); i++) {
578 bytecode[i] = i * i; // Arbitrary bytecode that we will not inspect below
579 }
580
581 return bytecode;
582 });
583
584 builder.process_decomposition(
585 {
586 simulation::BytecodeDecompositionEvent{
587 .bytecode_id = 0,
588 .bytecode = std::make_shared<std::vector<uint8_t>>(bytecodes[0]),
589 },
590 simulation::BytecodeDecompositionEvent{
591 .bytecode_id = 1,
592 .bytecode = std::make_shared<std::vector<uint8_t>>(bytecodes[1]),
593 },
594 simulation::BytecodeDecompositionEvent{
595 .bytecode_id = 2,
596 .bytecode = std::make_shared<std::vector<uint8_t>>(bytecodes[2]),
597 },
598 simulation::BytecodeDecompositionEvent{
599 .bytecode_id = 3,
600 .bytecode = std::make_shared<std::vector<uint8_t>>(bytecodes[3]),
601 },
602 },
603 trace);
604 auto rows = trace.as_rows();
605
606 // One extra empty row is prepended.
607 ASSERT_EQ(rows.size(), (2 * DECOMPOSE_WINDOW_SIZE) + 20 + 1);
608
609 size_t row_pos = 1;
610 for (uint32_t i = 0; i < 4; i++) {
611 uint32_t next_packed_pc = 0;
612 for (uint32_t j = 0; j < bc_sizes[i]; j++) {
613 const auto bytes_rem = bc_sizes[i] - j;
614 EXPECT_THAT(
615 rows.at(row_pos),
616 AllOf(
617 ROW_FIELD_EQ(bc_decomposition_sel, 1),
618 ROW_FIELD_EQ(bc_decomposition_id, i),
619 ROW_FIELD_EQ(bc_decomposition_pc, j),
620 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, bytes_rem),
621 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, DECOMPOSE_WINDOW_SIZE > bytes_rem ? 1 : 0),
623 bc_decomposition_windows_min_remaining_inv,
624 bytes_rem == DECOMPOSE_WINDOW_SIZE ? 0 : (FF(DECOMPOSE_WINDOW_SIZE) - FF(bytes_rem)).invert()),
625 ROW_FIELD_EQ(bc_decomposition_sel_windows_eq_remaining, bytes_rem == DECOMPOSE_WINDOW_SIZE ? 1 : 0),
626 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, std::min(DECOMPOSE_WINDOW_SIZE, bytes_rem)),
627 ROW_FIELD_EQ(bc_decomposition_sel_packed, j == next_packed_pc ? 1 : 0),
628 ROW_FIELD_EQ(bc_decomposition_next_packed_pc, next_packed_pc),
629 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv,
630 j == next_packed_pc ? 0 : FF(next_packed_pc - j).invert()),
631 ROW_FIELD_EQ(bc_decomposition_start, j == 0 ? 1 : 0),
632 ROW_FIELD_EQ(bc_decomposition_last_of_contract, j == bc_sizes[i] - 1 ? 1 : 0)));
633 row_pos++;
634 next_packed_pc += j % 31 == 0 ? 31 : 0;
635 }
636 }
637}
638
639TEST(BytecodeTraceGenTest, BasicHashing)
640{
641 TestTraceContainer trace;
642 BytecodeTraceBuilder builder;
643
644 builder.process_hashing(
645 {
646 simulation::BytecodeHashingEvent{
647 .bytecode_id = 1,
648 .bytecode_length = 93,
649 .bytecode_fields = { 10, 20, 30 },
650 },
651 },
652 trace);
653 const auto rows = trace.as_rows();
654
655 // One extra empty row is prepended.
656 EXPECT_THAT(
657 rows.at(1),
658 AllOf(ROW_FIELD_EQ(bc_hashing_sel, 1),
659 ROW_FIELD_EQ(bc_hashing_start, 1),
660 ROW_FIELD_EQ(bc_hashing_sel_not_start, 0),
661 ROW_FIELD_EQ(bc_hashing_sel_not_padding_1, 1),
662 ROW_FIELD_EQ(bc_hashing_sel_not_padding_2, 1),
663 ROW_FIELD_EQ(bc_hashing_latch, 0),
664 ROW_FIELD_EQ(bc_hashing_bytecode_id, 1),
665 ROW_FIELD_EQ(bc_hashing_pc_index, 0),
666 // We don't increment at start to account for the prepended first field length | separator:
667 ROW_FIELD_EQ(bc_hashing_pc_index_1, 0),
668 ROW_FIELD_EQ(bc_hashing_pc_index_2, 31),
669 ROW_FIELD_EQ(bc_hashing_packed_fields_0, simulation::compute_public_bytecode_first_field(93)),
670 ROW_FIELD_EQ(bc_hashing_packed_fields_1, 10),
671 ROW_FIELD_EQ(bc_hashing_packed_fields_2, 20),
672 ROW_FIELD_EQ(bc_hashing_size_in_bytes, 93),
673 ROW_FIELD_EQ(bc_hashing_input_len, 4),
674 ROW_FIELD_EQ(bc_hashing_rounds_rem, 2),
675 ROW_FIELD_EQ(bc_hashing_output_hash,
677 ROW_FIELD_EQ(bc_hashing_pc_at_final_field, 0)));
678
679 // Latched row
680 EXPECT_THAT(
681 rows.at(2),
682 AllOf(ROW_FIELD_EQ(bc_hashing_sel, 1),
683 ROW_FIELD_EQ(bc_hashing_start, 0),
684 ROW_FIELD_EQ(bc_hashing_sel_not_start, 1),
685 ROW_FIELD_EQ(bc_hashing_sel_not_padding_1, 0),
686 ROW_FIELD_EQ(bc_hashing_sel_not_padding_2, 0),
687 ROW_FIELD_EQ(bc_hashing_latch, 1),
688 ROW_FIELD_EQ(bc_hashing_bytecode_id, 1),
689 ROW_FIELD_EQ(bc_hashing_pc_index, 62),
690 ROW_FIELD_EQ(bc_hashing_pc_index_1, 93),
691 ROW_FIELD_EQ(bc_hashing_pc_index_2, 124),
692 ROW_FIELD_EQ(bc_hashing_packed_fields_0, 30),
693 ROW_FIELD_EQ(bc_hashing_packed_fields_1, 0),
694 ROW_FIELD_EQ(bc_hashing_packed_fields_2, 0),
695 ROW_FIELD_EQ(bc_hashing_input_len, 4),
696 ROW_FIELD_EQ(bc_hashing_rounds_rem, 1),
697 ROW_FIELD_EQ(bc_hashing_output_hash,
699 ROW_FIELD_EQ(bc_hashing_pc_at_final_field, 62)));
700}
701
702std::vector<Instruction> gen_random_instructions(std::span<const WireOpCode> opcodes)
703{
704 std::vector<Instruction> instructions;
705 instructions.reserve(opcodes.size());
706 for (const auto& opcode : opcodes) {
707 instructions.emplace_back(testing::random_instruction(opcode));
708 }
709 return instructions;
710}
711
712std::vector<uint8_t> create_bytecode(std::span<const Instruction> instructions)
713{
714 std::vector<uint8_t> bytecode;
715 for (const auto& instruction : instructions) {
716 auto serialized_instruction = instruction.serialize();
717 bytecode.insert(bytecode.end(),
718 std::make_move_iterator(serialized_instruction.begin()),
719 std::make_move_iterator(serialized_instruction.end()));
720 }
721 return bytecode;
722}
723
724std::vector<size_t> gen_pcs(std::span<const WireOpCode> opcodes)
725{
726 std::vector<size_t> pcs;
727 pcs.reserve(opcodes.size());
728 size_t pc = 0;
729 for (const auto& opcode : opcodes) {
730 pcs.emplace_back(pc);
731 pc += get_wire_instruction_spec().at(opcode).size_in_bytes;
732 }
733 return pcs;
734}
735
736std::vector<InstructionFetchingEvent> create_instruction_fetching_events(
737 const std::vector<Instruction>& instructions,
738 const std::vector<size_t>& pcs,
739 const std::shared_ptr<std::vector<uint8_t>>& bytecode_ptr,
740 const BytecodeId bytecode_id)
741{
743 events.reserve(instructions.size());
744
745 for (size_t i = 0; i < instructions.size(); i++) {
746 events.emplace_back(InstructionFetchingEvent{
747 .bytecode_id = bytecode_id,
748 .pc = static_cast<uint32_t>(pcs.at(i)),
749 .instruction = instructions.at(i),
750 .bytecode = bytecode_ptr,
751 });
752 }
753 return events;
754}
755
756// We build a random InstructionFetchingEvent for each wire opcode.
757// We then verify that the bytes (bd0, bd1, ...) correspond to the serialized instruction.
758TEST(BytecodeTraceGenTest, InstrDecompositionInBytesEachOpcode)
759{
760 TestTraceContainer trace;
761 BytecodeTraceBuilder builder;
762
763 constexpr std::array<C, 37> bd_columns = {
764 C::instr_fetching_bd0, C::instr_fetching_bd1, C::instr_fetching_bd2, C::instr_fetching_bd3,
765 C::instr_fetching_bd4, C::instr_fetching_bd5, C::instr_fetching_bd6, C::instr_fetching_bd7,
766 C::instr_fetching_bd8, C::instr_fetching_bd9, C::instr_fetching_bd10, C::instr_fetching_bd11,
767 C::instr_fetching_bd12, C::instr_fetching_bd13, C::instr_fetching_bd14, C::instr_fetching_bd15,
768 C::instr_fetching_bd16, C::instr_fetching_bd17, C::instr_fetching_bd18, C::instr_fetching_bd19,
769 C::instr_fetching_bd20, C::instr_fetching_bd21, C::instr_fetching_bd22, C::instr_fetching_bd23,
770 C::instr_fetching_bd24, C::instr_fetching_bd25, C::instr_fetching_bd26, C::instr_fetching_bd27,
771 C::instr_fetching_bd28, C::instr_fetching_bd29, C::instr_fetching_bd30, C::instr_fetching_bd31,
772 C::instr_fetching_bd32, C::instr_fetching_bd33, C::instr_fetching_bd34, C::instr_fetching_bd35,
773 C::instr_fetching_bd36,
774 };
775
776 constexpr std::array<C, 7> operand_columns = {
777 C::instr_fetching_op1, C::instr_fetching_op2, C::instr_fetching_op3, C::instr_fetching_op4,
778 C::instr_fetching_op5, C::instr_fetching_op6, C::instr_fetching_op7,
779 };
780
781 constexpr BytecodeId bytecode_id = 1;
782 constexpr auto num_opcodes = static_cast<size_t>(WireOpCode::LAST_OPCODE_SENTINEL);
783
785 opcodes.reserve(num_opcodes);
786 for (size_t i = 0; i < num_opcodes; i++) {
787 opcodes.emplace_back(static_cast<WireOpCode>(i));
788 }
789
790 std::vector<Instruction> instructions = gen_random_instructions(opcodes);
791 std::vector<size_t> pcs = gen_pcs(opcodes);
792 std::vector<uint8_t> bytecode = create_bytecode(instructions);
793
796 create_instruction_fetching_events(instructions, pcs, bytecode_ptr, bytecode_id);
797
798 builder.process_instruction_fetching(events, trace);
799
800 for (uint32_t i = 0; i < num_opcodes; i++) {
801 const auto& instr = instructions.at(i);
802 const auto instr_encoded = instr.serialize();
803 const auto w_opcode = static_cast<WireOpCode>(i);
804
805 // Check size_in_bytes column
806 const auto expected_size_in_bytes = get_wire_instruction_spec().at(w_opcode).size_in_bytes;
807 ASSERT_EQ(instr_encoded.size(), expected_size_in_bytes);
808 EXPECT_EQ(FF(expected_size_in_bytes), trace.get(C::instr_fetching_instr_size, i + 1));
809
810 // Inspect each byte
811 for (size_t j = 0; j < static_cast<size_t>(expected_size_in_bytes); j++) {
812 EXPECT_EQ(FF(instr_encoded.at(j)), trace.get(bd_columns.at(j), i + 1));
813 }
814
815 // Check exection opcode
816 EXPECT_EQ(FF(static_cast<uint8_t>(get_wire_instruction_spec().at(w_opcode).exec_opcode)),
817 trace.get(C::instr_fetching_exec_opcode, i + 1));
818
819 // Check indirect
820 EXPECT_EQ(FF(instr.addressing_mode), trace.get(C::instr_fetching_addressing_mode, i + 1));
821
822 // Check PCs
823 EXPECT_EQ(FF(pcs.at(i)), trace.get(C::instr_fetching_pc, i + 1));
824
825 // Check operands
826 size_t operand_idx = 0;
827 for (const auto& operand : instr.operands) {
828 EXPECT_EQ(FF(operand), trace.get(operand_columns.at(operand_idx++), i + 1));
829 }
830 }
831}
832
833TEST(BytecodeTraceGenTest, InstrFetchingSingleBytecode)
834{
835 TestTraceContainer trace;
836 BytecodeTraceBuilder builder;
837
838 constexpr BytecodeId bytecode_id = 1;
839 constexpr size_t num_of_opcodes = 10;
840 constexpr std::array<WireOpCode, num_of_opcodes> opcodes = {
845 };
846
847 std::vector<Instruction> instructions = gen_random_instructions(opcodes);
848 std::vector<size_t> pcs = gen_pcs(opcodes);
849 std::vector<uint8_t> bytecode = create_bytecode(instructions);
850
851 std::vector<InstructionFetchingEvent> events = create_instruction_fetching_events(
852 instructions, pcs, std::make_shared<std::vector<uint8_t>>(bytecode), bytecode_id);
853
854 builder.process_instruction_fetching(events, trace);
855
856 // One extra empty row is prepended.
857 const auto rows = trace.as_rows();
858 const auto bytecode_size = bytecode.size();
859 EXPECT_EQ(rows.size(), num_of_opcodes + 1);
860
861 for (size_t i = 0; i < num_of_opcodes; i++) {
862 const auto pc = pcs.at(i);
863 const auto instr_size = get_wire_instruction_spec().at(opcodes.at(i)).size_in_bytes;
864 const auto has_tag = get_wire_instruction_spec().at(opcodes.at(i)).tag_operand_idx.has_value();
865 const auto tag_is_op2 =
866 has_tag ? static_cast<int>(get_wire_instruction_spec().at(opcodes.at(i)).tag_operand_idx.value() == 2) : 0;
867 const auto bytes_remaining = bytecode_size - pc;
868 const auto bytes_to_read = std::min<size_t>(DECOMPOSE_WINDOW_SIZE, bytes_remaining);
869
870 EXPECT_LE(instr_size, bytes_to_read);
871 const auto instr_abs_diff = bytes_to_read - instr_size;
872
873 EXPECT_LT(pc, bytecode_size);
874 const auto pc_abs_diff = bytecode_size - pc - 1;
875
876 ASSERT_LE(bytecode_size, UINT16_MAX);
877
878 EXPECT_THAT(rows.at(i + 1),
879 AllOf(ROW_FIELD_EQ(instr_fetching_sel, 1),
880 ROW_FIELD_EQ(instr_fetching_pc, pc),
881 ROW_FIELD_EQ(instr_fetching_bd0, static_cast<uint8_t>(opcodes.at(i))),
882 ROW_FIELD_EQ(instr_fetching_bytecode_id, bytecode_id),
883 ROW_FIELD_EQ(instr_fetching_bytes_to_read, bytes_to_read),
884 ROW_FIELD_EQ(instr_fetching_bytecode_size, bytecode_size),
885 ROW_FIELD_EQ(instr_fetching_instr_size, instr_size),
886 ROW_FIELD_EQ(instr_fetching_instr_abs_diff, instr_abs_diff),
887 ROW_FIELD_EQ(instr_fetching_pc_abs_diff, pc_abs_diff),
888 ROW_FIELD_EQ(instr_fetching_pc_out_of_range, 0),
889 ROW_FIELD_EQ(instr_fetching_opcode_out_of_range, 0),
890 ROW_FIELD_EQ(instr_fetching_instr_out_of_range, 0),
891 ROW_FIELD_EQ(instr_fetching_tag_out_of_range, 0),
892 ROW_FIELD_EQ(instr_fetching_sel_parsing_err, 0),
893 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 1),
894 ROW_FIELD_EQ(instr_fetching_sel_has_tag, has_tag),
895 ROW_FIELD_EQ(instr_fetching_sel_tag_is_op2, tag_is_op2),
896 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 1)));
897 }
898}
899
900// Test involving 3 different bytecode_id's for each 2 opcodes (same bytecode).
901TEST(BytecodeTraceGenTest, InstrFetchingMultipleBytecodes)
902{
903 TestTraceContainer trace;
904 BytecodeTraceBuilder builder;
905
906 constexpr size_t num_of_opcodes = 2;
907 constexpr std::array<WireOpCode, num_of_opcodes> opcodes = {
910 };
911
912 std::vector<Instruction> instructions = gen_random_instructions(opcodes);
913 std::vector<size_t> pcs = gen_pcs(opcodes);
914 std::vector<uint8_t> bytecode = create_bytecode(instructions);
915
917 for (size_t i = 0; i < 3; i++) {
919 auto new_events =
920 create_instruction_fetching_events(instructions, pcs, bytecode_ptr, static_cast<BytecodeId>(i + 1));
921 events.insert(events.end(), new_events.begin(), new_events.end());
922 }
923
924 builder.process_instruction_fetching(events, trace);
925
926 // One extra empty row is prepended.
927 const auto rows = trace.as_rows();
928 EXPECT_EQ(rows.size(), 6 + 1);
929
930 for (size_t i = 0; i < 3; i++) {
931 EXPECT_THAT(rows.at((2 * i) + 1), ROW_FIELD_EQ(instr_fetching_pc, 0));
932 }
933}
934
935// Test which processes three single instruction events, each of one with a different parsing error.
936// The bytecode can be filled with trivial bytes of size 20 with all bytes being increasing from 0 to 19.
937// First byte at index 0 is set to LAST_OPCODE_SENTINEL + 1.
938// Then consider for the instruction events pc = 0, pc = 19, pc = 38.
939// pc == 0 will correspond to the error OPCODE_OUT_OF_RANGE
940// pc == 19 will have INSTRUCTION_OUT_OF_RANGE
941// pc == 38 will have PC_OUT_OF_RANGE
942// Check for each row that column instr_fetching_parsing_err in addition to the column of the respective error.
943// It is not an issue that the instruction is generated at random in the event and is not consistent with the
944// bytecode for this test case.
945TEST(BytecodeTraceGenTest, InstrFetchingParsingErrors)
946{
947 TestTraceContainer trace;
948 BytecodeTraceBuilder builder;
949
950 constexpr BytecodeId bytecode_id = 1;
951 constexpr size_t bytecode_size = 20;
952 std::vector<uint8_t> bytecode(bytecode_size);
953 for (size_t i = 0; i < bytecode_size; i++) {
954 bytecode[i] = static_cast<uint8_t>(i);
955 }
956 bytecode[0] = static_cast<uint8_t>(WireOpCode::LAST_OPCODE_SENTINEL) + 1;
957
960 events.emplace_back(InstructionFetchingEvent{
961 .bytecode_id = bytecode_id,
962 .pc = 0,
963 .bytecode = bytecode_ptr,
965 });
966 events.emplace_back(InstructionFetchingEvent{
967 .bytecode_id = bytecode_id,
968 .pc = 19,
969 .bytecode = bytecode_ptr,
971 });
972 events.emplace_back(InstructionFetchingEvent{
973 .bytecode_id = bytecode_id,
974 .pc = 38,
975 .bytecode = bytecode_ptr,
977 });
978
979 builder.process_instruction_fetching(events, trace);
980
981 // One extra empty row is prepended.
982 const auto rows = trace.as_rows();
983 ASSERT_EQ(rows.size(), 3 + 1);
984
985 EXPECT_THAT(rows.at(1),
986 AllOf(ROW_FIELD_EQ(instr_fetching_sel, 1),
987 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 1),
988 ROW_FIELD_EQ(instr_fetching_pc, 0),
989 ROW_FIELD_EQ(instr_fetching_bytes_to_read, 20),
990 ROW_FIELD_EQ(instr_fetching_instr_size, 0),
991 ROW_FIELD_EQ(instr_fetching_instr_abs_diff,
992 20), // instr_size <= bytes_to_read: bytes_to_read - instr_size
993 ROW_FIELD_EQ(instr_fetching_sel_parsing_err, 1),
994 ROW_FIELD_EQ(instr_fetching_pc_abs_diff, 19), // bytecode_size - pc - 1 if bytecode_size > pc
995 ROW_FIELD_EQ(instr_fetching_opcode_out_of_range, 1)));
996
997 EXPECT_THAT(rows.at(2),
998 AllOf(ROW_FIELD_EQ(instr_fetching_sel, 1),
999 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 1),
1000 ROW_FIELD_EQ(instr_fetching_pc, 19), // OR_16 opcode
1001 ROW_FIELD_EQ(instr_fetching_bytes_to_read, 1),
1002 ROW_FIELD_EQ(instr_fetching_instr_size, 8), // OR_16 is 8 bytes long
1003 ROW_FIELD_EQ(instr_fetching_instr_abs_diff,
1004 6), // instr_size > bytes_to_read: instr_size - bytes_to_read - 1
1005 ROW_FIELD_EQ(instr_fetching_sel_parsing_err, 1),
1006 ROW_FIELD_EQ(instr_fetching_pc_abs_diff, 0), // bytecode_size - pc - 1 if bytecode_size > pc
1007 ROW_FIELD_EQ(instr_fetching_instr_out_of_range, 1)));
1008
1009 EXPECT_THAT(
1010 rows.at(3),
1011 AllOf(ROW_FIELD_EQ(instr_fetching_sel, 1),
1012 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 0),
1013 ROW_FIELD_EQ(instr_fetching_pc, 38),
1014 ROW_FIELD_EQ(instr_fetching_bytes_to_read, 0),
1015 ROW_FIELD_EQ(instr_fetching_instr_size, 0),
1016 ROW_FIELD_EQ(instr_fetching_instr_abs_diff, 0), // instr_size <= bytes_to_read: bytes_to_read - instr_size
1017 ROW_FIELD_EQ(instr_fetching_sel_parsing_err, 1),
1018 ROW_FIELD_EQ(instr_fetching_pc_abs_diff, 18), // pc - bytecode_size if bytecode_size <= pc
1019 ROW_FIELD_EQ(instr_fetching_pc_out_of_range, 1)));
1020}
1021
1022// Test on error tag out of range
1023TEST(BytecodeTraceGenTest, InstrFetchingErrorTagOutOfRange)
1024{
1026 using simulation::Operand;
1028 TestTraceContainer trace;
1029 BytecodeTraceBuilder builder;
1030
1031 auto instr_cast = random_instruction(WireOpCode::CAST_16);
1032 auto instr_set = random_instruction(WireOpCode::SET_64);
1033 constexpr uint32_t cast_size = 7;
1034 constexpr uint32_t set_64_size = 13;
1035
1036 instr_cast.operands.at(2) = Operand::from<uint8_t>(0x09); // tag operand mutation to 0x09 which is out of range
1037 instr_set.operands.at(1) = Operand::from<uint8_t>(0x0A); // tag operand mutation to 0x0A which is out of range
1038
1039 auto bytecode = instr_cast.serialize();
1040 ASSERT_EQ(bytecode.size(), cast_size);
1041
1042 auto instr_set_bytecode = instr_set.serialize();
1043 ASSERT_EQ(instr_set_bytecode.size(), set_64_size);
1044
1045 bytecode.insert(bytecode.end(), instr_set_bytecode.begin(), instr_set_bytecode.end());
1046
1047 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(bytecode);
1048
1050 events.emplace_back(InstructionFetchingEvent{
1051 .bytecode_id = 1,
1052 .pc = 0,
1053 .instruction = deserialize_instruction(bytecode, 0), // Reflect more the real code path than passing instr_cast.
1054 .bytecode = bytecode_ptr,
1056 });
1057
1058 events.emplace_back(InstructionFetchingEvent{
1059 .bytecode_id = 1,
1060 .pc = cast_size,
1061 .instruction =
1062 deserialize_instruction(bytecode, cast_size), // Reflect more the real code path than passing instr_set.
1063 .bytecode = bytecode_ptr,
1065 });
1066
1067 builder.process_instruction_fetching(events, trace);
1068
1069 // One extra empty row is prepended.
1070 const auto rows = trace.as_rows();
1071 ASSERT_EQ(rows.size(), 2 + 1);
1072
1073 EXPECT_THAT(rows.at(1),
1074 AllOf(ROW_FIELD_EQ(instr_fetching_sel, 1),
1075 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 1),
1076 ROW_FIELD_EQ(instr_fetching_sel_has_tag, 1),
1077 ROW_FIELD_EQ(instr_fetching_sel_tag_is_op2, 0),
1078 ROW_FIELD_EQ(instr_fetching_tag_value, 9),
1079 ROW_FIELD_EQ(instr_fetching_pc, 0),
1080 ROW_FIELD_EQ(instr_fetching_bytes_to_read, cast_size + set_64_size),
1081 ROW_FIELD_EQ(instr_fetching_instr_size, cast_size),
1082 ROW_FIELD_EQ(instr_fetching_instr_abs_diff,
1083 set_64_size), // instr_size <= bytes_to_read: bytes_to_read - instr_size
1084 ROW_FIELD_EQ(instr_fetching_sel_parsing_err, 1),
1085 ROW_FIELD_EQ(instr_fetching_pc_abs_diff,
1086 cast_size + set_64_size - 1), // bytecode_size - pc - 1 if bytecode_size > pc
1087 ROW_FIELD_EQ(instr_fetching_tag_out_of_range, 1)));
1088
1089 EXPECT_THAT(
1090 rows.at(2),
1091 AllOf(ROW_FIELD_EQ(instr_fetching_sel, 1),
1092 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 1),
1093 ROW_FIELD_EQ(instr_fetching_sel_has_tag, 1),
1094 ROW_FIELD_EQ(instr_fetching_sel_tag_is_op2, 1),
1095 ROW_FIELD_EQ(instr_fetching_tag_value, 10),
1096 ROW_FIELD_EQ(instr_fetching_pc, cast_size),
1097 ROW_FIELD_EQ(instr_fetching_bytes_to_read, set_64_size),
1098 ROW_FIELD_EQ(instr_fetching_instr_size, set_64_size),
1099 ROW_FIELD_EQ(instr_fetching_instr_abs_diff, 0), // instr_size <= bytes_to_read: bytes_to_read - instr_size
1100 ROW_FIELD_EQ(instr_fetching_sel_parsing_err, 1),
1101 ROW_FIELD_EQ(instr_fetching_pc_abs_diff, set_64_size - 1), // bytecode_size - pc - 1 if bytecode_size > pc
1102 ROW_FIELD_EQ(instr_fetching_tag_out_of_range, 1)));
1103}
1104
1105} // namespace
1106} // namespace bb::avm2::tracegen
std::shared_ptr< Napi::ThreadSafeFunction > bytecode
#define MAX_PUBLIC_CALLS_TO_UNIQUE_CONTRACT_CLASS_IDS
std::vector< AvmFullRowConstRef > as_rows() const
const FF & get(Column col, uint32_t row) const
static FF hash(const std::vector< FF > &input)
Hashes a vector of field elements.
AluTraceBuilder builder
Definition alu.test.cpp:124
TestTraceContainer trace
std::vector< uint8_t > create_bytecode(const std::vector< bb::avm2::simulation::Instruction > &instructions)
Instruction instruction
#define ROW_FIELD_EQ(field_name, expression)
Definition macros.hpp:7
FF compute_public_bytecode_first_field(size_t bytecode_size)
Instruction deserialize_instruction(std::span< const uint8_t > bytecode, size_t pos)
Parsing of an instruction in the supplied bytecode at byte position pos. This checks that the WireOpC...
Instruction random_instruction(WireOpCode w_opcode)
Definition fixtures.cpp:125
AvmFlavorSettings::FF FF
Definition field.hpp:10
const std::unordered_map< WireOpCode, WireInstructionSpec > & get_wire_instruction_spec()
constexpr uint32_t DECOMPOSE_WINDOW_SIZE
TEST(BoomerangMegaCircuitBuilder, BasicCircuit)
STL namespace.
constexpr decltype(auto) get(::tuplet::tuple< T... > &&t) noexcept
Definition tuple.hpp:13