Barretenberg
The ZK-SNARK library at the core of Aztec
Loading...
Searching...
No Matches
bytecode_trace.test.cpp
Go to the documentation of this file.
1#include <algorithm>
2#include <cstddef>
3#include <gmock/gmock.h>
4#include <gtest/gtest.h>
5
6#include <cstdint>
7#include <memory>
8#include <sys/types.h>
9#include <vector>
10
19
20namespace bb::avm2::tracegen {
21namespace {
22
23using C = Column;
25
26using simulation::Instruction;
27using simulation::InstructionFetchingEvent;
28
29TEST(BytecodeTraceGenTest, BasicShortLength)
30{
31 TestTraceContainer trace;
32 BytecodeTraceBuilder builder;
33
34 builder.process_decomposition(
35 {
36 simulation::BytecodeDecompositionEvent{
37 .bytecode_id = 43,
38 .bytecode = std::make_shared<std::vector<uint8_t>>(std::vector<uint8_t>{ 12, 31, 5, 2 }),
39 },
40 },
41 trace);
42 auto rows = trace.as_rows();
43
44 // One extra empty row is prepended. Note that precomputed_first_row is not set through process_decomposition()
45 // because it pertains to another subtrace.
46 ASSERT_EQ(rows.size(), 4 + 1);
47
48 // We do not inspect row at index 0 as it is completely empty.
49 EXPECT_THAT(rows.at(1),
50 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
51 ROW_FIELD_EQ(bc_decomposition_id, 43),
52 ROW_FIELD_EQ(bc_decomposition_bytes, 12),
53 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_1, 31),
54 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_2, 5),
55 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_3, 2),
56 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_4, 0),
57 ROW_FIELD_EQ(bc_decomposition_pc, 0),
58 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, 4),
59 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
60 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(DECOMPOSE_WINDOW_SIZE - 4).invert()),
61 ROW_FIELD_EQ(bc_decomposition_is_windows_eq_remaining, 0),
62 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, 4),
63 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 0),
64 ROW_FIELD_EQ(bc_decomposition_sel_packed, 1),
65 ROW_FIELD_EQ(bc_decomposition_next_packed_pc, 0),
66 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv, 0)));
67
68 EXPECT_THAT(rows.at(2),
69 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
70 ROW_FIELD_EQ(bc_decomposition_id, 43),
71 ROW_FIELD_EQ(bc_decomposition_bytes, 31),
72 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_1, 5),
73 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_2, 2),
74 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_3, 0),
75 ROW_FIELD_EQ(bc_decomposition_pc, 1),
76 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, 3),
77 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
78 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(DECOMPOSE_WINDOW_SIZE - 3).invert()),
79 ROW_FIELD_EQ(bc_decomposition_is_windows_eq_remaining, 0),
80 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, 3),
81 ROW_FIELD_EQ(bc_decomposition_sel_packed, 0),
82 ROW_FIELD_EQ(bc_decomposition_next_packed_pc, 31),
83 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv, FF(31 - 1).invert()),
84 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 0)));
85
86 EXPECT_THAT(rows.at(3),
87 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
88 ROW_FIELD_EQ(bc_decomposition_id, 43),
89 ROW_FIELD_EQ(bc_decomposition_bytes, 5),
90 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_1, 2),
91 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_2, 0),
92 ROW_FIELD_EQ(bc_decomposition_pc, 2),
93 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, 2),
94 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
95 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(DECOMPOSE_WINDOW_SIZE - 2).invert()),
96 ROW_FIELD_EQ(bc_decomposition_is_windows_eq_remaining, 0),
97 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, 2),
98 ROW_FIELD_EQ(bc_decomposition_sel_packed, 0),
99 ROW_FIELD_EQ(bc_decomposition_next_packed_pc, 31),
100 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv, FF(31 - 2).invert()),
101 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 0)));
102
103 EXPECT_THAT(rows.at(4),
104 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
105 ROW_FIELD_EQ(bc_decomposition_id, 43),
106 ROW_FIELD_EQ(bc_decomposition_bytes, 2),
107 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_1, 0),
108 ROW_FIELD_EQ(bc_decomposition_pc, 3),
109 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, 1),
110 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
111 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(DECOMPOSE_WINDOW_SIZE - 1).invert()),
112 ROW_FIELD_EQ(bc_decomposition_is_windows_eq_remaining, 0),
113 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, 1),
114 ROW_FIELD_EQ(bc_decomposition_sel_packed, 0),
115 ROW_FIELD_EQ(bc_decomposition_next_packed_pc, 31),
116 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv, FF(31 - 3).invert()),
117 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 1)));
118}
119
120TEST(BytecodeTraceGenTest, BasicLongerThanWindowSize)
121{
122 TestTraceContainer trace;
123 BytecodeTraceBuilder builder;
124
125 constexpr auto bytecode_size = DECOMPOSE_WINDOW_SIZE + 8;
126 std::vector<uint8_t> bytecode(bytecode_size);
127 const uint8_t first_byte = 17; // Arbitrary start value and we increment by one. We will hit invalid opcodes
128 // but it should not matter.
129
130 for (uint8_t i = 0; i < bytecode_size; i++) {
131 bytecode[i] = i + first_byte;
132 }
133
134 builder.process_decomposition(
135 {
136 simulation::BytecodeDecompositionEvent{
137 .bytecode_id = 7,
138 .bytecode = std::make_shared<std::vector<uint8_t>>(bytecode),
139 },
140 },
141 trace);
142 auto rows = trace.as_rows();
143
144 // One extra empty row is prepended. Note that precomputed_first_row is not set through process_decomposition()
145 // because it pertains to another subtrace.
146 ASSERT_EQ(rows.size(), bytecode_size + 1);
147
148 // We do not inspect row at index 0 as it is completely empty.
149 EXPECT_THAT(rows.at(1),
150 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
151 ROW_FIELD_EQ(bc_decomposition_id, 7),
152 ROW_FIELD_EQ(bc_decomposition_bytes, first_byte),
153 ROW_FIELD_EQ(bc_decomposition_pc, 0),
154 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, bytecode_size),
155 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 0),
156 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(-8).invert()),
157 ROW_FIELD_EQ(bc_decomposition_is_windows_eq_remaining, 0),
158 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, DECOMPOSE_WINDOW_SIZE),
159 ROW_FIELD_EQ(bc_decomposition_sel_packed, 1),
160 ROW_FIELD_EQ(bc_decomposition_next_packed_pc, 0),
161 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv, 0),
162 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 0)));
163
164 // We are interested to inspect the boundary aroud bytes_remaining == windows size
165
166 EXPECT_THAT(rows.at(9),
167 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
168 ROW_FIELD_EQ(bc_decomposition_id, 7),
169 ROW_FIELD_EQ(bc_decomposition_bytes, first_byte + 8),
170 ROW_FIELD_EQ(bc_decomposition_pc, 8),
171 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, DECOMPOSE_WINDOW_SIZE),
172 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 0),
173 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, 0),
174 ROW_FIELD_EQ(bc_decomposition_is_windows_eq_remaining, 1),
175 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, DECOMPOSE_WINDOW_SIZE),
176 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 0)));
177
178 EXPECT_THAT(rows.at(10),
179 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
180 ROW_FIELD_EQ(bc_decomposition_id, 7),
181 ROW_FIELD_EQ(bc_decomposition_bytes, first_byte + 9),
182 ROW_FIELD_EQ(bc_decomposition_pc, 9),
183 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, DECOMPOSE_WINDOW_SIZE - 1),
184 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
185 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, 1),
186 ROW_FIELD_EQ(bc_decomposition_is_windows_eq_remaining, 0),
187 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, DECOMPOSE_WINDOW_SIZE - 1),
188 ROW_FIELD_EQ(bc_decomposition_sel_packed, 0),
189 ROW_FIELD_EQ(bc_decomposition_next_packed_pc, 31),
190 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv, FF(31 - 9).invert()),
191 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 0)));
192
193 // Last row
194 EXPECT_THAT(rows.at(bytecode_size),
195 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
196 ROW_FIELD_EQ(bc_decomposition_id, 7),
197 ROW_FIELD_EQ(bc_decomposition_bytes, first_byte + bytecode_size - 1),
198 ROW_FIELD_EQ(bc_decomposition_pc, bytecode_size - 1),
199 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, 1),
200 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
201 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(DECOMPOSE_WINDOW_SIZE - 1).invert()),
202 ROW_FIELD_EQ(bc_decomposition_is_windows_eq_remaining, 0),
203 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, 1),
204 ROW_FIELD_EQ(bc_decomposition_sel_packed, 0),
205 ROW_FIELD_EQ(bc_decomposition_next_packed_pc, 62),
206 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv, FF(62 - (bytecode_size - 1)).invert()),
207 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 1)));
208}
209
210TEST(BytecodeTraceGenTest, MultipleEvents)
211{
212 TestTraceContainer trace;
213 BytecodeTraceBuilder builder;
214
215 std::vector<uint32_t> bc_sizes = { DECOMPOSE_WINDOW_SIZE + 2, 17, DECOMPOSE_WINDOW_SIZE, 1 };
217
218 std::transform(bc_sizes.begin(), bc_sizes.end(), bytecodes.begin(), [](uint32_t bc_size) -> std::vector<uint8_t> {
219 std::vector<uint8_t> bytecode(bc_size);
220 for (uint8_t i = 0; i < bc_size; i++) {
221 bytecode[i] = i * i; // Arbitrary bytecode that we will not inspect below
222 }
223
224 return bytecode;
225 });
226
227 builder.process_decomposition(
228 {
229 simulation::BytecodeDecompositionEvent{
230 .bytecode_id = 0,
231 .bytecode = std::make_shared<std::vector<uint8_t>>(bytecodes[0]),
232 },
233 simulation::BytecodeDecompositionEvent{
234 .bytecode_id = 1,
235 .bytecode = std::make_shared<std::vector<uint8_t>>(bytecodes[1]),
236 },
237 simulation::BytecodeDecompositionEvent{
238 .bytecode_id = 2,
239 .bytecode = std::make_shared<std::vector<uint8_t>>(bytecodes[2]),
240 },
241 simulation::BytecodeDecompositionEvent{
242 .bytecode_id = 3,
243 .bytecode = std::make_shared<std::vector<uint8_t>>(bytecodes[3]),
244 },
245 },
246 trace);
247 auto rows = trace.as_rows();
248
249 // One extra empty row is prepended.
250 ASSERT_EQ(rows.size(), 2 * DECOMPOSE_WINDOW_SIZE + 20 + 1);
251
252 size_t row_pos = 1;
253 for (uint32_t i = 0; i < 4; i++) {
254 uint32_t next_packed_pc = 0;
255 for (uint32_t j = 0; j < bc_sizes[i]; j++) {
256 const auto bytes_rem = bc_sizes[i] - j;
257 EXPECT_THAT(
258 rows.at(row_pos),
259 AllOf(
260 ROW_FIELD_EQ(bc_decomposition_sel, 1),
261 ROW_FIELD_EQ(bc_decomposition_id, i),
262 ROW_FIELD_EQ(bc_decomposition_pc, j),
263 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, bytes_rem),
264 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, DECOMPOSE_WINDOW_SIZE > bytes_rem ? 1 : 0),
266 bc_decomposition_windows_min_remaining_inv,
267 bytes_rem == DECOMPOSE_WINDOW_SIZE ? 0 : (FF(DECOMPOSE_WINDOW_SIZE) - FF(bytes_rem)).invert()),
268 ROW_FIELD_EQ(bc_decomposition_is_windows_eq_remaining, bytes_rem == DECOMPOSE_WINDOW_SIZE ? 1 : 0),
269 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, std::min(DECOMPOSE_WINDOW_SIZE, bytes_rem)),
270 ROW_FIELD_EQ(bc_decomposition_sel_packed, j == next_packed_pc ? 1 : 0),
271 ROW_FIELD_EQ(bc_decomposition_next_packed_pc, next_packed_pc),
272 ROW_FIELD_EQ(bc_decomposition_next_packed_pc_min_pc_inv,
273 j == next_packed_pc ? 0 : FF(next_packed_pc - j).invert()),
274 ROW_FIELD_EQ(bc_decomposition_last_of_contract, j == bc_sizes[i] - 1 ? 1 : 0)));
275 row_pos++;
276 next_packed_pc += j % 31 == 0 ? 31 : 0;
277 }
278 }
279}
280
281TEST(BytecodeTraceGenTest, BasicHashing)
282{
283 TestTraceContainer trace;
284 BytecodeTraceBuilder builder;
285
286 builder.process_hashing(
287 {
288 simulation::BytecodeHashingEvent{
289 .bytecode_id = 1,
290 .bytecode_length = 9,
291 .bytecode_fields = { 10, 20, 30 },
292 },
293 },
294 trace);
295 const auto rows = trace.as_rows();
296
297 // One extra empty row is prepended.
298 EXPECT_THAT(rows.at(1),
299 AllOf(ROW_FIELD_EQ(bc_hashing_sel, 1),
300 ROW_FIELD_EQ(bc_hashing_start, 1),
301 ROW_FIELD_EQ(bc_hashing_sel_not_start, 0),
302 ROW_FIELD_EQ(bc_hashing_sel_not_padding_1, 1),
303 ROW_FIELD_EQ(bc_hashing_sel_not_padding_2, 1),
304 ROW_FIELD_EQ(bc_hashing_latch, 0),
305 ROW_FIELD_EQ(bc_hashing_bytecode_id, 1),
306 ROW_FIELD_EQ(bc_hashing_pc_index, 0),
307 // We don't increment at start to account for the prepended separator:
308 ROW_FIELD_EQ(bc_hashing_pc_index_1, 0),
309 ROW_FIELD_EQ(bc_hashing_pc_index_2, 31),
310 ROW_FIELD_EQ(bc_hashing_packed_fields_0, GENERATOR_INDEX__PUBLIC_BYTECODE),
311 ROW_FIELD_EQ(bc_hashing_packed_fields_1, 10),
312 ROW_FIELD_EQ(bc_hashing_packed_fields_2, 20),
313 ROW_FIELD_EQ(bc_hashing_input_len, 4),
314 ROW_FIELD_EQ(bc_hashing_rounds_rem, 2),
315 ROW_FIELD_EQ(bc_hashing_output_hash,
317 ROW_FIELD_EQ(bc_hashing_pc_at_final_field, 0)));
318
319 // Latched row
320 EXPECT_THAT(rows.at(2),
321 AllOf(ROW_FIELD_EQ(bc_hashing_sel, 1),
322 ROW_FIELD_EQ(bc_hashing_start, 0),
323 ROW_FIELD_EQ(bc_hashing_sel_not_start, 1),
324 ROW_FIELD_EQ(bc_hashing_sel_not_padding_1, 0),
325 ROW_FIELD_EQ(bc_hashing_sel_not_padding_2, 0),
326 ROW_FIELD_EQ(bc_hashing_latch, 1),
327 ROW_FIELD_EQ(bc_hashing_bytecode_id, 1),
328 ROW_FIELD_EQ(bc_hashing_pc_index, 62),
329 ROW_FIELD_EQ(bc_hashing_pc_index_1, 93),
330 ROW_FIELD_EQ(bc_hashing_pc_index_2, 124),
331 ROW_FIELD_EQ(bc_hashing_packed_fields_0, 30),
332 ROW_FIELD_EQ(bc_hashing_packed_fields_1, 0),
333 ROW_FIELD_EQ(bc_hashing_packed_fields_2, 0),
334 ROW_FIELD_EQ(bc_hashing_input_len, 4),
335 ROW_FIELD_EQ(bc_hashing_rounds_rem, 1),
336 ROW_FIELD_EQ(bc_hashing_output_hash,
338 ROW_FIELD_EQ(bc_hashing_pc_at_final_field, 62)));
339}
340
341std::vector<Instruction> gen_random_instructions(std::span<const WireOpCode> opcodes)
342{
343 std::vector<Instruction> instructions;
344 instructions.reserve(opcodes.size());
345 for (const auto& opcode : opcodes) {
346 instructions.emplace_back(testing::random_instruction(opcode));
347 }
348 return instructions;
349}
350
351std::vector<uint8_t> create_bytecode(std::span<const Instruction> instructions)
352{
353 std::vector<uint8_t> bytecode;
354 for (const auto& instruction : instructions) {
355 auto serialized_instruction = instruction.serialize();
356 bytecode.insert(bytecode.end(),
357 std::make_move_iterator(serialized_instruction.begin()),
358 std::make_move_iterator(serialized_instruction.end()));
359 }
360 return bytecode;
361}
362
363std::vector<size_t> gen_pcs(std::span<const WireOpCode> opcodes)
364{
365 std::vector<size_t> pcs;
366 pcs.reserve(opcodes.size());
367 size_t pc = 0;
368 for (const auto& opcode : opcodes) {
369 pcs.emplace_back(pc);
370 pc += WIRE_INSTRUCTION_SPEC.at(opcode).size_in_bytes;
371 }
372 return pcs;
373}
374
375std::vector<InstructionFetchingEvent> create_instruction_fetching_events(
376 const std::vector<Instruction>& instructions,
377 const std::vector<size_t>& pcs,
378 const std::shared_ptr<std::vector<uint8_t>>& bytecode_ptr,
379 const BytecodeId bytecode_id)
380{
382 events.reserve(instructions.size());
383
384 for (size_t i = 0; i < instructions.size(); i++) {
385 events.emplace_back(InstructionFetchingEvent{
386 .bytecode_id = bytecode_id,
387 .pc = static_cast<uint32_t>(pcs.at(i)),
388 .instruction = instructions.at(i),
389 .bytecode = bytecode_ptr,
390 });
391 }
392 return events;
393}
394
395// We build a random InstructionFetchingEvent for each wire opcode.
396// We then verify that the bytes (bd0, bd1, ...) correspond to the serialized instruction.
397TEST(BytecodeTraceGenTest, InstrDecompositionInBytesEachOpcode)
398{
399 TestTraceContainer trace;
400 BytecodeTraceBuilder builder;
401
402 constexpr std::array<C, 37> bd_columns = {
403 C::instr_fetching_bd0, C::instr_fetching_bd1, C::instr_fetching_bd2, C::instr_fetching_bd3,
404 C::instr_fetching_bd4, C::instr_fetching_bd5, C::instr_fetching_bd6, C::instr_fetching_bd7,
405 C::instr_fetching_bd8, C::instr_fetching_bd9, C::instr_fetching_bd10, C::instr_fetching_bd11,
406 C::instr_fetching_bd12, C::instr_fetching_bd13, C::instr_fetching_bd14, C::instr_fetching_bd15,
407 C::instr_fetching_bd16, C::instr_fetching_bd17, C::instr_fetching_bd18, C::instr_fetching_bd19,
408 C::instr_fetching_bd20, C::instr_fetching_bd21, C::instr_fetching_bd22, C::instr_fetching_bd23,
409 C::instr_fetching_bd24, C::instr_fetching_bd25, C::instr_fetching_bd26, C::instr_fetching_bd27,
410 C::instr_fetching_bd28, C::instr_fetching_bd29, C::instr_fetching_bd30, C::instr_fetching_bd31,
411 C::instr_fetching_bd32, C::instr_fetching_bd33, C::instr_fetching_bd34, C::instr_fetching_bd35,
412 C::instr_fetching_bd36,
413 };
414
415 constexpr std::array<C, 7> operand_columns = {
416 C::instr_fetching_op1, C::instr_fetching_op2, C::instr_fetching_op3, C::instr_fetching_op4,
417 C::instr_fetching_op5, C::instr_fetching_op6, C::instr_fetching_op7,
418 };
419
420 constexpr BytecodeId bytecode_id = 1;
421 constexpr auto num_opcodes = static_cast<size_t>(WireOpCode::LAST_OPCODE_SENTINEL);
422
424 opcodes.reserve(num_opcodes);
425 for (size_t i = 0; i < num_opcodes; i++) {
426 opcodes.emplace_back(static_cast<WireOpCode>(i));
427 }
428
429 std::vector<Instruction> instructions = gen_random_instructions(opcodes);
430 std::vector<size_t> pcs = gen_pcs(opcodes);
431 std::vector<uint8_t> bytecode = create_bytecode(instructions);
432
433 auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(bytecode);
435 create_instruction_fetching_events(instructions, pcs, bytecode_ptr, bytecode_id);
436
437 builder.process_instruction_fetching(events, trace);
438
439 for (uint32_t i = 0; i < num_opcodes; i++) {
440 const auto instr = instructions.at(i);
441 const auto instr_encoded = instr.serialize();
442 const auto w_opcode = static_cast<WireOpCode>(i);
443
444 // Check size_in_bytes column
445 const auto expected_size_in_bytes = WIRE_INSTRUCTION_SPEC.at(w_opcode).size_in_bytes;
446 ASSERT_EQ(instr_encoded.size(), expected_size_in_bytes);
447 EXPECT_EQ(FF(expected_size_in_bytes), trace.get(C::instr_fetching_instr_size, i + 1));
448
449 // Inspect each byte
450 for (size_t j = 0; j < static_cast<size_t>(expected_size_in_bytes); j++) {
451 EXPECT_EQ(FF(instr_encoded.at(j)), trace.get(bd_columns.at(j), i + 1));
452 }
453
454 // Check exection opcode
455 EXPECT_EQ(FF(static_cast<uint8_t>(WIRE_INSTRUCTION_SPEC.at(w_opcode).exec_opcode)),
456 trace.get(C::instr_fetching_exec_opcode, i + 1));
457
458 // Check indirect
459 EXPECT_EQ(FF(instr.indirect), trace.get(C::instr_fetching_indirect, i + 1));
460
461 // Check PCs
462 EXPECT_EQ(FF(pcs.at(i)), trace.get(C::instr_fetching_pc, i + 1));
463
464 // Check operands
465 size_t operand_idx = 0;
466 for (const auto& operand : instr.operands) {
467 EXPECT_EQ(FF(operand), trace.get(operand_columns.at(operand_idx++), i + 1));
468 }
469 }
470}
471
472TEST(BytecodeTraceGenTest, InstrFetchingSingleBytecode)
473{
474 TestTraceContainer trace;
475 BytecodeTraceBuilder builder;
476
477 constexpr BytecodeId bytecode_id = 1;
478 constexpr size_t num_of_opcodes = 10;
479 constexpr std::array<WireOpCode, num_of_opcodes> opcodes = {
490 };
491
492 std::vector<Instruction> instructions = gen_random_instructions(opcodes);
493 std::vector<size_t> pcs = gen_pcs(opcodes);
494 std::vector<uint8_t> bytecode = create_bytecode(instructions);
495
496 std::vector<InstructionFetchingEvent> events = create_instruction_fetching_events(
497 instructions, pcs, std::make_shared<std::vector<uint8_t>>(bytecode), bytecode_id);
498
499 builder.process_instruction_fetching(events, trace);
500
501 // One extra empty row is prepended.
502 const auto rows = trace.as_rows();
503 const auto bytecode_size = bytecode.size();
504 EXPECT_EQ(rows.size(), num_of_opcodes + 1);
505
506 for (size_t i = 0; i < num_of_opcodes; i++) {
507 const auto pc = pcs.at(i);
508 const auto instr_size = WIRE_INSTRUCTION_SPEC.at(opcodes.at(i)).size_in_bytes;
509 const auto has_tag = WIRE_INSTRUCTION_SPEC.at(opcodes.at(i)).tag_operand_idx.has_value();
510 const auto tag_is_op2 = has_tag ? WIRE_INSTRUCTION_SPEC.at(opcodes.at(i)).tag_operand_idx.value() == 2 : 0;
511 const auto bytes_remaining = bytecode_size - pc;
512 const auto bytes_to_read = std::min<size_t>(DECOMPOSE_WINDOW_SIZE, bytes_remaining);
513
514 EXPECT_LE(instr_size, bytes_to_read);
515 const auto instr_abs_diff = bytes_to_read - instr_size;
516
517 EXPECT_LT(pc, bytecode_size);
518 const auto pc_abs_diff = bytecode_size - pc - 1;
519
520 ASSERT_LE(bytecode_size, UINT16_MAX);
521
522 EXPECT_THAT(rows.at(i + 1),
523 AllOf(ROW_FIELD_EQ(instr_fetching_sel, 1),
524 ROW_FIELD_EQ(instr_fetching_pc, pc),
525 ROW_FIELD_EQ(instr_fetching_bd0, static_cast<uint8_t>(opcodes.at(i))),
526 ROW_FIELD_EQ(instr_fetching_bytecode_id, bytecode_id),
527 ROW_FIELD_EQ(instr_fetching_bytes_to_read, bytes_to_read),
528 ROW_FIELD_EQ(instr_fetching_bytecode_size, bytecode_size),
529 ROW_FIELD_EQ(instr_fetching_instr_size, instr_size),
530 ROW_FIELD_EQ(instr_fetching_instr_abs_diff, instr_abs_diff),
531 ROW_FIELD_EQ(instr_fetching_pc_abs_diff, pc_abs_diff),
532 ROW_FIELD_EQ(instr_fetching_pc_out_of_range, 0),
533 ROW_FIELD_EQ(instr_fetching_opcode_out_of_range, 0),
534 ROW_FIELD_EQ(instr_fetching_instr_out_of_range, 0),
535 ROW_FIELD_EQ(instr_fetching_tag_out_of_range, 0),
536 ROW_FIELD_EQ(instr_fetching_sel_parsing_err, 0),
537 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 1),
538 ROW_FIELD_EQ(instr_fetching_sel_has_tag, has_tag),
539 ROW_FIELD_EQ(instr_fetching_sel_tag_is_op2, tag_is_op2),
540 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 1)));
541 }
542}
543
544// Test involving 3 different bytecode_id's for each 2 opcodes (same bytecode).
545TEST(BytecodeTraceGenTest, InstrFetchingMultipleBytecodes)
546{
547 TestTraceContainer trace;
548 BytecodeTraceBuilder builder;
549
550 constexpr size_t num_of_opcodes = 2;
551 constexpr std::array<WireOpCode, num_of_opcodes> opcodes = {
554 };
555
556 std::vector<Instruction> instructions = gen_random_instructions(opcodes);
557 std::vector<size_t> pcs = gen_pcs(opcodes);
558 std::vector<uint8_t> bytecode = create_bytecode(instructions);
559
561 for (size_t i = 0; i < 3; i++) {
562 auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(bytecode);
563 auto new_events =
564 create_instruction_fetching_events(instructions, pcs, bytecode_ptr, static_cast<BytecodeId>(i + 1));
565 events.insert(events.end(), new_events.begin(), new_events.end());
566 }
567
568 builder.process_instruction_fetching(events, trace);
569
570 // One extra empty row is prepended.
571 const auto rows = trace.as_rows();
572 EXPECT_EQ(rows.size(), 6 + 1);
573
574 for (size_t i = 0; i < 3; i++) {
575 EXPECT_THAT(rows.at(2 * i + 1), ROW_FIELD_EQ(instr_fetching_pc, 0));
576 }
577}
578
579// Test which processes three single instruction events, each of one with a different parsing error.
580// The bytecode can be filled with trivial bytes of size 20 with all bytes being increasing from 0 to 19.
581// First byte at index 0 is set to LAST_OPCODE_SENTINEL + 1.
582// Then consider for the instruction events pc = 0, pc = 19, pc = 38.
583// pc == 0 will correspond to the error OPCODE_OUT_OF_RANGE
584// pc == 19 will have INSTRUCTION_OUT_OF_RANGE
585// pc == 38 will have PC_OUT_OF_RANGE
586// Check for each row that column instr_fetching_parsing_err in addition to the column of the respective error.
587// It is not an issue that the instruction is generated at random in the event and is not consistent with the
588// bytecode for this test case.
589TEST(BytecodeTraceGenTest, InstrFetchingParsingErrors)
590{
591 TestTraceContainer trace;
592 BytecodeTraceBuilder builder;
593
594 constexpr BytecodeId bytecode_id = 1;
595 constexpr size_t bytecode_size = 20;
596 std::vector<uint8_t> bytecode(bytecode_size);
597 for (size_t i = 0; i < bytecode_size; i++) {
598 bytecode[i] = static_cast<uint8_t>(i);
599 }
600 bytecode[0] = static_cast<uint8_t>(WireOpCode::LAST_OPCODE_SENTINEL) + 1;
601
603 auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(bytecode);
604 events.emplace_back(InstructionFetchingEvent{
605 .bytecode_id = bytecode_id,
606 .pc = 0,
607 .bytecode = bytecode_ptr,
609 });
610 events.emplace_back(InstructionFetchingEvent{
611 .bytecode_id = bytecode_id,
612 .pc = 19,
613 .bytecode = bytecode_ptr,
615 });
616 events.emplace_back(InstructionFetchingEvent{
617 .bytecode_id = bytecode_id,
618 .pc = 38,
619 .bytecode = bytecode_ptr,
621 });
622
623 builder.process_instruction_fetching(events, trace);
624
625 // One extra empty row is prepended.
626 const auto rows = trace.as_rows();
627 ASSERT_EQ(rows.size(), 3 + 1);
628
629 EXPECT_THAT(rows.at(1),
630 AllOf(ROW_FIELD_EQ(instr_fetching_sel, 1),
631 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 1),
632 ROW_FIELD_EQ(instr_fetching_pc, 0),
633 ROW_FIELD_EQ(instr_fetching_bytes_to_read, 20),
634 ROW_FIELD_EQ(instr_fetching_instr_size, 0),
635 ROW_FIELD_EQ(instr_fetching_instr_abs_diff,
636 20), // instr_size <= bytes_to_read: bytes_to_read - instr_size
637 ROW_FIELD_EQ(instr_fetching_sel_parsing_err, 1),
638 ROW_FIELD_EQ(instr_fetching_pc_abs_diff, 19), // bytecode_size - pc - 1 if bytecode_size > pc
639 ROW_FIELD_EQ(instr_fetching_opcode_out_of_range, 1)));
640
641 EXPECT_THAT(rows.at(2),
642 AllOf(ROW_FIELD_EQ(instr_fetching_sel, 1),
643 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 1),
644 ROW_FIELD_EQ(instr_fetching_pc, 19), // OR_16 opcode
645 ROW_FIELD_EQ(instr_fetching_bytes_to_read, 1),
646 ROW_FIELD_EQ(instr_fetching_instr_size, 8), // OR_16 is 8 bytes long
647 ROW_FIELD_EQ(instr_fetching_instr_abs_diff,
648 6), // instr_size > bytes_to_read: instr_size - bytes_to_read - 1
649 ROW_FIELD_EQ(instr_fetching_sel_parsing_err, 1),
650 ROW_FIELD_EQ(instr_fetching_pc_abs_diff, 0), // bytecode_size - pc - 1 if bytecode_size > pc
651 ROW_FIELD_EQ(instr_fetching_instr_out_of_range, 1)));
652
653 EXPECT_THAT(
654 rows.at(3),
655 AllOf(ROW_FIELD_EQ(instr_fetching_sel, 1),
656 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 0),
657 ROW_FIELD_EQ(instr_fetching_pc, 38),
658 ROW_FIELD_EQ(instr_fetching_bytes_to_read, 0),
659 ROW_FIELD_EQ(instr_fetching_instr_size, 0),
660 ROW_FIELD_EQ(instr_fetching_instr_abs_diff, 0), // instr_size <= bytes_to_read: bytes_to_read - instr_size
661 ROW_FIELD_EQ(instr_fetching_sel_parsing_err, 1),
662 ROW_FIELD_EQ(instr_fetching_pc_abs_diff, 18), // pc - bytecode_size if bytecode_size <= pc
663 ROW_FIELD_EQ(instr_fetching_pc_out_of_range, 1)));
664}
665
666// Test on error tag out of range
667TEST(BytecodeTraceGenTest, InstrFetchingErrorTagOutOfRange)
668{
672 TestTraceContainer trace;
673 BytecodeTraceBuilder builder;
674
675 auto instr_cast = random_instruction(WireOpCode::CAST_16);
676 auto instr_set = random_instruction(WireOpCode::SET_64);
677 constexpr uint32_t cast_size = 7;
678 constexpr uint32_t set_64_size = 13;
679
680 instr_cast.operands.at(2) = Operand::from<uint8_t>(0x09); // tag operand mutation to 0x09 which is out of range
681 instr_set.operands.at(1) = Operand::from<uint8_t>(0x0A); // tag operand mutation to 0x0A which is out of range
682
683 auto bytecode = instr_cast.serialize();
684 ASSERT_EQ(bytecode.size(), cast_size);
685
686 auto instr_set_bytecode = instr_set.serialize();
687 ASSERT_EQ(instr_set_bytecode.size(), set_64_size);
688
689 bytecode.insert(bytecode.end(), instr_set_bytecode.begin(), instr_set_bytecode.end());
690
691 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(bytecode);
692
694 events.emplace_back(InstructionFetchingEvent{
695 .bytecode_id = 1,
696 .pc = 0,
697 .instruction = deserialize_instruction(bytecode, 0), // Reflect more the real code path than passing instr_cast.
698 .bytecode = bytecode_ptr,
700 });
701
702 events.emplace_back(InstructionFetchingEvent{
703 .bytecode_id = 1,
704 .pc = cast_size,
705 .instruction =
706 deserialize_instruction(bytecode, cast_size), // Reflect more the real code path than passing instr_set.
707 .bytecode = bytecode_ptr,
709 });
710
711 builder.process_instruction_fetching(events, trace);
712
713 // One extra empty row is prepended.
714 const auto rows = trace.as_rows();
715 ASSERT_EQ(rows.size(), 2 + 1);
716
717 EXPECT_THAT(rows.at(1),
718 AllOf(ROW_FIELD_EQ(instr_fetching_sel, 1),
719 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 1),
720 ROW_FIELD_EQ(instr_fetching_sel_has_tag, 1),
721 ROW_FIELD_EQ(instr_fetching_sel_tag_is_op2, 0),
722 ROW_FIELD_EQ(instr_fetching_tag_value, 9),
723 ROW_FIELD_EQ(instr_fetching_pc, 0),
724 ROW_FIELD_EQ(instr_fetching_bytes_to_read, cast_size + set_64_size),
725 ROW_FIELD_EQ(instr_fetching_instr_size, cast_size),
726 ROW_FIELD_EQ(instr_fetching_instr_abs_diff,
727 set_64_size), // instr_size <= bytes_to_read: bytes_to_read - instr_size
728 ROW_FIELD_EQ(instr_fetching_sel_parsing_err, 1),
729 ROW_FIELD_EQ(instr_fetching_pc_abs_diff,
730 cast_size + set_64_size - 1), // bytecode_size - pc - 1 if bytecode_size > pc
731 ROW_FIELD_EQ(instr_fetching_tag_out_of_range, 1)));
732
733 EXPECT_THAT(
734 rows.at(2),
735 AllOf(ROW_FIELD_EQ(instr_fetching_sel, 1),
736 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 1),
737 ROW_FIELD_EQ(instr_fetching_sel_has_tag, 1),
738 ROW_FIELD_EQ(instr_fetching_sel_tag_is_op2, 1),
739 ROW_FIELD_EQ(instr_fetching_tag_value, 10),
740 ROW_FIELD_EQ(instr_fetching_pc, cast_size),
741 ROW_FIELD_EQ(instr_fetching_bytes_to_read, set_64_size),
742 ROW_FIELD_EQ(instr_fetching_instr_size, set_64_size),
743 ROW_FIELD_EQ(instr_fetching_instr_abs_diff, 0), // instr_size <= bytes_to_read: bytes_to_read - instr_size
744 ROW_FIELD_EQ(instr_fetching_sel_parsing_err, 1),
745 ROW_FIELD_EQ(instr_fetching_pc_abs_diff, set_64_size - 1), // bytecode_size - pc - 1 if bytecode_size > pc
746 ROW_FIELD_EQ(instr_fetching_tag_out_of_range, 1)));
747}
748
749} // namespace
750} // namespace bb::avm2::tracegen
#define GENERATOR_INDEX__PUBLIC_BYTECODE
std::vector< AvmFullRowConstRef > as_rows() const
const FF & get(Column col, uint32_t row) const
static FF hash(const std::vector< FF > &input)
Hashes a vector of field elements.
AluTraceBuilder builder
Definition alu.test.cpp:123
TestTraceContainer trace
Instruction instruction
#define ROW_FIELD_EQ(field_name, expression)
Definition macros.hpp:15
TEST(EmitUnencryptedLogTest, Basic)
Instruction deserialize_instruction(std::span< const uint8_t > bytecode, size_t pos)
Parsing of an instruction in the supplied bytecode at byte position pos. This checks that the WireOpC...
Instruction random_instruction(WireOpCode w_opcode)
Definition fixtures.cpp:125
constexpr uint32_t DECOMPOSE_WINDOW_SIZE
const std::unordered_map< WireOpCode, WireInstructionSpec > WIRE_INSTRUCTION_SPEC
AvmFlavorSettings::FF FF
Definition field.hpp:10
typename Flavor::FF FF
STL namespace.
constexpr decltype(auto) get(::tuplet::tuple< T... > &&t) noexcept
Definition tuple.hpp:13