13#include <gmock/gmock.h>
14#include <gtest/gtest.h>
20using ::testing::NiceMock;
21using ::testing::Return;
22using ::testing::ReturnRef;
24class TxExecutionTest :
public ::testing::Test {
26 TxExecutionTest() =
default;
39 written_public_data_slots_tree_check,
40 retrieved_bytecodes_tree_check,
46TEST_F(TxExecutionTest, simulateTx)
50 .hash =
"0x1234567890abcdef",
51 .nonRevertibleAccumulatedData =
57 .revertibleAccumulatedData =
68 AppendOnlyTreeSnapshot dummy_snapshot = {
70 .nextAvailableLeafIndex = 0,
72 TreeStates tree_state = {
73 .noteHashTree = { .tree = dummy_snapshot, .counter = 0 },
74 .nullifierTree = { .tree = dummy_snapshot, .counter = 0 },
75 .l1ToL2MessageTree = { .tree = dummy_snapshot, .counter = 0 },
76 .publicDataTree = { .tree = dummy_snapshot, .counter = 0 },
78 ON_CALL(
merkle_db, get_tree_state()).WillByDefault([&]() {
return tree_state; });
79 ON_CALL(
merkle_db, siloed_nullifier_write(_)).WillByDefault(Return());
83 ON_CALL(*setup_context, halted()).WillByDefault(Return(
true));
86 ON_CALL(*app_logic_context, halted()).WillByDefault(Return(
true));
89 ON_CALL(*teardown_context, halted()).WillByDefault(Return(
true));
92 ExecutionResult successful_result = {
95 .gas_used = Gas{ 100, 100 },
96 .side_effect_states = SideEffectStates{},
99 ON_CALL(execution,
execute(_)).WillByDefault(Return(successful_result));
102 .WillOnce(Return(
std::move(setup_context)))
103 .WillOnce(Return(
std::move(app_logic_context)))
104 .WillOnce(Return(
std::move(teardown_context)));
105 EXPECT_CALL(
merkle_db, create_checkpoint()).Times(1);
107 EXPECT_CALL(
merkle_db, pad_trees()).Times(1);
112 bool has_startup_event =
false;
113 auto expected_private_append_tree_events =
114 tx.nonRevertibleAccumulatedData.noteHashes.size() + tx.nonRevertibleAccumulatedData.nullifiers.size() +
115 tx.revertibleAccumulatedData.noteHashes.size() + tx.revertibleAccumulatedData.nullifiers.size();
116 auto actual_private_append_tree_events = 0;
118 auto expected_l2_l1_msg_events =
119 tx.nonRevertibleAccumulatedData.l2ToL1Messages.size() + tx.revertibleAccumulatedData.l2ToL1Messages.size();
120 auto actual_l2_l1_msg_events = 0;
122 auto expected_public_call_events = 3;
123 auto actual_public_call_events = 0;
125 bool has_collect_fee_event =
false;
129 for (
const auto& tx_event : events) {
131 has_startup_event =
true;
136 actual_private_append_tree_events++;
139 actual_l2_l1_msg_events++;
142 actual_public_call_events++;
145 has_collect_fee_event =
true;
149 EXPECT_TRUE(has_startup_event);
150 EXPECT_EQ(actual_private_append_tree_events, expected_private_append_tree_events);
151 EXPECT_EQ(expected_l2_l1_msg_events, actual_l2_l1_msg_events);
152 EXPECT_EQ(expected_public_call_events, actual_public_call_events);
153 EXPECT_TRUE(has_collect_fee_event);
156TEST_F(TxExecutionTest, NoteHashLimitReached)
160 .hash =
"0x1234567890abcdef",
161 .nonRevertibleAccumulatedData =
166 .revertibleAccumulatedData =
173 AppendOnlyTreeSnapshot dummy_snapshot = {
175 .nextAvailableLeafIndex = 0,
177 TreeStates tree_state = {
178 .noteHashTree = { .tree = dummy_snapshot, .counter = 0 },
179 .nullifierTree = { .tree = dummy_snapshot, .counter = 0 },
180 .l1ToL2MessageTree = { .tree = dummy_snapshot, .counter = 0 },
181 .publicDataTree = { .tree = dummy_snapshot, .counter = 0 },
183 ON_CALL(
merkle_db, get_tree_state()).WillByDefault([&]() {
return tree_state; });
184 ON_CALL(
merkle_db, siloed_nullifier_write(_)).WillByDefault([&](
const auto& ) {
185 tree_state.nullifierTree.counter++;
187 ON_CALL(
merkle_db, siloed_note_hash_write(_)).WillByDefault([&](
const auto& ) {
188 tree_state.noteHashTree.counter++;
191 ON_CALL(
merkle_db, unique_note_hash_write(_)).WillByDefault([&](
const auto& ) {
192 tree_state.noteHashTree.counter++;
196 EXPECT_CALL(
merkle_db, create_checkpoint()).Times(2);
201 bool has_startup_event =
false;
202 auto expected_private_append_tree_events =
203 tx.nonRevertibleAccumulatedData.noteHashes.size() + tx.nonRevertibleAccumulatedData.nullifiers.size() +
204 tx.revertibleAccumulatedData.noteHashes.size() + tx.revertibleAccumulatedData.nullifiers.size();
205 auto actual_private_append_tree_events = 0;
207 auto expected_l2_l1_msg_events =
208 tx.nonRevertibleAccumulatedData.l2ToL1Messages.size() + tx.revertibleAccumulatedData.l2ToL1Messages.size();
209 auto actual_l2_l1_msg_events = 0;
211 auto expected_public_call_events = 0;
212 auto actual_public_call_events = 0;
215 bool has_collect_fee_event =
false;
219 for (
const auto& tx_event : events) {
221 has_startup_event =
true;
225 if (phase_event.reverted) {
228 auto event = phase_event.event;
230 actual_private_append_tree_events++;
233 actual_l2_l1_msg_events++;
236 actual_public_call_events++;
239 has_collect_fee_event =
true;
243 EXPECT_TRUE(has_startup_event);
244 EXPECT_EQ(actual_private_append_tree_events, expected_private_append_tree_events);
245 EXPECT_EQ(expected_l2_l1_msg_events, actual_l2_l1_msg_events);
246 EXPECT_EQ(expected_public_call_events, actual_public_call_events);
247 EXPECT_TRUE(has_collect_fee_event);
248 EXPECT_EQ(reverts, 1);
251TEST_F(TxExecutionTest, NullifierLimitReached)
255 .hash =
"0x1234567890abcdef",
256 .nonRevertibleAccumulatedData =
260 .revertibleAccumulatedData =
267 AppendOnlyTreeSnapshot dummy_snapshot = {
269 .nextAvailableLeafIndex = 0,
271 TreeStates tree_state = {
272 .noteHashTree = { .tree = dummy_snapshot, .counter = 0 },
273 .nullifierTree = { .tree = dummy_snapshot, .counter = 0 },
274 .l1ToL2MessageTree = { .tree = dummy_snapshot, .counter = 0 },
275 .publicDataTree = { .tree = dummy_snapshot, .counter = 0 },
277 ON_CALL(
merkle_db, get_tree_state()).WillByDefault([&]() {
return tree_state; });
278 ON_CALL(
merkle_db, siloed_nullifier_write(_)).WillByDefault([&](
const auto& ) {
279 tree_state.nullifierTree.counter++;
282 ON_CALL(
merkle_db, siloed_note_hash_write(_)).WillByDefault([&](
const auto& ) {
283 tree_state.noteHashTree.counter++;
286 ON_CALL(
merkle_db, unique_note_hash_write(_)).WillByDefault([&](
const auto& ) {
287 tree_state.noteHashTree.counter++;
291 EXPECT_CALL(
merkle_db, create_checkpoint()).Times(2);
296 bool has_startup_event =
false;
297 auto expected_private_append_tree_events =
298 tx.nonRevertibleAccumulatedData.noteHashes.size() + tx.nonRevertibleAccumulatedData.nullifiers.size() +
299 tx.revertibleAccumulatedData.noteHashes.size() + tx.revertibleAccumulatedData.nullifiers.size();
300 auto actual_private_append_tree_events = 0;
302 auto expected_l2_l1_msg_events =
303 tx.nonRevertibleAccumulatedData.l2ToL1Messages.size() + tx.revertibleAccumulatedData.l2ToL1Messages.size();
304 auto actual_l2_l1_msg_events = 0;
306 auto expected_public_call_events = 0;
307 auto actual_public_call_events = 0;
310 bool has_collect_fee_event =
false;
314 for (
const auto& tx_event : events) {
316 has_startup_event =
true;
320 if (phase_event.reverted) {
323 auto event = phase_event.event;
325 actual_private_append_tree_events++;
328 actual_l2_l1_msg_events++;
331 actual_public_call_events++;
334 has_collect_fee_event =
true;
338 EXPECT_TRUE(has_startup_event);
339 EXPECT_EQ(actual_private_append_tree_events, expected_private_append_tree_events);
340 EXPECT_EQ(expected_l2_l1_msg_events, actual_l2_l1_msg_events);
341 EXPECT_EQ(expected_public_call_events, actual_public_call_events);
342 EXPECT_TRUE(has_collect_fee_event);
343 EXPECT_EQ(reverts, 1);
346TEST_F(TxExecutionTest, L2ToL1MessageLimitReached)
350 .hash =
"0x1234567890abcdef",
351 .nonRevertibleAccumulatedData =
356 .revertibleAccumulatedData =
363 AppendOnlyTreeSnapshot dummy_snapshot = {
365 .nextAvailableLeafIndex = 0,
367 TreeStates tree_state = {
368 .noteHashTree = { .tree = dummy_snapshot, .counter = 0 },
369 .nullifierTree = { .tree = dummy_snapshot, .counter = 0 },
370 .l1ToL2MessageTree = { .tree = dummy_snapshot, .counter = 0 },
371 .publicDataTree = { .tree = dummy_snapshot, .counter = 0 },
373 ON_CALL(
merkle_db, get_tree_state()).WillByDefault([&]() {
return tree_state; });
374 ON_CALL(
merkle_db, siloed_nullifier_write(_)).WillByDefault([&](
const auto& ) {
375 tree_state.nullifierTree.counter++;
378 ON_CALL(
merkle_db, siloed_note_hash_write(_)).WillByDefault([&](
const auto& ) {
379 tree_state.noteHashTree.counter++;
382 ON_CALL(
merkle_db, unique_note_hash_write(_)).WillByDefault([&](
const auto& ) {
383 tree_state.noteHashTree.counter++;
387 EXPECT_CALL(
merkle_db, create_checkpoint()).Times(2);
392 bool has_startup_event =
false;
393 auto expected_private_append_tree_events =
394 tx.nonRevertibleAccumulatedData.noteHashes.size() + tx.nonRevertibleAccumulatedData.nullifiers.size() +
395 tx.revertibleAccumulatedData.noteHashes.size() + tx.revertibleAccumulatedData.nullifiers.size();
396 auto actual_private_append_tree_events = 0;
398 auto expected_l2_l1_msg_events =
399 tx.nonRevertibleAccumulatedData.l2ToL1Messages.size() + tx.revertibleAccumulatedData.l2ToL1Messages.size();
400 auto actual_l2_l1_msg_events = 0;
402 auto expected_public_call_events = 0;
403 auto actual_public_call_events = 0;
406 bool has_collect_fee_event =
false;
410 for (
const auto& tx_event : events) {
412 has_startup_event =
true;
416 if (phase_event.reverted) {
419 auto event = phase_event.event;
421 actual_private_append_tree_events++;
424 actual_l2_l1_msg_events++;
427 actual_public_call_events++;
430 has_collect_fee_event =
true;
434 EXPECT_TRUE(has_startup_event);
435 EXPECT_EQ(actual_private_append_tree_events, expected_private_append_tree_events);
436 EXPECT_EQ(expected_l2_l1_msg_events, actual_l2_l1_msg_events);
437 EXPECT_EQ(expected_public_call_events, actual_public_call_events);
438 EXPECT_TRUE(has_collect_fee_event);
439 EXPECT_EQ(reverts, 1);
#define MAX_L2_TO_L1_MSGS_PER_TX
#define MAX_NOTE_HASHES_PER_TX
StrictMock< MockHighLevelMerkleDB > merkle_db
StrictMock< MockRetrievedBytecodesTreeCheck > retrieved_bytecodes_tree_check
void simulate(const Tx &tx)
std::vector< PublicCallRequestWithCalldata > random_enqueued_calls(size_t n)
std::vector< ScopedL2ToL1Message > random_l2_to_l1_messages(size_t n)
std::vector< FF > random_fields(size_t n)
CommandResponse execute(BBApiRequest &request, Command &&command)
Executes a command by visiting a variant of all possible commands.
TEST_F(IPATest, ChallengesAreZero)
constexpr decltype(auto) get(::tuplet::tuple< T... > &&t) noexcept
simulation::PublicDataTreeReadWriteEvent event
FieldGreaterThan field_gt
NiceMock< MockContextProvider > context_provider
NiceMock< MockExecution > execution
EventEmitter< TxEvent > tx_event_emitter
NiceMock< MockWrittenPublicDataSlotsTreeCheck > written_public_data_slots_tree_check