171 uint32_t round_constant,
180 uint32_t rot_6 =
ror_with_witness(state[4], 6, C::sha256_e_rotr_6, C::sha256_lhs_e_6, C::sha256_rhs_e_6,
trace);
181 trace.set(C::sha256_two_pow_6,
row, 64);
185 trace.set(C::sha256_two_pow_11,
row, 2048);
189 trace.set(C::sha256_two_pow_25,
row, 33554432);
192 trace.set(C::sha256_e_rotr_6_xor_e_rotr_11,
row, rot_6 ^ rot_11);
194 uint64_t S1 = rot_6 ^ rot_11 ^ rot_25;
199 uint32_t not_e = ~state[4];
200 trace.set(C::sha256_not_e,
row, not_e);
202 uint32_t e_and_f = state[4] & state[5];
203 trace.set(C::sha256_e_and_f,
row, e_and_f);
205 uint32_t not_e_and_g = not_e & state[6];
206 trace.set(C::sha256_not_e_and_g,
row, not_e_and_g);
208 uint64_t ch = e_and_f ^ not_e_and_g;
213 uint32_t rot_2 =
ror_with_witness(state[0], 2, C::sha256_a_rotr_2, C::sha256_lhs_a_2, C::sha256_rhs_a_2,
trace);
214 trace.set(C::sha256_two_pow_2,
row, 4);
218 trace.set(C::sha256_two_pow_13,
row, 8192);
222 trace.set(C::sha256_two_pow_22,
row, 4194304);
225 trace.set(C::sha256_a_rotr_2_xor_a_rotr_13,
row, rot_2 ^ rot_13);
227 uint64_t S0 = rot_2 ^ rot_13 ^ rot_22;
232 uint32_t a_and_b = state[0] & state[1];
233 trace.set(C::sha256_a_and_b,
row, a_and_b);
235 uint32_t a_and_c = state[0] & state[2];
236 trace.set(C::sha256_a_and_c,
row, a_and_c);
238 uint32_t b_and_c = state[1] & state[2];
239 trace.set(C::sha256_b_and_c,
row, b_and_c);
241 trace.set(C::sha256_a_and_b_xor_a_and_c,
row, a_and_b ^ a_and_c);
243 uint64_t maj = a_and_b ^ a_and_c ^ b_and_c;
247 uint64_t temp1 =
static_cast<uint64_t
>(state[7]) + S1 + ch + round_constant + round_w;
248 uint64_t temp2 = S0 + maj;
249 uint64_t next_a = temp1 + temp2;
251 trace.set(C::sha256_round_constant,
row, round_constant);
252 uint32_t
a =
static_cast<uint32_t
>(next_a);
255 uint64_t next_e = state[3] + temp1;
257 uint32_t e =
static_cast<uint32_t
>(next_e);
290 for (
const auto&
event : events) {
296 uint64_t state_addr =
static_cast<uint64_t
>(
event.state_addr);
297 uint64_t input_addr =
static_cast<uint64_t
>(
event.input_addr);
298 uint64_t output_addr =
static_cast<uint64_t
>(
event.output_addr);
300 uint64_t max_state_addr = state_addr + 7;
301 uint64_t max_input_addr = input_addr + 15;
302 uint64_t max_output_addr = output_addr + 7;
307 { C::sha256_sel, 1 },
308 { C::sha256_start, 1 },
309 { C::sha256_execution_clk,
event.execution_clk },
310 { C::sha256_space_id,
event.space_id },
313 { C::sha256_state_addr, state_addr },
314 { C::sha256_input_addr, input_addr },
315 { C::sha256_output_addr, output_addr },
318 { C::sha256_max_state_addr, max_state_addr },
319 { C::sha256_max_input_addr, max_input_addr },
320 { C::sha256_max_output_addr, max_output_addr },
321 { C::sha256_input_rounds_rem, 16 },
322 { C::sha256_sel_is_input_round, 1 },
323 { C::sha256_rounds_remaining, 64 },
333 bool out_of_range_err = output_out_of_range || input_out_of_range || state_out_of_range;
334 if (out_of_range_err) {
338 { C::sha256_sel_state_out_of_range_err, state_out_of_range ? 1 : 0 },
339 { C::sha256_sel_input_out_of_range_err, input_out_of_range ? 1 : 0 },
340 { C::sha256_sel_output_out_of_range_err, output_out_of_range ? 1 : 0 },
341 { C::sha256_mem_out_of_range_err, 1 },
342 { C::sha256_err, 1 },
343 { C::sha256_latch, 1 },
360 { C::sha256_sel_mem_state_or_output, 1 },
362 { C::sha256_memory_address_0_, state_addr },
363 { C::sha256_memory_address_1_, state_addr + 1 },
364 { C::sha256_memory_address_2_, state_addr + 2 },
365 { C::sha256_memory_address_3_, state_addr + 3 },
366 { C::sha256_memory_address_4_, state_addr + 4 },
367 { C::sha256_memory_address_5_, state_addr + 5 },
368 { C::sha256_memory_address_6_, state_addr + 6 },
369 { C::sha256_memory_address_7_, state_addr + 7 },
371 { C::sha256_memory_register_0_,
event.state[0].as_ff() },
372 { C::sha256_memory_register_1_,
event.state[1].as_ff() },
373 { C::sha256_memory_register_2_,
event.state[2].as_ff() },
374 { C::sha256_memory_register_3_,
event.state[3].as_ff() },
375 { C::sha256_memory_register_4_,
event.state[4].as_ff() },
376 { C::sha256_memory_register_5_,
event.state[5].as_ff() },
377 { C::sha256_memory_register_6_,
event.state[6].as_ff() },
378 { C::sha256_memory_register_7_,
event.state[7].as_ff() },
380 { C::sha256_init_a,
event.state[0].as_ff() },
381 { C::sha256_init_b,
event.state[1].as_ff() },
382 { C::sha256_init_c,
event.state[2].as_ff() },
383 { C::sha256_init_d,
event.state[3].as_ff() },
384 { C::sha256_init_e,
event.state[4].as_ff() },
385 { C::sha256_init_f,
event.state[5].as_ff() },
386 { C::sha256_init_g,
event.state[6].as_ff() },
387 { C::sha256_init_h,
event.state[7].as_ff() },
389 { C::sha256_memory_tag_0_,
static_cast<uint8_t
>(
event.state[0].get_tag()) },
390 { C::sha256_memory_tag_1_,
static_cast<uint8_t
>(
event.state[1].get_tag()) },
391 { C::sha256_memory_tag_2_,
static_cast<uint8_t
>(
event.state[2].get_tag()) },
392 { C::sha256_memory_tag_3_,
static_cast<uint8_t
>(
event.state[3].get_tag()) },
393 { C::sha256_memory_tag_4_,
static_cast<uint8_t
>(
event.state[4].get_tag()) },
394 { C::sha256_memory_tag_5_,
static_cast<uint8_t
>(
event.state[5].get_tag()) },
395 { C::sha256_memory_tag_6_,
static_cast<uint8_t
>(
event.state[6].get_tag()) },
396 { C::sha256_memory_tag_7_,
static_cast<uint8_t
>(
event.state[7].get_tag()) },
402 bool invalid_state_tag_err = std::ranges::any_of(
403 event.state, [](
const MemoryValue& state) { return state.get_tag() != MemoryTag::U32; });
405 if (invalid_state_tag_err) {
407 uint64_t batched_check = 0;
409 for (uint32_t i = 0; i <
event.state.size(); i++) {
411 (
static_cast<uint64_t
>(
event.state[i].get_tag()) -
static_cast<uint64_t
>(
MemoryTag::U32))
416 { C::sha256_sel_invalid_state_tag_err, 1 },
417 { C::sha256_batch_tag_inv,
FF(batched_check).invert() },
418 { C::sha256_latch, 1 },
419 { C::sha256_err, 1 },
433 bool invalid_tag_err =
event.input.back().get_tag() !=
MemoryTag::U32;
439 for (uint32_t i = 0; i <
event.input.size(); i++) {
440 uint32_t input_rounds_rem = 16 - i;
441 FF input_rounds_rem_inv = input_rounds_rem == 0 ? 0 :
FF(input_rounds_rem).invert();
444 FF input_tag =
FF(
static_cast<uint8_t
>(round_input.
get_tag()));
446 FF input_tag_diff = input_tag - expected_tag;
447 FF input_tag_diff_inv = input_tag_diff == 0 ? 0 : input_tag_diff.invert();
449 bool is_last = (i ==
event.input.size() - 1);
452 { C::sha256_sel, 1 },
454 { C::sha256_execution_clk,
event.execution_clk },
455 { C::sha256_space_id,
event.space_id },
456 { C::sha256_output_addr, output_addr },
457 { C::sha256_sel_is_input_round, 1 },
458 { C::sha256_u32_tag, expected_tag },
459 { C::sha256_sel_read_input_from_memory, 1 },
461 { C::sha256_input_rounds_rem, input_rounds_rem },
462 { C::sha256_input_rounds_rem_inv, input_rounds_rem_inv },
463 { C::sha256_input_addr, input_addr + i },
464 { C::sha256_input, round_input.
as_ff() },
465 { C::sha256_input_tag, input_tag },
466 { C::sha256_input_tag_diff_inv, input_tag_diff_inv },
468 { C::sha256_w, round_input.
as_ff() },
471 { C::sha256_sel_invalid_input_tag_err, invalid_tag_err ? 1 : 0 },
473 { C::sha256_sel_invalid_input_row_tag_err, (is_last && invalid_tag_err) ? 1 : 0 },
474 { C::sha256_err, invalid_tag_err ? 1 : 0 },
475 { C::sha256_latch, (is_last && invalid_tag_err) ? 1 : 0 },
479 if (invalid_tag_err) {
481 row +=
event.input.size();
491 std::array<uint32_t, 8> state;
492 std::ranges::transform(
event.state.begin(),
event.state.end(), state.begin(), [](
const MemoryValue& val) {
493 return val.as<uint32_t>();
497 std::ranges::transform(
event.input.begin(),
499 prev_w_helpers.begin(),
500 [](
const MemoryValue& val) { return val.as<uint32_t>(); });
501 std::array<uint32_t, 8> round_state = state;
508 for (
size_t i = 0; i < 64; i++) {
510 bool is_an_input_round = i < 16;
512 FF inv =
FF(64 - i).invert();
517 { C::sha256_sel, 1 },
519 { C::sha256_execution_clk,
event.execution_clk },
520 { C::sha256_space_id,
event.space_id },
521 { C::sha256_output_addr, output_addr },
523 { C::sha256_two_pow_32, 1UL << 32 },
525 { C::sha256_xor_sel, 2 },
526 { C::sha256_perform_round, 1 },
527 { C::sha256_round_count, i },
528 { C::sha256_rounds_remaining, 64 - i },
529 { C::sha256_rounds_remaining_inv, inv },
530 { C::sha256_w, round_w },
531 { C::sha256_sel_compute_w, is_an_input_round ? 0 : 1 },
545 for (
size_t j = 0; j < 15; j++) {
546 prev_w_helpers[j] = prev_w_helpers[j + 1];
548 prev_w_helpers[15] = round_w;
556 { C::sha256_latch, 1 },
557 { C::sha256_sel, 1 },
558 { C::sha256_xor_sel, 2 },
559 { C::sha256_round_count, 64 },
577 { C::sha256_execution_clk,
event.execution_clk },
578 { C::sha256_space_id,
event.space_id },
579 { C::sha256_sel_mem_state_or_output, 1 },
582 { C::sha256_two_pow_32, 1UL << 32 },
583 { C::sha256_output_addr, output_addr },
585 { C::sha256_memory_address_0_, output_addr },
586 { C::sha256_memory_address_1_, output_addr + 1 },
587 { C::sha256_memory_address_2_, output_addr + 2 },
588 { C::sha256_memory_address_3_, output_addr + 3 },
589 { C::sha256_memory_address_4_, output_addr + 4 },
590 { C::sha256_memory_address_5_, output_addr + 5 },
591 { C::sha256_memory_address_6_, output_addr + 6 },
592 { C::sha256_memory_address_7_, output_addr + 7 },
594 { C::sha256_memory_register_0_, round_state[0] + state[0] },
595 { C::sha256_memory_register_1_, round_state[1] + state[1] },
596 { C::sha256_memory_register_2_, round_state[2] + state[2] },
597 { C::sha256_memory_register_3_, round_state[3] + state[3] },
598 { C::sha256_memory_register_4_, round_state[4] + state[4] },
599 { C::sha256_memory_register_5_, round_state[5] + state[5] },
600 { C::sha256_memory_register_6_, round_state[6] + state[6] },
601 { C::sha256_memory_register_7_, round_state[7] + state[7] },
603 { C::sha256_memory_tag_0_,
static_cast<uint8_t
>(
MemoryTag::U32) },
604 { C::sha256_memory_tag_1_,
static_cast<uint8_t
>(
MemoryTag::U32) },
605 { C::sha256_memory_tag_2_,
static_cast<uint8_t
>(
MemoryTag::U32) },
606 { C::sha256_memory_tag_3_,
static_cast<uint8_t
>(
MemoryTag::U32) },
607 { C::sha256_memory_tag_4_,
static_cast<uint8_t
>(
MemoryTag::U32) },
608 { C::sha256_memory_tag_5_,
static_cast<uint8_t
>(
MemoryTag::U32) },
609 { C::sha256_memory_tag_6_,
static_cast<uint8_t
>(
MemoryTag::U32) },
610 { C::sha256_memory_tag_7_,
static_cast<uint8_t
>(
MemoryTag::U32) },