mirror of
https://github.com/ehw-fit/ariths-gen.git
synced 2025-04-20 13:51:23 +01:00
6133 lines
496 KiB
C
6133 lines
496 KiB
C
#include <stdio.h>
|
|
#include <stdint.h>
|
|
|
|
uint8_t and_gate(uint8_t a, uint8_t b){
|
|
return ((a >> 0) & 0x01) & ((b >> 0) & 0x01);
|
|
}
|
|
|
|
uint8_t xor_gate(uint8_t a, uint8_t b){
|
|
return ((a >> 0) & 0x01) ^ ((b >> 0) & 0x01);
|
|
}
|
|
|
|
uint8_t or_gate(uint8_t a, uint8_t b){
|
|
return ((a >> 0) & 0x01) | ((b >> 0) & 0x01);
|
|
}
|
|
|
|
uint8_t ha(uint8_t a, uint8_t b){
|
|
uint8_t ha_out = 0;
|
|
uint8_t ha_xor0 = 0;
|
|
uint8_t ha_and0 = 0;
|
|
|
|
ha_xor0 = xor_gate(((a >> 0) & 0x01), ((b >> 0) & 0x01));
|
|
ha_and0 = and_gate(((a >> 0) & 0x01), ((b >> 0) & 0x01));
|
|
|
|
ha_out |= ((ha_xor0 >> 0) & 0x01ull) << 0;
|
|
ha_out |= ((ha_and0 >> 0) & 0x01ull) << 1;
|
|
return ha_out;
|
|
}
|
|
|
|
uint8_t fa(uint8_t a, uint8_t b, uint8_t cin){
|
|
uint8_t fa_out = 0;
|
|
uint8_t fa_xor0 = 0;
|
|
uint8_t fa_and0 = 0;
|
|
uint8_t fa_xor1 = 0;
|
|
uint8_t fa_and1 = 0;
|
|
uint8_t fa_or0 = 0;
|
|
|
|
fa_xor0 = xor_gate(((a >> 0) & 0x01), ((b >> 0) & 0x01));
|
|
fa_and0 = and_gate(((a >> 0) & 0x01), ((b >> 0) & 0x01));
|
|
fa_xor1 = xor_gate(((fa_xor0 >> 0) & 0x01), ((cin >> 0) & 0x01));
|
|
fa_and1 = and_gate(((fa_xor0 >> 0) & 0x01), ((cin >> 0) & 0x01));
|
|
fa_or0 = or_gate(((fa_and0 >> 0) & 0x01), ((fa_and1 >> 0) & 0x01));
|
|
|
|
fa_out |= ((fa_xor1 >> 0) & 0x01ull) << 0;
|
|
fa_out |= ((fa_or0 >> 0) & 0x01ull) << 1;
|
|
return fa_out;
|
|
}
|
|
|
|
uint64_t u_arrmul32(uint64_t a, uint64_t b){
|
|
uint64_t u_arrmul32_out = 0;
|
|
uint8_t u_arrmul32_and0_0 = 0;
|
|
uint8_t u_arrmul32_and1_0 = 0;
|
|
uint8_t u_arrmul32_and2_0 = 0;
|
|
uint8_t u_arrmul32_and3_0 = 0;
|
|
uint8_t u_arrmul32_and4_0 = 0;
|
|
uint8_t u_arrmul32_and5_0 = 0;
|
|
uint8_t u_arrmul32_and6_0 = 0;
|
|
uint8_t u_arrmul32_and7_0 = 0;
|
|
uint8_t u_arrmul32_and8_0 = 0;
|
|
uint8_t u_arrmul32_and9_0 = 0;
|
|
uint8_t u_arrmul32_and10_0 = 0;
|
|
uint8_t u_arrmul32_and11_0 = 0;
|
|
uint8_t u_arrmul32_and12_0 = 0;
|
|
uint8_t u_arrmul32_and13_0 = 0;
|
|
uint8_t u_arrmul32_and14_0 = 0;
|
|
uint8_t u_arrmul32_and15_0 = 0;
|
|
uint8_t u_arrmul32_and16_0 = 0;
|
|
uint8_t u_arrmul32_and17_0 = 0;
|
|
uint8_t u_arrmul32_and18_0 = 0;
|
|
uint8_t u_arrmul32_and19_0 = 0;
|
|
uint8_t u_arrmul32_and20_0 = 0;
|
|
uint8_t u_arrmul32_and21_0 = 0;
|
|
uint8_t u_arrmul32_and22_0 = 0;
|
|
uint8_t u_arrmul32_and23_0 = 0;
|
|
uint8_t u_arrmul32_and24_0 = 0;
|
|
uint8_t u_arrmul32_and25_0 = 0;
|
|
uint8_t u_arrmul32_and26_0 = 0;
|
|
uint8_t u_arrmul32_and27_0 = 0;
|
|
uint8_t u_arrmul32_and28_0 = 0;
|
|
uint8_t u_arrmul32_and29_0 = 0;
|
|
uint8_t u_arrmul32_and30_0 = 0;
|
|
uint8_t u_arrmul32_and31_0 = 0;
|
|
uint8_t u_arrmul32_and0_1 = 0;
|
|
uint8_t u_arrmul32_ha0_1_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_1_and0 = 0;
|
|
uint8_t u_arrmul32_and1_1 = 0;
|
|
uint8_t u_arrmul32_fa1_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_1_or0 = 0;
|
|
uint8_t u_arrmul32_and2_1 = 0;
|
|
uint8_t u_arrmul32_fa2_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_1_or0 = 0;
|
|
uint8_t u_arrmul32_and3_1 = 0;
|
|
uint8_t u_arrmul32_fa3_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_1_or0 = 0;
|
|
uint8_t u_arrmul32_and4_1 = 0;
|
|
uint8_t u_arrmul32_fa4_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_1_or0 = 0;
|
|
uint8_t u_arrmul32_and5_1 = 0;
|
|
uint8_t u_arrmul32_fa5_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_1_or0 = 0;
|
|
uint8_t u_arrmul32_and6_1 = 0;
|
|
uint8_t u_arrmul32_fa6_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_1_or0 = 0;
|
|
uint8_t u_arrmul32_and7_1 = 0;
|
|
uint8_t u_arrmul32_fa7_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_1_or0 = 0;
|
|
uint8_t u_arrmul32_and8_1 = 0;
|
|
uint8_t u_arrmul32_fa8_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_1_or0 = 0;
|
|
uint8_t u_arrmul32_and9_1 = 0;
|
|
uint8_t u_arrmul32_fa9_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_1_or0 = 0;
|
|
uint8_t u_arrmul32_and10_1 = 0;
|
|
uint8_t u_arrmul32_fa10_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_1_or0 = 0;
|
|
uint8_t u_arrmul32_and11_1 = 0;
|
|
uint8_t u_arrmul32_fa11_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_1_or0 = 0;
|
|
uint8_t u_arrmul32_and12_1 = 0;
|
|
uint8_t u_arrmul32_fa12_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_1_or0 = 0;
|
|
uint8_t u_arrmul32_and13_1 = 0;
|
|
uint8_t u_arrmul32_fa13_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_1_or0 = 0;
|
|
uint8_t u_arrmul32_and14_1 = 0;
|
|
uint8_t u_arrmul32_fa14_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_1_or0 = 0;
|
|
uint8_t u_arrmul32_and15_1 = 0;
|
|
uint8_t u_arrmul32_fa15_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_1_or0 = 0;
|
|
uint8_t u_arrmul32_and16_1 = 0;
|
|
uint8_t u_arrmul32_fa16_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_1_or0 = 0;
|
|
uint8_t u_arrmul32_and17_1 = 0;
|
|
uint8_t u_arrmul32_fa17_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_1_or0 = 0;
|
|
uint8_t u_arrmul32_and18_1 = 0;
|
|
uint8_t u_arrmul32_fa18_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_1_or0 = 0;
|
|
uint8_t u_arrmul32_and19_1 = 0;
|
|
uint8_t u_arrmul32_fa19_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_1_or0 = 0;
|
|
uint8_t u_arrmul32_and20_1 = 0;
|
|
uint8_t u_arrmul32_fa20_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_1_or0 = 0;
|
|
uint8_t u_arrmul32_and21_1 = 0;
|
|
uint8_t u_arrmul32_fa21_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_1_or0 = 0;
|
|
uint8_t u_arrmul32_and22_1 = 0;
|
|
uint8_t u_arrmul32_fa22_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_1_or0 = 0;
|
|
uint8_t u_arrmul32_and23_1 = 0;
|
|
uint8_t u_arrmul32_fa23_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_1_or0 = 0;
|
|
uint8_t u_arrmul32_and24_1 = 0;
|
|
uint8_t u_arrmul32_fa24_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_1_or0 = 0;
|
|
uint8_t u_arrmul32_and25_1 = 0;
|
|
uint8_t u_arrmul32_fa25_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_1_or0 = 0;
|
|
uint8_t u_arrmul32_and26_1 = 0;
|
|
uint8_t u_arrmul32_fa26_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_1_or0 = 0;
|
|
uint8_t u_arrmul32_and27_1 = 0;
|
|
uint8_t u_arrmul32_fa27_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_1_or0 = 0;
|
|
uint8_t u_arrmul32_and28_1 = 0;
|
|
uint8_t u_arrmul32_fa28_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_1_or0 = 0;
|
|
uint8_t u_arrmul32_and29_1 = 0;
|
|
uint8_t u_arrmul32_fa29_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_1_or0 = 0;
|
|
uint8_t u_arrmul32_and30_1 = 0;
|
|
uint8_t u_arrmul32_fa30_1_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_1_or0 = 0;
|
|
uint8_t u_arrmul32_and31_1 = 0;
|
|
uint8_t u_arrmul32_ha31_1_xor0 = 0;
|
|
uint8_t u_arrmul32_ha31_1_and0 = 0;
|
|
uint8_t u_arrmul32_and0_2 = 0;
|
|
uint8_t u_arrmul32_ha0_2_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_2_and0 = 0;
|
|
uint8_t u_arrmul32_and1_2 = 0;
|
|
uint8_t u_arrmul32_fa1_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_2_or0 = 0;
|
|
uint8_t u_arrmul32_and2_2 = 0;
|
|
uint8_t u_arrmul32_fa2_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_2_or0 = 0;
|
|
uint8_t u_arrmul32_and3_2 = 0;
|
|
uint8_t u_arrmul32_fa3_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_2_or0 = 0;
|
|
uint8_t u_arrmul32_and4_2 = 0;
|
|
uint8_t u_arrmul32_fa4_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_2_or0 = 0;
|
|
uint8_t u_arrmul32_and5_2 = 0;
|
|
uint8_t u_arrmul32_fa5_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_2_or0 = 0;
|
|
uint8_t u_arrmul32_and6_2 = 0;
|
|
uint8_t u_arrmul32_fa6_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_2_or0 = 0;
|
|
uint8_t u_arrmul32_and7_2 = 0;
|
|
uint8_t u_arrmul32_fa7_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_2_or0 = 0;
|
|
uint8_t u_arrmul32_and8_2 = 0;
|
|
uint8_t u_arrmul32_fa8_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_2_or0 = 0;
|
|
uint8_t u_arrmul32_and9_2 = 0;
|
|
uint8_t u_arrmul32_fa9_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_2_or0 = 0;
|
|
uint8_t u_arrmul32_and10_2 = 0;
|
|
uint8_t u_arrmul32_fa10_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_2_or0 = 0;
|
|
uint8_t u_arrmul32_and11_2 = 0;
|
|
uint8_t u_arrmul32_fa11_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_2_or0 = 0;
|
|
uint8_t u_arrmul32_and12_2 = 0;
|
|
uint8_t u_arrmul32_fa12_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_2_or0 = 0;
|
|
uint8_t u_arrmul32_and13_2 = 0;
|
|
uint8_t u_arrmul32_fa13_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_2_or0 = 0;
|
|
uint8_t u_arrmul32_and14_2 = 0;
|
|
uint8_t u_arrmul32_fa14_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_2_or0 = 0;
|
|
uint8_t u_arrmul32_and15_2 = 0;
|
|
uint8_t u_arrmul32_fa15_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_2_or0 = 0;
|
|
uint8_t u_arrmul32_and16_2 = 0;
|
|
uint8_t u_arrmul32_fa16_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_2_or0 = 0;
|
|
uint8_t u_arrmul32_and17_2 = 0;
|
|
uint8_t u_arrmul32_fa17_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_2_or0 = 0;
|
|
uint8_t u_arrmul32_and18_2 = 0;
|
|
uint8_t u_arrmul32_fa18_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_2_or0 = 0;
|
|
uint8_t u_arrmul32_and19_2 = 0;
|
|
uint8_t u_arrmul32_fa19_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_2_or0 = 0;
|
|
uint8_t u_arrmul32_and20_2 = 0;
|
|
uint8_t u_arrmul32_fa20_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_2_or0 = 0;
|
|
uint8_t u_arrmul32_and21_2 = 0;
|
|
uint8_t u_arrmul32_fa21_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_2_or0 = 0;
|
|
uint8_t u_arrmul32_and22_2 = 0;
|
|
uint8_t u_arrmul32_fa22_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_2_or0 = 0;
|
|
uint8_t u_arrmul32_and23_2 = 0;
|
|
uint8_t u_arrmul32_fa23_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_2_or0 = 0;
|
|
uint8_t u_arrmul32_and24_2 = 0;
|
|
uint8_t u_arrmul32_fa24_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_2_or0 = 0;
|
|
uint8_t u_arrmul32_and25_2 = 0;
|
|
uint8_t u_arrmul32_fa25_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_2_or0 = 0;
|
|
uint8_t u_arrmul32_and26_2 = 0;
|
|
uint8_t u_arrmul32_fa26_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_2_or0 = 0;
|
|
uint8_t u_arrmul32_and27_2 = 0;
|
|
uint8_t u_arrmul32_fa27_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_2_or0 = 0;
|
|
uint8_t u_arrmul32_and28_2 = 0;
|
|
uint8_t u_arrmul32_fa28_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_2_or0 = 0;
|
|
uint8_t u_arrmul32_and29_2 = 0;
|
|
uint8_t u_arrmul32_fa29_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_2_or0 = 0;
|
|
uint8_t u_arrmul32_and30_2 = 0;
|
|
uint8_t u_arrmul32_fa30_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_2_or0 = 0;
|
|
uint8_t u_arrmul32_and31_2 = 0;
|
|
uint8_t u_arrmul32_fa31_2_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_2_or0 = 0;
|
|
uint8_t u_arrmul32_and0_3 = 0;
|
|
uint8_t u_arrmul32_ha0_3_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_3_and0 = 0;
|
|
uint8_t u_arrmul32_and1_3 = 0;
|
|
uint8_t u_arrmul32_fa1_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_3_or0 = 0;
|
|
uint8_t u_arrmul32_and2_3 = 0;
|
|
uint8_t u_arrmul32_fa2_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_3_or0 = 0;
|
|
uint8_t u_arrmul32_and3_3 = 0;
|
|
uint8_t u_arrmul32_fa3_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_3_or0 = 0;
|
|
uint8_t u_arrmul32_and4_3 = 0;
|
|
uint8_t u_arrmul32_fa4_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_3_or0 = 0;
|
|
uint8_t u_arrmul32_and5_3 = 0;
|
|
uint8_t u_arrmul32_fa5_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_3_or0 = 0;
|
|
uint8_t u_arrmul32_and6_3 = 0;
|
|
uint8_t u_arrmul32_fa6_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_3_or0 = 0;
|
|
uint8_t u_arrmul32_and7_3 = 0;
|
|
uint8_t u_arrmul32_fa7_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_3_or0 = 0;
|
|
uint8_t u_arrmul32_and8_3 = 0;
|
|
uint8_t u_arrmul32_fa8_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_3_or0 = 0;
|
|
uint8_t u_arrmul32_and9_3 = 0;
|
|
uint8_t u_arrmul32_fa9_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_3_or0 = 0;
|
|
uint8_t u_arrmul32_and10_3 = 0;
|
|
uint8_t u_arrmul32_fa10_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_3_or0 = 0;
|
|
uint8_t u_arrmul32_and11_3 = 0;
|
|
uint8_t u_arrmul32_fa11_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_3_or0 = 0;
|
|
uint8_t u_arrmul32_and12_3 = 0;
|
|
uint8_t u_arrmul32_fa12_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_3_or0 = 0;
|
|
uint8_t u_arrmul32_and13_3 = 0;
|
|
uint8_t u_arrmul32_fa13_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_3_or0 = 0;
|
|
uint8_t u_arrmul32_and14_3 = 0;
|
|
uint8_t u_arrmul32_fa14_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_3_or0 = 0;
|
|
uint8_t u_arrmul32_and15_3 = 0;
|
|
uint8_t u_arrmul32_fa15_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_3_or0 = 0;
|
|
uint8_t u_arrmul32_and16_3 = 0;
|
|
uint8_t u_arrmul32_fa16_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_3_or0 = 0;
|
|
uint8_t u_arrmul32_and17_3 = 0;
|
|
uint8_t u_arrmul32_fa17_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_3_or0 = 0;
|
|
uint8_t u_arrmul32_and18_3 = 0;
|
|
uint8_t u_arrmul32_fa18_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_3_or0 = 0;
|
|
uint8_t u_arrmul32_and19_3 = 0;
|
|
uint8_t u_arrmul32_fa19_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_3_or0 = 0;
|
|
uint8_t u_arrmul32_and20_3 = 0;
|
|
uint8_t u_arrmul32_fa20_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_3_or0 = 0;
|
|
uint8_t u_arrmul32_and21_3 = 0;
|
|
uint8_t u_arrmul32_fa21_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_3_or0 = 0;
|
|
uint8_t u_arrmul32_and22_3 = 0;
|
|
uint8_t u_arrmul32_fa22_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_3_or0 = 0;
|
|
uint8_t u_arrmul32_and23_3 = 0;
|
|
uint8_t u_arrmul32_fa23_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_3_or0 = 0;
|
|
uint8_t u_arrmul32_and24_3 = 0;
|
|
uint8_t u_arrmul32_fa24_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_3_or0 = 0;
|
|
uint8_t u_arrmul32_and25_3 = 0;
|
|
uint8_t u_arrmul32_fa25_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_3_or0 = 0;
|
|
uint8_t u_arrmul32_and26_3 = 0;
|
|
uint8_t u_arrmul32_fa26_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_3_or0 = 0;
|
|
uint8_t u_arrmul32_and27_3 = 0;
|
|
uint8_t u_arrmul32_fa27_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_3_or0 = 0;
|
|
uint8_t u_arrmul32_and28_3 = 0;
|
|
uint8_t u_arrmul32_fa28_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_3_or0 = 0;
|
|
uint8_t u_arrmul32_and29_3 = 0;
|
|
uint8_t u_arrmul32_fa29_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_3_or0 = 0;
|
|
uint8_t u_arrmul32_and30_3 = 0;
|
|
uint8_t u_arrmul32_fa30_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_3_or0 = 0;
|
|
uint8_t u_arrmul32_and31_3 = 0;
|
|
uint8_t u_arrmul32_fa31_3_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_3_or0 = 0;
|
|
uint8_t u_arrmul32_and0_4 = 0;
|
|
uint8_t u_arrmul32_ha0_4_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_4_and0 = 0;
|
|
uint8_t u_arrmul32_and1_4 = 0;
|
|
uint8_t u_arrmul32_fa1_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_4_or0 = 0;
|
|
uint8_t u_arrmul32_and2_4 = 0;
|
|
uint8_t u_arrmul32_fa2_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_4_or0 = 0;
|
|
uint8_t u_arrmul32_and3_4 = 0;
|
|
uint8_t u_arrmul32_fa3_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_4_or0 = 0;
|
|
uint8_t u_arrmul32_and4_4 = 0;
|
|
uint8_t u_arrmul32_fa4_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_4_or0 = 0;
|
|
uint8_t u_arrmul32_and5_4 = 0;
|
|
uint8_t u_arrmul32_fa5_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_4_or0 = 0;
|
|
uint8_t u_arrmul32_and6_4 = 0;
|
|
uint8_t u_arrmul32_fa6_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_4_or0 = 0;
|
|
uint8_t u_arrmul32_and7_4 = 0;
|
|
uint8_t u_arrmul32_fa7_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_4_or0 = 0;
|
|
uint8_t u_arrmul32_and8_4 = 0;
|
|
uint8_t u_arrmul32_fa8_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_4_or0 = 0;
|
|
uint8_t u_arrmul32_and9_4 = 0;
|
|
uint8_t u_arrmul32_fa9_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_4_or0 = 0;
|
|
uint8_t u_arrmul32_and10_4 = 0;
|
|
uint8_t u_arrmul32_fa10_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_4_or0 = 0;
|
|
uint8_t u_arrmul32_and11_4 = 0;
|
|
uint8_t u_arrmul32_fa11_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_4_or0 = 0;
|
|
uint8_t u_arrmul32_and12_4 = 0;
|
|
uint8_t u_arrmul32_fa12_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_4_or0 = 0;
|
|
uint8_t u_arrmul32_and13_4 = 0;
|
|
uint8_t u_arrmul32_fa13_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_4_or0 = 0;
|
|
uint8_t u_arrmul32_and14_4 = 0;
|
|
uint8_t u_arrmul32_fa14_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_4_or0 = 0;
|
|
uint8_t u_arrmul32_and15_4 = 0;
|
|
uint8_t u_arrmul32_fa15_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_4_or0 = 0;
|
|
uint8_t u_arrmul32_and16_4 = 0;
|
|
uint8_t u_arrmul32_fa16_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_4_or0 = 0;
|
|
uint8_t u_arrmul32_and17_4 = 0;
|
|
uint8_t u_arrmul32_fa17_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_4_or0 = 0;
|
|
uint8_t u_arrmul32_and18_4 = 0;
|
|
uint8_t u_arrmul32_fa18_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_4_or0 = 0;
|
|
uint8_t u_arrmul32_and19_4 = 0;
|
|
uint8_t u_arrmul32_fa19_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_4_or0 = 0;
|
|
uint8_t u_arrmul32_and20_4 = 0;
|
|
uint8_t u_arrmul32_fa20_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_4_or0 = 0;
|
|
uint8_t u_arrmul32_and21_4 = 0;
|
|
uint8_t u_arrmul32_fa21_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_4_or0 = 0;
|
|
uint8_t u_arrmul32_and22_4 = 0;
|
|
uint8_t u_arrmul32_fa22_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_4_or0 = 0;
|
|
uint8_t u_arrmul32_and23_4 = 0;
|
|
uint8_t u_arrmul32_fa23_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_4_or0 = 0;
|
|
uint8_t u_arrmul32_and24_4 = 0;
|
|
uint8_t u_arrmul32_fa24_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_4_or0 = 0;
|
|
uint8_t u_arrmul32_and25_4 = 0;
|
|
uint8_t u_arrmul32_fa25_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_4_or0 = 0;
|
|
uint8_t u_arrmul32_and26_4 = 0;
|
|
uint8_t u_arrmul32_fa26_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_4_or0 = 0;
|
|
uint8_t u_arrmul32_and27_4 = 0;
|
|
uint8_t u_arrmul32_fa27_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_4_or0 = 0;
|
|
uint8_t u_arrmul32_and28_4 = 0;
|
|
uint8_t u_arrmul32_fa28_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_4_or0 = 0;
|
|
uint8_t u_arrmul32_and29_4 = 0;
|
|
uint8_t u_arrmul32_fa29_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_4_or0 = 0;
|
|
uint8_t u_arrmul32_and30_4 = 0;
|
|
uint8_t u_arrmul32_fa30_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_4_or0 = 0;
|
|
uint8_t u_arrmul32_and31_4 = 0;
|
|
uint8_t u_arrmul32_fa31_4_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_4_or0 = 0;
|
|
uint8_t u_arrmul32_and0_5 = 0;
|
|
uint8_t u_arrmul32_ha0_5_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_5_and0 = 0;
|
|
uint8_t u_arrmul32_and1_5 = 0;
|
|
uint8_t u_arrmul32_fa1_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_5_or0 = 0;
|
|
uint8_t u_arrmul32_and2_5 = 0;
|
|
uint8_t u_arrmul32_fa2_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_5_or0 = 0;
|
|
uint8_t u_arrmul32_and3_5 = 0;
|
|
uint8_t u_arrmul32_fa3_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_5_or0 = 0;
|
|
uint8_t u_arrmul32_and4_5 = 0;
|
|
uint8_t u_arrmul32_fa4_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_5_or0 = 0;
|
|
uint8_t u_arrmul32_and5_5 = 0;
|
|
uint8_t u_arrmul32_fa5_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_5_or0 = 0;
|
|
uint8_t u_arrmul32_and6_5 = 0;
|
|
uint8_t u_arrmul32_fa6_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_5_or0 = 0;
|
|
uint8_t u_arrmul32_and7_5 = 0;
|
|
uint8_t u_arrmul32_fa7_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_5_or0 = 0;
|
|
uint8_t u_arrmul32_and8_5 = 0;
|
|
uint8_t u_arrmul32_fa8_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_5_or0 = 0;
|
|
uint8_t u_arrmul32_and9_5 = 0;
|
|
uint8_t u_arrmul32_fa9_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_5_or0 = 0;
|
|
uint8_t u_arrmul32_and10_5 = 0;
|
|
uint8_t u_arrmul32_fa10_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_5_or0 = 0;
|
|
uint8_t u_arrmul32_and11_5 = 0;
|
|
uint8_t u_arrmul32_fa11_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_5_or0 = 0;
|
|
uint8_t u_arrmul32_and12_5 = 0;
|
|
uint8_t u_arrmul32_fa12_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_5_or0 = 0;
|
|
uint8_t u_arrmul32_and13_5 = 0;
|
|
uint8_t u_arrmul32_fa13_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_5_or0 = 0;
|
|
uint8_t u_arrmul32_and14_5 = 0;
|
|
uint8_t u_arrmul32_fa14_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_5_or0 = 0;
|
|
uint8_t u_arrmul32_and15_5 = 0;
|
|
uint8_t u_arrmul32_fa15_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_5_or0 = 0;
|
|
uint8_t u_arrmul32_and16_5 = 0;
|
|
uint8_t u_arrmul32_fa16_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_5_or0 = 0;
|
|
uint8_t u_arrmul32_and17_5 = 0;
|
|
uint8_t u_arrmul32_fa17_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_5_or0 = 0;
|
|
uint8_t u_arrmul32_and18_5 = 0;
|
|
uint8_t u_arrmul32_fa18_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_5_or0 = 0;
|
|
uint8_t u_arrmul32_and19_5 = 0;
|
|
uint8_t u_arrmul32_fa19_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_5_or0 = 0;
|
|
uint8_t u_arrmul32_and20_5 = 0;
|
|
uint8_t u_arrmul32_fa20_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_5_or0 = 0;
|
|
uint8_t u_arrmul32_and21_5 = 0;
|
|
uint8_t u_arrmul32_fa21_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_5_or0 = 0;
|
|
uint8_t u_arrmul32_and22_5 = 0;
|
|
uint8_t u_arrmul32_fa22_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_5_or0 = 0;
|
|
uint8_t u_arrmul32_and23_5 = 0;
|
|
uint8_t u_arrmul32_fa23_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_5_or0 = 0;
|
|
uint8_t u_arrmul32_and24_5 = 0;
|
|
uint8_t u_arrmul32_fa24_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_5_or0 = 0;
|
|
uint8_t u_arrmul32_and25_5 = 0;
|
|
uint8_t u_arrmul32_fa25_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_5_or0 = 0;
|
|
uint8_t u_arrmul32_and26_5 = 0;
|
|
uint8_t u_arrmul32_fa26_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_5_or0 = 0;
|
|
uint8_t u_arrmul32_and27_5 = 0;
|
|
uint8_t u_arrmul32_fa27_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_5_or0 = 0;
|
|
uint8_t u_arrmul32_and28_5 = 0;
|
|
uint8_t u_arrmul32_fa28_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_5_or0 = 0;
|
|
uint8_t u_arrmul32_and29_5 = 0;
|
|
uint8_t u_arrmul32_fa29_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_5_or0 = 0;
|
|
uint8_t u_arrmul32_and30_5 = 0;
|
|
uint8_t u_arrmul32_fa30_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_5_or0 = 0;
|
|
uint8_t u_arrmul32_and31_5 = 0;
|
|
uint8_t u_arrmul32_fa31_5_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_5_or0 = 0;
|
|
uint8_t u_arrmul32_and0_6 = 0;
|
|
uint8_t u_arrmul32_ha0_6_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_6_and0 = 0;
|
|
uint8_t u_arrmul32_and1_6 = 0;
|
|
uint8_t u_arrmul32_fa1_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_6_or0 = 0;
|
|
uint8_t u_arrmul32_and2_6 = 0;
|
|
uint8_t u_arrmul32_fa2_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_6_or0 = 0;
|
|
uint8_t u_arrmul32_and3_6 = 0;
|
|
uint8_t u_arrmul32_fa3_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_6_or0 = 0;
|
|
uint8_t u_arrmul32_and4_6 = 0;
|
|
uint8_t u_arrmul32_fa4_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_6_or0 = 0;
|
|
uint8_t u_arrmul32_and5_6 = 0;
|
|
uint8_t u_arrmul32_fa5_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_6_or0 = 0;
|
|
uint8_t u_arrmul32_and6_6 = 0;
|
|
uint8_t u_arrmul32_fa6_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_6_or0 = 0;
|
|
uint8_t u_arrmul32_and7_6 = 0;
|
|
uint8_t u_arrmul32_fa7_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_6_or0 = 0;
|
|
uint8_t u_arrmul32_and8_6 = 0;
|
|
uint8_t u_arrmul32_fa8_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_6_or0 = 0;
|
|
uint8_t u_arrmul32_and9_6 = 0;
|
|
uint8_t u_arrmul32_fa9_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_6_or0 = 0;
|
|
uint8_t u_arrmul32_and10_6 = 0;
|
|
uint8_t u_arrmul32_fa10_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_6_or0 = 0;
|
|
uint8_t u_arrmul32_and11_6 = 0;
|
|
uint8_t u_arrmul32_fa11_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_6_or0 = 0;
|
|
uint8_t u_arrmul32_and12_6 = 0;
|
|
uint8_t u_arrmul32_fa12_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_6_or0 = 0;
|
|
uint8_t u_arrmul32_and13_6 = 0;
|
|
uint8_t u_arrmul32_fa13_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_6_or0 = 0;
|
|
uint8_t u_arrmul32_and14_6 = 0;
|
|
uint8_t u_arrmul32_fa14_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_6_or0 = 0;
|
|
uint8_t u_arrmul32_and15_6 = 0;
|
|
uint8_t u_arrmul32_fa15_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_6_or0 = 0;
|
|
uint8_t u_arrmul32_and16_6 = 0;
|
|
uint8_t u_arrmul32_fa16_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_6_or0 = 0;
|
|
uint8_t u_arrmul32_and17_6 = 0;
|
|
uint8_t u_arrmul32_fa17_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_6_or0 = 0;
|
|
uint8_t u_arrmul32_and18_6 = 0;
|
|
uint8_t u_arrmul32_fa18_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_6_or0 = 0;
|
|
uint8_t u_arrmul32_and19_6 = 0;
|
|
uint8_t u_arrmul32_fa19_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_6_or0 = 0;
|
|
uint8_t u_arrmul32_and20_6 = 0;
|
|
uint8_t u_arrmul32_fa20_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_6_or0 = 0;
|
|
uint8_t u_arrmul32_and21_6 = 0;
|
|
uint8_t u_arrmul32_fa21_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_6_or0 = 0;
|
|
uint8_t u_arrmul32_and22_6 = 0;
|
|
uint8_t u_arrmul32_fa22_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_6_or0 = 0;
|
|
uint8_t u_arrmul32_and23_6 = 0;
|
|
uint8_t u_arrmul32_fa23_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_6_or0 = 0;
|
|
uint8_t u_arrmul32_and24_6 = 0;
|
|
uint8_t u_arrmul32_fa24_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_6_or0 = 0;
|
|
uint8_t u_arrmul32_and25_6 = 0;
|
|
uint8_t u_arrmul32_fa25_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_6_or0 = 0;
|
|
uint8_t u_arrmul32_and26_6 = 0;
|
|
uint8_t u_arrmul32_fa26_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_6_or0 = 0;
|
|
uint8_t u_arrmul32_and27_6 = 0;
|
|
uint8_t u_arrmul32_fa27_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_6_or0 = 0;
|
|
uint8_t u_arrmul32_and28_6 = 0;
|
|
uint8_t u_arrmul32_fa28_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_6_or0 = 0;
|
|
uint8_t u_arrmul32_and29_6 = 0;
|
|
uint8_t u_arrmul32_fa29_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_6_or0 = 0;
|
|
uint8_t u_arrmul32_and30_6 = 0;
|
|
uint8_t u_arrmul32_fa30_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_6_or0 = 0;
|
|
uint8_t u_arrmul32_and31_6 = 0;
|
|
uint8_t u_arrmul32_fa31_6_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_6_or0 = 0;
|
|
uint8_t u_arrmul32_and0_7 = 0;
|
|
uint8_t u_arrmul32_ha0_7_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_7_and0 = 0;
|
|
uint8_t u_arrmul32_and1_7 = 0;
|
|
uint8_t u_arrmul32_fa1_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_7_or0 = 0;
|
|
uint8_t u_arrmul32_and2_7 = 0;
|
|
uint8_t u_arrmul32_fa2_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_7_or0 = 0;
|
|
uint8_t u_arrmul32_and3_7 = 0;
|
|
uint8_t u_arrmul32_fa3_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_7_or0 = 0;
|
|
uint8_t u_arrmul32_and4_7 = 0;
|
|
uint8_t u_arrmul32_fa4_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_7_or0 = 0;
|
|
uint8_t u_arrmul32_and5_7 = 0;
|
|
uint8_t u_arrmul32_fa5_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_7_or0 = 0;
|
|
uint8_t u_arrmul32_and6_7 = 0;
|
|
uint8_t u_arrmul32_fa6_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_7_or0 = 0;
|
|
uint8_t u_arrmul32_and7_7 = 0;
|
|
uint8_t u_arrmul32_fa7_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_7_or0 = 0;
|
|
uint8_t u_arrmul32_and8_7 = 0;
|
|
uint8_t u_arrmul32_fa8_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_7_or0 = 0;
|
|
uint8_t u_arrmul32_and9_7 = 0;
|
|
uint8_t u_arrmul32_fa9_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_7_or0 = 0;
|
|
uint8_t u_arrmul32_and10_7 = 0;
|
|
uint8_t u_arrmul32_fa10_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_7_or0 = 0;
|
|
uint8_t u_arrmul32_and11_7 = 0;
|
|
uint8_t u_arrmul32_fa11_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_7_or0 = 0;
|
|
uint8_t u_arrmul32_and12_7 = 0;
|
|
uint8_t u_arrmul32_fa12_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_7_or0 = 0;
|
|
uint8_t u_arrmul32_and13_7 = 0;
|
|
uint8_t u_arrmul32_fa13_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_7_or0 = 0;
|
|
uint8_t u_arrmul32_and14_7 = 0;
|
|
uint8_t u_arrmul32_fa14_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_7_or0 = 0;
|
|
uint8_t u_arrmul32_and15_7 = 0;
|
|
uint8_t u_arrmul32_fa15_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_7_or0 = 0;
|
|
uint8_t u_arrmul32_and16_7 = 0;
|
|
uint8_t u_arrmul32_fa16_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_7_or0 = 0;
|
|
uint8_t u_arrmul32_and17_7 = 0;
|
|
uint8_t u_arrmul32_fa17_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_7_or0 = 0;
|
|
uint8_t u_arrmul32_and18_7 = 0;
|
|
uint8_t u_arrmul32_fa18_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_7_or0 = 0;
|
|
uint8_t u_arrmul32_and19_7 = 0;
|
|
uint8_t u_arrmul32_fa19_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_7_or0 = 0;
|
|
uint8_t u_arrmul32_and20_7 = 0;
|
|
uint8_t u_arrmul32_fa20_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_7_or0 = 0;
|
|
uint8_t u_arrmul32_and21_7 = 0;
|
|
uint8_t u_arrmul32_fa21_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_7_or0 = 0;
|
|
uint8_t u_arrmul32_and22_7 = 0;
|
|
uint8_t u_arrmul32_fa22_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_7_or0 = 0;
|
|
uint8_t u_arrmul32_and23_7 = 0;
|
|
uint8_t u_arrmul32_fa23_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_7_or0 = 0;
|
|
uint8_t u_arrmul32_and24_7 = 0;
|
|
uint8_t u_arrmul32_fa24_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_7_or0 = 0;
|
|
uint8_t u_arrmul32_and25_7 = 0;
|
|
uint8_t u_arrmul32_fa25_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_7_or0 = 0;
|
|
uint8_t u_arrmul32_and26_7 = 0;
|
|
uint8_t u_arrmul32_fa26_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_7_or0 = 0;
|
|
uint8_t u_arrmul32_and27_7 = 0;
|
|
uint8_t u_arrmul32_fa27_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_7_or0 = 0;
|
|
uint8_t u_arrmul32_and28_7 = 0;
|
|
uint8_t u_arrmul32_fa28_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_7_or0 = 0;
|
|
uint8_t u_arrmul32_and29_7 = 0;
|
|
uint8_t u_arrmul32_fa29_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_7_or0 = 0;
|
|
uint8_t u_arrmul32_and30_7 = 0;
|
|
uint8_t u_arrmul32_fa30_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_7_or0 = 0;
|
|
uint8_t u_arrmul32_and31_7 = 0;
|
|
uint8_t u_arrmul32_fa31_7_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_7_or0 = 0;
|
|
uint8_t u_arrmul32_and0_8 = 0;
|
|
uint8_t u_arrmul32_ha0_8_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_8_and0 = 0;
|
|
uint8_t u_arrmul32_and1_8 = 0;
|
|
uint8_t u_arrmul32_fa1_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_8_or0 = 0;
|
|
uint8_t u_arrmul32_and2_8 = 0;
|
|
uint8_t u_arrmul32_fa2_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_8_or0 = 0;
|
|
uint8_t u_arrmul32_and3_8 = 0;
|
|
uint8_t u_arrmul32_fa3_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_8_or0 = 0;
|
|
uint8_t u_arrmul32_and4_8 = 0;
|
|
uint8_t u_arrmul32_fa4_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_8_or0 = 0;
|
|
uint8_t u_arrmul32_and5_8 = 0;
|
|
uint8_t u_arrmul32_fa5_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_8_or0 = 0;
|
|
uint8_t u_arrmul32_and6_8 = 0;
|
|
uint8_t u_arrmul32_fa6_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_8_or0 = 0;
|
|
uint8_t u_arrmul32_and7_8 = 0;
|
|
uint8_t u_arrmul32_fa7_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_8_or0 = 0;
|
|
uint8_t u_arrmul32_and8_8 = 0;
|
|
uint8_t u_arrmul32_fa8_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_8_or0 = 0;
|
|
uint8_t u_arrmul32_and9_8 = 0;
|
|
uint8_t u_arrmul32_fa9_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_8_or0 = 0;
|
|
uint8_t u_arrmul32_and10_8 = 0;
|
|
uint8_t u_arrmul32_fa10_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_8_or0 = 0;
|
|
uint8_t u_arrmul32_and11_8 = 0;
|
|
uint8_t u_arrmul32_fa11_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_8_or0 = 0;
|
|
uint8_t u_arrmul32_and12_8 = 0;
|
|
uint8_t u_arrmul32_fa12_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_8_or0 = 0;
|
|
uint8_t u_arrmul32_and13_8 = 0;
|
|
uint8_t u_arrmul32_fa13_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_8_or0 = 0;
|
|
uint8_t u_arrmul32_and14_8 = 0;
|
|
uint8_t u_arrmul32_fa14_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_8_or0 = 0;
|
|
uint8_t u_arrmul32_and15_8 = 0;
|
|
uint8_t u_arrmul32_fa15_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_8_or0 = 0;
|
|
uint8_t u_arrmul32_and16_8 = 0;
|
|
uint8_t u_arrmul32_fa16_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_8_or0 = 0;
|
|
uint8_t u_arrmul32_and17_8 = 0;
|
|
uint8_t u_arrmul32_fa17_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_8_or0 = 0;
|
|
uint8_t u_arrmul32_and18_8 = 0;
|
|
uint8_t u_arrmul32_fa18_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_8_or0 = 0;
|
|
uint8_t u_arrmul32_and19_8 = 0;
|
|
uint8_t u_arrmul32_fa19_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_8_or0 = 0;
|
|
uint8_t u_arrmul32_and20_8 = 0;
|
|
uint8_t u_arrmul32_fa20_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_8_or0 = 0;
|
|
uint8_t u_arrmul32_and21_8 = 0;
|
|
uint8_t u_arrmul32_fa21_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_8_or0 = 0;
|
|
uint8_t u_arrmul32_and22_8 = 0;
|
|
uint8_t u_arrmul32_fa22_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_8_or0 = 0;
|
|
uint8_t u_arrmul32_and23_8 = 0;
|
|
uint8_t u_arrmul32_fa23_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_8_or0 = 0;
|
|
uint8_t u_arrmul32_and24_8 = 0;
|
|
uint8_t u_arrmul32_fa24_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_8_or0 = 0;
|
|
uint8_t u_arrmul32_and25_8 = 0;
|
|
uint8_t u_arrmul32_fa25_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_8_or0 = 0;
|
|
uint8_t u_arrmul32_and26_8 = 0;
|
|
uint8_t u_arrmul32_fa26_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_8_or0 = 0;
|
|
uint8_t u_arrmul32_and27_8 = 0;
|
|
uint8_t u_arrmul32_fa27_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_8_or0 = 0;
|
|
uint8_t u_arrmul32_and28_8 = 0;
|
|
uint8_t u_arrmul32_fa28_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_8_or0 = 0;
|
|
uint8_t u_arrmul32_and29_8 = 0;
|
|
uint8_t u_arrmul32_fa29_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_8_or0 = 0;
|
|
uint8_t u_arrmul32_and30_8 = 0;
|
|
uint8_t u_arrmul32_fa30_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_8_or0 = 0;
|
|
uint8_t u_arrmul32_and31_8 = 0;
|
|
uint8_t u_arrmul32_fa31_8_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_8_or0 = 0;
|
|
uint8_t u_arrmul32_and0_9 = 0;
|
|
uint8_t u_arrmul32_ha0_9_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_9_and0 = 0;
|
|
uint8_t u_arrmul32_and1_9 = 0;
|
|
uint8_t u_arrmul32_fa1_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_9_or0 = 0;
|
|
uint8_t u_arrmul32_and2_9 = 0;
|
|
uint8_t u_arrmul32_fa2_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_9_or0 = 0;
|
|
uint8_t u_arrmul32_and3_9 = 0;
|
|
uint8_t u_arrmul32_fa3_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_9_or0 = 0;
|
|
uint8_t u_arrmul32_and4_9 = 0;
|
|
uint8_t u_arrmul32_fa4_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_9_or0 = 0;
|
|
uint8_t u_arrmul32_and5_9 = 0;
|
|
uint8_t u_arrmul32_fa5_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_9_or0 = 0;
|
|
uint8_t u_arrmul32_and6_9 = 0;
|
|
uint8_t u_arrmul32_fa6_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_9_or0 = 0;
|
|
uint8_t u_arrmul32_and7_9 = 0;
|
|
uint8_t u_arrmul32_fa7_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_9_or0 = 0;
|
|
uint8_t u_arrmul32_and8_9 = 0;
|
|
uint8_t u_arrmul32_fa8_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_9_or0 = 0;
|
|
uint8_t u_arrmul32_and9_9 = 0;
|
|
uint8_t u_arrmul32_fa9_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_9_or0 = 0;
|
|
uint8_t u_arrmul32_and10_9 = 0;
|
|
uint8_t u_arrmul32_fa10_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_9_or0 = 0;
|
|
uint8_t u_arrmul32_and11_9 = 0;
|
|
uint8_t u_arrmul32_fa11_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_9_or0 = 0;
|
|
uint8_t u_arrmul32_and12_9 = 0;
|
|
uint8_t u_arrmul32_fa12_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_9_or0 = 0;
|
|
uint8_t u_arrmul32_and13_9 = 0;
|
|
uint8_t u_arrmul32_fa13_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_9_or0 = 0;
|
|
uint8_t u_arrmul32_and14_9 = 0;
|
|
uint8_t u_arrmul32_fa14_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_9_or0 = 0;
|
|
uint8_t u_arrmul32_and15_9 = 0;
|
|
uint8_t u_arrmul32_fa15_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_9_or0 = 0;
|
|
uint8_t u_arrmul32_and16_9 = 0;
|
|
uint8_t u_arrmul32_fa16_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_9_or0 = 0;
|
|
uint8_t u_arrmul32_and17_9 = 0;
|
|
uint8_t u_arrmul32_fa17_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_9_or0 = 0;
|
|
uint8_t u_arrmul32_and18_9 = 0;
|
|
uint8_t u_arrmul32_fa18_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_9_or0 = 0;
|
|
uint8_t u_arrmul32_and19_9 = 0;
|
|
uint8_t u_arrmul32_fa19_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_9_or0 = 0;
|
|
uint8_t u_arrmul32_and20_9 = 0;
|
|
uint8_t u_arrmul32_fa20_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_9_or0 = 0;
|
|
uint8_t u_arrmul32_and21_9 = 0;
|
|
uint8_t u_arrmul32_fa21_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_9_or0 = 0;
|
|
uint8_t u_arrmul32_and22_9 = 0;
|
|
uint8_t u_arrmul32_fa22_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_9_or0 = 0;
|
|
uint8_t u_arrmul32_and23_9 = 0;
|
|
uint8_t u_arrmul32_fa23_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_9_or0 = 0;
|
|
uint8_t u_arrmul32_and24_9 = 0;
|
|
uint8_t u_arrmul32_fa24_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_9_or0 = 0;
|
|
uint8_t u_arrmul32_and25_9 = 0;
|
|
uint8_t u_arrmul32_fa25_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_9_or0 = 0;
|
|
uint8_t u_arrmul32_and26_9 = 0;
|
|
uint8_t u_arrmul32_fa26_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_9_or0 = 0;
|
|
uint8_t u_arrmul32_and27_9 = 0;
|
|
uint8_t u_arrmul32_fa27_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_9_or0 = 0;
|
|
uint8_t u_arrmul32_and28_9 = 0;
|
|
uint8_t u_arrmul32_fa28_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_9_or0 = 0;
|
|
uint8_t u_arrmul32_and29_9 = 0;
|
|
uint8_t u_arrmul32_fa29_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_9_or0 = 0;
|
|
uint8_t u_arrmul32_and30_9 = 0;
|
|
uint8_t u_arrmul32_fa30_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_9_or0 = 0;
|
|
uint8_t u_arrmul32_and31_9 = 0;
|
|
uint8_t u_arrmul32_fa31_9_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_9_or0 = 0;
|
|
uint8_t u_arrmul32_and0_10 = 0;
|
|
uint8_t u_arrmul32_ha0_10_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_10_and0 = 0;
|
|
uint8_t u_arrmul32_and1_10 = 0;
|
|
uint8_t u_arrmul32_fa1_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_10_or0 = 0;
|
|
uint8_t u_arrmul32_and2_10 = 0;
|
|
uint8_t u_arrmul32_fa2_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_10_or0 = 0;
|
|
uint8_t u_arrmul32_and3_10 = 0;
|
|
uint8_t u_arrmul32_fa3_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_10_or0 = 0;
|
|
uint8_t u_arrmul32_and4_10 = 0;
|
|
uint8_t u_arrmul32_fa4_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_10_or0 = 0;
|
|
uint8_t u_arrmul32_and5_10 = 0;
|
|
uint8_t u_arrmul32_fa5_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_10_or0 = 0;
|
|
uint8_t u_arrmul32_and6_10 = 0;
|
|
uint8_t u_arrmul32_fa6_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_10_or0 = 0;
|
|
uint8_t u_arrmul32_and7_10 = 0;
|
|
uint8_t u_arrmul32_fa7_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_10_or0 = 0;
|
|
uint8_t u_arrmul32_and8_10 = 0;
|
|
uint8_t u_arrmul32_fa8_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_10_or0 = 0;
|
|
uint8_t u_arrmul32_and9_10 = 0;
|
|
uint8_t u_arrmul32_fa9_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_10_or0 = 0;
|
|
uint8_t u_arrmul32_and10_10 = 0;
|
|
uint8_t u_arrmul32_fa10_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_10_or0 = 0;
|
|
uint8_t u_arrmul32_and11_10 = 0;
|
|
uint8_t u_arrmul32_fa11_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_10_or0 = 0;
|
|
uint8_t u_arrmul32_and12_10 = 0;
|
|
uint8_t u_arrmul32_fa12_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_10_or0 = 0;
|
|
uint8_t u_arrmul32_and13_10 = 0;
|
|
uint8_t u_arrmul32_fa13_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_10_or0 = 0;
|
|
uint8_t u_arrmul32_and14_10 = 0;
|
|
uint8_t u_arrmul32_fa14_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_10_or0 = 0;
|
|
uint8_t u_arrmul32_and15_10 = 0;
|
|
uint8_t u_arrmul32_fa15_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_10_or0 = 0;
|
|
uint8_t u_arrmul32_and16_10 = 0;
|
|
uint8_t u_arrmul32_fa16_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_10_or0 = 0;
|
|
uint8_t u_arrmul32_and17_10 = 0;
|
|
uint8_t u_arrmul32_fa17_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_10_or0 = 0;
|
|
uint8_t u_arrmul32_and18_10 = 0;
|
|
uint8_t u_arrmul32_fa18_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_10_or0 = 0;
|
|
uint8_t u_arrmul32_and19_10 = 0;
|
|
uint8_t u_arrmul32_fa19_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_10_or0 = 0;
|
|
uint8_t u_arrmul32_and20_10 = 0;
|
|
uint8_t u_arrmul32_fa20_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_10_or0 = 0;
|
|
uint8_t u_arrmul32_and21_10 = 0;
|
|
uint8_t u_arrmul32_fa21_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_10_or0 = 0;
|
|
uint8_t u_arrmul32_and22_10 = 0;
|
|
uint8_t u_arrmul32_fa22_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_10_or0 = 0;
|
|
uint8_t u_arrmul32_and23_10 = 0;
|
|
uint8_t u_arrmul32_fa23_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_10_or0 = 0;
|
|
uint8_t u_arrmul32_and24_10 = 0;
|
|
uint8_t u_arrmul32_fa24_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_10_or0 = 0;
|
|
uint8_t u_arrmul32_and25_10 = 0;
|
|
uint8_t u_arrmul32_fa25_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_10_or0 = 0;
|
|
uint8_t u_arrmul32_and26_10 = 0;
|
|
uint8_t u_arrmul32_fa26_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_10_or0 = 0;
|
|
uint8_t u_arrmul32_and27_10 = 0;
|
|
uint8_t u_arrmul32_fa27_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_10_or0 = 0;
|
|
uint8_t u_arrmul32_and28_10 = 0;
|
|
uint8_t u_arrmul32_fa28_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_10_or0 = 0;
|
|
uint8_t u_arrmul32_and29_10 = 0;
|
|
uint8_t u_arrmul32_fa29_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_10_or0 = 0;
|
|
uint8_t u_arrmul32_and30_10 = 0;
|
|
uint8_t u_arrmul32_fa30_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_10_or0 = 0;
|
|
uint8_t u_arrmul32_and31_10 = 0;
|
|
uint8_t u_arrmul32_fa31_10_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_10_or0 = 0;
|
|
uint8_t u_arrmul32_and0_11 = 0;
|
|
uint8_t u_arrmul32_ha0_11_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_11_and0 = 0;
|
|
uint8_t u_arrmul32_and1_11 = 0;
|
|
uint8_t u_arrmul32_fa1_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_11_or0 = 0;
|
|
uint8_t u_arrmul32_and2_11 = 0;
|
|
uint8_t u_arrmul32_fa2_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_11_or0 = 0;
|
|
uint8_t u_arrmul32_and3_11 = 0;
|
|
uint8_t u_arrmul32_fa3_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_11_or0 = 0;
|
|
uint8_t u_arrmul32_and4_11 = 0;
|
|
uint8_t u_arrmul32_fa4_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_11_or0 = 0;
|
|
uint8_t u_arrmul32_and5_11 = 0;
|
|
uint8_t u_arrmul32_fa5_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_11_or0 = 0;
|
|
uint8_t u_arrmul32_and6_11 = 0;
|
|
uint8_t u_arrmul32_fa6_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_11_or0 = 0;
|
|
uint8_t u_arrmul32_and7_11 = 0;
|
|
uint8_t u_arrmul32_fa7_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_11_or0 = 0;
|
|
uint8_t u_arrmul32_and8_11 = 0;
|
|
uint8_t u_arrmul32_fa8_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_11_or0 = 0;
|
|
uint8_t u_arrmul32_and9_11 = 0;
|
|
uint8_t u_arrmul32_fa9_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_11_or0 = 0;
|
|
uint8_t u_arrmul32_and10_11 = 0;
|
|
uint8_t u_arrmul32_fa10_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_11_or0 = 0;
|
|
uint8_t u_arrmul32_and11_11 = 0;
|
|
uint8_t u_arrmul32_fa11_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_11_or0 = 0;
|
|
uint8_t u_arrmul32_and12_11 = 0;
|
|
uint8_t u_arrmul32_fa12_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_11_or0 = 0;
|
|
uint8_t u_arrmul32_and13_11 = 0;
|
|
uint8_t u_arrmul32_fa13_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_11_or0 = 0;
|
|
uint8_t u_arrmul32_and14_11 = 0;
|
|
uint8_t u_arrmul32_fa14_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_11_or0 = 0;
|
|
uint8_t u_arrmul32_and15_11 = 0;
|
|
uint8_t u_arrmul32_fa15_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_11_or0 = 0;
|
|
uint8_t u_arrmul32_and16_11 = 0;
|
|
uint8_t u_arrmul32_fa16_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_11_or0 = 0;
|
|
uint8_t u_arrmul32_and17_11 = 0;
|
|
uint8_t u_arrmul32_fa17_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_11_or0 = 0;
|
|
uint8_t u_arrmul32_and18_11 = 0;
|
|
uint8_t u_arrmul32_fa18_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_11_or0 = 0;
|
|
uint8_t u_arrmul32_and19_11 = 0;
|
|
uint8_t u_arrmul32_fa19_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_11_or0 = 0;
|
|
uint8_t u_arrmul32_and20_11 = 0;
|
|
uint8_t u_arrmul32_fa20_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_11_or0 = 0;
|
|
uint8_t u_arrmul32_and21_11 = 0;
|
|
uint8_t u_arrmul32_fa21_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_11_or0 = 0;
|
|
uint8_t u_arrmul32_and22_11 = 0;
|
|
uint8_t u_arrmul32_fa22_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_11_or0 = 0;
|
|
uint8_t u_arrmul32_and23_11 = 0;
|
|
uint8_t u_arrmul32_fa23_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_11_or0 = 0;
|
|
uint8_t u_arrmul32_and24_11 = 0;
|
|
uint8_t u_arrmul32_fa24_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_11_or0 = 0;
|
|
uint8_t u_arrmul32_and25_11 = 0;
|
|
uint8_t u_arrmul32_fa25_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_11_or0 = 0;
|
|
uint8_t u_arrmul32_and26_11 = 0;
|
|
uint8_t u_arrmul32_fa26_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_11_or0 = 0;
|
|
uint8_t u_arrmul32_and27_11 = 0;
|
|
uint8_t u_arrmul32_fa27_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_11_or0 = 0;
|
|
uint8_t u_arrmul32_and28_11 = 0;
|
|
uint8_t u_arrmul32_fa28_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_11_or0 = 0;
|
|
uint8_t u_arrmul32_and29_11 = 0;
|
|
uint8_t u_arrmul32_fa29_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_11_or0 = 0;
|
|
uint8_t u_arrmul32_and30_11 = 0;
|
|
uint8_t u_arrmul32_fa30_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_11_or0 = 0;
|
|
uint8_t u_arrmul32_and31_11 = 0;
|
|
uint8_t u_arrmul32_fa31_11_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_11_or0 = 0;
|
|
uint8_t u_arrmul32_and0_12 = 0;
|
|
uint8_t u_arrmul32_ha0_12_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_12_and0 = 0;
|
|
uint8_t u_arrmul32_and1_12 = 0;
|
|
uint8_t u_arrmul32_fa1_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_12_or0 = 0;
|
|
uint8_t u_arrmul32_and2_12 = 0;
|
|
uint8_t u_arrmul32_fa2_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_12_or0 = 0;
|
|
uint8_t u_arrmul32_and3_12 = 0;
|
|
uint8_t u_arrmul32_fa3_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_12_or0 = 0;
|
|
uint8_t u_arrmul32_and4_12 = 0;
|
|
uint8_t u_arrmul32_fa4_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_12_or0 = 0;
|
|
uint8_t u_arrmul32_and5_12 = 0;
|
|
uint8_t u_arrmul32_fa5_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_12_or0 = 0;
|
|
uint8_t u_arrmul32_and6_12 = 0;
|
|
uint8_t u_arrmul32_fa6_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_12_or0 = 0;
|
|
uint8_t u_arrmul32_and7_12 = 0;
|
|
uint8_t u_arrmul32_fa7_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_12_or0 = 0;
|
|
uint8_t u_arrmul32_and8_12 = 0;
|
|
uint8_t u_arrmul32_fa8_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_12_or0 = 0;
|
|
uint8_t u_arrmul32_and9_12 = 0;
|
|
uint8_t u_arrmul32_fa9_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_12_or0 = 0;
|
|
uint8_t u_arrmul32_and10_12 = 0;
|
|
uint8_t u_arrmul32_fa10_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_12_or0 = 0;
|
|
uint8_t u_arrmul32_and11_12 = 0;
|
|
uint8_t u_arrmul32_fa11_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_12_or0 = 0;
|
|
uint8_t u_arrmul32_and12_12 = 0;
|
|
uint8_t u_arrmul32_fa12_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_12_or0 = 0;
|
|
uint8_t u_arrmul32_and13_12 = 0;
|
|
uint8_t u_arrmul32_fa13_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_12_or0 = 0;
|
|
uint8_t u_arrmul32_and14_12 = 0;
|
|
uint8_t u_arrmul32_fa14_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_12_or0 = 0;
|
|
uint8_t u_arrmul32_and15_12 = 0;
|
|
uint8_t u_arrmul32_fa15_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_12_or0 = 0;
|
|
uint8_t u_arrmul32_and16_12 = 0;
|
|
uint8_t u_arrmul32_fa16_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_12_or0 = 0;
|
|
uint8_t u_arrmul32_and17_12 = 0;
|
|
uint8_t u_arrmul32_fa17_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_12_or0 = 0;
|
|
uint8_t u_arrmul32_and18_12 = 0;
|
|
uint8_t u_arrmul32_fa18_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_12_or0 = 0;
|
|
uint8_t u_arrmul32_and19_12 = 0;
|
|
uint8_t u_arrmul32_fa19_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_12_or0 = 0;
|
|
uint8_t u_arrmul32_and20_12 = 0;
|
|
uint8_t u_arrmul32_fa20_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_12_or0 = 0;
|
|
uint8_t u_arrmul32_and21_12 = 0;
|
|
uint8_t u_arrmul32_fa21_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_12_or0 = 0;
|
|
uint8_t u_arrmul32_and22_12 = 0;
|
|
uint8_t u_arrmul32_fa22_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_12_or0 = 0;
|
|
uint8_t u_arrmul32_and23_12 = 0;
|
|
uint8_t u_arrmul32_fa23_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_12_or0 = 0;
|
|
uint8_t u_arrmul32_and24_12 = 0;
|
|
uint8_t u_arrmul32_fa24_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_12_or0 = 0;
|
|
uint8_t u_arrmul32_and25_12 = 0;
|
|
uint8_t u_arrmul32_fa25_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_12_or0 = 0;
|
|
uint8_t u_arrmul32_and26_12 = 0;
|
|
uint8_t u_arrmul32_fa26_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_12_or0 = 0;
|
|
uint8_t u_arrmul32_and27_12 = 0;
|
|
uint8_t u_arrmul32_fa27_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_12_or0 = 0;
|
|
uint8_t u_arrmul32_and28_12 = 0;
|
|
uint8_t u_arrmul32_fa28_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_12_or0 = 0;
|
|
uint8_t u_arrmul32_and29_12 = 0;
|
|
uint8_t u_arrmul32_fa29_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_12_or0 = 0;
|
|
uint8_t u_arrmul32_and30_12 = 0;
|
|
uint8_t u_arrmul32_fa30_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_12_or0 = 0;
|
|
uint8_t u_arrmul32_and31_12 = 0;
|
|
uint8_t u_arrmul32_fa31_12_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_12_or0 = 0;
|
|
uint8_t u_arrmul32_and0_13 = 0;
|
|
uint8_t u_arrmul32_ha0_13_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_13_and0 = 0;
|
|
uint8_t u_arrmul32_and1_13 = 0;
|
|
uint8_t u_arrmul32_fa1_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_13_or0 = 0;
|
|
uint8_t u_arrmul32_and2_13 = 0;
|
|
uint8_t u_arrmul32_fa2_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_13_or0 = 0;
|
|
uint8_t u_arrmul32_and3_13 = 0;
|
|
uint8_t u_arrmul32_fa3_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_13_or0 = 0;
|
|
uint8_t u_arrmul32_and4_13 = 0;
|
|
uint8_t u_arrmul32_fa4_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_13_or0 = 0;
|
|
uint8_t u_arrmul32_and5_13 = 0;
|
|
uint8_t u_arrmul32_fa5_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_13_or0 = 0;
|
|
uint8_t u_arrmul32_and6_13 = 0;
|
|
uint8_t u_arrmul32_fa6_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_13_or0 = 0;
|
|
uint8_t u_arrmul32_and7_13 = 0;
|
|
uint8_t u_arrmul32_fa7_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_13_or0 = 0;
|
|
uint8_t u_arrmul32_and8_13 = 0;
|
|
uint8_t u_arrmul32_fa8_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_13_or0 = 0;
|
|
uint8_t u_arrmul32_and9_13 = 0;
|
|
uint8_t u_arrmul32_fa9_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_13_or0 = 0;
|
|
uint8_t u_arrmul32_and10_13 = 0;
|
|
uint8_t u_arrmul32_fa10_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_13_or0 = 0;
|
|
uint8_t u_arrmul32_and11_13 = 0;
|
|
uint8_t u_arrmul32_fa11_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_13_or0 = 0;
|
|
uint8_t u_arrmul32_and12_13 = 0;
|
|
uint8_t u_arrmul32_fa12_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_13_or0 = 0;
|
|
uint8_t u_arrmul32_and13_13 = 0;
|
|
uint8_t u_arrmul32_fa13_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_13_or0 = 0;
|
|
uint8_t u_arrmul32_and14_13 = 0;
|
|
uint8_t u_arrmul32_fa14_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_13_or0 = 0;
|
|
uint8_t u_arrmul32_and15_13 = 0;
|
|
uint8_t u_arrmul32_fa15_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_13_or0 = 0;
|
|
uint8_t u_arrmul32_and16_13 = 0;
|
|
uint8_t u_arrmul32_fa16_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_13_or0 = 0;
|
|
uint8_t u_arrmul32_and17_13 = 0;
|
|
uint8_t u_arrmul32_fa17_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_13_or0 = 0;
|
|
uint8_t u_arrmul32_and18_13 = 0;
|
|
uint8_t u_arrmul32_fa18_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_13_or0 = 0;
|
|
uint8_t u_arrmul32_and19_13 = 0;
|
|
uint8_t u_arrmul32_fa19_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_13_or0 = 0;
|
|
uint8_t u_arrmul32_and20_13 = 0;
|
|
uint8_t u_arrmul32_fa20_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_13_or0 = 0;
|
|
uint8_t u_arrmul32_and21_13 = 0;
|
|
uint8_t u_arrmul32_fa21_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_13_or0 = 0;
|
|
uint8_t u_arrmul32_and22_13 = 0;
|
|
uint8_t u_arrmul32_fa22_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_13_or0 = 0;
|
|
uint8_t u_arrmul32_and23_13 = 0;
|
|
uint8_t u_arrmul32_fa23_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_13_or0 = 0;
|
|
uint8_t u_arrmul32_and24_13 = 0;
|
|
uint8_t u_arrmul32_fa24_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_13_or0 = 0;
|
|
uint8_t u_arrmul32_and25_13 = 0;
|
|
uint8_t u_arrmul32_fa25_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_13_or0 = 0;
|
|
uint8_t u_arrmul32_and26_13 = 0;
|
|
uint8_t u_arrmul32_fa26_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_13_or0 = 0;
|
|
uint8_t u_arrmul32_and27_13 = 0;
|
|
uint8_t u_arrmul32_fa27_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_13_or0 = 0;
|
|
uint8_t u_arrmul32_and28_13 = 0;
|
|
uint8_t u_arrmul32_fa28_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_13_or0 = 0;
|
|
uint8_t u_arrmul32_and29_13 = 0;
|
|
uint8_t u_arrmul32_fa29_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_13_or0 = 0;
|
|
uint8_t u_arrmul32_and30_13 = 0;
|
|
uint8_t u_arrmul32_fa30_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_13_or0 = 0;
|
|
uint8_t u_arrmul32_and31_13 = 0;
|
|
uint8_t u_arrmul32_fa31_13_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_13_or0 = 0;
|
|
uint8_t u_arrmul32_and0_14 = 0;
|
|
uint8_t u_arrmul32_ha0_14_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_14_and0 = 0;
|
|
uint8_t u_arrmul32_and1_14 = 0;
|
|
uint8_t u_arrmul32_fa1_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_14_or0 = 0;
|
|
uint8_t u_arrmul32_and2_14 = 0;
|
|
uint8_t u_arrmul32_fa2_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_14_or0 = 0;
|
|
uint8_t u_arrmul32_and3_14 = 0;
|
|
uint8_t u_arrmul32_fa3_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_14_or0 = 0;
|
|
uint8_t u_arrmul32_and4_14 = 0;
|
|
uint8_t u_arrmul32_fa4_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_14_or0 = 0;
|
|
uint8_t u_arrmul32_and5_14 = 0;
|
|
uint8_t u_arrmul32_fa5_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_14_or0 = 0;
|
|
uint8_t u_arrmul32_and6_14 = 0;
|
|
uint8_t u_arrmul32_fa6_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_14_or0 = 0;
|
|
uint8_t u_arrmul32_and7_14 = 0;
|
|
uint8_t u_arrmul32_fa7_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_14_or0 = 0;
|
|
uint8_t u_arrmul32_and8_14 = 0;
|
|
uint8_t u_arrmul32_fa8_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_14_or0 = 0;
|
|
uint8_t u_arrmul32_and9_14 = 0;
|
|
uint8_t u_arrmul32_fa9_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_14_or0 = 0;
|
|
uint8_t u_arrmul32_and10_14 = 0;
|
|
uint8_t u_arrmul32_fa10_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_14_or0 = 0;
|
|
uint8_t u_arrmul32_and11_14 = 0;
|
|
uint8_t u_arrmul32_fa11_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_14_or0 = 0;
|
|
uint8_t u_arrmul32_and12_14 = 0;
|
|
uint8_t u_arrmul32_fa12_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_14_or0 = 0;
|
|
uint8_t u_arrmul32_and13_14 = 0;
|
|
uint8_t u_arrmul32_fa13_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_14_or0 = 0;
|
|
uint8_t u_arrmul32_and14_14 = 0;
|
|
uint8_t u_arrmul32_fa14_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_14_or0 = 0;
|
|
uint8_t u_arrmul32_and15_14 = 0;
|
|
uint8_t u_arrmul32_fa15_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_14_or0 = 0;
|
|
uint8_t u_arrmul32_and16_14 = 0;
|
|
uint8_t u_arrmul32_fa16_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_14_or0 = 0;
|
|
uint8_t u_arrmul32_and17_14 = 0;
|
|
uint8_t u_arrmul32_fa17_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_14_or0 = 0;
|
|
uint8_t u_arrmul32_and18_14 = 0;
|
|
uint8_t u_arrmul32_fa18_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_14_or0 = 0;
|
|
uint8_t u_arrmul32_and19_14 = 0;
|
|
uint8_t u_arrmul32_fa19_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_14_or0 = 0;
|
|
uint8_t u_arrmul32_and20_14 = 0;
|
|
uint8_t u_arrmul32_fa20_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_14_or0 = 0;
|
|
uint8_t u_arrmul32_and21_14 = 0;
|
|
uint8_t u_arrmul32_fa21_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_14_or0 = 0;
|
|
uint8_t u_arrmul32_and22_14 = 0;
|
|
uint8_t u_arrmul32_fa22_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_14_or0 = 0;
|
|
uint8_t u_arrmul32_and23_14 = 0;
|
|
uint8_t u_arrmul32_fa23_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_14_or0 = 0;
|
|
uint8_t u_arrmul32_and24_14 = 0;
|
|
uint8_t u_arrmul32_fa24_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_14_or0 = 0;
|
|
uint8_t u_arrmul32_and25_14 = 0;
|
|
uint8_t u_arrmul32_fa25_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_14_or0 = 0;
|
|
uint8_t u_arrmul32_and26_14 = 0;
|
|
uint8_t u_arrmul32_fa26_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_14_or0 = 0;
|
|
uint8_t u_arrmul32_and27_14 = 0;
|
|
uint8_t u_arrmul32_fa27_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_14_or0 = 0;
|
|
uint8_t u_arrmul32_and28_14 = 0;
|
|
uint8_t u_arrmul32_fa28_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_14_or0 = 0;
|
|
uint8_t u_arrmul32_and29_14 = 0;
|
|
uint8_t u_arrmul32_fa29_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_14_or0 = 0;
|
|
uint8_t u_arrmul32_and30_14 = 0;
|
|
uint8_t u_arrmul32_fa30_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_14_or0 = 0;
|
|
uint8_t u_arrmul32_and31_14 = 0;
|
|
uint8_t u_arrmul32_fa31_14_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_14_or0 = 0;
|
|
uint8_t u_arrmul32_and0_15 = 0;
|
|
uint8_t u_arrmul32_ha0_15_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_15_and0 = 0;
|
|
uint8_t u_arrmul32_and1_15 = 0;
|
|
uint8_t u_arrmul32_fa1_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_15_or0 = 0;
|
|
uint8_t u_arrmul32_and2_15 = 0;
|
|
uint8_t u_arrmul32_fa2_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_15_or0 = 0;
|
|
uint8_t u_arrmul32_and3_15 = 0;
|
|
uint8_t u_arrmul32_fa3_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_15_or0 = 0;
|
|
uint8_t u_arrmul32_and4_15 = 0;
|
|
uint8_t u_arrmul32_fa4_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_15_or0 = 0;
|
|
uint8_t u_arrmul32_and5_15 = 0;
|
|
uint8_t u_arrmul32_fa5_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_15_or0 = 0;
|
|
uint8_t u_arrmul32_and6_15 = 0;
|
|
uint8_t u_arrmul32_fa6_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_15_or0 = 0;
|
|
uint8_t u_arrmul32_and7_15 = 0;
|
|
uint8_t u_arrmul32_fa7_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_15_or0 = 0;
|
|
uint8_t u_arrmul32_and8_15 = 0;
|
|
uint8_t u_arrmul32_fa8_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_15_or0 = 0;
|
|
uint8_t u_arrmul32_and9_15 = 0;
|
|
uint8_t u_arrmul32_fa9_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_15_or0 = 0;
|
|
uint8_t u_arrmul32_and10_15 = 0;
|
|
uint8_t u_arrmul32_fa10_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_15_or0 = 0;
|
|
uint8_t u_arrmul32_and11_15 = 0;
|
|
uint8_t u_arrmul32_fa11_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_15_or0 = 0;
|
|
uint8_t u_arrmul32_and12_15 = 0;
|
|
uint8_t u_arrmul32_fa12_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_15_or0 = 0;
|
|
uint8_t u_arrmul32_and13_15 = 0;
|
|
uint8_t u_arrmul32_fa13_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_15_or0 = 0;
|
|
uint8_t u_arrmul32_and14_15 = 0;
|
|
uint8_t u_arrmul32_fa14_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_15_or0 = 0;
|
|
uint8_t u_arrmul32_and15_15 = 0;
|
|
uint8_t u_arrmul32_fa15_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_15_or0 = 0;
|
|
uint8_t u_arrmul32_and16_15 = 0;
|
|
uint8_t u_arrmul32_fa16_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_15_or0 = 0;
|
|
uint8_t u_arrmul32_and17_15 = 0;
|
|
uint8_t u_arrmul32_fa17_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_15_or0 = 0;
|
|
uint8_t u_arrmul32_and18_15 = 0;
|
|
uint8_t u_arrmul32_fa18_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_15_or0 = 0;
|
|
uint8_t u_arrmul32_and19_15 = 0;
|
|
uint8_t u_arrmul32_fa19_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_15_or0 = 0;
|
|
uint8_t u_arrmul32_and20_15 = 0;
|
|
uint8_t u_arrmul32_fa20_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_15_or0 = 0;
|
|
uint8_t u_arrmul32_and21_15 = 0;
|
|
uint8_t u_arrmul32_fa21_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_15_or0 = 0;
|
|
uint8_t u_arrmul32_and22_15 = 0;
|
|
uint8_t u_arrmul32_fa22_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_15_or0 = 0;
|
|
uint8_t u_arrmul32_and23_15 = 0;
|
|
uint8_t u_arrmul32_fa23_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_15_or0 = 0;
|
|
uint8_t u_arrmul32_and24_15 = 0;
|
|
uint8_t u_arrmul32_fa24_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_15_or0 = 0;
|
|
uint8_t u_arrmul32_and25_15 = 0;
|
|
uint8_t u_arrmul32_fa25_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_15_or0 = 0;
|
|
uint8_t u_arrmul32_and26_15 = 0;
|
|
uint8_t u_arrmul32_fa26_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_15_or0 = 0;
|
|
uint8_t u_arrmul32_and27_15 = 0;
|
|
uint8_t u_arrmul32_fa27_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_15_or0 = 0;
|
|
uint8_t u_arrmul32_and28_15 = 0;
|
|
uint8_t u_arrmul32_fa28_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_15_or0 = 0;
|
|
uint8_t u_arrmul32_and29_15 = 0;
|
|
uint8_t u_arrmul32_fa29_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_15_or0 = 0;
|
|
uint8_t u_arrmul32_and30_15 = 0;
|
|
uint8_t u_arrmul32_fa30_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_15_or0 = 0;
|
|
uint8_t u_arrmul32_and31_15 = 0;
|
|
uint8_t u_arrmul32_fa31_15_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_15_or0 = 0;
|
|
uint8_t u_arrmul32_and0_16 = 0;
|
|
uint8_t u_arrmul32_ha0_16_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_16_and0 = 0;
|
|
uint8_t u_arrmul32_and1_16 = 0;
|
|
uint8_t u_arrmul32_fa1_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_16_or0 = 0;
|
|
uint8_t u_arrmul32_and2_16 = 0;
|
|
uint8_t u_arrmul32_fa2_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_16_or0 = 0;
|
|
uint8_t u_arrmul32_and3_16 = 0;
|
|
uint8_t u_arrmul32_fa3_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_16_or0 = 0;
|
|
uint8_t u_arrmul32_and4_16 = 0;
|
|
uint8_t u_arrmul32_fa4_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_16_or0 = 0;
|
|
uint8_t u_arrmul32_and5_16 = 0;
|
|
uint8_t u_arrmul32_fa5_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_16_or0 = 0;
|
|
uint8_t u_arrmul32_and6_16 = 0;
|
|
uint8_t u_arrmul32_fa6_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_16_or0 = 0;
|
|
uint8_t u_arrmul32_and7_16 = 0;
|
|
uint8_t u_arrmul32_fa7_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_16_or0 = 0;
|
|
uint8_t u_arrmul32_and8_16 = 0;
|
|
uint8_t u_arrmul32_fa8_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_16_or0 = 0;
|
|
uint8_t u_arrmul32_and9_16 = 0;
|
|
uint8_t u_arrmul32_fa9_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_16_or0 = 0;
|
|
uint8_t u_arrmul32_and10_16 = 0;
|
|
uint8_t u_arrmul32_fa10_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_16_or0 = 0;
|
|
uint8_t u_arrmul32_and11_16 = 0;
|
|
uint8_t u_arrmul32_fa11_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_16_or0 = 0;
|
|
uint8_t u_arrmul32_and12_16 = 0;
|
|
uint8_t u_arrmul32_fa12_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_16_or0 = 0;
|
|
uint8_t u_arrmul32_and13_16 = 0;
|
|
uint8_t u_arrmul32_fa13_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_16_or0 = 0;
|
|
uint8_t u_arrmul32_and14_16 = 0;
|
|
uint8_t u_arrmul32_fa14_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_16_or0 = 0;
|
|
uint8_t u_arrmul32_and15_16 = 0;
|
|
uint8_t u_arrmul32_fa15_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_16_or0 = 0;
|
|
uint8_t u_arrmul32_and16_16 = 0;
|
|
uint8_t u_arrmul32_fa16_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_16_or0 = 0;
|
|
uint8_t u_arrmul32_and17_16 = 0;
|
|
uint8_t u_arrmul32_fa17_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_16_or0 = 0;
|
|
uint8_t u_arrmul32_and18_16 = 0;
|
|
uint8_t u_arrmul32_fa18_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_16_or0 = 0;
|
|
uint8_t u_arrmul32_and19_16 = 0;
|
|
uint8_t u_arrmul32_fa19_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_16_or0 = 0;
|
|
uint8_t u_arrmul32_and20_16 = 0;
|
|
uint8_t u_arrmul32_fa20_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_16_or0 = 0;
|
|
uint8_t u_arrmul32_and21_16 = 0;
|
|
uint8_t u_arrmul32_fa21_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_16_or0 = 0;
|
|
uint8_t u_arrmul32_and22_16 = 0;
|
|
uint8_t u_arrmul32_fa22_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_16_or0 = 0;
|
|
uint8_t u_arrmul32_and23_16 = 0;
|
|
uint8_t u_arrmul32_fa23_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_16_or0 = 0;
|
|
uint8_t u_arrmul32_and24_16 = 0;
|
|
uint8_t u_arrmul32_fa24_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_16_or0 = 0;
|
|
uint8_t u_arrmul32_and25_16 = 0;
|
|
uint8_t u_arrmul32_fa25_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_16_or0 = 0;
|
|
uint8_t u_arrmul32_and26_16 = 0;
|
|
uint8_t u_arrmul32_fa26_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_16_or0 = 0;
|
|
uint8_t u_arrmul32_and27_16 = 0;
|
|
uint8_t u_arrmul32_fa27_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_16_or0 = 0;
|
|
uint8_t u_arrmul32_and28_16 = 0;
|
|
uint8_t u_arrmul32_fa28_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_16_or0 = 0;
|
|
uint8_t u_arrmul32_and29_16 = 0;
|
|
uint8_t u_arrmul32_fa29_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_16_or0 = 0;
|
|
uint8_t u_arrmul32_and30_16 = 0;
|
|
uint8_t u_arrmul32_fa30_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_16_or0 = 0;
|
|
uint8_t u_arrmul32_and31_16 = 0;
|
|
uint8_t u_arrmul32_fa31_16_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_16_or0 = 0;
|
|
uint8_t u_arrmul32_and0_17 = 0;
|
|
uint8_t u_arrmul32_ha0_17_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_17_and0 = 0;
|
|
uint8_t u_arrmul32_and1_17 = 0;
|
|
uint8_t u_arrmul32_fa1_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_17_or0 = 0;
|
|
uint8_t u_arrmul32_and2_17 = 0;
|
|
uint8_t u_arrmul32_fa2_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_17_or0 = 0;
|
|
uint8_t u_arrmul32_and3_17 = 0;
|
|
uint8_t u_arrmul32_fa3_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_17_or0 = 0;
|
|
uint8_t u_arrmul32_and4_17 = 0;
|
|
uint8_t u_arrmul32_fa4_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_17_or0 = 0;
|
|
uint8_t u_arrmul32_and5_17 = 0;
|
|
uint8_t u_arrmul32_fa5_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_17_or0 = 0;
|
|
uint8_t u_arrmul32_and6_17 = 0;
|
|
uint8_t u_arrmul32_fa6_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_17_or0 = 0;
|
|
uint8_t u_arrmul32_and7_17 = 0;
|
|
uint8_t u_arrmul32_fa7_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_17_or0 = 0;
|
|
uint8_t u_arrmul32_and8_17 = 0;
|
|
uint8_t u_arrmul32_fa8_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_17_or0 = 0;
|
|
uint8_t u_arrmul32_and9_17 = 0;
|
|
uint8_t u_arrmul32_fa9_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_17_or0 = 0;
|
|
uint8_t u_arrmul32_and10_17 = 0;
|
|
uint8_t u_arrmul32_fa10_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_17_or0 = 0;
|
|
uint8_t u_arrmul32_and11_17 = 0;
|
|
uint8_t u_arrmul32_fa11_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_17_or0 = 0;
|
|
uint8_t u_arrmul32_and12_17 = 0;
|
|
uint8_t u_arrmul32_fa12_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_17_or0 = 0;
|
|
uint8_t u_arrmul32_and13_17 = 0;
|
|
uint8_t u_arrmul32_fa13_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_17_or0 = 0;
|
|
uint8_t u_arrmul32_and14_17 = 0;
|
|
uint8_t u_arrmul32_fa14_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_17_or0 = 0;
|
|
uint8_t u_arrmul32_and15_17 = 0;
|
|
uint8_t u_arrmul32_fa15_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_17_or0 = 0;
|
|
uint8_t u_arrmul32_and16_17 = 0;
|
|
uint8_t u_arrmul32_fa16_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_17_or0 = 0;
|
|
uint8_t u_arrmul32_and17_17 = 0;
|
|
uint8_t u_arrmul32_fa17_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_17_or0 = 0;
|
|
uint8_t u_arrmul32_and18_17 = 0;
|
|
uint8_t u_arrmul32_fa18_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_17_or0 = 0;
|
|
uint8_t u_arrmul32_and19_17 = 0;
|
|
uint8_t u_arrmul32_fa19_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_17_or0 = 0;
|
|
uint8_t u_arrmul32_and20_17 = 0;
|
|
uint8_t u_arrmul32_fa20_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_17_or0 = 0;
|
|
uint8_t u_arrmul32_and21_17 = 0;
|
|
uint8_t u_arrmul32_fa21_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_17_or0 = 0;
|
|
uint8_t u_arrmul32_and22_17 = 0;
|
|
uint8_t u_arrmul32_fa22_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_17_or0 = 0;
|
|
uint8_t u_arrmul32_and23_17 = 0;
|
|
uint8_t u_arrmul32_fa23_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_17_or0 = 0;
|
|
uint8_t u_arrmul32_and24_17 = 0;
|
|
uint8_t u_arrmul32_fa24_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_17_or0 = 0;
|
|
uint8_t u_arrmul32_and25_17 = 0;
|
|
uint8_t u_arrmul32_fa25_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_17_or0 = 0;
|
|
uint8_t u_arrmul32_and26_17 = 0;
|
|
uint8_t u_arrmul32_fa26_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_17_or0 = 0;
|
|
uint8_t u_arrmul32_and27_17 = 0;
|
|
uint8_t u_arrmul32_fa27_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_17_or0 = 0;
|
|
uint8_t u_arrmul32_and28_17 = 0;
|
|
uint8_t u_arrmul32_fa28_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_17_or0 = 0;
|
|
uint8_t u_arrmul32_and29_17 = 0;
|
|
uint8_t u_arrmul32_fa29_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_17_or0 = 0;
|
|
uint8_t u_arrmul32_and30_17 = 0;
|
|
uint8_t u_arrmul32_fa30_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_17_or0 = 0;
|
|
uint8_t u_arrmul32_and31_17 = 0;
|
|
uint8_t u_arrmul32_fa31_17_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_17_or0 = 0;
|
|
uint8_t u_arrmul32_and0_18 = 0;
|
|
uint8_t u_arrmul32_ha0_18_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_18_and0 = 0;
|
|
uint8_t u_arrmul32_and1_18 = 0;
|
|
uint8_t u_arrmul32_fa1_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_18_or0 = 0;
|
|
uint8_t u_arrmul32_and2_18 = 0;
|
|
uint8_t u_arrmul32_fa2_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_18_or0 = 0;
|
|
uint8_t u_arrmul32_and3_18 = 0;
|
|
uint8_t u_arrmul32_fa3_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_18_or0 = 0;
|
|
uint8_t u_arrmul32_and4_18 = 0;
|
|
uint8_t u_arrmul32_fa4_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_18_or0 = 0;
|
|
uint8_t u_arrmul32_and5_18 = 0;
|
|
uint8_t u_arrmul32_fa5_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_18_or0 = 0;
|
|
uint8_t u_arrmul32_and6_18 = 0;
|
|
uint8_t u_arrmul32_fa6_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_18_or0 = 0;
|
|
uint8_t u_arrmul32_and7_18 = 0;
|
|
uint8_t u_arrmul32_fa7_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_18_or0 = 0;
|
|
uint8_t u_arrmul32_and8_18 = 0;
|
|
uint8_t u_arrmul32_fa8_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_18_or0 = 0;
|
|
uint8_t u_arrmul32_and9_18 = 0;
|
|
uint8_t u_arrmul32_fa9_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_18_or0 = 0;
|
|
uint8_t u_arrmul32_and10_18 = 0;
|
|
uint8_t u_arrmul32_fa10_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_18_or0 = 0;
|
|
uint8_t u_arrmul32_and11_18 = 0;
|
|
uint8_t u_arrmul32_fa11_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_18_or0 = 0;
|
|
uint8_t u_arrmul32_and12_18 = 0;
|
|
uint8_t u_arrmul32_fa12_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_18_or0 = 0;
|
|
uint8_t u_arrmul32_and13_18 = 0;
|
|
uint8_t u_arrmul32_fa13_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_18_or0 = 0;
|
|
uint8_t u_arrmul32_and14_18 = 0;
|
|
uint8_t u_arrmul32_fa14_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_18_or0 = 0;
|
|
uint8_t u_arrmul32_and15_18 = 0;
|
|
uint8_t u_arrmul32_fa15_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_18_or0 = 0;
|
|
uint8_t u_arrmul32_and16_18 = 0;
|
|
uint8_t u_arrmul32_fa16_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_18_or0 = 0;
|
|
uint8_t u_arrmul32_and17_18 = 0;
|
|
uint8_t u_arrmul32_fa17_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_18_or0 = 0;
|
|
uint8_t u_arrmul32_and18_18 = 0;
|
|
uint8_t u_arrmul32_fa18_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_18_or0 = 0;
|
|
uint8_t u_arrmul32_and19_18 = 0;
|
|
uint8_t u_arrmul32_fa19_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_18_or0 = 0;
|
|
uint8_t u_arrmul32_and20_18 = 0;
|
|
uint8_t u_arrmul32_fa20_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_18_or0 = 0;
|
|
uint8_t u_arrmul32_and21_18 = 0;
|
|
uint8_t u_arrmul32_fa21_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_18_or0 = 0;
|
|
uint8_t u_arrmul32_and22_18 = 0;
|
|
uint8_t u_arrmul32_fa22_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_18_or0 = 0;
|
|
uint8_t u_arrmul32_and23_18 = 0;
|
|
uint8_t u_arrmul32_fa23_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_18_or0 = 0;
|
|
uint8_t u_arrmul32_and24_18 = 0;
|
|
uint8_t u_arrmul32_fa24_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_18_or0 = 0;
|
|
uint8_t u_arrmul32_and25_18 = 0;
|
|
uint8_t u_arrmul32_fa25_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_18_or0 = 0;
|
|
uint8_t u_arrmul32_and26_18 = 0;
|
|
uint8_t u_arrmul32_fa26_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_18_or0 = 0;
|
|
uint8_t u_arrmul32_and27_18 = 0;
|
|
uint8_t u_arrmul32_fa27_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_18_or0 = 0;
|
|
uint8_t u_arrmul32_and28_18 = 0;
|
|
uint8_t u_arrmul32_fa28_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_18_or0 = 0;
|
|
uint8_t u_arrmul32_and29_18 = 0;
|
|
uint8_t u_arrmul32_fa29_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_18_or0 = 0;
|
|
uint8_t u_arrmul32_and30_18 = 0;
|
|
uint8_t u_arrmul32_fa30_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_18_or0 = 0;
|
|
uint8_t u_arrmul32_and31_18 = 0;
|
|
uint8_t u_arrmul32_fa31_18_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_18_or0 = 0;
|
|
uint8_t u_arrmul32_and0_19 = 0;
|
|
uint8_t u_arrmul32_ha0_19_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_19_and0 = 0;
|
|
uint8_t u_arrmul32_and1_19 = 0;
|
|
uint8_t u_arrmul32_fa1_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_19_or0 = 0;
|
|
uint8_t u_arrmul32_and2_19 = 0;
|
|
uint8_t u_arrmul32_fa2_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_19_or0 = 0;
|
|
uint8_t u_arrmul32_and3_19 = 0;
|
|
uint8_t u_arrmul32_fa3_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_19_or0 = 0;
|
|
uint8_t u_arrmul32_and4_19 = 0;
|
|
uint8_t u_arrmul32_fa4_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_19_or0 = 0;
|
|
uint8_t u_arrmul32_and5_19 = 0;
|
|
uint8_t u_arrmul32_fa5_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_19_or0 = 0;
|
|
uint8_t u_arrmul32_and6_19 = 0;
|
|
uint8_t u_arrmul32_fa6_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_19_or0 = 0;
|
|
uint8_t u_arrmul32_and7_19 = 0;
|
|
uint8_t u_arrmul32_fa7_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_19_or0 = 0;
|
|
uint8_t u_arrmul32_and8_19 = 0;
|
|
uint8_t u_arrmul32_fa8_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_19_or0 = 0;
|
|
uint8_t u_arrmul32_and9_19 = 0;
|
|
uint8_t u_arrmul32_fa9_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_19_or0 = 0;
|
|
uint8_t u_arrmul32_and10_19 = 0;
|
|
uint8_t u_arrmul32_fa10_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_19_or0 = 0;
|
|
uint8_t u_arrmul32_and11_19 = 0;
|
|
uint8_t u_arrmul32_fa11_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_19_or0 = 0;
|
|
uint8_t u_arrmul32_and12_19 = 0;
|
|
uint8_t u_arrmul32_fa12_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_19_or0 = 0;
|
|
uint8_t u_arrmul32_and13_19 = 0;
|
|
uint8_t u_arrmul32_fa13_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_19_or0 = 0;
|
|
uint8_t u_arrmul32_and14_19 = 0;
|
|
uint8_t u_arrmul32_fa14_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_19_or0 = 0;
|
|
uint8_t u_arrmul32_and15_19 = 0;
|
|
uint8_t u_arrmul32_fa15_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_19_or0 = 0;
|
|
uint8_t u_arrmul32_and16_19 = 0;
|
|
uint8_t u_arrmul32_fa16_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_19_or0 = 0;
|
|
uint8_t u_arrmul32_and17_19 = 0;
|
|
uint8_t u_arrmul32_fa17_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_19_or0 = 0;
|
|
uint8_t u_arrmul32_and18_19 = 0;
|
|
uint8_t u_arrmul32_fa18_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_19_or0 = 0;
|
|
uint8_t u_arrmul32_and19_19 = 0;
|
|
uint8_t u_arrmul32_fa19_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_19_or0 = 0;
|
|
uint8_t u_arrmul32_and20_19 = 0;
|
|
uint8_t u_arrmul32_fa20_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_19_or0 = 0;
|
|
uint8_t u_arrmul32_and21_19 = 0;
|
|
uint8_t u_arrmul32_fa21_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_19_or0 = 0;
|
|
uint8_t u_arrmul32_and22_19 = 0;
|
|
uint8_t u_arrmul32_fa22_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_19_or0 = 0;
|
|
uint8_t u_arrmul32_and23_19 = 0;
|
|
uint8_t u_arrmul32_fa23_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_19_or0 = 0;
|
|
uint8_t u_arrmul32_and24_19 = 0;
|
|
uint8_t u_arrmul32_fa24_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_19_or0 = 0;
|
|
uint8_t u_arrmul32_and25_19 = 0;
|
|
uint8_t u_arrmul32_fa25_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_19_or0 = 0;
|
|
uint8_t u_arrmul32_and26_19 = 0;
|
|
uint8_t u_arrmul32_fa26_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_19_or0 = 0;
|
|
uint8_t u_arrmul32_and27_19 = 0;
|
|
uint8_t u_arrmul32_fa27_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_19_or0 = 0;
|
|
uint8_t u_arrmul32_and28_19 = 0;
|
|
uint8_t u_arrmul32_fa28_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_19_or0 = 0;
|
|
uint8_t u_arrmul32_and29_19 = 0;
|
|
uint8_t u_arrmul32_fa29_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_19_or0 = 0;
|
|
uint8_t u_arrmul32_and30_19 = 0;
|
|
uint8_t u_arrmul32_fa30_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_19_or0 = 0;
|
|
uint8_t u_arrmul32_and31_19 = 0;
|
|
uint8_t u_arrmul32_fa31_19_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_19_or0 = 0;
|
|
uint8_t u_arrmul32_and0_20 = 0;
|
|
uint8_t u_arrmul32_ha0_20_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_20_and0 = 0;
|
|
uint8_t u_arrmul32_and1_20 = 0;
|
|
uint8_t u_arrmul32_fa1_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_20_or0 = 0;
|
|
uint8_t u_arrmul32_and2_20 = 0;
|
|
uint8_t u_arrmul32_fa2_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_20_or0 = 0;
|
|
uint8_t u_arrmul32_and3_20 = 0;
|
|
uint8_t u_arrmul32_fa3_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_20_or0 = 0;
|
|
uint8_t u_arrmul32_and4_20 = 0;
|
|
uint8_t u_arrmul32_fa4_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_20_or0 = 0;
|
|
uint8_t u_arrmul32_and5_20 = 0;
|
|
uint8_t u_arrmul32_fa5_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_20_or0 = 0;
|
|
uint8_t u_arrmul32_and6_20 = 0;
|
|
uint8_t u_arrmul32_fa6_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_20_or0 = 0;
|
|
uint8_t u_arrmul32_and7_20 = 0;
|
|
uint8_t u_arrmul32_fa7_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_20_or0 = 0;
|
|
uint8_t u_arrmul32_and8_20 = 0;
|
|
uint8_t u_arrmul32_fa8_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_20_or0 = 0;
|
|
uint8_t u_arrmul32_and9_20 = 0;
|
|
uint8_t u_arrmul32_fa9_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_20_or0 = 0;
|
|
uint8_t u_arrmul32_and10_20 = 0;
|
|
uint8_t u_arrmul32_fa10_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_20_or0 = 0;
|
|
uint8_t u_arrmul32_and11_20 = 0;
|
|
uint8_t u_arrmul32_fa11_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_20_or0 = 0;
|
|
uint8_t u_arrmul32_and12_20 = 0;
|
|
uint8_t u_arrmul32_fa12_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_20_or0 = 0;
|
|
uint8_t u_arrmul32_and13_20 = 0;
|
|
uint8_t u_arrmul32_fa13_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_20_or0 = 0;
|
|
uint8_t u_arrmul32_and14_20 = 0;
|
|
uint8_t u_arrmul32_fa14_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_20_or0 = 0;
|
|
uint8_t u_arrmul32_and15_20 = 0;
|
|
uint8_t u_arrmul32_fa15_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_20_or0 = 0;
|
|
uint8_t u_arrmul32_and16_20 = 0;
|
|
uint8_t u_arrmul32_fa16_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_20_or0 = 0;
|
|
uint8_t u_arrmul32_and17_20 = 0;
|
|
uint8_t u_arrmul32_fa17_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_20_or0 = 0;
|
|
uint8_t u_arrmul32_and18_20 = 0;
|
|
uint8_t u_arrmul32_fa18_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_20_or0 = 0;
|
|
uint8_t u_arrmul32_and19_20 = 0;
|
|
uint8_t u_arrmul32_fa19_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_20_or0 = 0;
|
|
uint8_t u_arrmul32_and20_20 = 0;
|
|
uint8_t u_arrmul32_fa20_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_20_or0 = 0;
|
|
uint8_t u_arrmul32_and21_20 = 0;
|
|
uint8_t u_arrmul32_fa21_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_20_or0 = 0;
|
|
uint8_t u_arrmul32_and22_20 = 0;
|
|
uint8_t u_arrmul32_fa22_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_20_or0 = 0;
|
|
uint8_t u_arrmul32_and23_20 = 0;
|
|
uint8_t u_arrmul32_fa23_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_20_or0 = 0;
|
|
uint8_t u_arrmul32_and24_20 = 0;
|
|
uint8_t u_arrmul32_fa24_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_20_or0 = 0;
|
|
uint8_t u_arrmul32_and25_20 = 0;
|
|
uint8_t u_arrmul32_fa25_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_20_or0 = 0;
|
|
uint8_t u_arrmul32_and26_20 = 0;
|
|
uint8_t u_arrmul32_fa26_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_20_or0 = 0;
|
|
uint8_t u_arrmul32_and27_20 = 0;
|
|
uint8_t u_arrmul32_fa27_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_20_or0 = 0;
|
|
uint8_t u_arrmul32_and28_20 = 0;
|
|
uint8_t u_arrmul32_fa28_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_20_or0 = 0;
|
|
uint8_t u_arrmul32_and29_20 = 0;
|
|
uint8_t u_arrmul32_fa29_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_20_or0 = 0;
|
|
uint8_t u_arrmul32_and30_20 = 0;
|
|
uint8_t u_arrmul32_fa30_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_20_or0 = 0;
|
|
uint8_t u_arrmul32_and31_20 = 0;
|
|
uint8_t u_arrmul32_fa31_20_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_20_or0 = 0;
|
|
uint8_t u_arrmul32_and0_21 = 0;
|
|
uint8_t u_arrmul32_ha0_21_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_21_and0 = 0;
|
|
uint8_t u_arrmul32_and1_21 = 0;
|
|
uint8_t u_arrmul32_fa1_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_21_or0 = 0;
|
|
uint8_t u_arrmul32_and2_21 = 0;
|
|
uint8_t u_arrmul32_fa2_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_21_or0 = 0;
|
|
uint8_t u_arrmul32_and3_21 = 0;
|
|
uint8_t u_arrmul32_fa3_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_21_or0 = 0;
|
|
uint8_t u_arrmul32_and4_21 = 0;
|
|
uint8_t u_arrmul32_fa4_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_21_or0 = 0;
|
|
uint8_t u_arrmul32_and5_21 = 0;
|
|
uint8_t u_arrmul32_fa5_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_21_or0 = 0;
|
|
uint8_t u_arrmul32_and6_21 = 0;
|
|
uint8_t u_arrmul32_fa6_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_21_or0 = 0;
|
|
uint8_t u_arrmul32_and7_21 = 0;
|
|
uint8_t u_arrmul32_fa7_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_21_or0 = 0;
|
|
uint8_t u_arrmul32_and8_21 = 0;
|
|
uint8_t u_arrmul32_fa8_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_21_or0 = 0;
|
|
uint8_t u_arrmul32_and9_21 = 0;
|
|
uint8_t u_arrmul32_fa9_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_21_or0 = 0;
|
|
uint8_t u_arrmul32_and10_21 = 0;
|
|
uint8_t u_arrmul32_fa10_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_21_or0 = 0;
|
|
uint8_t u_arrmul32_and11_21 = 0;
|
|
uint8_t u_arrmul32_fa11_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_21_or0 = 0;
|
|
uint8_t u_arrmul32_and12_21 = 0;
|
|
uint8_t u_arrmul32_fa12_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_21_or0 = 0;
|
|
uint8_t u_arrmul32_and13_21 = 0;
|
|
uint8_t u_arrmul32_fa13_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_21_or0 = 0;
|
|
uint8_t u_arrmul32_and14_21 = 0;
|
|
uint8_t u_arrmul32_fa14_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_21_or0 = 0;
|
|
uint8_t u_arrmul32_and15_21 = 0;
|
|
uint8_t u_arrmul32_fa15_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_21_or0 = 0;
|
|
uint8_t u_arrmul32_and16_21 = 0;
|
|
uint8_t u_arrmul32_fa16_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_21_or0 = 0;
|
|
uint8_t u_arrmul32_and17_21 = 0;
|
|
uint8_t u_arrmul32_fa17_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_21_or0 = 0;
|
|
uint8_t u_arrmul32_and18_21 = 0;
|
|
uint8_t u_arrmul32_fa18_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_21_or0 = 0;
|
|
uint8_t u_arrmul32_and19_21 = 0;
|
|
uint8_t u_arrmul32_fa19_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_21_or0 = 0;
|
|
uint8_t u_arrmul32_and20_21 = 0;
|
|
uint8_t u_arrmul32_fa20_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_21_or0 = 0;
|
|
uint8_t u_arrmul32_and21_21 = 0;
|
|
uint8_t u_arrmul32_fa21_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_21_or0 = 0;
|
|
uint8_t u_arrmul32_and22_21 = 0;
|
|
uint8_t u_arrmul32_fa22_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_21_or0 = 0;
|
|
uint8_t u_arrmul32_and23_21 = 0;
|
|
uint8_t u_arrmul32_fa23_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_21_or0 = 0;
|
|
uint8_t u_arrmul32_and24_21 = 0;
|
|
uint8_t u_arrmul32_fa24_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_21_or0 = 0;
|
|
uint8_t u_arrmul32_and25_21 = 0;
|
|
uint8_t u_arrmul32_fa25_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_21_or0 = 0;
|
|
uint8_t u_arrmul32_and26_21 = 0;
|
|
uint8_t u_arrmul32_fa26_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_21_or0 = 0;
|
|
uint8_t u_arrmul32_and27_21 = 0;
|
|
uint8_t u_arrmul32_fa27_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_21_or0 = 0;
|
|
uint8_t u_arrmul32_and28_21 = 0;
|
|
uint8_t u_arrmul32_fa28_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_21_or0 = 0;
|
|
uint8_t u_arrmul32_and29_21 = 0;
|
|
uint8_t u_arrmul32_fa29_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_21_or0 = 0;
|
|
uint8_t u_arrmul32_and30_21 = 0;
|
|
uint8_t u_arrmul32_fa30_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_21_or0 = 0;
|
|
uint8_t u_arrmul32_and31_21 = 0;
|
|
uint8_t u_arrmul32_fa31_21_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_21_or0 = 0;
|
|
uint8_t u_arrmul32_and0_22 = 0;
|
|
uint8_t u_arrmul32_ha0_22_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_22_and0 = 0;
|
|
uint8_t u_arrmul32_and1_22 = 0;
|
|
uint8_t u_arrmul32_fa1_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_22_or0 = 0;
|
|
uint8_t u_arrmul32_and2_22 = 0;
|
|
uint8_t u_arrmul32_fa2_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_22_or0 = 0;
|
|
uint8_t u_arrmul32_and3_22 = 0;
|
|
uint8_t u_arrmul32_fa3_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_22_or0 = 0;
|
|
uint8_t u_arrmul32_and4_22 = 0;
|
|
uint8_t u_arrmul32_fa4_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_22_or0 = 0;
|
|
uint8_t u_arrmul32_and5_22 = 0;
|
|
uint8_t u_arrmul32_fa5_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_22_or0 = 0;
|
|
uint8_t u_arrmul32_and6_22 = 0;
|
|
uint8_t u_arrmul32_fa6_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_22_or0 = 0;
|
|
uint8_t u_arrmul32_and7_22 = 0;
|
|
uint8_t u_arrmul32_fa7_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_22_or0 = 0;
|
|
uint8_t u_arrmul32_and8_22 = 0;
|
|
uint8_t u_arrmul32_fa8_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_22_or0 = 0;
|
|
uint8_t u_arrmul32_and9_22 = 0;
|
|
uint8_t u_arrmul32_fa9_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_22_or0 = 0;
|
|
uint8_t u_arrmul32_and10_22 = 0;
|
|
uint8_t u_arrmul32_fa10_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_22_or0 = 0;
|
|
uint8_t u_arrmul32_and11_22 = 0;
|
|
uint8_t u_arrmul32_fa11_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_22_or0 = 0;
|
|
uint8_t u_arrmul32_and12_22 = 0;
|
|
uint8_t u_arrmul32_fa12_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_22_or0 = 0;
|
|
uint8_t u_arrmul32_and13_22 = 0;
|
|
uint8_t u_arrmul32_fa13_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_22_or0 = 0;
|
|
uint8_t u_arrmul32_and14_22 = 0;
|
|
uint8_t u_arrmul32_fa14_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_22_or0 = 0;
|
|
uint8_t u_arrmul32_and15_22 = 0;
|
|
uint8_t u_arrmul32_fa15_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_22_or0 = 0;
|
|
uint8_t u_arrmul32_and16_22 = 0;
|
|
uint8_t u_arrmul32_fa16_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_22_or0 = 0;
|
|
uint8_t u_arrmul32_and17_22 = 0;
|
|
uint8_t u_arrmul32_fa17_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_22_or0 = 0;
|
|
uint8_t u_arrmul32_and18_22 = 0;
|
|
uint8_t u_arrmul32_fa18_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_22_or0 = 0;
|
|
uint8_t u_arrmul32_and19_22 = 0;
|
|
uint8_t u_arrmul32_fa19_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_22_or0 = 0;
|
|
uint8_t u_arrmul32_and20_22 = 0;
|
|
uint8_t u_arrmul32_fa20_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_22_or0 = 0;
|
|
uint8_t u_arrmul32_and21_22 = 0;
|
|
uint8_t u_arrmul32_fa21_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_22_or0 = 0;
|
|
uint8_t u_arrmul32_and22_22 = 0;
|
|
uint8_t u_arrmul32_fa22_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_22_or0 = 0;
|
|
uint8_t u_arrmul32_and23_22 = 0;
|
|
uint8_t u_arrmul32_fa23_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_22_or0 = 0;
|
|
uint8_t u_arrmul32_and24_22 = 0;
|
|
uint8_t u_arrmul32_fa24_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_22_or0 = 0;
|
|
uint8_t u_arrmul32_and25_22 = 0;
|
|
uint8_t u_arrmul32_fa25_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_22_or0 = 0;
|
|
uint8_t u_arrmul32_and26_22 = 0;
|
|
uint8_t u_arrmul32_fa26_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_22_or0 = 0;
|
|
uint8_t u_arrmul32_and27_22 = 0;
|
|
uint8_t u_arrmul32_fa27_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_22_or0 = 0;
|
|
uint8_t u_arrmul32_and28_22 = 0;
|
|
uint8_t u_arrmul32_fa28_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_22_or0 = 0;
|
|
uint8_t u_arrmul32_and29_22 = 0;
|
|
uint8_t u_arrmul32_fa29_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_22_or0 = 0;
|
|
uint8_t u_arrmul32_and30_22 = 0;
|
|
uint8_t u_arrmul32_fa30_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_22_or0 = 0;
|
|
uint8_t u_arrmul32_and31_22 = 0;
|
|
uint8_t u_arrmul32_fa31_22_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_22_or0 = 0;
|
|
uint8_t u_arrmul32_and0_23 = 0;
|
|
uint8_t u_arrmul32_ha0_23_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_23_and0 = 0;
|
|
uint8_t u_arrmul32_and1_23 = 0;
|
|
uint8_t u_arrmul32_fa1_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_23_or0 = 0;
|
|
uint8_t u_arrmul32_and2_23 = 0;
|
|
uint8_t u_arrmul32_fa2_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_23_or0 = 0;
|
|
uint8_t u_arrmul32_and3_23 = 0;
|
|
uint8_t u_arrmul32_fa3_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_23_or0 = 0;
|
|
uint8_t u_arrmul32_and4_23 = 0;
|
|
uint8_t u_arrmul32_fa4_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_23_or0 = 0;
|
|
uint8_t u_arrmul32_and5_23 = 0;
|
|
uint8_t u_arrmul32_fa5_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_23_or0 = 0;
|
|
uint8_t u_arrmul32_and6_23 = 0;
|
|
uint8_t u_arrmul32_fa6_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_23_or0 = 0;
|
|
uint8_t u_arrmul32_and7_23 = 0;
|
|
uint8_t u_arrmul32_fa7_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_23_or0 = 0;
|
|
uint8_t u_arrmul32_and8_23 = 0;
|
|
uint8_t u_arrmul32_fa8_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_23_or0 = 0;
|
|
uint8_t u_arrmul32_and9_23 = 0;
|
|
uint8_t u_arrmul32_fa9_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_23_or0 = 0;
|
|
uint8_t u_arrmul32_and10_23 = 0;
|
|
uint8_t u_arrmul32_fa10_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_23_or0 = 0;
|
|
uint8_t u_arrmul32_and11_23 = 0;
|
|
uint8_t u_arrmul32_fa11_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_23_or0 = 0;
|
|
uint8_t u_arrmul32_and12_23 = 0;
|
|
uint8_t u_arrmul32_fa12_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_23_or0 = 0;
|
|
uint8_t u_arrmul32_and13_23 = 0;
|
|
uint8_t u_arrmul32_fa13_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_23_or0 = 0;
|
|
uint8_t u_arrmul32_and14_23 = 0;
|
|
uint8_t u_arrmul32_fa14_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_23_or0 = 0;
|
|
uint8_t u_arrmul32_and15_23 = 0;
|
|
uint8_t u_arrmul32_fa15_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_23_or0 = 0;
|
|
uint8_t u_arrmul32_and16_23 = 0;
|
|
uint8_t u_arrmul32_fa16_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_23_or0 = 0;
|
|
uint8_t u_arrmul32_and17_23 = 0;
|
|
uint8_t u_arrmul32_fa17_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_23_or0 = 0;
|
|
uint8_t u_arrmul32_and18_23 = 0;
|
|
uint8_t u_arrmul32_fa18_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_23_or0 = 0;
|
|
uint8_t u_arrmul32_and19_23 = 0;
|
|
uint8_t u_arrmul32_fa19_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_23_or0 = 0;
|
|
uint8_t u_arrmul32_and20_23 = 0;
|
|
uint8_t u_arrmul32_fa20_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_23_or0 = 0;
|
|
uint8_t u_arrmul32_and21_23 = 0;
|
|
uint8_t u_arrmul32_fa21_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_23_or0 = 0;
|
|
uint8_t u_arrmul32_and22_23 = 0;
|
|
uint8_t u_arrmul32_fa22_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_23_or0 = 0;
|
|
uint8_t u_arrmul32_and23_23 = 0;
|
|
uint8_t u_arrmul32_fa23_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_23_or0 = 0;
|
|
uint8_t u_arrmul32_and24_23 = 0;
|
|
uint8_t u_arrmul32_fa24_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_23_or0 = 0;
|
|
uint8_t u_arrmul32_and25_23 = 0;
|
|
uint8_t u_arrmul32_fa25_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_23_or0 = 0;
|
|
uint8_t u_arrmul32_and26_23 = 0;
|
|
uint8_t u_arrmul32_fa26_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_23_or0 = 0;
|
|
uint8_t u_arrmul32_and27_23 = 0;
|
|
uint8_t u_arrmul32_fa27_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_23_or0 = 0;
|
|
uint8_t u_arrmul32_and28_23 = 0;
|
|
uint8_t u_arrmul32_fa28_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_23_or0 = 0;
|
|
uint8_t u_arrmul32_and29_23 = 0;
|
|
uint8_t u_arrmul32_fa29_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_23_or0 = 0;
|
|
uint8_t u_arrmul32_and30_23 = 0;
|
|
uint8_t u_arrmul32_fa30_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_23_or0 = 0;
|
|
uint8_t u_arrmul32_and31_23 = 0;
|
|
uint8_t u_arrmul32_fa31_23_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_23_or0 = 0;
|
|
uint8_t u_arrmul32_and0_24 = 0;
|
|
uint8_t u_arrmul32_ha0_24_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_24_and0 = 0;
|
|
uint8_t u_arrmul32_and1_24 = 0;
|
|
uint8_t u_arrmul32_fa1_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_24_or0 = 0;
|
|
uint8_t u_arrmul32_and2_24 = 0;
|
|
uint8_t u_arrmul32_fa2_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_24_or0 = 0;
|
|
uint8_t u_arrmul32_and3_24 = 0;
|
|
uint8_t u_arrmul32_fa3_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_24_or0 = 0;
|
|
uint8_t u_arrmul32_and4_24 = 0;
|
|
uint8_t u_arrmul32_fa4_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_24_or0 = 0;
|
|
uint8_t u_arrmul32_and5_24 = 0;
|
|
uint8_t u_arrmul32_fa5_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_24_or0 = 0;
|
|
uint8_t u_arrmul32_and6_24 = 0;
|
|
uint8_t u_arrmul32_fa6_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_24_or0 = 0;
|
|
uint8_t u_arrmul32_and7_24 = 0;
|
|
uint8_t u_arrmul32_fa7_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_24_or0 = 0;
|
|
uint8_t u_arrmul32_and8_24 = 0;
|
|
uint8_t u_arrmul32_fa8_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_24_or0 = 0;
|
|
uint8_t u_arrmul32_and9_24 = 0;
|
|
uint8_t u_arrmul32_fa9_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_24_or0 = 0;
|
|
uint8_t u_arrmul32_and10_24 = 0;
|
|
uint8_t u_arrmul32_fa10_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_24_or0 = 0;
|
|
uint8_t u_arrmul32_and11_24 = 0;
|
|
uint8_t u_arrmul32_fa11_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_24_or0 = 0;
|
|
uint8_t u_arrmul32_and12_24 = 0;
|
|
uint8_t u_arrmul32_fa12_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_24_or0 = 0;
|
|
uint8_t u_arrmul32_and13_24 = 0;
|
|
uint8_t u_arrmul32_fa13_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_24_or0 = 0;
|
|
uint8_t u_arrmul32_and14_24 = 0;
|
|
uint8_t u_arrmul32_fa14_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_24_or0 = 0;
|
|
uint8_t u_arrmul32_and15_24 = 0;
|
|
uint8_t u_arrmul32_fa15_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_24_or0 = 0;
|
|
uint8_t u_arrmul32_and16_24 = 0;
|
|
uint8_t u_arrmul32_fa16_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_24_or0 = 0;
|
|
uint8_t u_arrmul32_and17_24 = 0;
|
|
uint8_t u_arrmul32_fa17_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_24_or0 = 0;
|
|
uint8_t u_arrmul32_and18_24 = 0;
|
|
uint8_t u_arrmul32_fa18_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_24_or0 = 0;
|
|
uint8_t u_arrmul32_and19_24 = 0;
|
|
uint8_t u_arrmul32_fa19_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_24_or0 = 0;
|
|
uint8_t u_arrmul32_and20_24 = 0;
|
|
uint8_t u_arrmul32_fa20_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_24_or0 = 0;
|
|
uint8_t u_arrmul32_and21_24 = 0;
|
|
uint8_t u_arrmul32_fa21_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_24_or0 = 0;
|
|
uint8_t u_arrmul32_and22_24 = 0;
|
|
uint8_t u_arrmul32_fa22_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_24_or0 = 0;
|
|
uint8_t u_arrmul32_and23_24 = 0;
|
|
uint8_t u_arrmul32_fa23_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_24_or0 = 0;
|
|
uint8_t u_arrmul32_and24_24 = 0;
|
|
uint8_t u_arrmul32_fa24_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_24_or0 = 0;
|
|
uint8_t u_arrmul32_and25_24 = 0;
|
|
uint8_t u_arrmul32_fa25_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_24_or0 = 0;
|
|
uint8_t u_arrmul32_and26_24 = 0;
|
|
uint8_t u_arrmul32_fa26_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_24_or0 = 0;
|
|
uint8_t u_arrmul32_and27_24 = 0;
|
|
uint8_t u_arrmul32_fa27_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_24_or0 = 0;
|
|
uint8_t u_arrmul32_and28_24 = 0;
|
|
uint8_t u_arrmul32_fa28_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_24_or0 = 0;
|
|
uint8_t u_arrmul32_and29_24 = 0;
|
|
uint8_t u_arrmul32_fa29_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_24_or0 = 0;
|
|
uint8_t u_arrmul32_and30_24 = 0;
|
|
uint8_t u_arrmul32_fa30_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_24_or0 = 0;
|
|
uint8_t u_arrmul32_and31_24 = 0;
|
|
uint8_t u_arrmul32_fa31_24_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_24_or0 = 0;
|
|
uint8_t u_arrmul32_and0_25 = 0;
|
|
uint8_t u_arrmul32_ha0_25_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_25_and0 = 0;
|
|
uint8_t u_arrmul32_and1_25 = 0;
|
|
uint8_t u_arrmul32_fa1_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_25_or0 = 0;
|
|
uint8_t u_arrmul32_and2_25 = 0;
|
|
uint8_t u_arrmul32_fa2_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_25_or0 = 0;
|
|
uint8_t u_arrmul32_and3_25 = 0;
|
|
uint8_t u_arrmul32_fa3_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_25_or0 = 0;
|
|
uint8_t u_arrmul32_and4_25 = 0;
|
|
uint8_t u_arrmul32_fa4_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_25_or0 = 0;
|
|
uint8_t u_arrmul32_and5_25 = 0;
|
|
uint8_t u_arrmul32_fa5_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_25_or0 = 0;
|
|
uint8_t u_arrmul32_and6_25 = 0;
|
|
uint8_t u_arrmul32_fa6_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_25_or0 = 0;
|
|
uint8_t u_arrmul32_and7_25 = 0;
|
|
uint8_t u_arrmul32_fa7_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_25_or0 = 0;
|
|
uint8_t u_arrmul32_and8_25 = 0;
|
|
uint8_t u_arrmul32_fa8_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_25_or0 = 0;
|
|
uint8_t u_arrmul32_and9_25 = 0;
|
|
uint8_t u_arrmul32_fa9_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_25_or0 = 0;
|
|
uint8_t u_arrmul32_and10_25 = 0;
|
|
uint8_t u_arrmul32_fa10_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_25_or0 = 0;
|
|
uint8_t u_arrmul32_and11_25 = 0;
|
|
uint8_t u_arrmul32_fa11_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_25_or0 = 0;
|
|
uint8_t u_arrmul32_and12_25 = 0;
|
|
uint8_t u_arrmul32_fa12_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_25_or0 = 0;
|
|
uint8_t u_arrmul32_and13_25 = 0;
|
|
uint8_t u_arrmul32_fa13_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_25_or0 = 0;
|
|
uint8_t u_arrmul32_and14_25 = 0;
|
|
uint8_t u_arrmul32_fa14_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_25_or0 = 0;
|
|
uint8_t u_arrmul32_and15_25 = 0;
|
|
uint8_t u_arrmul32_fa15_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_25_or0 = 0;
|
|
uint8_t u_arrmul32_and16_25 = 0;
|
|
uint8_t u_arrmul32_fa16_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_25_or0 = 0;
|
|
uint8_t u_arrmul32_and17_25 = 0;
|
|
uint8_t u_arrmul32_fa17_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_25_or0 = 0;
|
|
uint8_t u_arrmul32_and18_25 = 0;
|
|
uint8_t u_arrmul32_fa18_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_25_or0 = 0;
|
|
uint8_t u_arrmul32_and19_25 = 0;
|
|
uint8_t u_arrmul32_fa19_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_25_or0 = 0;
|
|
uint8_t u_arrmul32_and20_25 = 0;
|
|
uint8_t u_arrmul32_fa20_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_25_or0 = 0;
|
|
uint8_t u_arrmul32_and21_25 = 0;
|
|
uint8_t u_arrmul32_fa21_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_25_or0 = 0;
|
|
uint8_t u_arrmul32_and22_25 = 0;
|
|
uint8_t u_arrmul32_fa22_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_25_or0 = 0;
|
|
uint8_t u_arrmul32_and23_25 = 0;
|
|
uint8_t u_arrmul32_fa23_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_25_or0 = 0;
|
|
uint8_t u_arrmul32_and24_25 = 0;
|
|
uint8_t u_arrmul32_fa24_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_25_or0 = 0;
|
|
uint8_t u_arrmul32_and25_25 = 0;
|
|
uint8_t u_arrmul32_fa25_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_25_or0 = 0;
|
|
uint8_t u_arrmul32_and26_25 = 0;
|
|
uint8_t u_arrmul32_fa26_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_25_or0 = 0;
|
|
uint8_t u_arrmul32_and27_25 = 0;
|
|
uint8_t u_arrmul32_fa27_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_25_or0 = 0;
|
|
uint8_t u_arrmul32_and28_25 = 0;
|
|
uint8_t u_arrmul32_fa28_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_25_or0 = 0;
|
|
uint8_t u_arrmul32_and29_25 = 0;
|
|
uint8_t u_arrmul32_fa29_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_25_or0 = 0;
|
|
uint8_t u_arrmul32_and30_25 = 0;
|
|
uint8_t u_arrmul32_fa30_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_25_or0 = 0;
|
|
uint8_t u_arrmul32_and31_25 = 0;
|
|
uint8_t u_arrmul32_fa31_25_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_25_or0 = 0;
|
|
uint8_t u_arrmul32_and0_26 = 0;
|
|
uint8_t u_arrmul32_ha0_26_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_26_and0 = 0;
|
|
uint8_t u_arrmul32_and1_26 = 0;
|
|
uint8_t u_arrmul32_fa1_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_26_or0 = 0;
|
|
uint8_t u_arrmul32_and2_26 = 0;
|
|
uint8_t u_arrmul32_fa2_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_26_or0 = 0;
|
|
uint8_t u_arrmul32_and3_26 = 0;
|
|
uint8_t u_arrmul32_fa3_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_26_or0 = 0;
|
|
uint8_t u_arrmul32_and4_26 = 0;
|
|
uint8_t u_arrmul32_fa4_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_26_or0 = 0;
|
|
uint8_t u_arrmul32_and5_26 = 0;
|
|
uint8_t u_arrmul32_fa5_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_26_or0 = 0;
|
|
uint8_t u_arrmul32_and6_26 = 0;
|
|
uint8_t u_arrmul32_fa6_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_26_or0 = 0;
|
|
uint8_t u_arrmul32_and7_26 = 0;
|
|
uint8_t u_arrmul32_fa7_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_26_or0 = 0;
|
|
uint8_t u_arrmul32_and8_26 = 0;
|
|
uint8_t u_arrmul32_fa8_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_26_or0 = 0;
|
|
uint8_t u_arrmul32_and9_26 = 0;
|
|
uint8_t u_arrmul32_fa9_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_26_or0 = 0;
|
|
uint8_t u_arrmul32_and10_26 = 0;
|
|
uint8_t u_arrmul32_fa10_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_26_or0 = 0;
|
|
uint8_t u_arrmul32_and11_26 = 0;
|
|
uint8_t u_arrmul32_fa11_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_26_or0 = 0;
|
|
uint8_t u_arrmul32_and12_26 = 0;
|
|
uint8_t u_arrmul32_fa12_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_26_or0 = 0;
|
|
uint8_t u_arrmul32_and13_26 = 0;
|
|
uint8_t u_arrmul32_fa13_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_26_or0 = 0;
|
|
uint8_t u_arrmul32_and14_26 = 0;
|
|
uint8_t u_arrmul32_fa14_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_26_or0 = 0;
|
|
uint8_t u_arrmul32_and15_26 = 0;
|
|
uint8_t u_arrmul32_fa15_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_26_or0 = 0;
|
|
uint8_t u_arrmul32_and16_26 = 0;
|
|
uint8_t u_arrmul32_fa16_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_26_or0 = 0;
|
|
uint8_t u_arrmul32_and17_26 = 0;
|
|
uint8_t u_arrmul32_fa17_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_26_or0 = 0;
|
|
uint8_t u_arrmul32_and18_26 = 0;
|
|
uint8_t u_arrmul32_fa18_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_26_or0 = 0;
|
|
uint8_t u_arrmul32_and19_26 = 0;
|
|
uint8_t u_arrmul32_fa19_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_26_or0 = 0;
|
|
uint8_t u_arrmul32_and20_26 = 0;
|
|
uint8_t u_arrmul32_fa20_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_26_or0 = 0;
|
|
uint8_t u_arrmul32_and21_26 = 0;
|
|
uint8_t u_arrmul32_fa21_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_26_or0 = 0;
|
|
uint8_t u_arrmul32_and22_26 = 0;
|
|
uint8_t u_arrmul32_fa22_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_26_or0 = 0;
|
|
uint8_t u_arrmul32_and23_26 = 0;
|
|
uint8_t u_arrmul32_fa23_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_26_or0 = 0;
|
|
uint8_t u_arrmul32_and24_26 = 0;
|
|
uint8_t u_arrmul32_fa24_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_26_or0 = 0;
|
|
uint8_t u_arrmul32_and25_26 = 0;
|
|
uint8_t u_arrmul32_fa25_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_26_or0 = 0;
|
|
uint8_t u_arrmul32_and26_26 = 0;
|
|
uint8_t u_arrmul32_fa26_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_26_or0 = 0;
|
|
uint8_t u_arrmul32_and27_26 = 0;
|
|
uint8_t u_arrmul32_fa27_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_26_or0 = 0;
|
|
uint8_t u_arrmul32_and28_26 = 0;
|
|
uint8_t u_arrmul32_fa28_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_26_or0 = 0;
|
|
uint8_t u_arrmul32_and29_26 = 0;
|
|
uint8_t u_arrmul32_fa29_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_26_or0 = 0;
|
|
uint8_t u_arrmul32_and30_26 = 0;
|
|
uint8_t u_arrmul32_fa30_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_26_or0 = 0;
|
|
uint8_t u_arrmul32_and31_26 = 0;
|
|
uint8_t u_arrmul32_fa31_26_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_26_or0 = 0;
|
|
uint8_t u_arrmul32_and0_27 = 0;
|
|
uint8_t u_arrmul32_ha0_27_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_27_and0 = 0;
|
|
uint8_t u_arrmul32_and1_27 = 0;
|
|
uint8_t u_arrmul32_fa1_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_27_or0 = 0;
|
|
uint8_t u_arrmul32_and2_27 = 0;
|
|
uint8_t u_arrmul32_fa2_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_27_or0 = 0;
|
|
uint8_t u_arrmul32_and3_27 = 0;
|
|
uint8_t u_arrmul32_fa3_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_27_or0 = 0;
|
|
uint8_t u_arrmul32_and4_27 = 0;
|
|
uint8_t u_arrmul32_fa4_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_27_or0 = 0;
|
|
uint8_t u_arrmul32_and5_27 = 0;
|
|
uint8_t u_arrmul32_fa5_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_27_or0 = 0;
|
|
uint8_t u_arrmul32_and6_27 = 0;
|
|
uint8_t u_arrmul32_fa6_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_27_or0 = 0;
|
|
uint8_t u_arrmul32_and7_27 = 0;
|
|
uint8_t u_arrmul32_fa7_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_27_or0 = 0;
|
|
uint8_t u_arrmul32_and8_27 = 0;
|
|
uint8_t u_arrmul32_fa8_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_27_or0 = 0;
|
|
uint8_t u_arrmul32_and9_27 = 0;
|
|
uint8_t u_arrmul32_fa9_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_27_or0 = 0;
|
|
uint8_t u_arrmul32_and10_27 = 0;
|
|
uint8_t u_arrmul32_fa10_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_27_or0 = 0;
|
|
uint8_t u_arrmul32_and11_27 = 0;
|
|
uint8_t u_arrmul32_fa11_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_27_or0 = 0;
|
|
uint8_t u_arrmul32_and12_27 = 0;
|
|
uint8_t u_arrmul32_fa12_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_27_or0 = 0;
|
|
uint8_t u_arrmul32_and13_27 = 0;
|
|
uint8_t u_arrmul32_fa13_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_27_or0 = 0;
|
|
uint8_t u_arrmul32_and14_27 = 0;
|
|
uint8_t u_arrmul32_fa14_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_27_or0 = 0;
|
|
uint8_t u_arrmul32_and15_27 = 0;
|
|
uint8_t u_arrmul32_fa15_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_27_or0 = 0;
|
|
uint8_t u_arrmul32_and16_27 = 0;
|
|
uint8_t u_arrmul32_fa16_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_27_or0 = 0;
|
|
uint8_t u_arrmul32_and17_27 = 0;
|
|
uint8_t u_arrmul32_fa17_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_27_or0 = 0;
|
|
uint8_t u_arrmul32_and18_27 = 0;
|
|
uint8_t u_arrmul32_fa18_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_27_or0 = 0;
|
|
uint8_t u_arrmul32_and19_27 = 0;
|
|
uint8_t u_arrmul32_fa19_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_27_or0 = 0;
|
|
uint8_t u_arrmul32_and20_27 = 0;
|
|
uint8_t u_arrmul32_fa20_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_27_or0 = 0;
|
|
uint8_t u_arrmul32_and21_27 = 0;
|
|
uint8_t u_arrmul32_fa21_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_27_or0 = 0;
|
|
uint8_t u_arrmul32_and22_27 = 0;
|
|
uint8_t u_arrmul32_fa22_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_27_or0 = 0;
|
|
uint8_t u_arrmul32_and23_27 = 0;
|
|
uint8_t u_arrmul32_fa23_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_27_or0 = 0;
|
|
uint8_t u_arrmul32_and24_27 = 0;
|
|
uint8_t u_arrmul32_fa24_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_27_or0 = 0;
|
|
uint8_t u_arrmul32_and25_27 = 0;
|
|
uint8_t u_arrmul32_fa25_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_27_or0 = 0;
|
|
uint8_t u_arrmul32_and26_27 = 0;
|
|
uint8_t u_arrmul32_fa26_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_27_or0 = 0;
|
|
uint8_t u_arrmul32_and27_27 = 0;
|
|
uint8_t u_arrmul32_fa27_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_27_or0 = 0;
|
|
uint8_t u_arrmul32_and28_27 = 0;
|
|
uint8_t u_arrmul32_fa28_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_27_or0 = 0;
|
|
uint8_t u_arrmul32_and29_27 = 0;
|
|
uint8_t u_arrmul32_fa29_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_27_or0 = 0;
|
|
uint8_t u_arrmul32_and30_27 = 0;
|
|
uint8_t u_arrmul32_fa30_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_27_or0 = 0;
|
|
uint8_t u_arrmul32_and31_27 = 0;
|
|
uint8_t u_arrmul32_fa31_27_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_27_or0 = 0;
|
|
uint8_t u_arrmul32_and0_28 = 0;
|
|
uint8_t u_arrmul32_ha0_28_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_28_and0 = 0;
|
|
uint8_t u_arrmul32_and1_28 = 0;
|
|
uint8_t u_arrmul32_fa1_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_28_or0 = 0;
|
|
uint8_t u_arrmul32_and2_28 = 0;
|
|
uint8_t u_arrmul32_fa2_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_28_or0 = 0;
|
|
uint8_t u_arrmul32_and3_28 = 0;
|
|
uint8_t u_arrmul32_fa3_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_28_or0 = 0;
|
|
uint8_t u_arrmul32_and4_28 = 0;
|
|
uint8_t u_arrmul32_fa4_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_28_or0 = 0;
|
|
uint8_t u_arrmul32_and5_28 = 0;
|
|
uint8_t u_arrmul32_fa5_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_28_or0 = 0;
|
|
uint8_t u_arrmul32_and6_28 = 0;
|
|
uint8_t u_arrmul32_fa6_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_28_or0 = 0;
|
|
uint8_t u_arrmul32_and7_28 = 0;
|
|
uint8_t u_arrmul32_fa7_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_28_or0 = 0;
|
|
uint8_t u_arrmul32_and8_28 = 0;
|
|
uint8_t u_arrmul32_fa8_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_28_or0 = 0;
|
|
uint8_t u_arrmul32_and9_28 = 0;
|
|
uint8_t u_arrmul32_fa9_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_28_or0 = 0;
|
|
uint8_t u_arrmul32_and10_28 = 0;
|
|
uint8_t u_arrmul32_fa10_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_28_or0 = 0;
|
|
uint8_t u_arrmul32_and11_28 = 0;
|
|
uint8_t u_arrmul32_fa11_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_28_or0 = 0;
|
|
uint8_t u_arrmul32_and12_28 = 0;
|
|
uint8_t u_arrmul32_fa12_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_28_or0 = 0;
|
|
uint8_t u_arrmul32_and13_28 = 0;
|
|
uint8_t u_arrmul32_fa13_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_28_or0 = 0;
|
|
uint8_t u_arrmul32_and14_28 = 0;
|
|
uint8_t u_arrmul32_fa14_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_28_or0 = 0;
|
|
uint8_t u_arrmul32_and15_28 = 0;
|
|
uint8_t u_arrmul32_fa15_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_28_or0 = 0;
|
|
uint8_t u_arrmul32_and16_28 = 0;
|
|
uint8_t u_arrmul32_fa16_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_28_or0 = 0;
|
|
uint8_t u_arrmul32_and17_28 = 0;
|
|
uint8_t u_arrmul32_fa17_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_28_or0 = 0;
|
|
uint8_t u_arrmul32_and18_28 = 0;
|
|
uint8_t u_arrmul32_fa18_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_28_or0 = 0;
|
|
uint8_t u_arrmul32_and19_28 = 0;
|
|
uint8_t u_arrmul32_fa19_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_28_or0 = 0;
|
|
uint8_t u_arrmul32_and20_28 = 0;
|
|
uint8_t u_arrmul32_fa20_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_28_or0 = 0;
|
|
uint8_t u_arrmul32_and21_28 = 0;
|
|
uint8_t u_arrmul32_fa21_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_28_or0 = 0;
|
|
uint8_t u_arrmul32_and22_28 = 0;
|
|
uint8_t u_arrmul32_fa22_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_28_or0 = 0;
|
|
uint8_t u_arrmul32_and23_28 = 0;
|
|
uint8_t u_arrmul32_fa23_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_28_or0 = 0;
|
|
uint8_t u_arrmul32_and24_28 = 0;
|
|
uint8_t u_arrmul32_fa24_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_28_or0 = 0;
|
|
uint8_t u_arrmul32_and25_28 = 0;
|
|
uint8_t u_arrmul32_fa25_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_28_or0 = 0;
|
|
uint8_t u_arrmul32_and26_28 = 0;
|
|
uint8_t u_arrmul32_fa26_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_28_or0 = 0;
|
|
uint8_t u_arrmul32_and27_28 = 0;
|
|
uint8_t u_arrmul32_fa27_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_28_or0 = 0;
|
|
uint8_t u_arrmul32_and28_28 = 0;
|
|
uint8_t u_arrmul32_fa28_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_28_or0 = 0;
|
|
uint8_t u_arrmul32_and29_28 = 0;
|
|
uint8_t u_arrmul32_fa29_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_28_or0 = 0;
|
|
uint8_t u_arrmul32_and30_28 = 0;
|
|
uint8_t u_arrmul32_fa30_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_28_or0 = 0;
|
|
uint8_t u_arrmul32_and31_28 = 0;
|
|
uint8_t u_arrmul32_fa31_28_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_28_or0 = 0;
|
|
uint8_t u_arrmul32_and0_29 = 0;
|
|
uint8_t u_arrmul32_ha0_29_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_29_and0 = 0;
|
|
uint8_t u_arrmul32_and1_29 = 0;
|
|
uint8_t u_arrmul32_fa1_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_29_or0 = 0;
|
|
uint8_t u_arrmul32_and2_29 = 0;
|
|
uint8_t u_arrmul32_fa2_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_29_or0 = 0;
|
|
uint8_t u_arrmul32_and3_29 = 0;
|
|
uint8_t u_arrmul32_fa3_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_29_or0 = 0;
|
|
uint8_t u_arrmul32_and4_29 = 0;
|
|
uint8_t u_arrmul32_fa4_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_29_or0 = 0;
|
|
uint8_t u_arrmul32_and5_29 = 0;
|
|
uint8_t u_arrmul32_fa5_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_29_or0 = 0;
|
|
uint8_t u_arrmul32_and6_29 = 0;
|
|
uint8_t u_arrmul32_fa6_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_29_or0 = 0;
|
|
uint8_t u_arrmul32_and7_29 = 0;
|
|
uint8_t u_arrmul32_fa7_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_29_or0 = 0;
|
|
uint8_t u_arrmul32_and8_29 = 0;
|
|
uint8_t u_arrmul32_fa8_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_29_or0 = 0;
|
|
uint8_t u_arrmul32_and9_29 = 0;
|
|
uint8_t u_arrmul32_fa9_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_29_or0 = 0;
|
|
uint8_t u_arrmul32_and10_29 = 0;
|
|
uint8_t u_arrmul32_fa10_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_29_or0 = 0;
|
|
uint8_t u_arrmul32_and11_29 = 0;
|
|
uint8_t u_arrmul32_fa11_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_29_or0 = 0;
|
|
uint8_t u_arrmul32_and12_29 = 0;
|
|
uint8_t u_arrmul32_fa12_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_29_or0 = 0;
|
|
uint8_t u_arrmul32_and13_29 = 0;
|
|
uint8_t u_arrmul32_fa13_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_29_or0 = 0;
|
|
uint8_t u_arrmul32_and14_29 = 0;
|
|
uint8_t u_arrmul32_fa14_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_29_or0 = 0;
|
|
uint8_t u_arrmul32_and15_29 = 0;
|
|
uint8_t u_arrmul32_fa15_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_29_or0 = 0;
|
|
uint8_t u_arrmul32_and16_29 = 0;
|
|
uint8_t u_arrmul32_fa16_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_29_or0 = 0;
|
|
uint8_t u_arrmul32_and17_29 = 0;
|
|
uint8_t u_arrmul32_fa17_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_29_or0 = 0;
|
|
uint8_t u_arrmul32_and18_29 = 0;
|
|
uint8_t u_arrmul32_fa18_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_29_or0 = 0;
|
|
uint8_t u_arrmul32_and19_29 = 0;
|
|
uint8_t u_arrmul32_fa19_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_29_or0 = 0;
|
|
uint8_t u_arrmul32_and20_29 = 0;
|
|
uint8_t u_arrmul32_fa20_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_29_or0 = 0;
|
|
uint8_t u_arrmul32_and21_29 = 0;
|
|
uint8_t u_arrmul32_fa21_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_29_or0 = 0;
|
|
uint8_t u_arrmul32_and22_29 = 0;
|
|
uint8_t u_arrmul32_fa22_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_29_or0 = 0;
|
|
uint8_t u_arrmul32_and23_29 = 0;
|
|
uint8_t u_arrmul32_fa23_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_29_or0 = 0;
|
|
uint8_t u_arrmul32_and24_29 = 0;
|
|
uint8_t u_arrmul32_fa24_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_29_or0 = 0;
|
|
uint8_t u_arrmul32_and25_29 = 0;
|
|
uint8_t u_arrmul32_fa25_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_29_or0 = 0;
|
|
uint8_t u_arrmul32_and26_29 = 0;
|
|
uint8_t u_arrmul32_fa26_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_29_or0 = 0;
|
|
uint8_t u_arrmul32_and27_29 = 0;
|
|
uint8_t u_arrmul32_fa27_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_29_or0 = 0;
|
|
uint8_t u_arrmul32_and28_29 = 0;
|
|
uint8_t u_arrmul32_fa28_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_29_or0 = 0;
|
|
uint8_t u_arrmul32_and29_29 = 0;
|
|
uint8_t u_arrmul32_fa29_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_29_or0 = 0;
|
|
uint8_t u_arrmul32_and30_29 = 0;
|
|
uint8_t u_arrmul32_fa30_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_29_or0 = 0;
|
|
uint8_t u_arrmul32_and31_29 = 0;
|
|
uint8_t u_arrmul32_fa31_29_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_29_or0 = 0;
|
|
uint8_t u_arrmul32_and0_30 = 0;
|
|
uint8_t u_arrmul32_ha0_30_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_30_and0 = 0;
|
|
uint8_t u_arrmul32_and1_30 = 0;
|
|
uint8_t u_arrmul32_fa1_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_30_or0 = 0;
|
|
uint8_t u_arrmul32_and2_30 = 0;
|
|
uint8_t u_arrmul32_fa2_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_30_or0 = 0;
|
|
uint8_t u_arrmul32_and3_30 = 0;
|
|
uint8_t u_arrmul32_fa3_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_30_or0 = 0;
|
|
uint8_t u_arrmul32_and4_30 = 0;
|
|
uint8_t u_arrmul32_fa4_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_30_or0 = 0;
|
|
uint8_t u_arrmul32_and5_30 = 0;
|
|
uint8_t u_arrmul32_fa5_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_30_or0 = 0;
|
|
uint8_t u_arrmul32_and6_30 = 0;
|
|
uint8_t u_arrmul32_fa6_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_30_or0 = 0;
|
|
uint8_t u_arrmul32_and7_30 = 0;
|
|
uint8_t u_arrmul32_fa7_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_30_or0 = 0;
|
|
uint8_t u_arrmul32_and8_30 = 0;
|
|
uint8_t u_arrmul32_fa8_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_30_or0 = 0;
|
|
uint8_t u_arrmul32_and9_30 = 0;
|
|
uint8_t u_arrmul32_fa9_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_30_or0 = 0;
|
|
uint8_t u_arrmul32_and10_30 = 0;
|
|
uint8_t u_arrmul32_fa10_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_30_or0 = 0;
|
|
uint8_t u_arrmul32_and11_30 = 0;
|
|
uint8_t u_arrmul32_fa11_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_30_or0 = 0;
|
|
uint8_t u_arrmul32_and12_30 = 0;
|
|
uint8_t u_arrmul32_fa12_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_30_or0 = 0;
|
|
uint8_t u_arrmul32_and13_30 = 0;
|
|
uint8_t u_arrmul32_fa13_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_30_or0 = 0;
|
|
uint8_t u_arrmul32_and14_30 = 0;
|
|
uint8_t u_arrmul32_fa14_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_30_or0 = 0;
|
|
uint8_t u_arrmul32_and15_30 = 0;
|
|
uint8_t u_arrmul32_fa15_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_30_or0 = 0;
|
|
uint8_t u_arrmul32_and16_30 = 0;
|
|
uint8_t u_arrmul32_fa16_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_30_or0 = 0;
|
|
uint8_t u_arrmul32_and17_30 = 0;
|
|
uint8_t u_arrmul32_fa17_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_30_or0 = 0;
|
|
uint8_t u_arrmul32_and18_30 = 0;
|
|
uint8_t u_arrmul32_fa18_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_30_or0 = 0;
|
|
uint8_t u_arrmul32_and19_30 = 0;
|
|
uint8_t u_arrmul32_fa19_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_30_or0 = 0;
|
|
uint8_t u_arrmul32_and20_30 = 0;
|
|
uint8_t u_arrmul32_fa20_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_30_or0 = 0;
|
|
uint8_t u_arrmul32_and21_30 = 0;
|
|
uint8_t u_arrmul32_fa21_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_30_or0 = 0;
|
|
uint8_t u_arrmul32_and22_30 = 0;
|
|
uint8_t u_arrmul32_fa22_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_30_or0 = 0;
|
|
uint8_t u_arrmul32_and23_30 = 0;
|
|
uint8_t u_arrmul32_fa23_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_30_or0 = 0;
|
|
uint8_t u_arrmul32_and24_30 = 0;
|
|
uint8_t u_arrmul32_fa24_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_30_or0 = 0;
|
|
uint8_t u_arrmul32_and25_30 = 0;
|
|
uint8_t u_arrmul32_fa25_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_30_or0 = 0;
|
|
uint8_t u_arrmul32_and26_30 = 0;
|
|
uint8_t u_arrmul32_fa26_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_30_or0 = 0;
|
|
uint8_t u_arrmul32_and27_30 = 0;
|
|
uint8_t u_arrmul32_fa27_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_30_or0 = 0;
|
|
uint8_t u_arrmul32_and28_30 = 0;
|
|
uint8_t u_arrmul32_fa28_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_30_or0 = 0;
|
|
uint8_t u_arrmul32_and29_30 = 0;
|
|
uint8_t u_arrmul32_fa29_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_30_or0 = 0;
|
|
uint8_t u_arrmul32_and30_30 = 0;
|
|
uint8_t u_arrmul32_fa30_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_30_or0 = 0;
|
|
uint8_t u_arrmul32_and31_30 = 0;
|
|
uint8_t u_arrmul32_fa31_30_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_30_or0 = 0;
|
|
uint8_t u_arrmul32_and0_31 = 0;
|
|
uint8_t u_arrmul32_ha0_31_xor0 = 0;
|
|
uint8_t u_arrmul32_ha0_31_and0 = 0;
|
|
uint8_t u_arrmul32_and1_31 = 0;
|
|
uint8_t u_arrmul32_fa1_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa1_31_or0 = 0;
|
|
uint8_t u_arrmul32_and2_31 = 0;
|
|
uint8_t u_arrmul32_fa2_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa2_31_or0 = 0;
|
|
uint8_t u_arrmul32_and3_31 = 0;
|
|
uint8_t u_arrmul32_fa3_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa3_31_or0 = 0;
|
|
uint8_t u_arrmul32_and4_31 = 0;
|
|
uint8_t u_arrmul32_fa4_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa4_31_or0 = 0;
|
|
uint8_t u_arrmul32_and5_31 = 0;
|
|
uint8_t u_arrmul32_fa5_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa5_31_or0 = 0;
|
|
uint8_t u_arrmul32_and6_31 = 0;
|
|
uint8_t u_arrmul32_fa6_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa6_31_or0 = 0;
|
|
uint8_t u_arrmul32_and7_31 = 0;
|
|
uint8_t u_arrmul32_fa7_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa7_31_or0 = 0;
|
|
uint8_t u_arrmul32_and8_31 = 0;
|
|
uint8_t u_arrmul32_fa8_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa8_31_or0 = 0;
|
|
uint8_t u_arrmul32_and9_31 = 0;
|
|
uint8_t u_arrmul32_fa9_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa9_31_or0 = 0;
|
|
uint8_t u_arrmul32_and10_31 = 0;
|
|
uint8_t u_arrmul32_fa10_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa10_31_or0 = 0;
|
|
uint8_t u_arrmul32_and11_31 = 0;
|
|
uint8_t u_arrmul32_fa11_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa11_31_or0 = 0;
|
|
uint8_t u_arrmul32_and12_31 = 0;
|
|
uint8_t u_arrmul32_fa12_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa12_31_or0 = 0;
|
|
uint8_t u_arrmul32_and13_31 = 0;
|
|
uint8_t u_arrmul32_fa13_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa13_31_or0 = 0;
|
|
uint8_t u_arrmul32_and14_31 = 0;
|
|
uint8_t u_arrmul32_fa14_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa14_31_or0 = 0;
|
|
uint8_t u_arrmul32_and15_31 = 0;
|
|
uint8_t u_arrmul32_fa15_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa15_31_or0 = 0;
|
|
uint8_t u_arrmul32_and16_31 = 0;
|
|
uint8_t u_arrmul32_fa16_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa16_31_or0 = 0;
|
|
uint8_t u_arrmul32_and17_31 = 0;
|
|
uint8_t u_arrmul32_fa17_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa17_31_or0 = 0;
|
|
uint8_t u_arrmul32_and18_31 = 0;
|
|
uint8_t u_arrmul32_fa18_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa18_31_or0 = 0;
|
|
uint8_t u_arrmul32_and19_31 = 0;
|
|
uint8_t u_arrmul32_fa19_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa19_31_or0 = 0;
|
|
uint8_t u_arrmul32_and20_31 = 0;
|
|
uint8_t u_arrmul32_fa20_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa20_31_or0 = 0;
|
|
uint8_t u_arrmul32_and21_31 = 0;
|
|
uint8_t u_arrmul32_fa21_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa21_31_or0 = 0;
|
|
uint8_t u_arrmul32_and22_31 = 0;
|
|
uint8_t u_arrmul32_fa22_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa22_31_or0 = 0;
|
|
uint8_t u_arrmul32_and23_31 = 0;
|
|
uint8_t u_arrmul32_fa23_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa23_31_or0 = 0;
|
|
uint8_t u_arrmul32_and24_31 = 0;
|
|
uint8_t u_arrmul32_fa24_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa24_31_or0 = 0;
|
|
uint8_t u_arrmul32_and25_31 = 0;
|
|
uint8_t u_arrmul32_fa25_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa25_31_or0 = 0;
|
|
uint8_t u_arrmul32_and26_31 = 0;
|
|
uint8_t u_arrmul32_fa26_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa26_31_or0 = 0;
|
|
uint8_t u_arrmul32_and27_31 = 0;
|
|
uint8_t u_arrmul32_fa27_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa27_31_or0 = 0;
|
|
uint8_t u_arrmul32_and28_31 = 0;
|
|
uint8_t u_arrmul32_fa28_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa28_31_or0 = 0;
|
|
uint8_t u_arrmul32_and29_31 = 0;
|
|
uint8_t u_arrmul32_fa29_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa29_31_or0 = 0;
|
|
uint8_t u_arrmul32_and30_31 = 0;
|
|
uint8_t u_arrmul32_fa30_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa30_31_or0 = 0;
|
|
uint8_t u_arrmul32_and31_31 = 0;
|
|
uint8_t u_arrmul32_fa31_31_xor1 = 0;
|
|
uint8_t u_arrmul32_fa31_31_or0 = 0;
|
|
|
|
u_arrmul32_and0_0 = and_gate(((a >> 0) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and1_0 = and_gate(((a >> 1) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and2_0 = and_gate(((a >> 2) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and3_0 = and_gate(((a >> 3) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and4_0 = and_gate(((a >> 4) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and5_0 = and_gate(((a >> 5) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and6_0 = and_gate(((a >> 6) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and7_0 = and_gate(((a >> 7) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and8_0 = and_gate(((a >> 8) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and9_0 = and_gate(((a >> 9) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and10_0 = and_gate(((a >> 10) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and11_0 = and_gate(((a >> 11) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and12_0 = and_gate(((a >> 12) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and13_0 = and_gate(((a >> 13) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and14_0 = and_gate(((a >> 14) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and15_0 = and_gate(((a >> 15) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and16_0 = and_gate(((a >> 16) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and17_0 = and_gate(((a >> 17) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and18_0 = and_gate(((a >> 18) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and19_0 = and_gate(((a >> 19) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and20_0 = and_gate(((a >> 20) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and21_0 = and_gate(((a >> 21) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and22_0 = and_gate(((a >> 22) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and23_0 = and_gate(((a >> 23) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and24_0 = and_gate(((a >> 24) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and25_0 = and_gate(((a >> 25) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and26_0 = and_gate(((a >> 26) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and27_0 = and_gate(((a >> 27) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and28_0 = and_gate(((a >> 28) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and29_0 = and_gate(((a >> 29) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and30_0 = and_gate(((a >> 30) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and31_0 = and_gate(((a >> 31) & 0x01), ((b >> 0) & 0x01));
|
|
u_arrmul32_and0_1 = and_gate(((a >> 0) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_ha0_1_xor0 = (ha(((u_arrmul32_and0_1 >> 0) & 0x01), ((u_arrmul32_and1_0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_1_and0 = (ha(((u_arrmul32_and0_1 >> 0) & 0x01), ((u_arrmul32_and1_0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_1 = and_gate(((a >> 1) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa1_1_xor1 = (fa(((u_arrmul32_and1_1 >> 0) & 0x01), ((u_arrmul32_and2_0 >> 0) & 0x01), ((u_arrmul32_ha0_1_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_1_or0 = (fa(((u_arrmul32_and1_1 >> 0) & 0x01), ((u_arrmul32_and2_0 >> 0) & 0x01), ((u_arrmul32_ha0_1_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_1 = and_gate(((a >> 2) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa2_1_xor1 = (fa(((u_arrmul32_and2_1 >> 0) & 0x01), ((u_arrmul32_and3_0 >> 0) & 0x01), ((u_arrmul32_fa1_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_1_or0 = (fa(((u_arrmul32_and2_1 >> 0) & 0x01), ((u_arrmul32_and3_0 >> 0) & 0x01), ((u_arrmul32_fa1_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_1 = and_gate(((a >> 3) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa3_1_xor1 = (fa(((u_arrmul32_and3_1 >> 0) & 0x01), ((u_arrmul32_and4_0 >> 0) & 0x01), ((u_arrmul32_fa2_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_1_or0 = (fa(((u_arrmul32_and3_1 >> 0) & 0x01), ((u_arrmul32_and4_0 >> 0) & 0x01), ((u_arrmul32_fa2_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_1 = and_gate(((a >> 4) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa4_1_xor1 = (fa(((u_arrmul32_and4_1 >> 0) & 0x01), ((u_arrmul32_and5_0 >> 0) & 0x01), ((u_arrmul32_fa3_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_1_or0 = (fa(((u_arrmul32_and4_1 >> 0) & 0x01), ((u_arrmul32_and5_0 >> 0) & 0x01), ((u_arrmul32_fa3_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_1 = and_gate(((a >> 5) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa5_1_xor1 = (fa(((u_arrmul32_and5_1 >> 0) & 0x01), ((u_arrmul32_and6_0 >> 0) & 0x01), ((u_arrmul32_fa4_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_1_or0 = (fa(((u_arrmul32_and5_1 >> 0) & 0x01), ((u_arrmul32_and6_0 >> 0) & 0x01), ((u_arrmul32_fa4_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_1 = and_gate(((a >> 6) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa6_1_xor1 = (fa(((u_arrmul32_and6_1 >> 0) & 0x01), ((u_arrmul32_and7_0 >> 0) & 0x01), ((u_arrmul32_fa5_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_1_or0 = (fa(((u_arrmul32_and6_1 >> 0) & 0x01), ((u_arrmul32_and7_0 >> 0) & 0x01), ((u_arrmul32_fa5_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_1 = and_gate(((a >> 7) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa7_1_xor1 = (fa(((u_arrmul32_and7_1 >> 0) & 0x01), ((u_arrmul32_and8_0 >> 0) & 0x01), ((u_arrmul32_fa6_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_1_or0 = (fa(((u_arrmul32_and7_1 >> 0) & 0x01), ((u_arrmul32_and8_0 >> 0) & 0x01), ((u_arrmul32_fa6_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_1 = and_gate(((a >> 8) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa8_1_xor1 = (fa(((u_arrmul32_and8_1 >> 0) & 0x01), ((u_arrmul32_and9_0 >> 0) & 0x01), ((u_arrmul32_fa7_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_1_or0 = (fa(((u_arrmul32_and8_1 >> 0) & 0x01), ((u_arrmul32_and9_0 >> 0) & 0x01), ((u_arrmul32_fa7_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_1 = and_gate(((a >> 9) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa9_1_xor1 = (fa(((u_arrmul32_and9_1 >> 0) & 0x01), ((u_arrmul32_and10_0 >> 0) & 0x01), ((u_arrmul32_fa8_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_1_or0 = (fa(((u_arrmul32_and9_1 >> 0) & 0x01), ((u_arrmul32_and10_0 >> 0) & 0x01), ((u_arrmul32_fa8_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_1 = and_gate(((a >> 10) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa10_1_xor1 = (fa(((u_arrmul32_and10_1 >> 0) & 0x01), ((u_arrmul32_and11_0 >> 0) & 0x01), ((u_arrmul32_fa9_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_1_or0 = (fa(((u_arrmul32_and10_1 >> 0) & 0x01), ((u_arrmul32_and11_0 >> 0) & 0x01), ((u_arrmul32_fa9_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_1 = and_gate(((a >> 11) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa11_1_xor1 = (fa(((u_arrmul32_and11_1 >> 0) & 0x01), ((u_arrmul32_and12_0 >> 0) & 0x01), ((u_arrmul32_fa10_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_1_or0 = (fa(((u_arrmul32_and11_1 >> 0) & 0x01), ((u_arrmul32_and12_0 >> 0) & 0x01), ((u_arrmul32_fa10_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_1 = and_gate(((a >> 12) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa12_1_xor1 = (fa(((u_arrmul32_and12_1 >> 0) & 0x01), ((u_arrmul32_and13_0 >> 0) & 0x01), ((u_arrmul32_fa11_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_1_or0 = (fa(((u_arrmul32_and12_1 >> 0) & 0x01), ((u_arrmul32_and13_0 >> 0) & 0x01), ((u_arrmul32_fa11_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_1 = and_gate(((a >> 13) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa13_1_xor1 = (fa(((u_arrmul32_and13_1 >> 0) & 0x01), ((u_arrmul32_and14_0 >> 0) & 0x01), ((u_arrmul32_fa12_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_1_or0 = (fa(((u_arrmul32_and13_1 >> 0) & 0x01), ((u_arrmul32_and14_0 >> 0) & 0x01), ((u_arrmul32_fa12_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_1 = and_gate(((a >> 14) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa14_1_xor1 = (fa(((u_arrmul32_and14_1 >> 0) & 0x01), ((u_arrmul32_and15_0 >> 0) & 0x01), ((u_arrmul32_fa13_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_1_or0 = (fa(((u_arrmul32_and14_1 >> 0) & 0x01), ((u_arrmul32_and15_0 >> 0) & 0x01), ((u_arrmul32_fa13_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_1 = and_gate(((a >> 15) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa15_1_xor1 = (fa(((u_arrmul32_and15_1 >> 0) & 0x01), ((u_arrmul32_and16_0 >> 0) & 0x01), ((u_arrmul32_fa14_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_1_or0 = (fa(((u_arrmul32_and15_1 >> 0) & 0x01), ((u_arrmul32_and16_0 >> 0) & 0x01), ((u_arrmul32_fa14_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_1 = and_gate(((a >> 16) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa16_1_xor1 = (fa(((u_arrmul32_and16_1 >> 0) & 0x01), ((u_arrmul32_and17_0 >> 0) & 0x01), ((u_arrmul32_fa15_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_1_or0 = (fa(((u_arrmul32_and16_1 >> 0) & 0x01), ((u_arrmul32_and17_0 >> 0) & 0x01), ((u_arrmul32_fa15_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_1 = and_gate(((a >> 17) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa17_1_xor1 = (fa(((u_arrmul32_and17_1 >> 0) & 0x01), ((u_arrmul32_and18_0 >> 0) & 0x01), ((u_arrmul32_fa16_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_1_or0 = (fa(((u_arrmul32_and17_1 >> 0) & 0x01), ((u_arrmul32_and18_0 >> 0) & 0x01), ((u_arrmul32_fa16_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_1 = and_gate(((a >> 18) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa18_1_xor1 = (fa(((u_arrmul32_and18_1 >> 0) & 0x01), ((u_arrmul32_and19_0 >> 0) & 0x01), ((u_arrmul32_fa17_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_1_or0 = (fa(((u_arrmul32_and18_1 >> 0) & 0x01), ((u_arrmul32_and19_0 >> 0) & 0x01), ((u_arrmul32_fa17_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_1 = and_gate(((a >> 19) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa19_1_xor1 = (fa(((u_arrmul32_and19_1 >> 0) & 0x01), ((u_arrmul32_and20_0 >> 0) & 0x01), ((u_arrmul32_fa18_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_1_or0 = (fa(((u_arrmul32_and19_1 >> 0) & 0x01), ((u_arrmul32_and20_0 >> 0) & 0x01), ((u_arrmul32_fa18_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_1 = and_gate(((a >> 20) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa20_1_xor1 = (fa(((u_arrmul32_and20_1 >> 0) & 0x01), ((u_arrmul32_and21_0 >> 0) & 0x01), ((u_arrmul32_fa19_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_1_or0 = (fa(((u_arrmul32_and20_1 >> 0) & 0x01), ((u_arrmul32_and21_0 >> 0) & 0x01), ((u_arrmul32_fa19_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_1 = and_gate(((a >> 21) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa21_1_xor1 = (fa(((u_arrmul32_and21_1 >> 0) & 0x01), ((u_arrmul32_and22_0 >> 0) & 0x01), ((u_arrmul32_fa20_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_1_or0 = (fa(((u_arrmul32_and21_1 >> 0) & 0x01), ((u_arrmul32_and22_0 >> 0) & 0x01), ((u_arrmul32_fa20_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_1 = and_gate(((a >> 22) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa22_1_xor1 = (fa(((u_arrmul32_and22_1 >> 0) & 0x01), ((u_arrmul32_and23_0 >> 0) & 0x01), ((u_arrmul32_fa21_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_1_or0 = (fa(((u_arrmul32_and22_1 >> 0) & 0x01), ((u_arrmul32_and23_0 >> 0) & 0x01), ((u_arrmul32_fa21_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_1 = and_gate(((a >> 23) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa23_1_xor1 = (fa(((u_arrmul32_and23_1 >> 0) & 0x01), ((u_arrmul32_and24_0 >> 0) & 0x01), ((u_arrmul32_fa22_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_1_or0 = (fa(((u_arrmul32_and23_1 >> 0) & 0x01), ((u_arrmul32_and24_0 >> 0) & 0x01), ((u_arrmul32_fa22_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_1 = and_gate(((a >> 24) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa24_1_xor1 = (fa(((u_arrmul32_and24_1 >> 0) & 0x01), ((u_arrmul32_and25_0 >> 0) & 0x01), ((u_arrmul32_fa23_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_1_or0 = (fa(((u_arrmul32_and24_1 >> 0) & 0x01), ((u_arrmul32_and25_0 >> 0) & 0x01), ((u_arrmul32_fa23_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_1 = and_gate(((a >> 25) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa25_1_xor1 = (fa(((u_arrmul32_and25_1 >> 0) & 0x01), ((u_arrmul32_and26_0 >> 0) & 0x01), ((u_arrmul32_fa24_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_1_or0 = (fa(((u_arrmul32_and25_1 >> 0) & 0x01), ((u_arrmul32_and26_0 >> 0) & 0x01), ((u_arrmul32_fa24_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_1 = and_gate(((a >> 26) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa26_1_xor1 = (fa(((u_arrmul32_and26_1 >> 0) & 0x01), ((u_arrmul32_and27_0 >> 0) & 0x01), ((u_arrmul32_fa25_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_1_or0 = (fa(((u_arrmul32_and26_1 >> 0) & 0x01), ((u_arrmul32_and27_0 >> 0) & 0x01), ((u_arrmul32_fa25_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_1 = and_gate(((a >> 27) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa27_1_xor1 = (fa(((u_arrmul32_and27_1 >> 0) & 0x01), ((u_arrmul32_and28_0 >> 0) & 0x01), ((u_arrmul32_fa26_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_1_or0 = (fa(((u_arrmul32_and27_1 >> 0) & 0x01), ((u_arrmul32_and28_0 >> 0) & 0x01), ((u_arrmul32_fa26_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_1 = and_gate(((a >> 28) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa28_1_xor1 = (fa(((u_arrmul32_and28_1 >> 0) & 0x01), ((u_arrmul32_and29_0 >> 0) & 0x01), ((u_arrmul32_fa27_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_1_or0 = (fa(((u_arrmul32_and28_1 >> 0) & 0x01), ((u_arrmul32_and29_0 >> 0) & 0x01), ((u_arrmul32_fa27_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_1 = and_gate(((a >> 29) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa29_1_xor1 = (fa(((u_arrmul32_and29_1 >> 0) & 0x01), ((u_arrmul32_and30_0 >> 0) & 0x01), ((u_arrmul32_fa28_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_1_or0 = (fa(((u_arrmul32_and29_1 >> 0) & 0x01), ((u_arrmul32_and30_0 >> 0) & 0x01), ((u_arrmul32_fa28_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_1 = and_gate(((a >> 30) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_fa30_1_xor1 = (fa(((u_arrmul32_and30_1 >> 0) & 0x01), ((u_arrmul32_and31_0 >> 0) & 0x01), ((u_arrmul32_fa29_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_1_or0 = (fa(((u_arrmul32_and30_1 >> 0) & 0x01), ((u_arrmul32_and31_0 >> 0) & 0x01), ((u_arrmul32_fa29_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_1 = and_gate(((a >> 31) & 0x01), ((b >> 1) & 0x01));
|
|
u_arrmul32_ha31_1_xor0 = (ha(((u_arrmul32_and31_1 >> 0) & 0x01), ((u_arrmul32_fa30_1_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha31_1_and0 = (ha(((u_arrmul32_and31_1 >> 0) & 0x01), ((u_arrmul32_fa30_1_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_2 = and_gate(((a >> 0) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_ha0_2_xor0 = (ha(((u_arrmul32_and0_2 >> 0) & 0x01), ((u_arrmul32_fa1_1_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_2_and0 = (ha(((u_arrmul32_and0_2 >> 0) & 0x01), ((u_arrmul32_fa1_1_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_2 = and_gate(((a >> 1) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa1_2_xor1 = (fa(((u_arrmul32_and1_2 >> 0) & 0x01), ((u_arrmul32_fa2_1_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_2_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_2_or0 = (fa(((u_arrmul32_and1_2 >> 0) & 0x01), ((u_arrmul32_fa2_1_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_2_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_2 = and_gate(((a >> 2) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa2_2_xor1 = (fa(((u_arrmul32_and2_2 >> 0) & 0x01), ((u_arrmul32_fa3_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_2_or0 = (fa(((u_arrmul32_and2_2 >> 0) & 0x01), ((u_arrmul32_fa3_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_2 = and_gate(((a >> 3) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa3_2_xor1 = (fa(((u_arrmul32_and3_2 >> 0) & 0x01), ((u_arrmul32_fa4_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_2_or0 = (fa(((u_arrmul32_and3_2 >> 0) & 0x01), ((u_arrmul32_fa4_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_2 = and_gate(((a >> 4) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa4_2_xor1 = (fa(((u_arrmul32_and4_2 >> 0) & 0x01), ((u_arrmul32_fa5_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_2_or0 = (fa(((u_arrmul32_and4_2 >> 0) & 0x01), ((u_arrmul32_fa5_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_2 = and_gate(((a >> 5) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa5_2_xor1 = (fa(((u_arrmul32_and5_2 >> 0) & 0x01), ((u_arrmul32_fa6_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_2_or0 = (fa(((u_arrmul32_and5_2 >> 0) & 0x01), ((u_arrmul32_fa6_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_2 = and_gate(((a >> 6) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa6_2_xor1 = (fa(((u_arrmul32_and6_2 >> 0) & 0x01), ((u_arrmul32_fa7_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_2_or0 = (fa(((u_arrmul32_and6_2 >> 0) & 0x01), ((u_arrmul32_fa7_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_2 = and_gate(((a >> 7) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa7_2_xor1 = (fa(((u_arrmul32_and7_2 >> 0) & 0x01), ((u_arrmul32_fa8_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_2_or0 = (fa(((u_arrmul32_and7_2 >> 0) & 0x01), ((u_arrmul32_fa8_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_2 = and_gate(((a >> 8) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa8_2_xor1 = (fa(((u_arrmul32_and8_2 >> 0) & 0x01), ((u_arrmul32_fa9_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_2_or0 = (fa(((u_arrmul32_and8_2 >> 0) & 0x01), ((u_arrmul32_fa9_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_2 = and_gate(((a >> 9) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa9_2_xor1 = (fa(((u_arrmul32_and9_2 >> 0) & 0x01), ((u_arrmul32_fa10_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_2_or0 = (fa(((u_arrmul32_and9_2 >> 0) & 0x01), ((u_arrmul32_fa10_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_2 = and_gate(((a >> 10) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa10_2_xor1 = (fa(((u_arrmul32_and10_2 >> 0) & 0x01), ((u_arrmul32_fa11_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_2_or0 = (fa(((u_arrmul32_and10_2 >> 0) & 0x01), ((u_arrmul32_fa11_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_2 = and_gate(((a >> 11) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa11_2_xor1 = (fa(((u_arrmul32_and11_2 >> 0) & 0x01), ((u_arrmul32_fa12_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_2_or0 = (fa(((u_arrmul32_and11_2 >> 0) & 0x01), ((u_arrmul32_fa12_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_2 = and_gate(((a >> 12) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa12_2_xor1 = (fa(((u_arrmul32_and12_2 >> 0) & 0x01), ((u_arrmul32_fa13_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_2_or0 = (fa(((u_arrmul32_and12_2 >> 0) & 0x01), ((u_arrmul32_fa13_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_2 = and_gate(((a >> 13) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa13_2_xor1 = (fa(((u_arrmul32_and13_2 >> 0) & 0x01), ((u_arrmul32_fa14_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_2_or0 = (fa(((u_arrmul32_and13_2 >> 0) & 0x01), ((u_arrmul32_fa14_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_2 = and_gate(((a >> 14) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa14_2_xor1 = (fa(((u_arrmul32_and14_2 >> 0) & 0x01), ((u_arrmul32_fa15_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_2_or0 = (fa(((u_arrmul32_and14_2 >> 0) & 0x01), ((u_arrmul32_fa15_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_2 = and_gate(((a >> 15) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa15_2_xor1 = (fa(((u_arrmul32_and15_2 >> 0) & 0x01), ((u_arrmul32_fa16_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_2_or0 = (fa(((u_arrmul32_and15_2 >> 0) & 0x01), ((u_arrmul32_fa16_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_2 = and_gate(((a >> 16) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa16_2_xor1 = (fa(((u_arrmul32_and16_2 >> 0) & 0x01), ((u_arrmul32_fa17_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_2_or0 = (fa(((u_arrmul32_and16_2 >> 0) & 0x01), ((u_arrmul32_fa17_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_2 = and_gate(((a >> 17) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa17_2_xor1 = (fa(((u_arrmul32_and17_2 >> 0) & 0x01), ((u_arrmul32_fa18_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_2_or0 = (fa(((u_arrmul32_and17_2 >> 0) & 0x01), ((u_arrmul32_fa18_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_2 = and_gate(((a >> 18) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa18_2_xor1 = (fa(((u_arrmul32_and18_2 >> 0) & 0x01), ((u_arrmul32_fa19_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_2_or0 = (fa(((u_arrmul32_and18_2 >> 0) & 0x01), ((u_arrmul32_fa19_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_2 = and_gate(((a >> 19) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa19_2_xor1 = (fa(((u_arrmul32_and19_2 >> 0) & 0x01), ((u_arrmul32_fa20_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_2_or0 = (fa(((u_arrmul32_and19_2 >> 0) & 0x01), ((u_arrmul32_fa20_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_2 = and_gate(((a >> 20) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa20_2_xor1 = (fa(((u_arrmul32_and20_2 >> 0) & 0x01), ((u_arrmul32_fa21_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_2_or0 = (fa(((u_arrmul32_and20_2 >> 0) & 0x01), ((u_arrmul32_fa21_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_2 = and_gate(((a >> 21) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa21_2_xor1 = (fa(((u_arrmul32_and21_2 >> 0) & 0x01), ((u_arrmul32_fa22_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_2_or0 = (fa(((u_arrmul32_and21_2 >> 0) & 0x01), ((u_arrmul32_fa22_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_2 = and_gate(((a >> 22) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa22_2_xor1 = (fa(((u_arrmul32_and22_2 >> 0) & 0x01), ((u_arrmul32_fa23_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_2_or0 = (fa(((u_arrmul32_and22_2 >> 0) & 0x01), ((u_arrmul32_fa23_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_2 = and_gate(((a >> 23) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa23_2_xor1 = (fa(((u_arrmul32_and23_2 >> 0) & 0x01), ((u_arrmul32_fa24_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_2_or0 = (fa(((u_arrmul32_and23_2 >> 0) & 0x01), ((u_arrmul32_fa24_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_2 = and_gate(((a >> 24) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa24_2_xor1 = (fa(((u_arrmul32_and24_2 >> 0) & 0x01), ((u_arrmul32_fa25_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_2_or0 = (fa(((u_arrmul32_and24_2 >> 0) & 0x01), ((u_arrmul32_fa25_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_2 = and_gate(((a >> 25) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa25_2_xor1 = (fa(((u_arrmul32_and25_2 >> 0) & 0x01), ((u_arrmul32_fa26_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_2_or0 = (fa(((u_arrmul32_and25_2 >> 0) & 0x01), ((u_arrmul32_fa26_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_2 = and_gate(((a >> 26) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa26_2_xor1 = (fa(((u_arrmul32_and26_2 >> 0) & 0x01), ((u_arrmul32_fa27_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_2_or0 = (fa(((u_arrmul32_and26_2 >> 0) & 0x01), ((u_arrmul32_fa27_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_2 = and_gate(((a >> 27) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa27_2_xor1 = (fa(((u_arrmul32_and27_2 >> 0) & 0x01), ((u_arrmul32_fa28_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_2_or0 = (fa(((u_arrmul32_and27_2 >> 0) & 0x01), ((u_arrmul32_fa28_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_2 = and_gate(((a >> 28) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa28_2_xor1 = (fa(((u_arrmul32_and28_2 >> 0) & 0x01), ((u_arrmul32_fa29_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_2_or0 = (fa(((u_arrmul32_and28_2 >> 0) & 0x01), ((u_arrmul32_fa29_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_2 = and_gate(((a >> 29) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa29_2_xor1 = (fa(((u_arrmul32_and29_2 >> 0) & 0x01), ((u_arrmul32_fa30_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_2_or0 = (fa(((u_arrmul32_and29_2 >> 0) & 0x01), ((u_arrmul32_fa30_1_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_2 = and_gate(((a >> 30) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa30_2_xor1 = (fa(((u_arrmul32_and30_2 >> 0) & 0x01), ((u_arrmul32_ha31_1_xor0 >> 0) & 0x01), ((u_arrmul32_fa29_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_2_or0 = (fa(((u_arrmul32_and30_2 >> 0) & 0x01), ((u_arrmul32_ha31_1_xor0 >> 0) & 0x01), ((u_arrmul32_fa29_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_2 = and_gate(((a >> 31) & 0x01), ((b >> 2) & 0x01));
|
|
u_arrmul32_fa31_2_xor1 = (fa(((u_arrmul32_and31_2 >> 0) & 0x01), ((u_arrmul32_ha31_1_and0 >> 0) & 0x01), ((u_arrmul32_fa30_2_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_2_or0 = (fa(((u_arrmul32_and31_2 >> 0) & 0x01), ((u_arrmul32_ha31_1_and0 >> 0) & 0x01), ((u_arrmul32_fa30_2_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_3 = and_gate(((a >> 0) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_ha0_3_xor0 = (ha(((u_arrmul32_and0_3 >> 0) & 0x01), ((u_arrmul32_fa1_2_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_3_and0 = (ha(((u_arrmul32_and0_3 >> 0) & 0x01), ((u_arrmul32_fa1_2_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_3 = and_gate(((a >> 1) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa1_3_xor1 = (fa(((u_arrmul32_and1_3 >> 0) & 0x01), ((u_arrmul32_fa2_2_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_3_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_3_or0 = (fa(((u_arrmul32_and1_3 >> 0) & 0x01), ((u_arrmul32_fa2_2_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_3_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_3 = and_gate(((a >> 2) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa2_3_xor1 = (fa(((u_arrmul32_and2_3 >> 0) & 0x01), ((u_arrmul32_fa3_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_3_or0 = (fa(((u_arrmul32_and2_3 >> 0) & 0x01), ((u_arrmul32_fa3_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_3 = and_gate(((a >> 3) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa3_3_xor1 = (fa(((u_arrmul32_and3_3 >> 0) & 0x01), ((u_arrmul32_fa4_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_3_or0 = (fa(((u_arrmul32_and3_3 >> 0) & 0x01), ((u_arrmul32_fa4_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_3 = and_gate(((a >> 4) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa4_3_xor1 = (fa(((u_arrmul32_and4_3 >> 0) & 0x01), ((u_arrmul32_fa5_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_3_or0 = (fa(((u_arrmul32_and4_3 >> 0) & 0x01), ((u_arrmul32_fa5_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_3 = and_gate(((a >> 5) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa5_3_xor1 = (fa(((u_arrmul32_and5_3 >> 0) & 0x01), ((u_arrmul32_fa6_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_3_or0 = (fa(((u_arrmul32_and5_3 >> 0) & 0x01), ((u_arrmul32_fa6_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_3 = and_gate(((a >> 6) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa6_3_xor1 = (fa(((u_arrmul32_and6_3 >> 0) & 0x01), ((u_arrmul32_fa7_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_3_or0 = (fa(((u_arrmul32_and6_3 >> 0) & 0x01), ((u_arrmul32_fa7_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_3 = and_gate(((a >> 7) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa7_3_xor1 = (fa(((u_arrmul32_and7_3 >> 0) & 0x01), ((u_arrmul32_fa8_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_3_or0 = (fa(((u_arrmul32_and7_3 >> 0) & 0x01), ((u_arrmul32_fa8_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_3 = and_gate(((a >> 8) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa8_3_xor1 = (fa(((u_arrmul32_and8_3 >> 0) & 0x01), ((u_arrmul32_fa9_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_3_or0 = (fa(((u_arrmul32_and8_3 >> 0) & 0x01), ((u_arrmul32_fa9_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_3 = and_gate(((a >> 9) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa9_3_xor1 = (fa(((u_arrmul32_and9_3 >> 0) & 0x01), ((u_arrmul32_fa10_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_3_or0 = (fa(((u_arrmul32_and9_3 >> 0) & 0x01), ((u_arrmul32_fa10_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_3 = and_gate(((a >> 10) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa10_3_xor1 = (fa(((u_arrmul32_and10_3 >> 0) & 0x01), ((u_arrmul32_fa11_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_3_or0 = (fa(((u_arrmul32_and10_3 >> 0) & 0x01), ((u_arrmul32_fa11_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_3 = and_gate(((a >> 11) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa11_3_xor1 = (fa(((u_arrmul32_and11_3 >> 0) & 0x01), ((u_arrmul32_fa12_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_3_or0 = (fa(((u_arrmul32_and11_3 >> 0) & 0x01), ((u_arrmul32_fa12_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_3 = and_gate(((a >> 12) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa12_3_xor1 = (fa(((u_arrmul32_and12_3 >> 0) & 0x01), ((u_arrmul32_fa13_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_3_or0 = (fa(((u_arrmul32_and12_3 >> 0) & 0x01), ((u_arrmul32_fa13_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_3 = and_gate(((a >> 13) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa13_3_xor1 = (fa(((u_arrmul32_and13_3 >> 0) & 0x01), ((u_arrmul32_fa14_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_3_or0 = (fa(((u_arrmul32_and13_3 >> 0) & 0x01), ((u_arrmul32_fa14_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_3 = and_gate(((a >> 14) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa14_3_xor1 = (fa(((u_arrmul32_and14_3 >> 0) & 0x01), ((u_arrmul32_fa15_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_3_or0 = (fa(((u_arrmul32_and14_3 >> 0) & 0x01), ((u_arrmul32_fa15_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_3 = and_gate(((a >> 15) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa15_3_xor1 = (fa(((u_arrmul32_and15_3 >> 0) & 0x01), ((u_arrmul32_fa16_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_3_or0 = (fa(((u_arrmul32_and15_3 >> 0) & 0x01), ((u_arrmul32_fa16_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_3 = and_gate(((a >> 16) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa16_3_xor1 = (fa(((u_arrmul32_and16_3 >> 0) & 0x01), ((u_arrmul32_fa17_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_3_or0 = (fa(((u_arrmul32_and16_3 >> 0) & 0x01), ((u_arrmul32_fa17_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_3 = and_gate(((a >> 17) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa17_3_xor1 = (fa(((u_arrmul32_and17_3 >> 0) & 0x01), ((u_arrmul32_fa18_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_3_or0 = (fa(((u_arrmul32_and17_3 >> 0) & 0x01), ((u_arrmul32_fa18_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_3 = and_gate(((a >> 18) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa18_3_xor1 = (fa(((u_arrmul32_and18_3 >> 0) & 0x01), ((u_arrmul32_fa19_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_3_or0 = (fa(((u_arrmul32_and18_3 >> 0) & 0x01), ((u_arrmul32_fa19_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_3 = and_gate(((a >> 19) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa19_3_xor1 = (fa(((u_arrmul32_and19_3 >> 0) & 0x01), ((u_arrmul32_fa20_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_3_or0 = (fa(((u_arrmul32_and19_3 >> 0) & 0x01), ((u_arrmul32_fa20_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_3 = and_gate(((a >> 20) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa20_3_xor1 = (fa(((u_arrmul32_and20_3 >> 0) & 0x01), ((u_arrmul32_fa21_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_3_or0 = (fa(((u_arrmul32_and20_3 >> 0) & 0x01), ((u_arrmul32_fa21_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_3 = and_gate(((a >> 21) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa21_3_xor1 = (fa(((u_arrmul32_and21_3 >> 0) & 0x01), ((u_arrmul32_fa22_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_3_or0 = (fa(((u_arrmul32_and21_3 >> 0) & 0x01), ((u_arrmul32_fa22_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_3 = and_gate(((a >> 22) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa22_3_xor1 = (fa(((u_arrmul32_and22_3 >> 0) & 0x01), ((u_arrmul32_fa23_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_3_or0 = (fa(((u_arrmul32_and22_3 >> 0) & 0x01), ((u_arrmul32_fa23_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_3 = and_gate(((a >> 23) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa23_3_xor1 = (fa(((u_arrmul32_and23_3 >> 0) & 0x01), ((u_arrmul32_fa24_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_3_or0 = (fa(((u_arrmul32_and23_3 >> 0) & 0x01), ((u_arrmul32_fa24_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_3 = and_gate(((a >> 24) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa24_3_xor1 = (fa(((u_arrmul32_and24_3 >> 0) & 0x01), ((u_arrmul32_fa25_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_3_or0 = (fa(((u_arrmul32_and24_3 >> 0) & 0x01), ((u_arrmul32_fa25_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_3 = and_gate(((a >> 25) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa25_3_xor1 = (fa(((u_arrmul32_and25_3 >> 0) & 0x01), ((u_arrmul32_fa26_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_3_or0 = (fa(((u_arrmul32_and25_3 >> 0) & 0x01), ((u_arrmul32_fa26_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_3 = and_gate(((a >> 26) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa26_3_xor1 = (fa(((u_arrmul32_and26_3 >> 0) & 0x01), ((u_arrmul32_fa27_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_3_or0 = (fa(((u_arrmul32_and26_3 >> 0) & 0x01), ((u_arrmul32_fa27_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_3 = and_gate(((a >> 27) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa27_3_xor1 = (fa(((u_arrmul32_and27_3 >> 0) & 0x01), ((u_arrmul32_fa28_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_3_or0 = (fa(((u_arrmul32_and27_3 >> 0) & 0x01), ((u_arrmul32_fa28_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_3 = and_gate(((a >> 28) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa28_3_xor1 = (fa(((u_arrmul32_and28_3 >> 0) & 0x01), ((u_arrmul32_fa29_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_3_or0 = (fa(((u_arrmul32_and28_3 >> 0) & 0x01), ((u_arrmul32_fa29_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_3 = and_gate(((a >> 29) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa29_3_xor1 = (fa(((u_arrmul32_and29_3 >> 0) & 0x01), ((u_arrmul32_fa30_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_3_or0 = (fa(((u_arrmul32_and29_3 >> 0) & 0x01), ((u_arrmul32_fa30_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_3 = and_gate(((a >> 30) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa30_3_xor1 = (fa(((u_arrmul32_and30_3 >> 0) & 0x01), ((u_arrmul32_fa31_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_3_or0 = (fa(((u_arrmul32_and30_3 >> 0) & 0x01), ((u_arrmul32_fa31_2_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_3 = and_gate(((a >> 31) & 0x01), ((b >> 3) & 0x01));
|
|
u_arrmul32_fa31_3_xor1 = (fa(((u_arrmul32_and31_3 >> 0) & 0x01), ((u_arrmul32_fa31_2_or0 >> 0) & 0x01), ((u_arrmul32_fa30_3_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_3_or0 = (fa(((u_arrmul32_and31_3 >> 0) & 0x01), ((u_arrmul32_fa31_2_or0 >> 0) & 0x01), ((u_arrmul32_fa30_3_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_4 = and_gate(((a >> 0) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_ha0_4_xor0 = (ha(((u_arrmul32_and0_4 >> 0) & 0x01), ((u_arrmul32_fa1_3_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_4_and0 = (ha(((u_arrmul32_and0_4 >> 0) & 0x01), ((u_arrmul32_fa1_3_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_4 = and_gate(((a >> 1) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa1_4_xor1 = (fa(((u_arrmul32_and1_4 >> 0) & 0x01), ((u_arrmul32_fa2_3_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_4_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_4_or0 = (fa(((u_arrmul32_and1_4 >> 0) & 0x01), ((u_arrmul32_fa2_3_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_4_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_4 = and_gate(((a >> 2) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa2_4_xor1 = (fa(((u_arrmul32_and2_4 >> 0) & 0x01), ((u_arrmul32_fa3_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_4_or0 = (fa(((u_arrmul32_and2_4 >> 0) & 0x01), ((u_arrmul32_fa3_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_4 = and_gate(((a >> 3) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa3_4_xor1 = (fa(((u_arrmul32_and3_4 >> 0) & 0x01), ((u_arrmul32_fa4_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_4_or0 = (fa(((u_arrmul32_and3_4 >> 0) & 0x01), ((u_arrmul32_fa4_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_4 = and_gate(((a >> 4) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa4_4_xor1 = (fa(((u_arrmul32_and4_4 >> 0) & 0x01), ((u_arrmul32_fa5_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_4_or0 = (fa(((u_arrmul32_and4_4 >> 0) & 0x01), ((u_arrmul32_fa5_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_4 = and_gate(((a >> 5) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa5_4_xor1 = (fa(((u_arrmul32_and5_4 >> 0) & 0x01), ((u_arrmul32_fa6_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_4_or0 = (fa(((u_arrmul32_and5_4 >> 0) & 0x01), ((u_arrmul32_fa6_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_4 = and_gate(((a >> 6) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa6_4_xor1 = (fa(((u_arrmul32_and6_4 >> 0) & 0x01), ((u_arrmul32_fa7_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_4_or0 = (fa(((u_arrmul32_and6_4 >> 0) & 0x01), ((u_arrmul32_fa7_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_4 = and_gate(((a >> 7) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa7_4_xor1 = (fa(((u_arrmul32_and7_4 >> 0) & 0x01), ((u_arrmul32_fa8_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_4_or0 = (fa(((u_arrmul32_and7_4 >> 0) & 0x01), ((u_arrmul32_fa8_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_4 = and_gate(((a >> 8) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa8_4_xor1 = (fa(((u_arrmul32_and8_4 >> 0) & 0x01), ((u_arrmul32_fa9_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_4_or0 = (fa(((u_arrmul32_and8_4 >> 0) & 0x01), ((u_arrmul32_fa9_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_4 = and_gate(((a >> 9) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa9_4_xor1 = (fa(((u_arrmul32_and9_4 >> 0) & 0x01), ((u_arrmul32_fa10_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_4_or0 = (fa(((u_arrmul32_and9_4 >> 0) & 0x01), ((u_arrmul32_fa10_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_4 = and_gate(((a >> 10) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa10_4_xor1 = (fa(((u_arrmul32_and10_4 >> 0) & 0x01), ((u_arrmul32_fa11_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_4_or0 = (fa(((u_arrmul32_and10_4 >> 0) & 0x01), ((u_arrmul32_fa11_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_4 = and_gate(((a >> 11) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa11_4_xor1 = (fa(((u_arrmul32_and11_4 >> 0) & 0x01), ((u_arrmul32_fa12_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_4_or0 = (fa(((u_arrmul32_and11_4 >> 0) & 0x01), ((u_arrmul32_fa12_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_4 = and_gate(((a >> 12) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa12_4_xor1 = (fa(((u_arrmul32_and12_4 >> 0) & 0x01), ((u_arrmul32_fa13_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_4_or0 = (fa(((u_arrmul32_and12_4 >> 0) & 0x01), ((u_arrmul32_fa13_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_4 = and_gate(((a >> 13) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa13_4_xor1 = (fa(((u_arrmul32_and13_4 >> 0) & 0x01), ((u_arrmul32_fa14_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_4_or0 = (fa(((u_arrmul32_and13_4 >> 0) & 0x01), ((u_arrmul32_fa14_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_4 = and_gate(((a >> 14) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa14_4_xor1 = (fa(((u_arrmul32_and14_4 >> 0) & 0x01), ((u_arrmul32_fa15_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_4_or0 = (fa(((u_arrmul32_and14_4 >> 0) & 0x01), ((u_arrmul32_fa15_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_4 = and_gate(((a >> 15) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa15_4_xor1 = (fa(((u_arrmul32_and15_4 >> 0) & 0x01), ((u_arrmul32_fa16_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_4_or0 = (fa(((u_arrmul32_and15_4 >> 0) & 0x01), ((u_arrmul32_fa16_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_4 = and_gate(((a >> 16) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa16_4_xor1 = (fa(((u_arrmul32_and16_4 >> 0) & 0x01), ((u_arrmul32_fa17_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_4_or0 = (fa(((u_arrmul32_and16_4 >> 0) & 0x01), ((u_arrmul32_fa17_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_4 = and_gate(((a >> 17) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa17_4_xor1 = (fa(((u_arrmul32_and17_4 >> 0) & 0x01), ((u_arrmul32_fa18_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_4_or0 = (fa(((u_arrmul32_and17_4 >> 0) & 0x01), ((u_arrmul32_fa18_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_4 = and_gate(((a >> 18) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa18_4_xor1 = (fa(((u_arrmul32_and18_4 >> 0) & 0x01), ((u_arrmul32_fa19_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_4_or0 = (fa(((u_arrmul32_and18_4 >> 0) & 0x01), ((u_arrmul32_fa19_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_4 = and_gate(((a >> 19) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa19_4_xor1 = (fa(((u_arrmul32_and19_4 >> 0) & 0x01), ((u_arrmul32_fa20_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_4_or0 = (fa(((u_arrmul32_and19_4 >> 0) & 0x01), ((u_arrmul32_fa20_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_4 = and_gate(((a >> 20) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa20_4_xor1 = (fa(((u_arrmul32_and20_4 >> 0) & 0x01), ((u_arrmul32_fa21_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_4_or0 = (fa(((u_arrmul32_and20_4 >> 0) & 0x01), ((u_arrmul32_fa21_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_4 = and_gate(((a >> 21) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa21_4_xor1 = (fa(((u_arrmul32_and21_4 >> 0) & 0x01), ((u_arrmul32_fa22_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_4_or0 = (fa(((u_arrmul32_and21_4 >> 0) & 0x01), ((u_arrmul32_fa22_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_4 = and_gate(((a >> 22) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa22_4_xor1 = (fa(((u_arrmul32_and22_4 >> 0) & 0x01), ((u_arrmul32_fa23_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_4_or0 = (fa(((u_arrmul32_and22_4 >> 0) & 0x01), ((u_arrmul32_fa23_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_4 = and_gate(((a >> 23) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa23_4_xor1 = (fa(((u_arrmul32_and23_4 >> 0) & 0x01), ((u_arrmul32_fa24_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_4_or0 = (fa(((u_arrmul32_and23_4 >> 0) & 0x01), ((u_arrmul32_fa24_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_4 = and_gate(((a >> 24) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa24_4_xor1 = (fa(((u_arrmul32_and24_4 >> 0) & 0x01), ((u_arrmul32_fa25_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_4_or0 = (fa(((u_arrmul32_and24_4 >> 0) & 0x01), ((u_arrmul32_fa25_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_4 = and_gate(((a >> 25) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa25_4_xor1 = (fa(((u_arrmul32_and25_4 >> 0) & 0x01), ((u_arrmul32_fa26_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_4_or0 = (fa(((u_arrmul32_and25_4 >> 0) & 0x01), ((u_arrmul32_fa26_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_4 = and_gate(((a >> 26) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa26_4_xor1 = (fa(((u_arrmul32_and26_4 >> 0) & 0x01), ((u_arrmul32_fa27_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_4_or0 = (fa(((u_arrmul32_and26_4 >> 0) & 0x01), ((u_arrmul32_fa27_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_4 = and_gate(((a >> 27) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa27_4_xor1 = (fa(((u_arrmul32_and27_4 >> 0) & 0x01), ((u_arrmul32_fa28_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_4_or0 = (fa(((u_arrmul32_and27_4 >> 0) & 0x01), ((u_arrmul32_fa28_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_4 = and_gate(((a >> 28) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa28_4_xor1 = (fa(((u_arrmul32_and28_4 >> 0) & 0x01), ((u_arrmul32_fa29_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_4_or0 = (fa(((u_arrmul32_and28_4 >> 0) & 0x01), ((u_arrmul32_fa29_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_4 = and_gate(((a >> 29) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa29_4_xor1 = (fa(((u_arrmul32_and29_4 >> 0) & 0x01), ((u_arrmul32_fa30_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_4_or0 = (fa(((u_arrmul32_and29_4 >> 0) & 0x01), ((u_arrmul32_fa30_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_4 = and_gate(((a >> 30) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa30_4_xor1 = (fa(((u_arrmul32_and30_4 >> 0) & 0x01), ((u_arrmul32_fa31_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_4_or0 = (fa(((u_arrmul32_and30_4 >> 0) & 0x01), ((u_arrmul32_fa31_3_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_4 = and_gate(((a >> 31) & 0x01), ((b >> 4) & 0x01));
|
|
u_arrmul32_fa31_4_xor1 = (fa(((u_arrmul32_and31_4 >> 0) & 0x01), ((u_arrmul32_fa31_3_or0 >> 0) & 0x01), ((u_arrmul32_fa30_4_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_4_or0 = (fa(((u_arrmul32_and31_4 >> 0) & 0x01), ((u_arrmul32_fa31_3_or0 >> 0) & 0x01), ((u_arrmul32_fa30_4_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_5 = and_gate(((a >> 0) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_ha0_5_xor0 = (ha(((u_arrmul32_and0_5 >> 0) & 0x01), ((u_arrmul32_fa1_4_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_5_and0 = (ha(((u_arrmul32_and0_5 >> 0) & 0x01), ((u_arrmul32_fa1_4_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_5 = and_gate(((a >> 1) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa1_5_xor1 = (fa(((u_arrmul32_and1_5 >> 0) & 0x01), ((u_arrmul32_fa2_4_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_5_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_5_or0 = (fa(((u_arrmul32_and1_5 >> 0) & 0x01), ((u_arrmul32_fa2_4_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_5_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_5 = and_gate(((a >> 2) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa2_5_xor1 = (fa(((u_arrmul32_and2_5 >> 0) & 0x01), ((u_arrmul32_fa3_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_5_or0 = (fa(((u_arrmul32_and2_5 >> 0) & 0x01), ((u_arrmul32_fa3_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_5 = and_gate(((a >> 3) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa3_5_xor1 = (fa(((u_arrmul32_and3_5 >> 0) & 0x01), ((u_arrmul32_fa4_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_5_or0 = (fa(((u_arrmul32_and3_5 >> 0) & 0x01), ((u_arrmul32_fa4_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_5 = and_gate(((a >> 4) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa4_5_xor1 = (fa(((u_arrmul32_and4_5 >> 0) & 0x01), ((u_arrmul32_fa5_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_5_or0 = (fa(((u_arrmul32_and4_5 >> 0) & 0x01), ((u_arrmul32_fa5_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_5 = and_gate(((a >> 5) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa5_5_xor1 = (fa(((u_arrmul32_and5_5 >> 0) & 0x01), ((u_arrmul32_fa6_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_5_or0 = (fa(((u_arrmul32_and5_5 >> 0) & 0x01), ((u_arrmul32_fa6_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_5 = and_gate(((a >> 6) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa6_5_xor1 = (fa(((u_arrmul32_and6_5 >> 0) & 0x01), ((u_arrmul32_fa7_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_5_or0 = (fa(((u_arrmul32_and6_5 >> 0) & 0x01), ((u_arrmul32_fa7_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_5 = and_gate(((a >> 7) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa7_5_xor1 = (fa(((u_arrmul32_and7_5 >> 0) & 0x01), ((u_arrmul32_fa8_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_5_or0 = (fa(((u_arrmul32_and7_5 >> 0) & 0x01), ((u_arrmul32_fa8_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_5 = and_gate(((a >> 8) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa8_5_xor1 = (fa(((u_arrmul32_and8_5 >> 0) & 0x01), ((u_arrmul32_fa9_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_5_or0 = (fa(((u_arrmul32_and8_5 >> 0) & 0x01), ((u_arrmul32_fa9_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_5 = and_gate(((a >> 9) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa9_5_xor1 = (fa(((u_arrmul32_and9_5 >> 0) & 0x01), ((u_arrmul32_fa10_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_5_or0 = (fa(((u_arrmul32_and9_5 >> 0) & 0x01), ((u_arrmul32_fa10_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_5 = and_gate(((a >> 10) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa10_5_xor1 = (fa(((u_arrmul32_and10_5 >> 0) & 0x01), ((u_arrmul32_fa11_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_5_or0 = (fa(((u_arrmul32_and10_5 >> 0) & 0x01), ((u_arrmul32_fa11_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_5 = and_gate(((a >> 11) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa11_5_xor1 = (fa(((u_arrmul32_and11_5 >> 0) & 0x01), ((u_arrmul32_fa12_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_5_or0 = (fa(((u_arrmul32_and11_5 >> 0) & 0x01), ((u_arrmul32_fa12_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_5 = and_gate(((a >> 12) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa12_5_xor1 = (fa(((u_arrmul32_and12_5 >> 0) & 0x01), ((u_arrmul32_fa13_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_5_or0 = (fa(((u_arrmul32_and12_5 >> 0) & 0x01), ((u_arrmul32_fa13_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_5 = and_gate(((a >> 13) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa13_5_xor1 = (fa(((u_arrmul32_and13_5 >> 0) & 0x01), ((u_arrmul32_fa14_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_5_or0 = (fa(((u_arrmul32_and13_5 >> 0) & 0x01), ((u_arrmul32_fa14_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_5 = and_gate(((a >> 14) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa14_5_xor1 = (fa(((u_arrmul32_and14_5 >> 0) & 0x01), ((u_arrmul32_fa15_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_5_or0 = (fa(((u_arrmul32_and14_5 >> 0) & 0x01), ((u_arrmul32_fa15_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_5 = and_gate(((a >> 15) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa15_5_xor1 = (fa(((u_arrmul32_and15_5 >> 0) & 0x01), ((u_arrmul32_fa16_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_5_or0 = (fa(((u_arrmul32_and15_5 >> 0) & 0x01), ((u_arrmul32_fa16_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_5 = and_gate(((a >> 16) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa16_5_xor1 = (fa(((u_arrmul32_and16_5 >> 0) & 0x01), ((u_arrmul32_fa17_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_5_or0 = (fa(((u_arrmul32_and16_5 >> 0) & 0x01), ((u_arrmul32_fa17_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_5 = and_gate(((a >> 17) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa17_5_xor1 = (fa(((u_arrmul32_and17_5 >> 0) & 0x01), ((u_arrmul32_fa18_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_5_or0 = (fa(((u_arrmul32_and17_5 >> 0) & 0x01), ((u_arrmul32_fa18_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_5 = and_gate(((a >> 18) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa18_5_xor1 = (fa(((u_arrmul32_and18_5 >> 0) & 0x01), ((u_arrmul32_fa19_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_5_or0 = (fa(((u_arrmul32_and18_5 >> 0) & 0x01), ((u_arrmul32_fa19_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_5 = and_gate(((a >> 19) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa19_5_xor1 = (fa(((u_arrmul32_and19_5 >> 0) & 0x01), ((u_arrmul32_fa20_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_5_or0 = (fa(((u_arrmul32_and19_5 >> 0) & 0x01), ((u_arrmul32_fa20_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_5 = and_gate(((a >> 20) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa20_5_xor1 = (fa(((u_arrmul32_and20_5 >> 0) & 0x01), ((u_arrmul32_fa21_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_5_or0 = (fa(((u_arrmul32_and20_5 >> 0) & 0x01), ((u_arrmul32_fa21_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_5 = and_gate(((a >> 21) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa21_5_xor1 = (fa(((u_arrmul32_and21_5 >> 0) & 0x01), ((u_arrmul32_fa22_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_5_or0 = (fa(((u_arrmul32_and21_5 >> 0) & 0x01), ((u_arrmul32_fa22_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_5 = and_gate(((a >> 22) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa22_5_xor1 = (fa(((u_arrmul32_and22_5 >> 0) & 0x01), ((u_arrmul32_fa23_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_5_or0 = (fa(((u_arrmul32_and22_5 >> 0) & 0x01), ((u_arrmul32_fa23_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_5 = and_gate(((a >> 23) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa23_5_xor1 = (fa(((u_arrmul32_and23_5 >> 0) & 0x01), ((u_arrmul32_fa24_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_5_or0 = (fa(((u_arrmul32_and23_5 >> 0) & 0x01), ((u_arrmul32_fa24_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_5 = and_gate(((a >> 24) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa24_5_xor1 = (fa(((u_arrmul32_and24_5 >> 0) & 0x01), ((u_arrmul32_fa25_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_5_or0 = (fa(((u_arrmul32_and24_5 >> 0) & 0x01), ((u_arrmul32_fa25_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_5 = and_gate(((a >> 25) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa25_5_xor1 = (fa(((u_arrmul32_and25_5 >> 0) & 0x01), ((u_arrmul32_fa26_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_5_or0 = (fa(((u_arrmul32_and25_5 >> 0) & 0x01), ((u_arrmul32_fa26_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_5 = and_gate(((a >> 26) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa26_5_xor1 = (fa(((u_arrmul32_and26_5 >> 0) & 0x01), ((u_arrmul32_fa27_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_5_or0 = (fa(((u_arrmul32_and26_5 >> 0) & 0x01), ((u_arrmul32_fa27_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_5 = and_gate(((a >> 27) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa27_5_xor1 = (fa(((u_arrmul32_and27_5 >> 0) & 0x01), ((u_arrmul32_fa28_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_5_or0 = (fa(((u_arrmul32_and27_5 >> 0) & 0x01), ((u_arrmul32_fa28_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_5 = and_gate(((a >> 28) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa28_5_xor1 = (fa(((u_arrmul32_and28_5 >> 0) & 0x01), ((u_arrmul32_fa29_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_5_or0 = (fa(((u_arrmul32_and28_5 >> 0) & 0x01), ((u_arrmul32_fa29_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_5 = and_gate(((a >> 29) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa29_5_xor1 = (fa(((u_arrmul32_and29_5 >> 0) & 0x01), ((u_arrmul32_fa30_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_5_or0 = (fa(((u_arrmul32_and29_5 >> 0) & 0x01), ((u_arrmul32_fa30_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_5 = and_gate(((a >> 30) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa30_5_xor1 = (fa(((u_arrmul32_and30_5 >> 0) & 0x01), ((u_arrmul32_fa31_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_5_or0 = (fa(((u_arrmul32_and30_5 >> 0) & 0x01), ((u_arrmul32_fa31_4_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_5 = and_gate(((a >> 31) & 0x01), ((b >> 5) & 0x01));
|
|
u_arrmul32_fa31_5_xor1 = (fa(((u_arrmul32_and31_5 >> 0) & 0x01), ((u_arrmul32_fa31_4_or0 >> 0) & 0x01), ((u_arrmul32_fa30_5_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_5_or0 = (fa(((u_arrmul32_and31_5 >> 0) & 0x01), ((u_arrmul32_fa31_4_or0 >> 0) & 0x01), ((u_arrmul32_fa30_5_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_6 = and_gate(((a >> 0) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_ha0_6_xor0 = (ha(((u_arrmul32_and0_6 >> 0) & 0x01), ((u_arrmul32_fa1_5_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_6_and0 = (ha(((u_arrmul32_and0_6 >> 0) & 0x01), ((u_arrmul32_fa1_5_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_6 = and_gate(((a >> 1) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa1_6_xor1 = (fa(((u_arrmul32_and1_6 >> 0) & 0x01), ((u_arrmul32_fa2_5_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_6_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_6_or0 = (fa(((u_arrmul32_and1_6 >> 0) & 0x01), ((u_arrmul32_fa2_5_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_6_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_6 = and_gate(((a >> 2) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa2_6_xor1 = (fa(((u_arrmul32_and2_6 >> 0) & 0x01), ((u_arrmul32_fa3_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_6_or0 = (fa(((u_arrmul32_and2_6 >> 0) & 0x01), ((u_arrmul32_fa3_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_6 = and_gate(((a >> 3) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa3_6_xor1 = (fa(((u_arrmul32_and3_6 >> 0) & 0x01), ((u_arrmul32_fa4_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_6_or0 = (fa(((u_arrmul32_and3_6 >> 0) & 0x01), ((u_arrmul32_fa4_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_6 = and_gate(((a >> 4) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa4_6_xor1 = (fa(((u_arrmul32_and4_6 >> 0) & 0x01), ((u_arrmul32_fa5_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_6_or0 = (fa(((u_arrmul32_and4_6 >> 0) & 0x01), ((u_arrmul32_fa5_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_6 = and_gate(((a >> 5) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa5_6_xor1 = (fa(((u_arrmul32_and5_6 >> 0) & 0x01), ((u_arrmul32_fa6_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_6_or0 = (fa(((u_arrmul32_and5_6 >> 0) & 0x01), ((u_arrmul32_fa6_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_6 = and_gate(((a >> 6) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa6_6_xor1 = (fa(((u_arrmul32_and6_6 >> 0) & 0x01), ((u_arrmul32_fa7_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_6_or0 = (fa(((u_arrmul32_and6_6 >> 0) & 0x01), ((u_arrmul32_fa7_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_6 = and_gate(((a >> 7) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa7_6_xor1 = (fa(((u_arrmul32_and7_6 >> 0) & 0x01), ((u_arrmul32_fa8_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_6_or0 = (fa(((u_arrmul32_and7_6 >> 0) & 0x01), ((u_arrmul32_fa8_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_6 = and_gate(((a >> 8) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa8_6_xor1 = (fa(((u_arrmul32_and8_6 >> 0) & 0x01), ((u_arrmul32_fa9_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_6_or0 = (fa(((u_arrmul32_and8_6 >> 0) & 0x01), ((u_arrmul32_fa9_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_6 = and_gate(((a >> 9) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa9_6_xor1 = (fa(((u_arrmul32_and9_6 >> 0) & 0x01), ((u_arrmul32_fa10_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_6_or0 = (fa(((u_arrmul32_and9_6 >> 0) & 0x01), ((u_arrmul32_fa10_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_6 = and_gate(((a >> 10) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa10_6_xor1 = (fa(((u_arrmul32_and10_6 >> 0) & 0x01), ((u_arrmul32_fa11_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_6_or0 = (fa(((u_arrmul32_and10_6 >> 0) & 0x01), ((u_arrmul32_fa11_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_6 = and_gate(((a >> 11) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa11_6_xor1 = (fa(((u_arrmul32_and11_6 >> 0) & 0x01), ((u_arrmul32_fa12_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_6_or0 = (fa(((u_arrmul32_and11_6 >> 0) & 0x01), ((u_arrmul32_fa12_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_6 = and_gate(((a >> 12) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa12_6_xor1 = (fa(((u_arrmul32_and12_6 >> 0) & 0x01), ((u_arrmul32_fa13_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_6_or0 = (fa(((u_arrmul32_and12_6 >> 0) & 0x01), ((u_arrmul32_fa13_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_6 = and_gate(((a >> 13) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa13_6_xor1 = (fa(((u_arrmul32_and13_6 >> 0) & 0x01), ((u_arrmul32_fa14_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_6_or0 = (fa(((u_arrmul32_and13_6 >> 0) & 0x01), ((u_arrmul32_fa14_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_6 = and_gate(((a >> 14) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa14_6_xor1 = (fa(((u_arrmul32_and14_6 >> 0) & 0x01), ((u_arrmul32_fa15_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_6_or0 = (fa(((u_arrmul32_and14_6 >> 0) & 0x01), ((u_arrmul32_fa15_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_6 = and_gate(((a >> 15) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa15_6_xor1 = (fa(((u_arrmul32_and15_6 >> 0) & 0x01), ((u_arrmul32_fa16_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_6_or0 = (fa(((u_arrmul32_and15_6 >> 0) & 0x01), ((u_arrmul32_fa16_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_6 = and_gate(((a >> 16) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa16_6_xor1 = (fa(((u_arrmul32_and16_6 >> 0) & 0x01), ((u_arrmul32_fa17_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_6_or0 = (fa(((u_arrmul32_and16_6 >> 0) & 0x01), ((u_arrmul32_fa17_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_6 = and_gate(((a >> 17) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa17_6_xor1 = (fa(((u_arrmul32_and17_6 >> 0) & 0x01), ((u_arrmul32_fa18_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_6_or0 = (fa(((u_arrmul32_and17_6 >> 0) & 0x01), ((u_arrmul32_fa18_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_6 = and_gate(((a >> 18) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa18_6_xor1 = (fa(((u_arrmul32_and18_6 >> 0) & 0x01), ((u_arrmul32_fa19_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_6_or0 = (fa(((u_arrmul32_and18_6 >> 0) & 0x01), ((u_arrmul32_fa19_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_6 = and_gate(((a >> 19) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa19_6_xor1 = (fa(((u_arrmul32_and19_6 >> 0) & 0x01), ((u_arrmul32_fa20_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_6_or0 = (fa(((u_arrmul32_and19_6 >> 0) & 0x01), ((u_arrmul32_fa20_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_6 = and_gate(((a >> 20) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa20_6_xor1 = (fa(((u_arrmul32_and20_6 >> 0) & 0x01), ((u_arrmul32_fa21_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_6_or0 = (fa(((u_arrmul32_and20_6 >> 0) & 0x01), ((u_arrmul32_fa21_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_6 = and_gate(((a >> 21) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa21_6_xor1 = (fa(((u_arrmul32_and21_6 >> 0) & 0x01), ((u_arrmul32_fa22_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_6_or0 = (fa(((u_arrmul32_and21_6 >> 0) & 0x01), ((u_arrmul32_fa22_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_6 = and_gate(((a >> 22) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa22_6_xor1 = (fa(((u_arrmul32_and22_6 >> 0) & 0x01), ((u_arrmul32_fa23_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_6_or0 = (fa(((u_arrmul32_and22_6 >> 0) & 0x01), ((u_arrmul32_fa23_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_6 = and_gate(((a >> 23) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa23_6_xor1 = (fa(((u_arrmul32_and23_6 >> 0) & 0x01), ((u_arrmul32_fa24_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_6_or0 = (fa(((u_arrmul32_and23_6 >> 0) & 0x01), ((u_arrmul32_fa24_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_6 = and_gate(((a >> 24) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa24_6_xor1 = (fa(((u_arrmul32_and24_6 >> 0) & 0x01), ((u_arrmul32_fa25_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_6_or0 = (fa(((u_arrmul32_and24_6 >> 0) & 0x01), ((u_arrmul32_fa25_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_6 = and_gate(((a >> 25) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa25_6_xor1 = (fa(((u_arrmul32_and25_6 >> 0) & 0x01), ((u_arrmul32_fa26_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_6_or0 = (fa(((u_arrmul32_and25_6 >> 0) & 0x01), ((u_arrmul32_fa26_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_6 = and_gate(((a >> 26) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa26_6_xor1 = (fa(((u_arrmul32_and26_6 >> 0) & 0x01), ((u_arrmul32_fa27_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_6_or0 = (fa(((u_arrmul32_and26_6 >> 0) & 0x01), ((u_arrmul32_fa27_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_6 = and_gate(((a >> 27) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa27_6_xor1 = (fa(((u_arrmul32_and27_6 >> 0) & 0x01), ((u_arrmul32_fa28_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_6_or0 = (fa(((u_arrmul32_and27_6 >> 0) & 0x01), ((u_arrmul32_fa28_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_6 = and_gate(((a >> 28) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa28_6_xor1 = (fa(((u_arrmul32_and28_6 >> 0) & 0x01), ((u_arrmul32_fa29_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_6_or0 = (fa(((u_arrmul32_and28_6 >> 0) & 0x01), ((u_arrmul32_fa29_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_6 = and_gate(((a >> 29) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa29_6_xor1 = (fa(((u_arrmul32_and29_6 >> 0) & 0x01), ((u_arrmul32_fa30_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_6_or0 = (fa(((u_arrmul32_and29_6 >> 0) & 0x01), ((u_arrmul32_fa30_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_6 = and_gate(((a >> 30) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa30_6_xor1 = (fa(((u_arrmul32_and30_6 >> 0) & 0x01), ((u_arrmul32_fa31_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_6_or0 = (fa(((u_arrmul32_and30_6 >> 0) & 0x01), ((u_arrmul32_fa31_5_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_6 = and_gate(((a >> 31) & 0x01), ((b >> 6) & 0x01));
|
|
u_arrmul32_fa31_6_xor1 = (fa(((u_arrmul32_and31_6 >> 0) & 0x01), ((u_arrmul32_fa31_5_or0 >> 0) & 0x01), ((u_arrmul32_fa30_6_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_6_or0 = (fa(((u_arrmul32_and31_6 >> 0) & 0x01), ((u_arrmul32_fa31_5_or0 >> 0) & 0x01), ((u_arrmul32_fa30_6_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_7 = and_gate(((a >> 0) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_ha0_7_xor0 = (ha(((u_arrmul32_and0_7 >> 0) & 0x01), ((u_arrmul32_fa1_6_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_7_and0 = (ha(((u_arrmul32_and0_7 >> 0) & 0x01), ((u_arrmul32_fa1_6_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_7 = and_gate(((a >> 1) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa1_7_xor1 = (fa(((u_arrmul32_and1_7 >> 0) & 0x01), ((u_arrmul32_fa2_6_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_7_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_7_or0 = (fa(((u_arrmul32_and1_7 >> 0) & 0x01), ((u_arrmul32_fa2_6_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_7_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_7 = and_gate(((a >> 2) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa2_7_xor1 = (fa(((u_arrmul32_and2_7 >> 0) & 0x01), ((u_arrmul32_fa3_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_7_or0 = (fa(((u_arrmul32_and2_7 >> 0) & 0x01), ((u_arrmul32_fa3_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_7 = and_gate(((a >> 3) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa3_7_xor1 = (fa(((u_arrmul32_and3_7 >> 0) & 0x01), ((u_arrmul32_fa4_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_7_or0 = (fa(((u_arrmul32_and3_7 >> 0) & 0x01), ((u_arrmul32_fa4_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_7 = and_gate(((a >> 4) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa4_7_xor1 = (fa(((u_arrmul32_and4_7 >> 0) & 0x01), ((u_arrmul32_fa5_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_7_or0 = (fa(((u_arrmul32_and4_7 >> 0) & 0x01), ((u_arrmul32_fa5_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_7 = and_gate(((a >> 5) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa5_7_xor1 = (fa(((u_arrmul32_and5_7 >> 0) & 0x01), ((u_arrmul32_fa6_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_7_or0 = (fa(((u_arrmul32_and5_7 >> 0) & 0x01), ((u_arrmul32_fa6_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_7 = and_gate(((a >> 6) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa6_7_xor1 = (fa(((u_arrmul32_and6_7 >> 0) & 0x01), ((u_arrmul32_fa7_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_7_or0 = (fa(((u_arrmul32_and6_7 >> 0) & 0x01), ((u_arrmul32_fa7_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_7 = and_gate(((a >> 7) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa7_7_xor1 = (fa(((u_arrmul32_and7_7 >> 0) & 0x01), ((u_arrmul32_fa8_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_7_or0 = (fa(((u_arrmul32_and7_7 >> 0) & 0x01), ((u_arrmul32_fa8_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_7 = and_gate(((a >> 8) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa8_7_xor1 = (fa(((u_arrmul32_and8_7 >> 0) & 0x01), ((u_arrmul32_fa9_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_7_or0 = (fa(((u_arrmul32_and8_7 >> 0) & 0x01), ((u_arrmul32_fa9_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_7 = and_gate(((a >> 9) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa9_7_xor1 = (fa(((u_arrmul32_and9_7 >> 0) & 0x01), ((u_arrmul32_fa10_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_7_or0 = (fa(((u_arrmul32_and9_7 >> 0) & 0x01), ((u_arrmul32_fa10_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_7 = and_gate(((a >> 10) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa10_7_xor1 = (fa(((u_arrmul32_and10_7 >> 0) & 0x01), ((u_arrmul32_fa11_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_7_or0 = (fa(((u_arrmul32_and10_7 >> 0) & 0x01), ((u_arrmul32_fa11_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_7 = and_gate(((a >> 11) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa11_7_xor1 = (fa(((u_arrmul32_and11_7 >> 0) & 0x01), ((u_arrmul32_fa12_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_7_or0 = (fa(((u_arrmul32_and11_7 >> 0) & 0x01), ((u_arrmul32_fa12_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_7 = and_gate(((a >> 12) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa12_7_xor1 = (fa(((u_arrmul32_and12_7 >> 0) & 0x01), ((u_arrmul32_fa13_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_7_or0 = (fa(((u_arrmul32_and12_7 >> 0) & 0x01), ((u_arrmul32_fa13_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_7 = and_gate(((a >> 13) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa13_7_xor1 = (fa(((u_arrmul32_and13_7 >> 0) & 0x01), ((u_arrmul32_fa14_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_7_or0 = (fa(((u_arrmul32_and13_7 >> 0) & 0x01), ((u_arrmul32_fa14_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_7 = and_gate(((a >> 14) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa14_7_xor1 = (fa(((u_arrmul32_and14_7 >> 0) & 0x01), ((u_arrmul32_fa15_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_7_or0 = (fa(((u_arrmul32_and14_7 >> 0) & 0x01), ((u_arrmul32_fa15_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_7 = and_gate(((a >> 15) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa15_7_xor1 = (fa(((u_arrmul32_and15_7 >> 0) & 0x01), ((u_arrmul32_fa16_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_7_or0 = (fa(((u_arrmul32_and15_7 >> 0) & 0x01), ((u_arrmul32_fa16_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_7 = and_gate(((a >> 16) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa16_7_xor1 = (fa(((u_arrmul32_and16_7 >> 0) & 0x01), ((u_arrmul32_fa17_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_7_or0 = (fa(((u_arrmul32_and16_7 >> 0) & 0x01), ((u_arrmul32_fa17_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_7 = and_gate(((a >> 17) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa17_7_xor1 = (fa(((u_arrmul32_and17_7 >> 0) & 0x01), ((u_arrmul32_fa18_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_7_or0 = (fa(((u_arrmul32_and17_7 >> 0) & 0x01), ((u_arrmul32_fa18_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_7 = and_gate(((a >> 18) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa18_7_xor1 = (fa(((u_arrmul32_and18_7 >> 0) & 0x01), ((u_arrmul32_fa19_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_7_or0 = (fa(((u_arrmul32_and18_7 >> 0) & 0x01), ((u_arrmul32_fa19_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_7 = and_gate(((a >> 19) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa19_7_xor1 = (fa(((u_arrmul32_and19_7 >> 0) & 0x01), ((u_arrmul32_fa20_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_7_or0 = (fa(((u_arrmul32_and19_7 >> 0) & 0x01), ((u_arrmul32_fa20_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_7 = and_gate(((a >> 20) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa20_7_xor1 = (fa(((u_arrmul32_and20_7 >> 0) & 0x01), ((u_arrmul32_fa21_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_7_or0 = (fa(((u_arrmul32_and20_7 >> 0) & 0x01), ((u_arrmul32_fa21_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_7 = and_gate(((a >> 21) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa21_7_xor1 = (fa(((u_arrmul32_and21_7 >> 0) & 0x01), ((u_arrmul32_fa22_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_7_or0 = (fa(((u_arrmul32_and21_7 >> 0) & 0x01), ((u_arrmul32_fa22_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_7 = and_gate(((a >> 22) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa22_7_xor1 = (fa(((u_arrmul32_and22_7 >> 0) & 0x01), ((u_arrmul32_fa23_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_7_or0 = (fa(((u_arrmul32_and22_7 >> 0) & 0x01), ((u_arrmul32_fa23_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_7 = and_gate(((a >> 23) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa23_7_xor1 = (fa(((u_arrmul32_and23_7 >> 0) & 0x01), ((u_arrmul32_fa24_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_7_or0 = (fa(((u_arrmul32_and23_7 >> 0) & 0x01), ((u_arrmul32_fa24_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_7 = and_gate(((a >> 24) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa24_7_xor1 = (fa(((u_arrmul32_and24_7 >> 0) & 0x01), ((u_arrmul32_fa25_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_7_or0 = (fa(((u_arrmul32_and24_7 >> 0) & 0x01), ((u_arrmul32_fa25_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_7 = and_gate(((a >> 25) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa25_7_xor1 = (fa(((u_arrmul32_and25_7 >> 0) & 0x01), ((u_arrmul32_fa26_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_7_or0 = (fa(((u_arrmul32_and25_7 >> 0) & 0x01), ((u_arrmul32_fa26_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_7 = and_gate(((a >> 26) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa26_7_xor1 = (fa(((u_arrmul32_and26_7 >> 0) & 0x01), ((u_arrmul32_fa27_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_7_or0 = (fa(((u_arrmul32_and26_7 >> 0) & 0x01), ((u_arrmul32_fa27_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_7 = and_gate(((a >> 27) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa27_7_xor1 = (fa(((u_arrmul32_and27_7 >> 0) & 0x01), ((u_arrmul32_fa28_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_7_or0 = (fa(((u_arrmul32_and27_7 >> 0) & 0x01), ((u_arrmul32_fa28_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_7 = and_gate(((a >> 28) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa28_7_xor1 = (fa(((u_arrmul32_and28_7 >> 0) & 0x01), ((u_arrmul32_fa29_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_7_or0 = (fa(((u_arrmul32_and28_7 >> 0) & 0x01), ((u_arrmul32_fa29_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_7 = and_gate(((a >> 29) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa29_7_xor1 = (fa(((u_arrmul32_and29_7 >> 0) & 0x01), ((u_arrmul32_fa30_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_7_or0 = (fa(((u_arrmul32_and29_7 >> 0) & 0x01), ((u_arrmul32_fa30_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_7 = and_gate(((a >> 30) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa30_7_xor1 = (fa(((u_arrmul32_and30_7 >> 0) & 0x01), ((u_arrmul32_fa31_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_7_or0 = (fa(((u_arrmul32_and30_7 >> 0) & 0x01), ((u_arrmul32_fa31_6_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_7 = and_gate(((a >> 31) & 0x01), ((b >> 7) & 0x01));
|
|
u_arrmul32_fa31_7_xor1 = (fa(((u_arrmul32_and31_7 >> 0) & 0x01), ((u_arrmul32_fa31_6_or0 >> 0) & 0x01), ((u_arrmul32_fa30_7_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_7_or0 = (fa(((u_arrmul32_and31_7 >> 0) & 0x01), ((u_arrmul32_fa31_6_or0 >> 0) & 0x01), ((u_arrmul32_fa30_7_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_8 = and_gate(((a >> 0) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_ha0_8_xor0 = (ha(((u_arrmul32_and0_8 >> 0) & 0x01), ((u_arrmul32_fa1_7_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_8_and0 = (ha(((u_arrmul32_and0_8 >> 0) & 0x01), ((u_arrmul32_fa1_7_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_8 = and_gate(((a >> 1) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa1_8_xor1 = (fa(((u_arrmul32_and1_8 >> 0) & 0x01), ((u_arrmul32_fa2_7_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_8_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_8_or0 = (fa(((u_arrmul32_and1_8 >> 0) & 0x01), ((u_arrmul32_fa2_7_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_8_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_8 = and_gate(((a >> 2) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa2_8_xor1 = (fa(((u_arrmul32_and2_8 >> 0) & 0x01), ((u_arrmul32_fa3_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_8_or0 = (fa(((u_arrmul32_and2_8 >> 0) & 0x01), ((u_arrmul32_fa3_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_8 = and_gate(((a >> 3) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa3_8_xor1 = (fa(((u_arrmul32_and3_8 >> 0) & 0x01), ((u_arrmul32_fa4_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_8_or0 = (fa(((u_arrmul32_and3_8 >> 0) & 0x01), ((u_arrmul32_fa4_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_8 = and_gate(((a >> 4) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa4_8_xor1 = (fa(((u_arrmul32_and4_8 >> 0) & 0x01), ((u_arrmul32_fa5_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_8_or0 = (fa(((u_arrmul32_and4_8 >> 0) & 0x01), ((u_arrmul32_fa5_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_8 = and_gate(((a >> 5) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa5_8_xor1 = (fa(((u_arrmul32_and5_8 >> 0) & 0x01), ((u_arrmul32_fa6_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_8_or0 = (fa(((u_arrmul32_and5_8 >> 0) & 0x01), ((u_arrmul32_fa6_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_8 = and_gate(((a >> 6) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa6_8_xor1 = (fa(((u_arrmul32_and6_8 >> 0) & 0x01), ((u_arrmul32_fa7_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_8_or0 = (fa(((u_arrmul32_and6_8 >> 0) & 0x01), ((u_arrmul32_fa7_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_8 = and_gate(((a >> 7) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa7_8_xor1 = (fa(((u_arrmul32_and7_8 >> 0) & 0x01), ((u_arrmul32_fa8_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_8_or0 = (fa(((u_arrmul32_and7_8 >> 0) & 0x01), ((u_arrmul32_fa8_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_8 = and_gate(((a >> 8) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa8_8_xor1 = (fa(((u_arrmul32_and8_8 >> 0) & 0x01), ((u_arrmul32_fa9_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_8_or0 = (fa(((u_arrmul32_and8_8 >> 0) & 0x01), ((u_arrmul32_fa9_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_8 = and_gate(((a >> 9) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa9_8_xor1 = (fa(((u_arrmul32_and9_8 >> 0) & 0x01), ((u_arrmul32_fa10_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_8_or0 = (fa(((u_arrmul32_and9_8 >> 0) & 0x01), ((u_arrmul32_fa10_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_8 = and_gate(((a >> 10) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa10_8_xor1 = (fa(((u_arrmul32_and10_8 >> 0) & 0x01), ((u_arrmul32_fa11_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_8_or0 = (fa(((u_arrmul32_and10_8 >> 0) & 0x01), ((u_arrmul32_fa11_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_8 = and_gate(((a >> 11) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa11_8_xor1 = (fa(((u_arrmul32_and11_8 >> 0) & 0x01), ((u_arrmul32_fa12_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_8_or0 = (fa(((u_arrmul32_and11_8 >> 0) & 0x01), ((u_arrmul32_fa12_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_8 = and_gate(((a >> 12) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa12_8_xor1 = (fa(((u_arrmul32_and12_8 >> 0) & 0x01), ((u_arrmul32_fa13_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_8_or0 = (fa(((u_arrmul32_and12_8 >> 0) & 0x01), ((u_arrmul32_fa13_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_8 = and_gate(((a >> 13) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa13_8_xor1 = (fa(((u_arrmul32_and13_8 >> 0) & 0x01), ((u_arrmul32_fa14_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_8_or0 = (fa(((u_arrmul32_and13_8 >> 0) & 0x01), ((u_arrmul32_fa14_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_8 = and_gate(((a >> 14) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa14_8_xor1 = (fa(((u_arrmul32_and14_8 >> 0) & 0x01), ((u_arrmul32_fa15_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_8_or0 = (fa(((u_arrmul32_and14_8 >> 0) & 0x01), ((u_arrmul32_fa15_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_8 = and_gate(((a >> 15) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa15_8_xor1 = (fa(((u_arrmul32_and15_8 >> 0) & 0x01), ((u_arrmul32_fa16_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_8_or0 = (fa(((u_arrmul32_and15_8 >> 0) & 0x01), ((u_arrmul32_fa16_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_8 = and_gate(((a >> 16) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa16_8_xor1 = (fa(((u_arrmul32_and16_8 >> 0) & 0x01), ((u_arrmul32_fa17_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_8_or0 = (fa(((u_arrmul32_and16_8 >> 0) & 0x01), ((u_arrmul32_fa17_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_8 = and_gate(((a >> 17) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa17_8_xor1 = (fa(((u_arrmul32_and17_8 >> 0) & 0x01), ((u_arrmul32_fa18_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_8_or0 = (fa(((u_arrmul32_and17_8 >> 0) & 0x01), ((u_arrmul32_fa18_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_8 = and_gate(((a >> 18) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa18_8_xor1 = (fa(((u_arrmul32_and18_8 >> 0) & 0x01), ((u_arrmul32_fa19_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_8_or0 = (fa(((u_arrmul32_and18_8 >> 0) & 0x01), ((u_arrmul32_fa19_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_8 = and_gate(((a >> 19) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa19_8_xor1 = (fa(((u_arrmul32_and19_8 >> 0) & 0x01), ((u_arrmul32_fa20_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_8_or0 = (fa(((u_arrmul32_and19_8 >> 0) & 0x01), ((u_arrmul32_fa20_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_8 = and_gate(((a >> 20) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa20_8_xor1 = (fa(((u_arrmul32_and20_8 >> 0) & 0x01), ((u_arrmul32_fa21_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_8_or0 = (fa(((u_arrmul32_and20_8 >> 0) & 0x01), ((u_arrmul32_fa21_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_8 = and_gate(((a >> 21) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa21_8_xor1 = (fa(((u_arrmul32_and21_8 >> 0) & 0x01), ((u_arrmul32_fa22_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_8_or0 = (fa(((u_arrmul32_and21_8 >> 0) & 0x01), ((u_arrmul32_fa22_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_8 = and_gate(((a >> 22) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa22_8_xor1 = (fa(((u_arrmul32_and22_8 >> 0) & 0x01), ((u_arrmul32_fa23_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_8_or0 = (fa(((u_arrmul32_and22_8 >> 0) & 0x01), ((u_arrmul32_fa23_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_8 = and_gate(((a >> 23) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa23_8_xor1 = (fa(((u_arrmul32_and23_8 >> 0) & 0x01), ((u_arrmul32_fa24_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_8_or0 = (fa(((u_arrmul32_and23_8 >> 0) & 0x01), ((u_arrmul32_fa24_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_8 = and_gate(((a >> 24) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa24_8_xor1 = (fa(((u_arrmul32_and24_8 >> 0) & 0x01), ((u_arrmul32_fa25_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_8_or0 = (fa(((u_arrmul32_and24_8 >> 0) & 0x01), ((u_arrmul32_fa25_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_8 = and_gate(((a >> 25) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa25_8_xor1 = (fa(((u_arrmul32_and25_8 >> 0) & 0x01), ((u_arrmul32_fa26_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_8_or0 = (fa(((u_arrmul32_and25_8 >> 0) & 0x01), ((u_arrmul32_fa26_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_8 = and_gate(((a >> 26) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa26_8_xor1 = (fa(((u_arrmul32_and26_8 >> 0) & 0x01), ((u_arrmul32_fa27_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_8_or0 = (fa(((u_arrmul32_and26_8 >> 0) & 0x01), ((u_arrmul32_fa27_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_8 = and_gate(((a >> 27) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa27_8_xor1 = (fa(((u_arrmul32_and27_8 >> 0) & 0x01), ((u_arrmul32_fa28_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_8_or0 = (fa(((u_arrmul32_and27_8 >> 0) & 0x01), ((u_arrmul32_fa28_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_8 = and_gate(((a >> 28) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa28_8_xor1 = (fa(((u_arrmul32_and28_8 >> 0) & 0x01), ((u_arrmul32_fa29_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_8_or0 = (fa(((u_arrmul32_and28_8 >> 0) & 0x01), ((u_arrmul32_fa29_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_8 = and_gate(((a >> 29) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa29_8_xor1 = (fa(((u_arrmul32_and29_8 >> 0) & 0x01), ((u_arrmul32_fa30_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_8_or0 = (fa(((u_arrmul32_and29_8 >> 0) & 0x01), ((u_arrmul32_fa30_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_8 = and_gate(((a >> 30) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa30_8_xor1 = (fa(((u_arrmul32_and30_8 >> 0) & 0x01), ((u_arrmul32_fa31_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_8_or0 = (fa(((u_arrmul32_and30_8 >> 0) & 0x01), ((u_arrmul32_fa31_7_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_8 = and_gate(((a >> 31) & 0x01), ((b >> 8) & 0x01));
|
|
u_arrmul32_fa31_8_xor1 = (fa(((u_arrmul32_and31_8 >> 0) & 0x01), ((u_arrmul32_fa31_7_or0 >> 0) & 0x01), ((u_arrmul32_fa30_8_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_8_or0 = (fa(((u_arrmul32_and31_8 >> 0) & 0x01), ((u_arrmul32_fa31_7_or0 >> 0) & 0x01), ((u_arrmul32_fa30_8_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_9 = and_gate(((a >> 0) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_ha0_9_xor0 = (ha(((u_arrmul32_and0_9 >> 0) & 0x01), ((u_arrmul32_fa1_8_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_9_and0 = (ha(((u_arrmul32_and0_9 >> 0) & 0x01), ((u_arrmul32_fa1_8_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_9 = and_gate(((a >> 1) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa1_9_xor1 = (fa(((u_arrmul32_and1_9 >> 0) & 0x01), ((u_arrmul32_fa2_8_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_9_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_9_or0 = (fa(((u_arrmul32_and1_9 >> 0) & 0x01), ((u_arrmul32_fa2_8_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_9_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_9 = and_gate(((a >> 2) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa2_9_xor1 = (fa(((u_arrmul32_and2_9 >> 0) & 0x01), ((u_arrmul32_fa3_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_9_or0 = (fa(((u_arrmul32_and2_9 >> 0) & 0x01), ((u_arrmul32_fa3_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_9 = and_gate(((a >> 3) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa3_9_xor1 = (fa(((u_arrmul32_and3_9 >> 0) & 0x01), ((u_arrmul32_fa4_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_9_or0 = (fa(((u_arrmul32_and3_9 >> 0) & 0x01), ((u_arrmul32_fa4_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_9 = and_gate(((a >> 4) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa4_9_xor1 = (fa(((u_arrmul32_and4_9 >> 0) & 0x01), ((u_arrmul32_fa5_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_9_or0 = (fa(((u_arrmul32_and4_9 >> 0) & 0x01), ((u_arrmul32_fa5_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_9 = and_gate(((a >> 5) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa5_9_xor1 = (fa(((u_arrmul32_and5_9 >> 0) & 0x01), ((u_arrmul32_fa6_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_9_or0 = (fa(((u_arrmul32_and5_9 >> 0) & 0x01), ((u_arrmul32_fa6_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_9 = and_gate(((a >> 6) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa6_9_xor1 = (fa(((u_arrmul32_and6_9 >> 0) & 0x01), ((u_arrmul32_fa7_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_9_or0 = (fa(((u_arrmul32_and6_9 >> 0) & 0x01), ((u_arrmul32_fa7_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_9 = and_gate(((a >> 7) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa7_9_xor1 = (fa(((u_arrmul32_and7_9 >> 0) & 0x01), ((u_arrmul32_fa8_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_9_or0 = (fa(((u_arrmul32_and7_9 >> 0) & 0x01), ((u_arrmul32_fa8_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_9 = and_gate(((a >> 8) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa8_9_xor1 = (fa(((u_arrmul32_and8_9 >> 0) & 0x01), ((u_arrmul32_fa9_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_9_or0 = (fa(((u_arrmul32_and8_9 >> 0) & 0x01), ((u_arrmul32_fa9_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_9 = and_gate(((a >> 9) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa9_9_xor1 = (fa(((u_arrmul32_and9_9 >> 0) & 0x01), ((u_arrmul32_fa10_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_9_or0 = (fa(((u_arrmul32_and9_9 >> 0) & 0x01), ((u_arrmul32_fa10_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_9 = and_gate(((a >> 10) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa10_9_xor1 = (fa(((u_arrmul32_and10_9 >> 0) & 0x01), ((u_arrmul32_fa11_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_9_or0 = (fa(((u_arrmul32_and10_9 >> 0) & 0x01), ((u_arrmul32_fa11_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_9 = and_gate(((a >> 11) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa11_9_xor1 = (fa(((u_arrmul32_and11_9 >> 0) & 0x01), ((u_arrmul32_fa12_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_9_or0 = (fa(((u_arrmul32_and11_9 >> 0) & 0x01), ((u_arrmul32_fa12_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_9 = and_gate(((a >> 12) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa12_9_xor1 = (fa(((u_arrmul32_and12_9 >> 0) & 0x01), ((u_arrmul32_fa13_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_9_or0 = (fa(((u_arrmul32_and12_9 >> 0) & 0x01), ((u_arrmul32_fa13_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_9 = and_gate(((a >> 13) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa13_9_xor1 = (fa(((u_arrmul32_and13_9 >> 0) & 0x01), ((u_arrmul32_fa14_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_9_or0 = (fa(((u_arrmul32_and13_9 >> 0) & 0x01), ((u_arrmul32_fa14_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_9 = and_gate(((a >> 14) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa14_9_xor1 = (fa(((u_arrmul32_and14_9 >> 0) & 0x01), ((u_arrmul32_fa15_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_9_or0 = (fa(((u_arrmul32_and14_9 >> 0) & 0x01), ((u_arrmul32_fa15_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_9 = and_gate(((a >> 15) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa15_9_xor1 = (fa(((u_arrmul32_and15_9 >> 0) & 0x01), ((u_arrmul32_fa16_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_9_or0 = (fa(((u_arrmul32_and15_9 >> 0) & 0x01), ((u_arrmul32_fa16_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_9 = and_gate(((a >> 16) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa16_9_xor1 = (fa(((u_arrmul32_and16_9 >> 0) & 0x01), ((u_arrmul32_fa17_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_9_or0 = (fa(((u_arrmul32_and16_9 >> 0) & 0x01), ((u_arrmul32_fa17_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_9 = and_gate(((a >> 17) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa17_9_xor1 = (fa(((u_arrmul32_and17_9 >> 0) & 0x01), ((u_arrmul32_fa18_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_9_or0 = (fa(((u_arrmul32_and17_9 >> 0) & 0x01), ((u_arrmul32_fa18_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_9 = and_gate(((a >> 18) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa18_9_xor1 = (fa(((u_arrmul32_and18_9 >> 0) & 0x01), ((u_arrmul32_fa19_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_9_or0 = (fa(((u_arrmul32_and18_9 >> 0) & 0x01), ((u_arrmul32_fa19_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_9 = and_gate(((a >> 19) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa19_9_xor1 = (fa(((u_arrmul32_and19_9 >> 0) & 0x01), ((u_arrmul32_fa20_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_9_or0 = (fa(((u_arrmul32_and19_9 >> 0) & 0x01), ((u_arrmul32_fa20_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_9 = and_gate(((a >> 20) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa20_9_xor1 = (fa(((u_arrmul32_and20_9 >> 0) & 0x01), ((u_arrmul32_fa21_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_9_or0 = (fa(((u_arrmul32_and20_9 >> 0) & 0x01), ((u_arrmul32_fa21_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_9 = and_gate(((a >> 21) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa21_9_xor1 = (fa(((u_arrmul32_and21_9 >> 0) & 0x01), ((u_arrmul32_fa22_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_9_or0 = (fa(((u_arrmul32_and21_9 >> 0) & 0x01), ((u_arrmul32_fa22_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_9 = and_gate(((a >> 22) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa22_9_xor1 = (fa(((u_arrmul32_and22_9 >> 0) & 0x01), ((u_arrmul32_fa23_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_9_or0 = (fa(((u_arrmul32_and22_9 >> 0) & 0x01), ((u_arrmul32_fa23_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_9 = and_gate(((a >> 23) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa23_9_xor1 = (fa(((u_arrmul32_and23_9 >> 0) & 0x01), ((u_arrmul32_fa24_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_9_or0 = (fa(((u_arrmul32_and23_9 >> 0) & 0x01), ((u_arrmul32_fa24_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_9 = and_gate(((a >> 24) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa24_9_xor1 = (fa(((u_arrmul32_and24_9 >> 0) & 0x01), ((u_arrmul32_fa25_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_9_or0 = (fa(((u_arrmul32_and24_9 >> 0) & 0x01), ((u_arrmul32_fa25_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_9 = and_gate(((a >> 25) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa25_9_xor1 = (fa(((u_arrmul32_and25_9 >> 0) & 0x01), ((u_arrmul32_fa26_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_9_or0 = (fa(((u_arrmul32_and25_9 >> 0) & 0x01), ((u_arrmul32_fa26_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_9 = and_gate(((a >> 26) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa26_9_xor1 = (fa(((u_arrmul32_and26_9 >> 0) & 0x01), ((u_arrmul32_fa27_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_9_or0 = (fa(((u_arrmul32_and26_9 >> 0) & 0x01), ((u_arrmul32_fa27_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_9 = and_gate(((a >> 27) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa27_9_xor1 = (fa(((u_arrmul32_and27_9 >> 0) & 0x01), ((u_arrmul32_fa28_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_9_or0 = (fa(((u_arrmul32_and27_9 >> 0) & 0x01), ((u_arrmul32_fa28_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_9 = and_gate(((a >> 28) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa28_9_xor1 = (fa(((u_arrmul32_and28_9 >> 0) & 0x01), ((u_arrmul32_fa29_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_9_or0 = (fa(((u_arrmul32_and28_9 >> 0) & 0x01), ((u_arrmul32_fa29_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_9 = and_gate(((a >> 29) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa29_9_xor1 = (fa(((u_arrmul32_and29_9 >> 0) & 0x01), ((u_arrmul32_fa30_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_9_or0 = (fa(((u_arrmul32_and29_9 >> 0) & 0x01), ((u_arrmul32_fa30_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_9 = and_gate(((a >> 30) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa30_9_xor1 = (fa(((u_arrmul32_and30_9 >> 0) & 0x01), ((u_arrmul32_fa31_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_9_or0 = (fa(((u_arrmul32_and30_9 >> 0) & 0x01), ((u_arrmul32_fa31_8_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_9 = and_gate(((a >> 31) & 0x01), ((b >> 9) & 0x01));
|
|
u_arrmul32_fa31_9_xor1 = (fa(((u_arrmul32_and31_9 >> 0) & 0x01), ((u_arrmul32_fa31_8_or0 >> 0) & 0x01), ((u_arrmul32_fa30_9_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_9_or0 = (fa(((u_arrmul32_and31_9 >> 0) & 0x01), ((u_arrmul32_fa31_8_or0 >> 0) & 0x01), ((u_arrmul32_fa30_9_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_10 = and_gate(((a >> 0) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_ha0_10_xor0 = (ha(((u_arrmul32_and0_10 >> 0) & 0x01), ((u_arrmul32_fa1_9_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_10_and0 = (ha(((u_arrmul32_and0_10 >> 0) & 0x01), ((u_arrmul32_fa1_9_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_10 = and_gate(((a >> 1) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa1_10_xor1 = (fa(((u_arrmul32_and1_10 >> 0) & 0x01), ((u_arrmul32_fa2_9_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_10_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_10_or0 = (fa(((u_arrmul32_and1_10 >> 0) & 0x01), ((u_arrmul32_fa2_9_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_10_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_10 = and_gate(((a >> 2) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa2_10_xor1 = (fa(((u_arrmul32_and2_10 >> 0) & 0x01), ((u_arrmul32_fa3_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_10_or0 = (fa(((u_arrmul32_and2_10 >> 0) & 0x01), ((u_arrmul32_fa3_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_10 = and_gate(((a >> 3) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa3_10_xor1 = (fa(((u_arrmul32_and3_10 >> 0) & 0x01), ((u_arrmul32_fa4_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_10_or0 = (fa(((u_arrmul32_and3_10 >> 0) & 0x01), ((u_arrmul32_fa4_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_10 = and_gate(((a >> 4) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa4_10_xor1 = (fa(((u_arrmul32_and4_10 >> 0) & 0x01), ((u_arrmul32_fa5_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_10_or0 = (fa(((u_arrmul32_and4_10 >> 0) & 0x01), ((u_arrmul32_fa5_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_10 = and_gate(((a >> 5) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa5_10_xor1 = (fa(((u_arrmul32_and5_10 >> 0) & 0x01), ((u_arrmul32_fa6_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_10_or0 = (fa(((u_arrmul32_and5_10 >> 0) & 0x01), ((u_arrmul32_fa6_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_10 = and_gate(((a >> 6) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa6_10_xor1 = (fa(((u_arrmul32_and6_10 >> 0) & 0x01), ((u_arrmul32_fa7_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_10_or0 = (fa(((u_arrmul32_and6_10 >> 0) & 0x01), ((u_arrmul32_fa7_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_10 = and_gate(((a >> 7) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa7_10_xor1 = (fa(((u_arrmul32_and7_10 >> 0) & 0x01), ((u_arrmul32_fa8_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_10_or0 = (fa(((u_arrmul32_and7_10 >> 0) & 0x01), ((u_arrmul32_fa8_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_10 = and_gate(((a >> 8) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa8_10_xor1 = (fa(((u_arrmul32_and8_10 >> 0) & 0x01), ((u_arrmul32_fa9_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_10_or0 = (fa(((u_arrmul32_and8_10 >> 0) & 0x01), ((u_arrmul32_fa9_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_10 = and_gate(((a >> 9) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa9_10_xor1 = (fa(((u_arrmul32_and9_10 >> 0) & 0x01), ((u_arrmul32_fa10_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_10_or0 = (fa(((u_arrmul32_and9_10 >> 0) & 0x01), ((u_arrmul32_fa10_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_10 = and_gate(((a >> 10) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa10_10_xor1 = (fa(((u_arrmul32_and10_10 >> 0) & 0x01), ((u_arrmul32_fa11_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_10_or0 = (fa(((u_arrmul32_and10_10 >> 0) & 0x01), ((u_arrmul32_fa11_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_10 = and_gate(((a >> 11) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa11_10_xor1 = (fa(((u_arrmul32_and11_10 >> 0) & 0x01), ((u_arrmul32_fa12_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_10_or0 = (fa(((u_arrmul32_and11_10 >> 0) & 0x01), ((u_arrmul32_fa12_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_10 = and_gate(((a >> 12) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa12_10_xor1 = (fa(((u_arrmul32_and12_10 >> 0) & 0x01), ((u_arrmul32_fa13_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_10_or0 = (fa(((u_arrmul32_and12_10 >> 0) & 0x01), ((u_arrmul32_fa13_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_10 = and_gate(((a >> 13) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa13_10_xor1 = (fa(((u_arrmul32_and13_10 >> 0) & 0x01), ((u_arrmul32_fa14_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_10_or0 = (fa(((u_arrmul32_and13_10 >> 0) & 0x01), ((u_arrmul32_fa14_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_10 = and_gate(((a >> 14) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa14_10_xor1 = (fa(((u_arrmul32_and14_10 >> 0) & 0x01), ((u_arrmul32_fa15_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_10_or0 = (fa(((u_arrmul32_and14_10 >> 0) & 0x01), ((u_arrmul32_fa15_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_10 = and_gate(((a >> 15) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa15_10_xor1 = (fa(((u_arrmul32_and15_10 >> 0) & 0x01), ((u_arrmul32_fa16_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_10_or0 = (fa(((u_arrmul32_and15_10 >> 0) & 0x01), ((u_arrmul32_fa16_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_10 = and_gate(((a >> 16) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa16_10_xor1 = (fa(((u_arrmul32_and16_10 >> 0) & 0x01), ((u_arrmul32_fa17_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_10_or0 = (fa(((u_arrmul32_and16_10 >> 0) & 0x01), ((u_arrmul32_fa17_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_10 = and_gate(((a >> 17) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa17_10_xor1 = (fa(((u_arrmul32_and17_10 >> 0) & 0x01), ((u_arrmul32_fa18_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_10_or0 = (fa(((u_arrmul32_and17_10 >> 0) & 0x01), ((u_arrmul32_fa18_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_10 = and_gate(((a >> 18) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa18_10_xor1 = (fa(((u_arrmul32_and18_10 >> 0) & 0x01), ((u_arrmul32_fa19_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_10_or0 = (fa(((u_arrmul32_and18_10 >> 0) & 0x01), ((u_arrmul32_fa19_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_10 = and_gate(((a >> 19) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa19_10_xor1 = (fa(((u_arrmul32_and19_10 >> 0) & 0x01), ((u_arrmul32_fa20_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_10_or0 = (fa(((u_arrmul32_and19_10 >> 0) & 0x01), ((u_arrmul32_fa20_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_10 = and_gate(((a >> 20) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa20_10_xor1 = (fa(((u_arrmul32_and20_10 >> 0) & 0x01), ((u_arrmul32_fa21_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_10_or0 = (fa(((u_arrmul32_and20_10 >> 0) & 0x01), ((u_arrmul32_fa21_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_10 = and_gate(((a >> 21) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa21_10_xor1 = (fa(((u_arrmul32_and21_10 >> 0) & 0x01), ((u_arrmul32_fa22_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_10_or0 = (fa(((u_arrmul32_and21_10 >> 0) & 0x01), ((u_arrmul32_fa22_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_10 = and_gate(((a >> 22) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa22_10_xor1 = (fa(((u_arrmul32_and22_10 >> 0) & 0x01), ((u_arrmul32_fa23_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_10_or0 = (fa(((u_arrmul32_and22_10 >> 0) & 0x01), ((u_arrmul32_fa23_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_10 = and_gate(((a >> 23) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa23_10_xor1 = (fa(((u_arrmul32_and23_10 >> 0) & 0x01), ((u_arrmul32_fa24_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_10_or0 = (fa(((u_arrmul32_and23_10 >> 0) & 0x01), ((u_arrmul32_fa24_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_10 = and_gate(((a >> 24) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa24_10_xor1 = (fa(((u_arrmul32_and24_10 >> 0) & 0x01), ((u_arrmul32_fa25_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_10_or0 = (fa(((u_arrmul32_and24_10 >> 0) & 0x01), ((u_arrmul32_fa25_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_10 = and_gate(((a >> 25) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa25_10_xor1 = (fa(((u_arrmul32_and25_10 >> 0) & 0x01), ((u_arrmul32_fa26_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_10_or0 = (fa(((u_arrmul32_and25_10 >> 0) & 0x01), ((u_arrmul32_fa26_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_10 = and_gate(((a >> 26) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa26_10_xor1 = (fa(((u_arrmul32_and26_10 >> 0) & 0x01), ((u_arrmul32_fa27_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_10_or0 = (fa(((u_arrmul32_and26_10 >> 0) & 0x01), ((u_arrmul32_fa27_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_10 = and_gate(((a >> 27) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa27_10_xor1 = (fa(((u_arrmul32_and27_10 >> 0) & 0x01), ((u_arrmul32_fa28_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_10_or0 = (fa(((u_arrmul32_and27_10 >> 0) & 0x01), ((u_arrmul32_fa28_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_10 = and_gate(((a >> 28) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa28_10_xor1 = (fa(((u_arrmul32_and28_10 >> 0) & 0x01), ((u_arrmul32_fa29_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_10_or0 = (fa(((u_arrmul32_and28_10 >> 0) & 0x01), ((u_arrmul32_fa29_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_10 = and_gate(((a >> 29) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa29_10_xor1 = (fa(((u_arrmul32_and29_10 >> 0) & 0x01), ((u_arrmul32_fa30_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_10_or0 = (fa(((u_arrmul32_and29_10 >> 0) & 0x01), ((u_arrmul32_fa30_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_10 = and_gate(((a >> 30) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa30_10_xor1 = (fa(((u_arrmul32_and30_10 >> 0) & 0x01), ((u_arrmul32_fa31_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_10_or0 = (fa(((u_arrmul32_and30_10 >> 0) & 0x01), ((u_arrmul32_fa31_9_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_10 = and_gate(((a >> 31) & 0x01), ((b >> 10) & 0x01));
|
|
u_arrmul32_fa31_10_xor1 = (fa(((u_arrmul32_and31_10 >> 0) & 0x01), ((u_arrmul32_fa31_9_or0 >> 0) & 0x01), ((u_arrmul32_fa30_10_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_10_or0 = (fa(((u_arrmul32_and31_10 >> 0) & 0x01), ((u_arrmul32_fa31_9_or0 >> 0) & 0x01), ((u_arrmul32_fa30_10_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_11 = and_gate(((a >> 0) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_ha0_11_xor0 = (ha(((u_arrmul32_and0_11 >> 0) & 0x01), ((u_arrmul32_fa1_10_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_11_and0 = (ha(((u_arrmul32_and0_11 >> 0) & 0x01), ((u_arrmul32_fa1_10_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_11 = and_gate(((a >> 1) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa1_11_xor1 = (fa(((u_arrmul32_and1_11 >> 0) & 0x01), ((u_arrmul32_fa2_10_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_11_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_11_or0 = (fa(((u_arrmul32_and1_11 >> 0) & 0x01), ((u_arrmul32_fa2_10_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_11_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_11 = and_gate(((a >> 2) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa2_11_xor1 = (fa(((u_arrmul32_and2_11 >> 0) & 0x01), ((u_arrmul32_fa3_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_11_or0 = (fa(((u_arrmul32_and2_11 >> 0) & 0x01), ((u_arrmul32_fa3_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_11 = and_gate(((a >> 3) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa3_11_xor1 = (fa(((u_arrmul32_and3_11 >> 0) & 0x01), ((u_arrmul32_fa4_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_11_or0 = (fa(((u_arrmul32_and3_11 >> 0) & 0x01), ((u_arrmul32_fa4_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_11 = and_gate(((a >> 4) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa4_11_xor1 = (fa(((u_arrmul32_and4_11 >> 0) & 0x01), ((u_arrmul32_fa5_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_11_or0 = (fa(((u_arrmul32_and4_11 >> 0) & 0x01), ((u_arrmul32_fa5_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_11 = and_gate(((a >> 5) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa5_11_xor1 = (fa(((u_arrmul32_and5_11 >> 0) & 0x01), ((u_arrmul32_fa6_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_11_or0 = (fa(((u_arrmul32_and5_11 >> 0) & 0x01), ((u_arrmul32_fa6_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_11 = and_gate(((a >> 6) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa6_11_xor1 = (fa(((u_arrmul32_and6_11 >> 0) & 0x01), ((u_arrmul32_fa7_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_11_or0 = (fa(((u_arrmul32_and6_11 >> 0) & 0x01), ((u_arrmul32_fa7_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_11 = and_gate(((a >> 7) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa7_11_xor1 = (fa(((u_arrmul32_and7_11 >> 0) & 0x01), ((u_arrmul32_fa8_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_11_or0 = (fa(((u_arrmul32_and7_11 >> 0) & 0x01), ((u_arrmul32_fa8_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_11 = and_gate(((a >> 8) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa8_11_xor1 = (fa(((u_arrmul32_and8_11 >> 0) & 0x01), ((u_arrmul32_fa9_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_11_or0 = (fa(((u_arrmul32_and8_11 >> 0) & 0x01), ((u_arrmul32_fa9_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_11 = and_gate(((a >> 9) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa9_11_xor1 = (fa(((u_arrmul32_and9_11 >> 0) & 0x01), ((u_arrmul32_fa10_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_11_or0 = (fa(((u_arrmul32_and9_11 >> 0) & 0x01), ((u_arrmul32_fa10_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_11 = and_gate(((a >> 10) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa10_11_xor1 = (fa(((u_arrmul32_and10_11 >> 0) & 0x01), ((u_arrmul32_fa11_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_11_or0 = (fa(((u_arrmul32_and10_11 >> 0) & 0x01), ((u_arrmul32_fa11_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_11 = and_gate(((a >> 11) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa11_11_xor1 = (fa(((u_arrmul32_and11_11 >> 0) & 0x01), ((u_arrmul32_fa12_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_11_or0 = (fa(((u_arrmul32_and11_11 >> 0) & 0x01), ((u_arrmul32_fa12_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_11 = and_gate(((a >> 12) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa12_11_xor1 = (fa(((u_arrmul32_and12_11 >> 0) & 0x01), ((u_arrmul32_fa13_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_11_or0 = (fa(((u_arrmul32_and12_11 >> 0) & 0x01), ((u_arrmul32_fa13_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_11 = and_gate(((a >> 13) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa13_11_xor1 = (fa(((u_arrmul32_and13_11 >> 0) & 0x01), ((u_arrmul32_fa14_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_11_or0 = (fa(((u_arrmul32_and13_11 >> 0) & 0x01), ((u_arrmul32_fa14_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_11 = and_gate(((a >> 14) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa14_11_xor1 = (fa(((u_arrmul32_and14_11 >> 0) & 0x01), ((u_arrmul32_fa15_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_11_or0 = (fa(((u_arrmul32_and14_11 >> 0) & 0x01), ((u_arrmul32_fa15_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_11 = and_gate(((a >> 15) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa15_11_xor1 = (fa(((u_arrmul32_and15_11 >> 0) & 0x01), ((u_arrmul32_fa16_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_11_or0 = (fa(((u_arrmul32_and15_11 >> 0) & 0x01), ((u_arrmul32_fa16_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_11 = and_gate(((a >> 16) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa16_11_xor1 = (fa(((u_arrmul32_and16_11 >> 0) & 0x01), ((u_arrmul32_fa17_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_11_or0 = (fa(((u_arrmul32_and16_11 >> 0) & 0x01), ((u_arrmul32_fa17_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_11 = and_gate(((a >> 17) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa17_11_xor1 = (fa(((u_arrmul32_and17_11 >> 0) & 0x01), ((u_arrmul32_fa18_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_11_or0 = (fa(((u_arrmul32_and17_11 >> 0) & 0x01), ((u_arrmul32_fa18_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_11 = and_gate(((a >> 18) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa18_11_xor1 = (fa(((u_arrmul32_and18_11 >> 0) & 0x01), ((u_arrmul32_fa19_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_11_or0 = (fa(((u_arrmul32_and18_11 >> 0) & 0x01), ((u_arrmul32_fa19_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_11 = and_gate(((a >> 19) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa19_11_xor1 = (fa(((u_arrmul32_and19_11 >> 0) & 0x01), ((u_arrmul32_fa20_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_11_or0 = (fa(((u_arrmul32_and19_11 >> 0) & 0x01), ((u_arrmul32_fa20_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_11 = and_gate(((a >> 20) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa20_11_xor1 = (fa(((u_arrmul32_and20_11 >> 0) & 0x01), ((u_arrmul32_fa21_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_11_or0 = (fa(((u_arrmul32_and20_11 >> 0) & 0x01), ((u_arrmul32_fa21_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_11 = and_gate(((a >> 21) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa21_11_xor1 = (fa(((u_arrmul32_and21_11 >> 0) & 0x01), ((u_arrmul32_fa22_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_11_or0 = (fa(((u_arrmul32_and21_11 >> 0) & 0x01), ((u_arrmul32_fa22_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_11 = and_gate(((a >> 22) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa22_11_xor1 = (fa(((u_arrmul32_and22_11 >> 0) & 0x01), ((u_arrmul32_fa23_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_11_or0 = (fa(((u_arrmul32_and22_11 >> 0) & 0x01), ((u_arrmul32_fa23_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_11 = and_gate(((a >> 23) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa23_11_xor1 = (fa(((u_arrmul32_and23_11 >> 0) & 0x01), ((u_arrmul32_fa24_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_11_or0 = (fa(((u_arrmul32_and23_11 >> 0) & 0x01), ((u_arrmul32_fa24_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_11 = and_gate(((a >> 24) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa24_11_xor1 = (fa(((u_arrmul32_and24_11 >> 0) & 0x01), ((u_arrmul32_fa25_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_11_or0 = (fa(((u_arrmul32_and24_11 >> 0) & 0x01), ((u_arrmul32_fa25_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_11 = and_gate(((a >> 25) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa25_11_xor1 = (fa(((u_arrmul32_and25_11 >> 0) & 0x01), ((u_arrmul32_fa26_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_11_or0 = (fa(((u_arrmul32_and25_11 >> 0) & 0x01), ((u_arrmul32_fa26_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_11 = and_gate(((a >> 26) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa26_11_xor1 = (fa(((u_arrmul32_and26_11 >> 0) & 0x01), ((u_arrmul32_fa27_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_11_or0 = (fa(((u_arrmul32_and26_11 >> 0) & 0x01), ((u_arrmul32_fa27_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_11 = and_gate(((a >> 27) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa27_11_xor1 = (fa(((u_arrmul32_and27_11 >> 0) & 0x01), ((u_arrmul32_fa28_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_11_or0 = (fa(((u_arrmul32_and27_11 >> 0) & 0x01), ((u_arrmul32_fa28_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_11 = and_gate(((a >> 28) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa28_11_xor1 = (fa(((u_arrmul32_and28_11 >> 0) & 0x01), ((u_arrmul32_fa29_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_11_or0 = (fa(((u_arrmul32_and28_11 >> 0) & 0x01), ((u_arrmul32_fa29_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_11 = and_gate(((a >> 29) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa29_11_xor1 = (fa(((u_arrmul32_and29_11 >> 0) & 0x01), ((u_arrmul32_fa30_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_11_or0 = (fa(((u_arrmul32_and29_11 >> 0) & 0x01), ((u_arrmul32_fa30_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_11 = and_gate(((a >> 30) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa30_11_xor1 = (fa(((u_arrmul32_and30_11 >> 0) & 0x01), ((u_arrmul32_fa31_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_11_or0 = (fa(((u_arrmul32_and30_11 >> 0) & 0x01), ((u_arrmul32_fa31_10_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_11 = and_gate(((a >> 31) & 0x01), ((b >> 11) & 0x01));
|
|
u_arrmul32_fa31_11_xor1 = (fa(((u_arrmul32_and31_11 >> 0) & 0x01), ((u_arrmul32_fa31_10_or0 >> 0) & 0x01), ((u_arrmul32_fa30_11_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_11_or0 = (fa(((u_arrmul32_and31_11 >> 0) & 0x01), ((u_arrmul32_fa31_10_or0 >> 0) & 0x01), ((u_arrmul32_fa30_11_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_12 = and_gate(((a >> 0) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_ha0_12_xor0 = (ha(((u_arrmul32_and0_12 >> 0) & 0x01), ((u_arrmul32_fa1_11_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_12_and0 = (ha(((u_arrmul32_and0_12 >> 0) & 0x01), ((u_arrmul32_fa1_11_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_12 = and_gate(((a >> 1) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa1_12_xor1 = (fa(((u_arrmul32_and1_12 >> 0) & 0x01), ((u_arrmul32_fa2_11_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_12_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_12_or0 = (fa(((u_arrmul32_and1_12 >> 0) & 0x01), ((u_arrmul32_fa2_11_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_12_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_12 = and_gate(((a >> 2) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa2_12_xor1 = (fa(((u_arrmul32_and2_12 >> 0) & 0x01), ((u_arrmul32_fa3_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_12_or0 = (fa(((u_arrmul32_and2_12 >> 0) & 0x01), ((u_arrmul32_fa3_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_12 = and_gate(((a >> 3) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa3_12_xor1 = (fa(((u_arrmul32_and3_12 >> 0) & 0x01), ((u_arrmul32_fa4_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_12_or0 = (fa(((u_arrmul32_and3_12 >> 0) & 0x01), ((u_arrmul32_fa4_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_12 = and_gate(((a >> 4) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa4_12_xor1 = (fa(((u_arrmul32_and4_12 >> 0) & 0x01), ((u_arrmul32_fa5_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_12_or0 = (fa(((u_arrmul32_and4_12 >> 0) & 0x01), ((u_arrmul32_fa5_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_12 = and_gate(((a >> 5) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa5_12_xor1 = (fa(((u_arrmul32_and5_12 >> 0) & 0x01), ((u_arrmul32_fa6_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_12_or0 = (fa(((u_arrmul32_and5_12 >> 0) & 0x01), ((u_arrmul32_fa6_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_12 = and_gate(((a >> 6) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa6_12_xor1 = (fa(((u_arrmul32_and6_12 >> 0) & 0x01), ((u_arrmul32_fa7_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_12_or0 = (fa(((u_arrmul32_and6_12 >> 0) & 0x01), ((u_arrmul32_fa7_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_12 = and_gate(((a >> 7) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa7_12_xor1 = (fa(((u_arrmul32_and7_12 >> 0) & 0x01), ((u_arrmul32_fa8_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_12_or0 = (fa(((u_arrmul32_and7_12 >> 0) & 0x01), ((u_arrmul32_fa8_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_12 = and_gate(((a >> 8) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa8_12_xor1 = (fa(((u_arrmul32_and8_12 >> 0) & 0x01), ((u_arrmul32_fa9_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_12_or0 = (fa(((u_arrmul32_and8_12 >> 0) & 0x01), ((u_arrmul32_fa9_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_12 = and_gate(((a >> 9) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa9_12_xor1 = (fa(((u_arrmul32_and9_12 >> 0) & 0x01), ((u_arrmul32_fa10_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_12_or0 = (fa(((u_arrmul32_and9_12 >> 0) & 0x01), ((u_arrmul32_fa10_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_12 = and_gate(((a >> 10) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa10_12_xor1 = (fa(((u_arrmul32_and10_12 >> 0) & 0x01), ((u_arrmul32_fa11_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_12_or0 = (fa(((u_arrmul32_and10_12 >> 0) & 0x01), ((u_arrmul32_fa11_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_12 = and_gate(((a >> 11) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa11_12_xor1 = (fa(((u_arrmul32_and11_12 >> 0) & 0x01), ((u_arrmul32_fa12_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_12_or0 = (fa(((u_arrmul32_and11_12 >> 0) & 0x01), ((u_arrmul32_fa12_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_12 = and_gate(((a >> 12) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa12_12_xor1 = (fa(((u_arrmul32_and12_12 >> 0) & 0x01), ((u_arrmul32_fa13_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_12_or0 = (fa(((u_arrmul32_and12_12 >> 0) & 0x01), ((u_arrmul32_fa13_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_12 = and_gate(((a >> 13) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa13_12_xor1 = (fa(((u_arrmul32_and13_12 >> 0) & 0x01), ((u_arrmul32_fa14_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_12_or0 = (fa(((u_arrmul32_and13_12 >> 0) & 0x01), ((u_arrmul32_fa14_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_12 = and_gate(((a >> 14) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa14_12_xor1 = (fa(((u_arrmul32_and14_12 >> 0) & 0x01), ((u_arrmul32_fa15_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_12_or0 = (fa(((u_arrmul32_and14_12 >> 0) & 0x01), ((u_arrmul32_fa15_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_12 = and_gate(((a >> 15) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa15_12_xor1 = (fa(((u_arrmul32_and15_12 >> 0) & 0x01), ((u_arrmul32_fa16_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_12_or0 = (fa(((u_arrmul32_and15_12 >> 0) & 0x01), ((u_arrmul32_fa16_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_12 = and_gate(((a >> 16) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa16_12_xor1 = (fa(((u_arrmul32_and16_12 >> 0) & 0x01), ((u_arrmul32_fa17_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_12_or0 = (fa(((u_arrmul32_and16_12 >> 0) & 0x01), ((u_arrmul32_fa17_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_12 = and_gate(((a >> 17) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa17_12_xor1 = (fa(((u_arrmul32_and17_12 >> 0) & 0x01), ((u_arrmul32_fa18_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_12_or0 = (fa(((u_arrmul32_and17_12 >> 0) & 0x01), ((u_arrmul32_fa18_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_12 = and_gate(((a >> 18) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa18_12_xor1 = (fa(((u_arrmul32_and18_12 >> 0) & 0x01), ((u_arrmul32_fa19_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_12_or0 = (fa(((u_arrmul32_and18_12 >> 0) & 0x01), ((u_arrmul32_fa19_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_12 = and_gate(((a >> 19) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa19_12_xor1 = (fa(((u_arrmul32_and19_12 >> 0) & 0x01), ((u_arrmul32_fa20_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_12_or0 = (fa(((u_arrmul32_and19_12 >> 0) & 0x01), ((u_arrmul32_fa20_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_12 = and_gate(((a >> 20) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa20_12_xor1 = (fa(((u_arrmul32_and20_12 >> 0) & 0x01), ((u_arrmul32_fa21_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_12_or0 = (fa(((u_arrmul32_and20_12 >> 0) & 0x01), ((u_arrmul32_fa21_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_12 = and_gate(((a >> 21) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa21_12_xor1 = (fa(((u_arrmul32_and21_12 >> 0) & 0x01), ((u_arrmul32_fa22_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_12_or0 = (fa(((u_arrmul32_and21_12 >> 0) & 0x01), ((u_arrmul32_fa22_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_12 = and_gate(((a >> 22) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa22_12_xor1 = (fa(((u_arrmul32_and22_12 >> 0) & 0x01), ((u_arrmul32_fa23_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_12_or0 = (fa(((u_arrmul32_and22_12 >> 0) & 0x01), ((u_arrmul32_fa23_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_12 = and_gate(((a >> 23) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa23_12_xor1 = (fa(((u_arrmul32_and23_12 >> 0) & 0x01), ((u_arrmul32_fa24_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_12_or0 = (fa(((u_arrmul32_and23_12 >> 0) & 0x01), ((u_arrmul32_fa24_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_12 = and_gate(((a >> 24) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa24_12_xor1 = (fa(((u_arrmul32_and24_12 >> 0) & 0x01), ((u_arrmul32_fa25_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_12_or0 = (fa(((u_arrmul32_and24_12 >> 0) & 0x01), ((u_arrmul32_fa25_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_12 = and_gate(((a >> 25) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa25_12_xor1 = (fa(((u_arrmul32_and25_12 >> 0) & 0x01), ((u_arrmul32_fa26_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_12_or0 = (fa(((u_arrmul32_and25_12 >> 0) & 0x01), ((u_arrmul32_fa26_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_12 = and_gate(((a >> 26) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa26_12_xor1 = (fa(((u_arrmul32_and26_12 >> 0) & 0x01), ((u_arrmul32_fa27_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_12_or0 = (fa(((u_arrmul32_and26_12 >> 0) & 0x01), ((u_arrmul32_fa27_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_12 = and_gate(((a >> 27) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa27_12_xor1 = (fa(((u_arrmul32_and27_12 >> 0) & 0x01), ((u_arrmul32_fa28_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_12_or0 = (fa(((u_arrmul32_and27_12 >> 0) & 0x01), ((u_arrmul32_fa28_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_12 = and_gate(((a >> 28) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa28_12_xor1 = (fa(((u_arrmul32_and28_12 >> 0) & 0x01), ((u_arrmul32_fa29_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_12_or0 = (fa(((u_arrmul32_and28_12 >> 0) & 0x01), ((u_arrmul32_fa29_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_12 = and_gate(((a >> 29) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa29_12_xor1 = (fa(((u_arrmul32_and29_12 >> 0) & 0x01), ((u_arrmul32_fa30_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_12_or0 = (fa(((u_arrmul32_and29_12 >> 0) & 0x01), ((u_arrmul32_fa30_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_12 = and_gate(((a >> 30) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa30_12_xor1 = (fa(((u_arrmul32_and30_12 >> 0) & 0x01), ((u_arrmul32_fa31_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_12_or0 = (fa(((u_arrmul32_and30_12 >> 0) & 0x01), ((u_arrmul32_fa31_11_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_12 = and_gate(((a >> 31) & 0x01), ((b >> 12) & 0x01));
|
|
u_arrmul32_fa31_12_xor1 = (fa(((u_arrmul32_and31_12 >> 0) & 0x01), ((u_arrmul32_fa31_11_or0 >> 0) & 0x01), ((u_arrmul32_fa30_12_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_12_or0 = (fa(((u_arrmul32_and31_12 >> 0) & 0x01), ((u_arrmul32_fa31_11_or0 >> 0) & 0x01), ((u_arrmul32_fa30_12_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_13 = and_gate(((a >> 0) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_ha0_13_xor0 = (ha(((u_arrmul32_and0_13 >> 0) & 0x01), ((u_arrmul32_fa1_12_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_13_and0 = (ha(((u_arrmul32_and0_13 >> 0) & 0x01), ((u_arrmul32_fa1_12_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_13 = and_gate(((a >> 1) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa1_13_xor1 = (fa(((u_arrmul32_and1_13 >> 0) & 0x01), ((u_arrmul32_fa2_12_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_13_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_13_or0 = (fa(((u_arrmul32_and1_13 >> 0) & 0x01), ((u_arrmul32_fa2_12_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_13_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_13 = and_gate(((a >> 2) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa2_13_xor1 = (fa(((u_arrmul32_and2_13 >> 0) & 0x01), ((u_arrmul32_fa3_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_13_or0 = (fa(((u_arrmul32_and2_13 >> 0) & 0x01), ((u_arrmul32_fa3_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_13 = and_gate(((a >> 3) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa3_13_xor1 = (fa(((u_arrmul32_and3_13 >> 0) & 0x01), ((u_arrmul32_fa4_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_13_or0 = (fa(((u_arrmul32_and3_13 >> 0) & 0x01), ((u_arrmul32_fa4_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_13 = and_gate(((a >> 4) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa4_13_xor1 = (fa(((u_arrmul32_and4_13 >> 0) & 0x01), ((u_arrmul32_fa5_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_13_or0 = (fa(((u_arrmul32_and4_13 >> 0) & 0x01), ((u_arrmul32_fa5_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_13 = and_gate(((a >> 5) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa5_13_xor1 = (fa(((u_arrmul32_and5_13 >> 0) & 0x01), ((u_arrmul32_fa6_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_13_or0 = (fa(((u_arrmul32_and5_13 >> 0) & 0x01), ((u_arrmul32_fa6_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_13 = and_gate(((a >> 6) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa6_13_xor1 = (fa(((u_arrmul32_and6_13 >> 0) & 0x01), ((u_arrmul32_fa7_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_13_or0 = (fa(((u_arrmul32_and6_13 >> 0) & 0x01), ((u_arrmul32_fa7_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_13 = and_gate(((a >> 7) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa7_13_xor1 = (fa(((u_arrmul32_and7_13 >> 0) & 0x01), ((u_arrmul32_fa8_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_13_or0 = (fa(((u_arrmul32_and7_13 >> 0) & 0x01), ((u_arrmul32_fa8_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_13 = and_gate(((a >> 8) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa8_13_xor1 = (fa(((u_arrmul32_and8_13 >> 0) & 0x01), ((u_arrmul32_fa9_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_13_or0 = (fa(((u_arrmul32_and8_13 >> 0) & 0x01), ((u_arrmul32_fa9_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_13 = and_gate(((a >> 9) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa9_13_xor1 = (fa(((u_arrmul32_and9_13 >> 0) & 0x01), ((u_arrmul32_fa10_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_13_or0 = (fa(((u_arrmul32_and9_13 >> 0) & 0x01), ((u_arrmul32_fa10_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_13 = and_gate(((a >> 10) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa10_13_xor1 = (fa(((u_arrmul32_and10_13 >> 0) & 0x01), ((u_arrmul32_fa11_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_13_or0 = (fa(((u_arrmul32_and10_13 >> 0) & 0x01), ((u_arrmul32_fa11_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_13 = and_gate(((a >> 11) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa11_13_xor1 = (fa(((u_arrmul32_and11_13 >> 0) & 0x01), ((u_arrmul32_fa12_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_13_or0 = (fa(((u_arrmul32_and11_13 >> 0) & 0x01), ((u_arrmul32_fa12_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_13 = and_gate(((a >> 12) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa12_13_xor1 = (fa(((u_arrmul32_and12_13 >> 0) & 0x01), ((u_arrmul32_fa13_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_13_or0 = (fa(((u_arrmul32_and12_13 >> 0) & 0x01), ((u_arrmul32_fa13_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_13 = and_gate(((a >> 13) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa13_13_xor1 = (fa(((u_arrmul32_and13_13 >> 0) & 0x01), ((u_arrmul32_fa14_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_13_or0 = (fa(((u_arrmul32_and13_13 >> 0) & 0x01), ((u_arrmul32_fa14_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_13 = and_gate(((a >> 14) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa14_13_xor1 = (fa(((u_arrmul32_and14_13 >> 0) & 0x01), ((u_arrmul32_fa15_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_13_or0 = (fa(((u_arrmul32_and14_13 >> 0) & 0x01), ((u_arrmul32_fa15_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_13 = and_gate(((a >> 15) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa15_13_xor1 = (fa(((u_arrmul32_and15_13 >> 0) & 0x01), ((u_arrmul32_fa16_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_13_or0 = (fa(((u_arrmul32_and15_13 >> 0) & 0x01), ((u_arrmul32_fa16_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_13 = and_gate(((a >> 16) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa16_13_xor1 = (fa(((u_arrmul32_and16_13 >> 0) & 0x01), ((u_arrmul32_fa17_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_13_or0 = (fa(((u_arrmul32_and16_13 >> 0) & 0x01), ((u_arrmul32_fa17_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_13 = and_gate(((a >> 17) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa17_13_xor1 = (fa(((u_arrmul32_and17_13 >> 0) & 0x01), ((u_arrmul32_fa18_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_13_or0 = (fa(((u_arrmul32_and17_13 >> 0) & 0x01), ((u_arrmul32_fa18_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_13 = and_gate(((a >> 18) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa18_13_xor1 = (fa(((u_arrmul32_and18_13 >> 0) & 0x01), ((u_arrmul32_fa19_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_13_or0 = (fa(((u_arrmul32_and18_13 >> 0) & 0x01), ((u_arrmul32_fa19_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_13 = and_gate(((a >> 19) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa19_13_xor1 = (fa(((u_arrmul32_and19_13 >> 0) & 0x01), ((u_arrmul32_fa20_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_13_or0 = (fa(((u_arrmul32_and19_13 >> 0) & 0x01), ((u_arrmul32_fa20_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_13 = and_gate(((a >> 20) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa20_13_xor1 = (fa(((u_arrmul32_and20_13 >> 0) & 0x01), ((u_arrmul32_fa21_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_13_or0 = (fa(((u_arrmul32_and20_13 >> 0) & 0x01), ((u_arrmul32_fa21_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_13 = and_gate(((a >> 21) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa21_13_xor1 = (fa(((u_arrmul32_and21_13 >> 0) & 0x01), ((u_arrmul32_fa22_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_13_or0 = (fa(((u_arrmul32_and21_13 >> 0) & 0x01), ((u_arrmul32_fa22_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_13 = and_gate(((a >> 22) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa22_13_xor1 = (fa(((u_arrmul32_and22_13 >> 0) & 0x01), ((u_arrmul32_fa23_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_13_or0 = (fa(((u_arrmul32_and22_13 >> 0) & 0x01), ((u_arrmul32_fa23_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_13 = and_gate(((a >> 23) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa23_13_xor1 = (fa(((u_arrmul32_and23_13 >> 0) & 0x01), ((u_arrmul32_fa24_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_13_or0 = (fa(((u_arrmul32_and23_13 >> 0) & 0x01), ((u_arrmul32_fa24_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_13 = and_gate(((a >> 24) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa24_13_xor1 = (fa(((u_arrmul32_and24_13 >> 0) & 0x01), ((u_arrmul32_fa25_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_13_or0 = (fa(((u_arrmul32_and24_13 >> 0) & 0x01), ((u_arrmul32_fa25_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_13 = and_gate(((a >> 25) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa25_13_xor1 = (fa(((u_arrmul32_and25_13 >> 0) & 0x01), ((u_arrmul32_fa26_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_13_or0 = (fa(((u_arrmul32_and25_13 >> 0) & 0x01), ((u_arrmul32_fa26_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_13 = and_gate(((a >> 26) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa26_13_xor1 = (fa(((u_arrmul32_and26_13 >> 0) & 0x01), ((u_arrmul32_fa27_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_13_or0 = (fa(((u_arrmul32_and26_13 >> 0) & 0x01), ((u_arrmul32_fa27_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_13 = and_gate(((a >> 27) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa27_13_xor1 = (fa(((u_arrmul32_and27_13 >> 0) & 0x01), ((u_arrmul32_fa28_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_13_or0 = (fa(((u_arrmul32_and27_13 >> 0) & 0x01), ((u_arrmul32_fa28_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_13 = and_gate(((a >> 28) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa28_13_xor1 = (fa(((u_arrmul32_and28_13 >> 0) & 0x01), ((u_arrmul32_fa29_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_13_or0 = (fa(((u_arrmul32_and28_13 >> 0) & 0x01), ((u_arrmul32_fa29_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_13 = and_gate(((a >> 29) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa29_13_xor1 = (fa(((u_arrmul32_and29_13 >> 0) & 0x01), ((u_arrmul32_fa30_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_13_or0 = (fa(((u_arrmul32_and29_13 >> 0) & 0x01), ((u_arrmul32_fa30_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_13 = and_gate(((a >> 30) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa30_13_xor1 = (fa(((u_arrmul32_and30_13 >> 0) & 0x01), ((u_arrmul32_fa31_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_13_or0 = (fa(((u_arrmul32_and30_13 >> 0) & 0x01), ((u_arrmul32_fa31_12_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_13 = and_gate(((a >> 31) & 0x01), ((b >> 13) & 0x01));
|
|
u_arrmul32_fa31_13_xor1 = (fa(((u_arrmul32_and31_13 >> 0) & 0x01), ((u_arrmul32_fa31_12_or0 >> 0) & 0x01), ((u_arrmul32_fa30_13_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_13_or0 = (fa(((u_arrmul32_and31_13 >> 0) & 0x01), ((u_arrmul32_fa31_12_or0 >> 0) & 0x01), ((u_arrmul32_fa30_13_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_14 = and_gate(((a >> 0) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_ha0_14_xor0 = (ha(((u_arrmul32_and0_14 >> 0) & 0x01), ((u_arrmul32_fa1_13_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_14_and0 = (ha(((u_arrmul32_and0_14 >> 0) & 0x01), ((u_arrmul32_fa1_13_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_14 = and_gate(((a >> 1) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa1_14_xor1 = (fa(((u_arrmul32_and1_14 >> 0) & 0x01), ((u_arrmul32_fa2_13_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_14_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_14_or0 = (fa(((u_arrmul32_and1_14 >> 0) & 0x01), ((u_arrmul32_fa2_13_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_14_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_14 = and_gate(((a >> 2) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa2_14_xor1 = (fa(((u_arrmul32_and2_14 >> 0) & 0x01), ((u_arrmul32_fa3_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_14_or0 = (fa(((u_arrmul32_and2_14 >> 0) & 0x01), ((u_arrmul32_fa3_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_14 = and_gate(((a >> 3) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa3_14_xor1 = (fa(((u_arrmul32_and3_14 >> 0) & 0x01), ((u_arrmul32_fa4_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_14_or0 = (fa(((u_arrmul32_and3_14 >> 0) & 0x01), ((u_arrmul32_fa4_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_14 = and_gate(((a >> 4) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa4_14_xor1 = (fa(((u_arrmul32_and4_14 >> 0) & 0x01), ((u_arrmul32_fa5_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_14_or0 = (fa(((u_arrmul32_and4_14 >> 0) & 0x01), ((u_arrmul32_fa5_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_14 = and_gate(((a >> 5) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa5_14_xor1 = (fa(((u_arrmul32_and5_14 >> 0) & 0x01), ((u_arrmul32_fa6_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_14_or0 = (fa(((u_arrmul32_and5_14 >> 0) & 0x01), ((u_arrmul32_fa6_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_14 = and_gate(((a >> 6) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa6_14_xor1 = (fa(((u_arrmul32_and6_14 >> 0) & 0x01), ((u_arrmul32_fa7_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_14_or0 = (fa(((u_arrmul32_and6_14 >> 0) & 0x01), ((u_arrmul32_fa7_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_14 = and_gate(((a >> 7) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa7_14_xor1 = (fa(((u_arrmul32_and7_14 >> 0) & 0x01), ((u_arrmul32_fa8_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_14_or0 = (fa(((u_arrmul32_and7_14 >> 0) & 0x01), ((u_arrmul32_fa8_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_14 = and_gate(((a >> 8) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa8_14_xor1 = (fa(((u_arrmul32_and8_14 >> 0) & 0x01), ((u_arrmul32_fa9_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_14_or0 = (fa(((u_arrmul32_and8_14 >> 0) & 0x01), ((u_arrmul32_fa9_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_14 = and_gate(((a >> 9) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa9_14_xor1 = (fa(((u_arrmul32_and9_14 >> 0) & 0x01), ((u_arrmul32_fa10_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_14_or0 = (fa(((u_arrmul32_and9_14 >> 0) & 0x01), ((u_arrmul32_fa10_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_14 = and_gate(((a >> 10) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa10_14_xor1 = (fa(((u_arrmul32_and10_14 >> 0) & 0x01), ((u_arrmul32_fa11_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_14_or0 = (fa(((u_arrmul32_and10_14 >> 0) & 0x01), ((u_arrmul32_fa11_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_14 = and_gate(((a >> 11) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa11_14_xor1 = (fa(((u_arrmul32_and11_14 >> 0) & 0x01), ((u_arrmul32_fa12_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_14_or0 = (fa(((u_arrmul32_and11_14 >> 0) & 0x01), ((u_arrmul32_fa12_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_14 = and_gate(((a >> 12) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa12_14_xor1 = (fa(((u_arrmul32_and12_14 >> 0) & 0x01), ((u_arrmul32_fa13_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_14_or0 = (fa(((u_arrmul32_and12_14 >> 0) & 0x01), ((u_arrmul32_fa13_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_14 = and_gate(((a >> 13) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa13_14_xor1 = (fa(((u_arrmul32_and13_14 >> 0) & 0x01), ((u_arrmul32_fa14_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_14_or0 = (fa(((u_arrmul32_and13_14 >> 0) & 0x01), ((u_arrmul32_fa14_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_14 = and_gate(((a >> 14) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa14_14_xor1 = (fa(((u_arrmul32_and14_14 >> 0) & 0x01), ((u_arrmul32_fa15_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_14_or0 = (fa(((u_arrmul32_and14_14 >> 0) & 0x01), ((u_arrmul32_fa15_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_14 = and_gate(((a >> 15) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa15_14_xor1 = (fa(((u_arrmul32_and15_14 >> 0) & 0x01), ((u_arrmul32_fa16_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_14_or0 = (fa(((u_arrmul32_and15_14 >> 0) & 0x01), ((u_arrmul32_fa16_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_14 = and_gate(((a >> 16) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa16_14_xor1 = (fa(((u_arrmul32_and16_14 >> 0) & 0x01), ((u_arrmul32_fa17_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_14_or0 = (fa(((u_arrmul32_and16_14 >> 0) & 0x01), ((u_arrmul32_fa17_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_14 = and_gate(((a >> 17) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa17_14_xor1 = (fa(((u_arrmul32_and17_14 >> 0) & 0x01), ((u_arrmul32_fa18_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_14_or0 = (fa(((u_arrmul32_and17_14 >> 0) & 0x01), ((u_arrmul32_fa18_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_14 = and_gate(((a >> 18) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa18_14_xor1 = (fa(((u_arrmul32_and18_14 >> 0) & 0x01), ((u_arrmul32_fa19_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_14_or0 = (fa(((u_arrmul32_and18_14 >> 0) & 0x01), ((u_arrmul32_fa19_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_14 = and_gate(((a >> 19) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa19_14_xor1 = (fa(((u_arrmul32_and19_14 >> 0) & 0x01), ((u_arrmul32_fa20_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_14_or0 = (fa(((u_arrmul32_and19_14 >> 0) & 0x01), ((u_arrmul32_fa20_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_14 = and_gate(((a >> 20) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa20_14_xor1 = (fa(((u_arrmul32_and20_14 >> 0) & 0x01), ((u_arrmul32_fa21_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_14_or0 = (fa(((u_arrmul32_and20_14 >> 0) & 0x01), ((u_arrmul32_fa21_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_14 = and_gate(((a >> 21) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa21_14_xor1 = (fa(((u_arrmul32_and21_14 >> 0) & 0x01), ((u_arrmul32_fa22_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_14_or0 = (fa(((u_arrmul32_and21_14 >> 0) & 0x01), ((u_arrmul32_fa22_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_14 = and_gate(((a >> 22) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa22_14_xor1 = (fa(((u_arrmul32_and22_14 >> 0) & 0x01), ((u_arrmul32_fa23_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_14_or0 = (fa(((u_arrmul32_and22_14 >> 0) & 0x01), ((u_arrmul32_fa23_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_14 = and_gate(((a >> 23) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa23_14_xor1 = (fa(((u_arrmul32_and23_14 >> 0) & 0x01), ((u_arrmul32_fa24_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_14_or0 = (fa(((u_arrmul32_and23_14 >> 0) & 0x01), ((u_arrmul32_fa24_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_14 = and_gate(((a >> 24) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa24_14_xor1 = (fa(((u_arrmul32_and24_14 >> 0) & 0x01), ((u_arrmul32_fa25_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_14_or0 = (fa(((u_arrmul32_and24_14 >> 0) & 0x01), ((u_arrmul32_fa25_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_14 = and_gate(((a >> 25) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa25_14_xor1 = (fa(((u_arrmul32_and25_14 >> 0) & 0x01), ((u_arrmul32_fa26_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_14_or0 = (fa(((u_arrmul32_and25_14 >> 0) & 0x01), ((u_arrmul32_fa26_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_14 = and_gate(((a >> 26) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa26_14_xor1 = (fa(((u_arrmul32_and26_14 >> 0) & 0x01), ((u_arrmul32_fa27_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_14_or0 = (fa(((u_arrmul32_and26_14 >> 0) & 0x01), ((u_arrmul32_fa27_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_14 = and_gate(((a >> 27) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa27_14_xor1 = (fa(((u_arrmul32_and27_14 >> 0) & 0x01), ((u_arrmul32_fa28_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_14_or0 = (fa(((u_arrmul32_and27_14 >> 0) & 0x01), ((u_arrmul32_fa28_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_14 = and_gate(((a >> 28) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa28_14_xor1 = (fa(((u_arrmul32_and28_14 >> 0) & 0x01), ((u_arrmul32_fa29_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_14_or0 = (fa(((u_arrmul32_and28_14 >> 0) & 0x01), ((u_arrmul32_fa29_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_14 = and_gate(((a >> 29) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa29_14_xor1 = (fa(((u_arrmul32_and29_14 >> 0) & 0x01), ((u_arrmul32_fa30_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_14_or0 = (fa(((u_arrmul32_and29_14 >> 0) & 0x01), ((u_arrmul32_fa30_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_14 = and_gate(((a >> 30) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa30_14_xor1 = (fa(((u_arrmul32_and30_14 >> 0) & 0x01), ((u_arrmul32_fa31_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_14_or0 = (fa(((u_arrmul32_and30_14 >> 0) & 0x01), ((u_arrmul32_fa31_13_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_14 = and_gate(((a >> 31) & 0x01), ((b >> 14) & 0x01));
|
|
u_arrmul32_fa31_14_xor1 = (fa(((u_arrmul32_and31_14 >> 0) & 0x01), ((u_arrmul32_fa31_13_or0 >> 0) & 0x01), ((u_arrmul32_fa30_14_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_14_or0 = (fa(((u_arrmul32_and31_14 >> 0) & 0x01), ((u_arrmul32_fa31_13_or0 >> 0) & 0x01), ((u_arrmul32_fa30_14_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_15 = and_gate(((a >> 0) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_ha0_15_xor0 = (ha(((u_arrmul32_and0_15 >> 0) & 0x01), ((u_arrmul32_fa1_14_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_15_and0 = (ha(((u_arrmul32_and0_15 >> 0) & 0x01), ((u_arrmul32_fa1_14_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_15 = and_gate(((a >> 1) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa1_15_xor1 = (fa(((u_arrmul32_and1_15 >> 0) & 0x01), ((u_arrmul32_fa2_14_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_15_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_15_or0 = (fa(((u_arrmul32_and1_15 >> 0) & 0x01), ((u_arrmul32_fa2_14_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_15_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_15 = and_gate(((a >> 2) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa2_15_xor1 = (fa(((u_arrmul32_and2_15 >> 0) & 0x01), ((u_arrmul32_fa3_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_15_or0 = (fa(((u_arrmul32_and2_15 >> 0) & 0x01), ((u_arrmul32_fa3_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_15 = and_gate(((a >> 3) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa3_15_xor1 = (fa(((u_arrmul32_and3_15 >> 0) & 0x01), ((u_arrmul32_fa4_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_15_or0 = (fa(((u_arrmul32_and3_15 >> 0) & 0x01), ((u_arrmul32_fa4_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_15 = and_gate(((a >> 4) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa4_15_xor1 = (fa(((u_arrmul32_and4_15 >> 0) & 0x01), ((u_arrmul32_fa5_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_15_or0 = (fa(((u_arrmul32_and4_15 >> 0) & 0x01), ((u_arrmul32_fa5_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_15 = and_gate(((a >> 5) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa5_15_xor1 = (fa(((u_arrmul32_and5_15 >> 0) & 0x01), ((u_arrmul32_fa6_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_15_or0 = (fa(((u_arrmul32_and5_15 >> 0) & 0x01), ((u_arrmul32_fa6_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_15 = and_gate(((a >> 6) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa6_15_xor1 = (fa(((u_arrmul32_and6_15 >> 0) & 0x01), ((u_arrmul32_fa7_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_15_or0 = (fa(((u_arrmul32_and6_15 >> 0) & 0x01), ((u_arrmul32_fa7_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_15 = and_gate(((a >> 7) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa7_15_xor1 = (fa(((u_arrmul32_and7_15 >> 0) & 0x01), ((u_arrmul32_fa8_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_15_or0 = (fa(((u_arrmul32_and7_15 >> 0) & 0x01), ((u_arrmul32_fa8_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_15 = and_gate(((a >> 8) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa8_15_xor1 = (fa(((u_arrmul32_and8_15 >> 0) & 0x01), ((u_arrmul32_fa9_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_15_or0 = (fa(((u_arrmul32_and8_15 >> 0) & 0x01), ((u_arrmul32_fa9_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_15 = and_gate(((a >> 9) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa9_15_xor1 = (fa(((u_arrmul32_and9_15 >> 0) & 0x01), ((u_arrmul32_fa10_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_15_or0 = (fa(((u_arrmul32_and9_15 >> 0) & 0x01), ((u_arrmul32_fa10_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_15 = and_gate(((a >> 10) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa10_15_xor1 = (fa(((u_arrmul32_and10_15 >> 0) & 0x01), ((u_arrmul32_fa11_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_15_or0 = (fa(((u_arrmul32_and10_15 >> 0) & 0x01), ((u_arrmul32_fa11_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_15 = and_gate(((a >> 11) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa11_15_xor1 = (fa(((u_arrmul32_and11_15 >> 0) & 0x01), ((u_arrmul32_fa12_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_15_or0 = (fa(((u_arrmul32_and11_15 >> 0) & 0x01), ((u_arrmul32_fa12_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_15 = and_gate(((a >> 12) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa12_15_xor1 = (fa(((u_arrmul32_and12_15 >> 0) & 0x01), ((u_arrmul32_fa13_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_15_or0 = (fa(((u_arrmul32_and12_15 >> 0) & 0x01), ((u_arrmul32_fa13_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_15 = and_gate(((a >> 13) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa13_15_xor1 = (fa(((u_arrmul32_and13_15 >> 0) & 0x01), ((u_arrmul32_fa14_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_15_or0 = (fa(((u_arrmul32_and13_15 >> 0) & 0x01), ((u_arrmul32_fa14_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_15 = and_gate(((a >> 14) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa14_15_xor1 = (fa(((u_arrmul32_and14_15 >> 0) & 0x01), ((u_arrmul32_fa15_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_15_or0 = (fa(((u_arrmul32_and14_15 >> 0) & 0x01), ((u_arrmul32_fa15_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_15 = and_gate(((a >> 15) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa15_15_xor1 = (fa(((u_arrmul32_and15_15 >> 0) & 0x01), ((u_arrmul32_fa16_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_15_or0 = (fa(((u_arrmul32_and15_15 >> 0) & 0x01), ((u_arrmul32_fa16_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_15 = and_gate(((a >> 16) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa16_15_xor1 = (fa(((u_arrmul32_and16_15 >> 0) & 0x01), ((u_arrmul32_fa17_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_15_or0 = (fa(((u_arrmul32_and16_15 >> 0) & 0x01), ((u_arrmul32_fa17_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_15 = and_gate(((a >> 17) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa17_15_xor1 = (fa(((u_arrmul32_and17_15 >> 0) & 0x01), ((u_arrmul32_fa18_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_15_or0 = (fa(((u_arrmul32_and17_15 >> 0) & 0x01), ((u_arrmul32_fa18_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_15 = and_gate(((a >> 18) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa18_15_xor1 = (fa(((u_arrmul32_and18_15 >> 0) & 0x01), ((u_arrmul32_fa19_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_15_or0 = (fa(((u_arrmul32_and18_15 >> 0) & 0x01), ((u_arrmul32_fa19_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_15 = and_gate(((a >> 19) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa19_15_xor1 = (fa(((u_arrmul32_and19_15 >> 0) & 0x01), ((u_arrmul32_fa20_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_15_or0 = (fa(((u_arrmul32_and19_15 >> 0) & 0x01), ((u_arrmul32_fa20_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_15 = and_gate(((a >> 20) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa20_15_xor1 = (fa(((u_arrmul32_and20_15 >> 0) & 0x01), ((u_arrmul32_fa21_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_15_or0 = (fa(((u_arrmul32_and20_15 >> 0) & 0x01), ((u_arrmul32_fa21_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_15 = and_gate(((a >> 21) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa21_15_xor1 = (fa(((u_arrmul32_and21_15 >> 0) & 0x01), ((u_arrmul32_fa22_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_15_or0 = (fa(((u_arrmul32_and21_15 >> 0) & 0x01), ((u_arrmul32_fa22_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_15 = and_gate(((a >> 22) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa22_15_xor1 = (fa(((u_arrmul32_and22_15 >> 0) & 0x01), ((u_arrmul32_fa23_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_15_or0 = (fa(((u_arrmul32_and22_15 >> 0) & 0x01), ((u_arrmul32_fa23_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_15 = and_gate(((a >> 23) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa23_15_xor1 = (fa(((u_arrmul32_and23_15 >> 0) & 0x01), ((u_arrmul32_fa24_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_15_or0 = (fa(((u_arrmul32_and23_15 >> 0) & 0x01), ((u_arrmul32_fa24_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_15 = and_gate(((a >> 24) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa24_15_xor1 = (fa(((u_arrmul32_and24_15 >> 0) & 0x01), ((u_arrmul32_fa25_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_15_or0 = (fa(((u_arrmul32_and24_15 >> 0) & 0x01), ((u_arrmul32_fa25_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_15 = and_gate(((a >> 25) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa25_15_xor1 = (fa(((u_arrmul32_and25_15 >> 0) & 0x01), ((u_arrmul32_fa26_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_15_or0 = (fa(((u_arrmul32_and25_15 >> 0) & 0x01), ((u_arrmul32_fa26_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_15 = and_gate(((a >> 26) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa26_15_xor1 = (fa(((u_arrmul32_and26_15 >> 0) & 0x01), ((u_arrmul32_fa27_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_15_or0 = (fa(((u_arrmul32_and26_15 >> 0) & 0x01), ((u_arrmul32_fa27_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_15 = and_gate(((a >> 27) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa27_15_xor1 = (fa(((u_arrmul32_and27_15 >> 0) & 0x01), ((u_arrmul32_fa28_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_15_or0 = (fa(((u_arrmul32_and27_15 >> 0) & 0x01), ((u_arrmul32_fa28_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_15 = and_gate(((a >> 28) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa28_15_xor1 = (fa(((u_arrmul32_and28_15 >> 0) & 0x01), ((u_arrmul32_fa29_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_15_or0 = (fa(((u_arrmul32_and28_15 >> 0) & 0x01), ((u_arrmul32_fa29_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_15 = and_gate(((a >> 29) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa29_15_xor1 = (fa(((u_arrmul32_and29_15 >> 0) & 0x01), ((u_arrmul32_fa30_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_15_or0 = (fa(((u_arrmul32_and29_15 >> 0) & 0x01), ((u_arrmul32_fa30_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_15 = and_gate(((a >> 30) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa30_15_xor1 = (fa(((u_arrmul32_and30_15 >> 0) & 0x01), ((u_arrmul32_fa31_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_15_or0 = (fa(((u_arrmul32_and30_15 >> 0) & 0x01), ((u_arrmul32_fa31_14_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_15 = and_gate(((a >> 31) & 0x01), ((b >> 15) & 0x01));
|
|
u_arrmul32_fa31_15_xor1 = (fa(((u_arrmul32_and31_15 >> 0) & 0x01), ((u_arrmul32_fa31_14_or0 >> 0) & 0x01), ((u_arrmul32_fa30_15_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_15_or0 = (fa(((u_arrmul32_and31_15 >> 0) & 0x01), ((u_arrmul32_fa31_14_or0 >> 0) & 0x01), ((u_arrmul32_fa30_15_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_16 = and_gate(((a >> 0) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_ha0_16_xor0 = (ha(((u_arrmul32_and0_16 >> 0) & 0x01), ((u_arrmul32_fa1_15_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_16_and0 = (ha(((u_arrmul32_and0_16 >> 0) & 0x01), ((u_arrmul32_fa1_15_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_16 = and_gate(((a >> 1) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa1_16_xor1 = (fa(((u_arrmul32_and1_16 >> 0) & 0x01), ((u_arrmul32_fa2_15_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_16_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_16_or0 = (fa(((u_arrmul32_and1_16 >> 0) & 0x01), ((u_arrmul32_fa2_15_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_16_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_16 = and_gate(((a >> 2) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa2_16_xor1 = (fa(((u_arrmul32_and2_16 >> 0) & 0x01), ((u_arrmul32_fa3_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_16_or0 = (fa(((u_arrmul32_and2_16 >> 0) & 0x01), ((u_arrmul32_fa3_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_16 = and_gate(((a >> 3) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa3_16_xor1 = (fa(((u_arrmul32_and3_16 >> 0) & 0x01), ((u_arrmul32_fa4_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_16_or0 = (fa(((u_arrmul32_and3_16 >> 0) & 0x01), ((u_arrmul32_fa4_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_16 = and_gate(((a >> 4) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa4_16_xor1 = (fa(((u_arrmul32_and4_16 >> 0) & 0x01), ((u_arrmul32_fa5_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_16_or0 = (fa(((u_arrmul32_and4_16 >> 0) & 0x01), ((u_arrmul32_fa5_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_16 = and_gate(((a >> 5) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa5_16_xor1 = (fa(((u_arrmul32_and5_16 >> 0) & 0x01), ((u_arrmul32_fa6_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_16_or0 = (fa(((u_arrmul32_and5_16 >> 0) & 0x01), ((u_arrmul32_fa6_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_16 = and_gate(((a >> 6) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa6_16_xor1 = (fa(((u_arrmul32_and6_16 >> 0) & 0x01), ((u_arrmul32_fa7_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_16_or0 = (fa(((u_arrmul32_and6_16 >> 0) & 0x01), ((u_arrmul32_fa7_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_16 = and_gate(((a >> 7) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa7_16_xor1 = (fa(((u_arrmul32_and7_16 >> 0) & 0x01), ((u_arrmul32_fa8_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_16_or0 = (fa(((u_arrmul32_and7_16 >> 0) & 0x01), ((u_arrmul32_fa8_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_16 = and_gate(((a >> 8) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa8_16_xor1 = (fa(((u_arrmul32_and8_16 >> 0) & 0x01), ((u_arrmul32_fa9_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_16_or0 = (fa(((u_arrmul32_and8_16 >> 0) & 0x01), ((u_arrmul32_fa9_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_16 = and_gate(((a >> 9) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa9_16_xor1 = (fa(((u_arrmul32_and9_16 >> 0) & 0x01), ((u_arrmul32_fa10_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_16_or0 = (fa(((u_arrmul32_and9_16 >> 0) & 0x01), ((u_arrmul32_fa10_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_16 = and_gate(((a >> 10) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa10_16_xor1 = (fa(((u_arrmul32_and10_16 >> 0) & 0x01), ((u_arrmul32_fa11_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_16_or0 = (fa(((u_arrmul32_and10_16 >> 0) & 0x01), ((u_arrmul32_fa11_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_16 = and_gate(((a >> 11) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa11_16_xor1 = (fa(((u_arrmul32_and11_16 >> 0) & 0x01), ((u_arrmul32_fa12_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_16_or0 = (fa(((u_arrmul32_and11_16 >> 0) & 0x01), ((u_arrmul32_fa12_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_16 = and_gate(((a >> 12) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa12_16_xor1 = (fa(((u_arrmul32_and12_16 >> 0) & 0x01), ((u_arrmul32_fa13_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_16_or0 = (fa(((u_arrmul32_and12_16 >> 0) & 0x01), ((u_arrmul32_fa13_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_16 = and_gate(((a >> 13) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa13_16_xor1 = (fa(((u_arrmul32_and13_16 >> 0) & 0x01), ((u_arrmul32_fa14_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_16_or0 = (fa(((u_arrmul32_and13_16 >> 0) & 0x01), ((u_arrmul32_fa14_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_16 = and_gate(((a >> 14) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa14_16_xor1 = (fa(((u_arrmul32_and14_16 >> 0) & 0x01), ((u_arrmul32_fa15_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_16_or0 = (fa(((u_arrmul32_and14_16 >> 0) & 0x01), ((u_arrmul32_fa15_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_16 = and_gate(((a >> 15) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa15_16_xor1 = (fa(((u_arrmul32_and15_16 >> 0) & 0x01), ((u_arrmul32_fa16_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_16_or0 = (fa(((u_arrmul32_and15_16 >> 0) & 0x01), ((u_arrmul32_fa16_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_16 = and_gate(((a >> 16) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa16_16_xor1 = (fa(((u_arrmul32_and16_16 >> 0) & 0x01), ((u_arrmul32_fa17_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_16_or0 = (fa(((u_arrmul32_and16_16 >> 0) & 0x01), ((u_arrmul32_fa17_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_16 = and_gate(((a >> 17) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa17_16_xor1 = (fa(((u_arrmul32_and17_16 >> 0) & 0x01), ((u_arrmul32_fa18_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_16_or0 = (fa(((u_arrmul32_and17_16 >> 0) & 0x01), ((u_arrmul32_fa18_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_16 = and_gate(((a >> 18) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa18_16_xor1 = (fa(((u_arrmul32_and18_16 >> 0) & 0x01), ((u_arrmul32_fa19_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_16_or0 = (fa(((u_arrmul32_and18_16 >> 0) & 0x01), ((u_arrmul32_fa19_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_16 = and_gate(((a >> 19) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa19_16_xor1 = (fa(((u_arrmul32_and19_16 >> 0) & 0x01), ((u_arrmul32_fa20_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_16_or0 = (fa(((u_arrmul32_and19_16 >> 0) & 0x01), ((u_arrmul32_fa20_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_16 = and_gate(((a >> 20) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa20_16_xor1 = (fa(((u_arrmul32_and20_16 >> 0) & 0x01), ((u_arrmul32_fa21_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_16_or0 = (fa(((u_arrmul32_and20_16 >> 0) & 0x01), ((u_arrmul32_fa21_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_16 = and_gate(((a >> 21) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa21_16_xor1 = (fa(((u_arrmul32_and21_16 >> 0) & 0x01), ((u_arrmul32_fa22_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_16_or0 = (fa(((u_arrmul32_and21_16 >> 0) & 0x01), ((u_arrmul32_fa22_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_16 = and_gate(((a >> 22) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa22_16_xor1 = (fa(((u_arrmul32_and22_16 >> 0) & 0x01), ((u_arrmul32_fa23_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_16_or0 = (fa(((u_arrmul32_and22_16 >> 0) & 0x01), ((u_arrmul32_fa23_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_16 = and_gate(((a >> 23) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa23_16_xor1 = (fa(((u_arrmul32_and23_16 >> 0) & 0x01), ((u_arrmul32_fa24_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_16_or0 = (fa(((u_arrmul32_and23_16 >> 0) & 0x01), ((u_arrmul32_fa24_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_16 = and_gate(((a >> 24) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa24_16_xor1 = (fa(((u_arrmul32_and24_16 >> 0) & 0x01), ((u_arrmul32_fa25_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_16_or0 = (fa(((u_arrmul32_and24_16 >> 0) & 0x01), ((u_arrmul32_fa25_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_16 = and_gate(((a >> 25) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa25_16_xor1 = (fa(((u_arrmul32_and25_16 >> 0) & 0x01), ((u_arrmul32_fa26_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_16_or0 = (fa(((u_arrmul32_and25_16 >> 0) & 0x01), ((u_arrmul32_fa26_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_16 = and_gate(((a >> 26) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa26_16_xor1 = (fa(((u_arrmul32_and26_16 >> 0) & 0x01), ((u_arrmul32_fa27_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_16_or0 = (fa(((u_arrmul32_and26_16 >> 0) & 0x01), ((u_arrmul32_fa27_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_16 = and_gate(((a >> 27) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa27_16_xor1 = (fa(((u_arrmul32_and27_16 >> 0) & 0x01), ((u_arrmul32_fa28_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_16_or0 = (fa(((u_arrmul32_and27_16 >> 0) & 0x01), ((u_arrmul32_fa28_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_16 = and_gate(((a >> 28) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa28_16_xor1 = (fa(((u_arrmul32_and28_16 >> 0) & 0x01), ((u_arrmul32_fa29_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_16_or0 = (fa(((u_arrmul32_and28_16 >> 0) & 0x01), ((u_arrmul32_fa29_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_16 = and_gate(((a >> 29) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa29_16_xor1 = (fa(((u_arrmul32_and29_16 >> 0) & 0x01), ((u_arrmul32_fa30_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_16_or0 = (fa(((u_arrmul32_and29_16 >> 0) & 0x01), ((u_arrmul32_fa30_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_16 = and_gate(((a >> 30) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa30_16_xor1 = (fa(((u_arrmul32_and30_16 >> 0) & 0x01), ((u_arrmul32_fa31_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_16_or0 = (fa(((u_arrmul32_and30_16 >> 0) & 0x01), ((u_arrmul32_fa31_15_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_16 = and_gate(((a >> 31) & 0x01), ((b >> 16) & 0x01));
|
|
u_arrmul32_fa31_16_xor1 = (fa(((u_arrmul32_and31_16 >> 0) & 0x01), ((u_arrmul32_fa31_15_or0 >> 0) & 0x01), ((u_arrmul32_fa30_16_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_16_or0 = (fa(((u_arrmul32_and31_16 >> 0) & 0x01), ((u_arrmul32_fa31_15_or0 >> 0) & 0x01), ((u_arrmul32_fa30_16_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_17 = and_gate(((a >> 0) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_ha0_17_xor0 = (ha(((u_arrmul32_and0_17 >> 0) & 0x01), ((u_arrmul32_fa1_16_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_17_and0 = (ha(((u_arrmul32_and0_17 >> 0) & 0x01), ((u_arrmul32_fa1_16_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_17 = and_gate(((a >> 1) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa1_17_xor1 = (fa(((u_arrmul32_and1_17 >> 0) & 0x01), ((u_arrmul32_fa2_16_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_17_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_17_or0 = (fa(((u_arrmul32_and1_17 >> 0) & 0x01), ((u_arrmul32_fa2_16_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_17_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_17 = and_gate(((a >> 2) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa2_17_xor1 = (fa(((u_arrmul32_and2_17 >> 0) & 0x01), ((u_arrmul32_fa3_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_17_or0 = (fa(((u_arrmul32_and2_17 >> 0) & 0x01), ((u_arrmul32_fa3_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_17 = and_gate(((a >> 3) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa3_17_xor1 = (fa(((u_arrmul32_and3_17 >> 0) & 0x01), ((u_arrmul32_fa4_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_17_or0 = (fa(((u_arrmul32_and3_17 >> 0) & 0x01), ((u_arrmul32_fa4_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_17 = and_gate(((a >> 4) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa4_17_xor1 = (fa(((u_arrmul32_and4_17 >> 0) & 0x01), ((u_arrmul32_fa5_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_17_or0 = (fa(((u_arrmul32_and4_17 >> 0) & 0x01), ((u_arrmul32_fa5_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_17 = and_gate(((a >> 5) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa5_17_xor1 = (fa(((u_arrmul32_and5_17 >> 0) & 0x01), ((u_arrmul32_fa6_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_17_or0 = (fa(((u_arrmul32_and5_17 >> 0) & 0x01), ((u_arrmul32_fa6_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_17 = and_gate(((a >> 6) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa6_17_xor1 = (fa(((u_arrmul32_and6_17 >> 0) & 0x01), ((u_arrmul32_fa7_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_17_or0 = (fa(((u_arrmul32_and6_17 >> 0) & 0x01), ((u_arrmul32_fa7_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_17 = and_gate(((a >> 7) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa7_17_xor1 = (fa(((u_arrmul32_and7_17 >> 0) & 0x01), ((u_arrmul32_fa8_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_17_or0 = (fa(((u_arrmul32_and7_17 >> 0) & 0x01), ((u_arrmul32_fa8_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_17 = and_gate(((a >> 8) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa8_17_xor1 = (fa(((u_arrmul32_and8_17 >> 0) & 0x01), ((u_arrmul32_fa9_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_17_or0 = (fa(((u_arrmul32_and8_17 >> 0) & 0x01), ((u_arrmul32_fa9_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_17 = and_gate(((a >> 9) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa9_17_xor1 = (fa(((u_arrmul32_and9_17 >> 0) & 0x01), ((u_arrmul32_fa10_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_17_or0 = (fa(((u_arrmul32_and9_17 >> 0) & 0x01), ((u_arrmul32_fa10_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_17 = and_gate(((a >> 10) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa10_17_xor1 = (fa(((u_arrmul32_and10_17 >> 0) & 0x01), ((u_arrmul32_fa11_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_17_or0 = (fa(((u_arrmul32_and10_17 >> 0) & 0x01), ((u_arrmul32_fa11_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_17 = and_gate(((a >> 11) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa11_17_xor1 = (fa(((u_arrmul32_and11_17 >> 0) & 0x01), ((u_arrmul32_fa12_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_17_or0 = (fa(((u_arrmul32_and11_17 >> 0) & 0x01), ((u_arrmul32_fa12_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_17 = and_gate(((a >> 12) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa12_17_xor1 = (fa(((u_arrmul32_and12_17 >> 0) & 0x01), ((u_arrmul32_fa13_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_17_or0 = (fa(((u_arrmul32_and12_17 >> 0) & 0x01), ((u_arrmul32_fa13_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_17 = and_gate(((a >> 13) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa13_17_xor1 = (fa(((u_arrmul32_and13_17 >> 0) & 0x01), ((u_arrmul32_fa14_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_17_or0 = (fa(((u_arrmul32_and13_17 >> 0) & 0x01), ((u_arrmul32_fa14_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_17 = and_gate(((a >> 14) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa14_17_xor1 = (fa(((u_arrmul32_and14_17 >> 0) & 0x01), ((u_arrmul32_fa15_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_17_or0 = (fa(((u_arrmul32_and14_17 >> 0) & 0x01), ((u_arrmul32_fa15_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_17 = and_gate(((a >> 15) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa15_17_xor1 = (fa(((u_arrmul32_and15_17 >> 0) & 0x01), ((u_arrmul32_fa16_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_17_or0 = (fa(((u_arrmul32_and15_17 >> 0) & 0x01), ((u_arrmul32_fa16_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_17 = and_gate(((a >> 16) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa16_17_xor1 = (fa(((u_arrmul32_and16_17 >> 0) & 0x01), ((u_arrmul32_fa17_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_17_or0 = (fa(((u_arrmul32_and16_17 >> 0) & 0x01), ((u_arrmul32_fa17_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_17 = and_gate(((a >> 17) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa17_17_xor1 = (fa(((u_arrmul32_and17_17 >> 0) & 0x01), ((u_arrmul32_fa18_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_17_or0 = (fa(((u_arrmul32_and17_17 >> 0) & 0x01), ((u_arrmul32_fa18_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_17 = and_gate(((a >> 18) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa18_17_xor1 = (fa(((u_arrmul32_and18_17 >> 0) & 0x01), ((u_arrmul32_fa19_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_17_or0 = (fa(((u_arrmul32_and18_17 >> 0) & 0x01), ((u_arrmul32_fa19_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_17 = and_gate(((a >> 19) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa19_17_xor1 = (fa(((u_arrmul32_and19_17 >> 0) & 0x01), ((u_arrmul32_fa20_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_17_or0 = (fa(((u_arrmul32_and19_17 >> 0) & 0x01), ((u_arrmul32_fa20_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_17 = and_gate(((a >> 20) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa20_17_xor1 = (fa(((u_arrmul32_and20_17 >> 0) & 0x01), ((u_arrmul32_fa21_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_17_or0 = (fa(((u_arrmul32_and20_17 >> 0) & 0x01), ((u_arrmul32_fa21_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_17 = and_gate(((a >> 21) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa21_17_xor1 = (fa(((u_arrmul32_and21_17 >> 0) & 0x01), ((u_arrmul32_fa22_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_17_or0 = (fa(((u_arrmul32_and21_17 >> 0) & 0x01), ((u_arrmul32_fa22_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_17 = and_gate(((a >> 22) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa22_17_xor1 = (fa(((u_arrmul32_and22_17 >> 0) & 0x01), ((u_arrmul32_fa23_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_17_or0 = (fa(((u_arrmul32_and22_17 >> 0) & 0x01), ((u_arrmul32_fa23_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_17 = and_gate(((a >> 23) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa23_17_xor1 = (fa(((u_arrmul32_and23_17 >> 0) & 0x01), ((u_arrmul32_fa24_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_17_or0 = (fa(((u_arrmul32_and23_17 >> 0) & 0x01), ((u_arrmul32_fa24_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_17 = and_gate(((a >> 24) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa24_17_xor1 = (fa(((u_arrmul32_and24_17 >> 0) & 0x01), ((u_arrmul32_fa25_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_17_or0 = (fa(((u_arrmul32_and24_17 >> 0) & 0x01), ((u_arrmul32_fa25_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_17 = and_gate(((a >> 25) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa25_17_xor1 = (fa(((u_arrmul32_and25_17 >> 0) & 0x01), ((u_arrmul32_fa26_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_17_or0 = (fa(((u_arrmul32_and25_17 >> 0) & 0x01), ((u_arrmul32_fa26_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_17 = and_gate(((a >> 26) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa26_17_xor1 = (fa(((u_arrmul32_and26_17 >> 0) & 0x01), ((u_arrmul32_fa27_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_17_or0 = (fa(((u_arrmul32_and26_17 >> 0) & 0x01), ((u_arrmul32_fa27_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_17 = and_gate(((a >> 27) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa27_17_xor1 = (fa(((u_arrmul32_and27_17 >> 0) & 0x01), ((u_arrmul32_fa28_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_17_or0 = (fa(((u_arrmul32_and27_17 >> 0) & 0x01), ((u_arrmul32_fa28_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_17 = and_gate(((a >> 28) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa28_17_xor1 = (fa(((u_arrmul32_and28_17 >> 0) & 0x01), ((u_arrmul32_fa29_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_17_or0 = (fa(((u_arrmul32_and28_17 >> 0) & 0x01), ((u_arrmul32_fa29_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_17 = and_gate(((a >> 29) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa29_17_xor1 = (fa(((u_arrmul32_and29_17 >> 0) & 0x01), ((u_arrmul32_fa30_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_17_or0 = (fa(((u_arrmul32_and29_17 >> 0) & 0x01), ((u_arrmul32_fa30_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_17 = and_gate(((a >> 30) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa30_17_xor1 = (fa(((u_arrmul32_and30_17 >> 0) & 0x01), ((u_arrmul32_fa31_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_17_or0 = (fa(((u_arrmul32_and30_17 >> 0) & 0x01), ((u_arrmul32_fa31_16_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_17 = and_gate(((a >> 31) & 0x01), ((b >> 17) & 0x01));
|
|
u_arrmul32_fa31_17_xor1 = (fa(((u_arrmul32_and31_17 >> 0) & 0x01), ((u_arrmul32_fa31_16_or0 >> 0) & 0x01), ((u_arrmul32_fa30_17_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_17_or0 = (fa(((u_arrmul32_and31_17 >> 0) & 0x01), ((u_arrmul32_fa31_16_or0 >> 0) & 0x01), ((u_arrmul32_fa30_17_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_18 = and_gate(((a >> 0) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_ha0_18_xor0 = (ha(((u_arrmul32_and0_18 >> 0) & 0x01), ((u_arrmul32_fa1_17_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_18_and0 = (ha(((u_arrmul32_and0_18 >> 0) & 0x01), ((u_arrmul32_fa1_17_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_18 = and_gate(((a >> 1) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa1_18_xor1 = (fa(((u_arrmul32_and1_18 >> 0) & 0x01), ((u_arrmul32_fa2_17_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_18_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_18_or0 = (fa(((u_arrmul32_and1_18 >> 0) & 0x01), ((u_arrmul32_fa2_17_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_18_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_18 = and_gate(((a >> 2) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa2_18_xor1 = (fa(((u_arrmul32_and2_18 >> 0) & 0x01), ((u_arrmul32_fa3_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_18_or0 = (fa(((u_arrmul32_and2_18 >> 0) & 0x01), ((u_arrmul32_fa3_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_18 = and_gate(((a >> 3) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa3_18_xor1 = (fa(((u_arrmul32_and3_18 >> 0) & 0x01), ((u_arrmul32_fa4_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_18_or0 = (fa(((u_arrmul32_and3_18 >> 0) & 0x01), ((u_arrmul32_fa4_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_18 = and_gate(((a >> 4) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa4_18_xor1 = (fa(((u_arrmul32_and4_18 >> 0) & 0x01), ((u_arrmul32_fa5_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_18_or0 = (fa(((u_arrmul32_and4_18 >> 0) & 0x01), ((u_arrmul32_fa5_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_18 = and_gate(((a >> 5) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa5_18_xor1 = (fa(((u_arrmul32_and5_18 >> 0) & 0x01), ((u_arrmul32_fa6_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_18_or0 = (fa(((u_arrmul32_and5_18 >> 0) & 0x01), ((u_arrmul32_fa6_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_18 = and_gate(((a >> 6) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa6_18_xor1 = (fa(((u_arrmul32_and6_18 >> 0) & 0x01), ((u_arrmul32_fa7_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_18_or0 = (fa(((u_arrmul32_and6_18 >> 0) & 0x01), ((u_arrmul32_fa7_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_18 = and_gate(((a >> 7) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa7_18_xor1 = (fa(((u_arrmul32_and7_18 >> 0) & 0x01), ((u_arrmul32_fa8_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_18_or0 = (fa(((u_arrmul32_and7_18 >> 0) & 0x01), ((u_arrmul32_fa8_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_18 = and_gate(((a >> 8) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa8_18_xor1 = (fa(((u_arrmul32_and8_18 >> 0) & 0x01), ((u_arrmul32_fa9_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_18_or0 = (fa(((u_arrmul32_and8_18 >> 0) & 0x01), ((u_arrmul32_fa9_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_18 = and_gate(((a >> 9) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa9_18_xor1 = (fa(((u_arrmul32_and9_18 >> 0) & 0x01), ((u_arrmul32_fa10_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_18_or0 = (fa(((u_arrmul32_and9_18 >> 0) & 0x01), ((u_arrmul32_fa10_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_18 = and_gate(((a >> 10) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa10_18_xor1 = (fa(((u_arrmul32_and10_18 >> 0) & 0x01), ((u_arrmul32_fa11_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_18_or0 = (fa(((u_arrmul32_and10_18 >> 0) & 0x01), ((u_arrmul32_fa11_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_18 = and_gate(((a >> 11) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa11_18_xor1 = (fa(((u_arrmul32_and11_18 >> 0) & 0x01), ((u_arrmul32_fa12_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_18_or0 = (fa(((u_arrmul32_and11_18 >> 0) & 0x01), ((u_arrmul32_fa12_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_18 = and_gate(((a >> 12) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa12_18_xor1 = (fa(((u_arrmul32_and12_18 >> 0) & 0x01), ((u_arrmul32_fa13_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_18_or0 = (fa(((u_arrmul32_and12_18 >> 0) & 0x01), ((u_arrmul32_fa13_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_18 = and_gate(((a >> 13) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa13_18_xor1 = (fa(((u_arrmul32_and13_18 >> 0) & 0x01), ((u_arrmul32_fa14_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_18_or0 = (fa(((u_arrmul32_and13_18 >> 0) & 0x01), ((u_arrmul32_fa14_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_18 = and_gate(((a >> 14) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa14_18_xor1 = (fa(((u_arrmul32_and14_18 >> 0) & 0x01), ((u_arrmul32_fa15_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_18_or0 = (fa(((u_arrmul32_and14_18 >> 0) & 0x01), ((u_arrmul32_fa15_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_18 = and_gate(((a >> 15) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa15_18_xor1 = (fa(((u_arrmul32_and15_18 >> 0) & 0x01), ((u_arrmul32_fa16_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_18_or0 = (fa(((u_arrmul32_and15_18 >> 0) & 0x01), ((u_arrmul32_fa16_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_18 = and_gate(((a >> 16) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa16_18_xor1 = (fa(((u_arrmul32_and16_18 >> 0) & 0x01), ((u_arrmul32_fa17_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_18_or0 = (fa(((u_arrmul32_and16_18 >> 0) & 0x01), ((u_arrmul32_fa17_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_18 = and_gate(((a >> 17) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa17_18_xor1 = (fa(((u_arrmul32_and17_18 >> 0) & 0x01), ((u_arrmul32_fa18_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_18_or0 = (fa(((u_arrmul32_and17_18 >> 0) & 0x01), ((u_arrmul32_fa18_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_18 = and_gate(((a >> 18) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa18_18_xor1 = (fa(((u_arrmul32_and18_18 >> 0) & 0x01), ((u_arrmul32_fa19_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_18_or0 = (fa(((u_arrmul32_and18_18 >> 0) & 0x01), ((u_arrmul32_fa19_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_18 = and_gate(((a >> 19) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa19_18_xor1 = (fa(((u_arrmul32_and19_18 >> 0) & 0x01), ((u_arrmul32_fa20_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_18_or0 = (fa(((u_arrmul32_and19_18 >> 0) & 0x01), ((u_arrmul32_fa20_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_18 = and_gate(((a >> 20) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa20_18_xor1 = (fa(((u_arrmul32_and20_18 >> 0) & 0x01), ((u_arrmul32_fa21_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_18_or0 = (fa(((u_arrmul32_and20_18 >> 0) & 0x01), ((u_arrmul32_fa21_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_18 = and_gate(((a >> 21) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa21_18_xor1 = (fa(((u_arrmul32_and21_18 >> 0) & 0x01), ((u_arrmul32_fa22_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_18_or0 = (fa(((u_arrmul32_and21_18 >> 0) & 0x01), ((u_arrmul32_fa22_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_18 = and_gate(((a >> 22) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa22_18_xor1 = (fa(((u_arrmul32_and22_18 >> 0) & 0x01), ((u_arrmul32_fa23_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_18_or0 = (fa(((u_arrmul32_and22_18 >> 0) & 0x01), ((u_arrmul32_fa23_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_18 = and_gate(((a >> 23) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa23_18_xor1 = (fa(((u_arrmul32_and23_18 >> 0) & 0x01), ((u_arrmul32_fa24_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_18_or0 = (fa(((u_arrmul32_and23_18 >> 0) & 0x01), ((u_arrmul32_fa24_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_18 = and_gate(((a >> 24) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa24_18_xor1 = (fa(((u_arrmul32_and24_18 >> 0) & 0x01), ((u_arrmul32_fa25_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_18_or0 = (fa(((u_arrmul32_and24_18 >> 0) & 0x01), ((u_arrmul32_fa25_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_18 = and_gate(((a >> 25) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa25_18_xor1 = (fa(((u_arrmul32_and25_18 >> 0) & 0x01), ((u_arrmul32_fa26_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_18_or0 = (fa(((u_arrmul32_and25_18 >> 0) & 0x01), ((u_arrmul32_fa26_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_18 = and_gate(((a >> 26) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa26_18_xor1 = (fa(((u_arrmul32_and26_18 >> 0) & 0x01), ((u_arrmul32_fa27_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_18_or0 = (fa(((u_arrmul32_and26_18 >> 0) & 0x01), ((u_arrmul32_fa27_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_18 = and_gate(((a >> 27) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa27_18_xor1 = (fa(((u_arrmul32_and27_18 >> 0) & 0x01), ((u_arrmul32_fa28_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_18_or0 = (fa(((u_arrmul32_and27_18 >> 0) & 0x01), ((u_arrmul32_fa28_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_18 = and_gate(((a >> 28) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa28_18_xor1 = (fa(((u_arrmul32_and28_18 >> 0) & 0x01), ((u_arrmul32_fa29_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_18_or0 = (fa(((u_arrmul32_and28_18 >> 0) & 0x01), ((u_arrmul32_fa29_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_18 = and_gate(((a >> 29) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa29_18_xor1 = (fa(((u_arrmul32_and29_18 >> 0) & 0x01), ((u_arrmul32_fa30_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_18_or0 = (fa(((u_arrmul32_and29_18 >> 0) & 0x01), ((u_arrmul32_fa30_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_18 = and_gate(((a >> 30) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa30_18_xor1 = (fa(((u_arrmul32_and30_18 >> 0) & 0x01), ((u_arrmul32_fa31_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_18_or0 = (fa(((u_arrmul32_and30_18 >> 0) & 0x01), ((u_arrmul32_fa31_17_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_18 = and_gate(((a >> 31) & 0x01), ((b >> 18) & 0x01));
|
|
u_arrmul32_fa31_18_xor1 = (fa(((u_arrmul32_and31_18 >> 0) & 0x01), ((u_arrmul32_fa31_17_or0 >> 0) & 0x01), ((u_arrmul32_fa30_18_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_18_or0 = (fa(((u_arrmul32_and31_18 >> 0) & 0x01), ((u_arrmul32_fa31_17_or0 >> 0) & 0x01), ((u_arrmul32_fa30_18_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_19 = and_gate(((a >> 0) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_ha0_19_xor0 = (ha(((u_arrmul32_and0_19 >> 0) & 0x01), ((u_arrmul32_fa1_18_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_19_and0 = (ha(((u_arrmul32_and0_19 >> 0) & 0x01), ((u_arrmul32_fa1_18_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_19 = and_gate(((a >> 1) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa1_19_xor1 = (fa(((u_arrmul32_and1_19 >> 0) & 0x01), ((u_arrmul32_fa2_18_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_19_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_19_or0 = (fa(((u_arrmul32_and1_19 >> 0) & 0x01), ((u_arrmul32_fa2_18_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_19_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_19 = and_gate(((a >> 2) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa2_19_xor1 = (fa(((u_arrmul32_and2_19 >> 0) & 0x01), ((u_arrmul32_fa3_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_19_or0 = (fa(((u_arrmul32_and2_19 >> 0) & 0x01), ((u_arrmul32_fa3_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_19 = and_gate(((a >> 3) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa3_19_xor1 = (fa(((u_arrmul32_and3_19 >> 0) & 0x01), ((u_arrmul32_fa4_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_19_or0 = (fa(((u_arrmul32_and3_19 >> 0) & 0x01), ((u_arrmul32_fa4_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_19 = and_gate(((a >> 4) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa4_19_xor1 = (fa(((u_arrmul32_and4_19 >> 0) & 0x01), ((u_arrmul32_fa5_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_19_or0 = (fa(((u_arrmul32_and4_19 >> 0) & 0x01), ((u_arrmul32_fa5_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_19 = and_gate(((a >> 5) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa5_19_xor1 = (fa(((u_arrmul32_and5_19 >> 0) & 0x01), ((u_arrmul32_fa6_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_19_or0 = (fa(((u_arrmul32_and5_19 >> 0) & 0x01), ((u_arrmul32_fa6_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_19 = and_gate(((a >> 6) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa6_19_xor1 = (fa(((u_arrmul32_and6_19 >> 0) & 0x01), ((u_arrmul32_fa7_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_19_or0 = (fa(((u_arrmul32_and6_19 >> 0) & 0x01), ((u_arrmul32_fa7_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_19 = and_gate(((a >> 7) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa7_19_xor1 = (fa(((u_arrmul32_and7_19 >> 0) & 0x01), ((u_arrmul32_fa8_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_19_or0 = (fa(((u_arrmul32_and7_19 >> 0) & 0x01), ((u_arrmul32_fa8_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_19 = and_gate(((a >> 8) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa8_19_xor1 = (fa(((u_arrmul32_and8_19 >> 0) & 0x01), ((u_arrmul32_fa9_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_19_or0 = (fa(((u_arrmul32_and8_19 >> 0) & 0x01), ((u_arrmul32_fa9_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_19 = and_gate(((a >> 9) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa9_19_xor1 = (fa(((u_arrmul32_and9_19 >> 0) & 0x01), ((u_arrmul32_fa10_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_19_or0 = (fa(((u_arrmul32_and9_19 >> 0) & 0x01), ((u_arrmul32_fa10_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_19 = and_gate(((a >> 10) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa10_19_xor1 = (fa(((u_arrmul32_and10_19 >> 0) & 0x01), ((u_arrmul32_fa11_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_19_or0 = (fa(((u_arrmul32_and10_19 >> 0) & 0x01), ((u_arrmul32_fa11_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_19 = and_gate(((a >> 11) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa11_19_xor1 = (fa(((u_arrmul32_and11_19 >> 0) & 0x01), ((u_arrmul32_fa12_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_19_or0 = (fa(((u_arrmul32_and11_19 >> 0) & 0x01), ((u_arrmul32_fa12_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_19 = and_gate(((a >> 12) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa12_19_xor1 = (fa(((u_arrmul32_and12_19 >> 0) & 0x01), ((u_arrmul32_fa13_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_19_or0 = (fa(((u_arrmul32_and12_19 >> 0) & 0x01), ((u_arrmul32_fa13_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_19 = and_gate(((a >> 13) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa13_19_xor1 = (fa(((u_arrmul32_and13_19 >> 0) & 0x01), ((u_arrmul32_fa14_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_19_or0 = (fa(((u_arrmul32_and13_19 >> 0) & 0x01), ((u_arrmul32_fa14_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_19 = and_gate(((a >> 14) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa14_19_xor1 = (fa(((u_arrmul32_and14_19 >> 0) & 0x01), ((u_arrmul32_fa15_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_19_or0 = (fa(((u_arrmul32_and14_19 >> 0) & 0x01), ((u_arrmul32_fa15_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_19 = and_gate(((a >> 15) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa15_19_xor1 = (fa(((u_arrmul32_and15_19 >> 0) & 0x01), ((u_arrmul32_fa16_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_19_or0 = (fa(((u_arrmul32_and15_19 >> 0) & 0x01), ((u_arrmul32_fa16_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_19 = and_gate(((a >> 16) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa16_19_xor1 = (fa(((u_arrmul32_and16_19 >> 0) & 0x01), ((u_arrmul32_fa17_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_19_or0 = (fa(((u_arrmul32_and16_19 >> 0) & 0x01), ((u_arrmul32_fa17_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_19 = and_gate(((a >> 17) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa17_19_xor1 = (fa(((u_arrmul32_and17_19 >> 0) & 0x01), ((u_arrmul32_fa18_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_19_or0 = (fa(((u_arrmul32_and17_19 >> 0) & 0x01), ((u_arrmul32_fa18_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_19 = and_gate(((a >> 18) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa18_19_xor1 = (fa(((u_arrmul32_and18_19 >> 0) & 0x01), ((u_arrmul32_fa19_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_19_or0 = (fa(((u_arrmul32_and18_19 >> 0) & 0x01), ((u_arrmul32_fa19_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_19 = and_gate(((a >> 19) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa19_19_xor1 = (fa(((u_arrmul32_and19_19 >> 0) & 0x01), ((u_arrmul32_fa20_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_19_or0 = (fa(((u_arrmul32_and19_19 >> 0) & 0x01), ((u_arrmul32_fa20_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_19 = and_gate(((a >> 20) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa20_19_xor1 = (fa(((u_arrmul32_and20_19 >> 0) & 0x01), ((u_arrmul32_fa21_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_19_or0 = (fa(((u_arrmul32_and20_19 >> 0) & 0x01), ((u_arrmul32_fa21_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_19 = and_gate(((a >> 21) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa21_19_xor1 = (fa(((u_arrmul32_and21_19 >> 0) & 0x01), ((u_arrmul32_fa22_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_19_or0 = (fa(((u_arrmul32_and21_19 >> 0) & 0x01), ((u_arrmul32_fa22_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_19 = and_gate(((a >> 22) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa22_19_xor1 = (fa(((u_arrmul32_and22_19 >> 0) & 0x01), ((u_arrmul32_fa23_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_19_or0 = (fa(((u_arrmul32_and22_19 >> 0) & 0x01), ((u_arrmul32_fa23_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_19 = and_gate(((a >> 23) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa23_19_xor1 = (fa(((u_arrmul32_and23_19 >> 0) & 0x01), ((u_arrmul32_fa24_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_19_or0 = (fa(((u_arrmul32_and23_19 >> 0) & 0x01), ((u_arrmul32_fa24_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_19 = and_gate(((a >> 24) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa24_19_xor1 = (fa(((u_arrmul32_and24_19 >> 0) & 0x01), ((u_arrmul32_fa25_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_19_or0 = (fa(((u_arrmul32_and24_19 >> 0) & 0x01), ((u_arrmul32_fa25_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_19 = and_gate(((a >> 25) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa25_19_xor1 = (fa(((u_arrmul32_and25_19 >> 0) & 0x01), ((u_arrmul32_fa26_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_19_or0 = (fa(((u_arrmul32_and25_19 >> 0) & 0x01), ((u_arrmul32_fa26_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_19 = and_gate(((a >> 26) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa26_19_xor1 = (fa(((u_arrmul32_and26_19 >> 0) & 0x01), ((u_arrmul32_fa27_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_19_or0 = (fa(((u_arrmul32_and26_19 >> 0) & 0x01), ((u_arrmul32_fa27_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_19 = and_gate(((a >> 27) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa27_19_xor1 = (fa(((u_arrmul32_and27_19 >> 0) & 0x01), ((u_arrmul32_fa28_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_19_or0 = (fa(((u_arrmul32_and27_19 >> 0) & 0x01), ((u_arrmul32_fa28_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_19 = and_gate(((a >> 28) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa28_19_xor1 = (fa(((u_arrmul32_and28_19 >> 0) & 0x01), ((u_arrmul32_fa29_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_19_or0 = (fa(((u_arrmul32_and28_19 >> 0) & 0x01), ((u_arrmul32_fa29_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_19 = and_gate(((a >> 29) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa29_19_xor1 = (fa(((u_arrmul32_and29_19 >> 0) & 0x01), ((u_arrmul32_fa30_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_19_or0 = (fa(((u_arrmul32_and29_19 >> 0) & 0x01), ((u_arrmul32_fa30_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_19 = and_gate(((a >> 30) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa30_19_xor1 = (fa(((u_arrmul32_and30_19 >> 0) & 0x01), ((u_arrmul32_fa31_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_19_or0 = (fa(((u_arrmul32_and30_19 >> 0) & 0x01), ((u_arrmul32_fa31_18_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_19 = and_gate(((a >> 31) & 0x01), ((b >> 19) & 0x01));
|
|
u_arrmul32_fa31_19_xor1 = (fa(((u_arrmul32_and31_19 >> 0) & 0x01), ((u_arrmul32_fa31_18_or0 >> 0) & 0x01), ((u_arrmul32_fa30_19_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_19_or0 = (fa(((u_arrmul32_and31_19 >> 0) & 0x01), ((u_arrmul32_fa31_18_or0 >> 0) & 0x01), ((u_arrmul32_fa30_19_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_20 = and_gate(((a >> 0) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_ha0_20_xor0 = (ha(((u_arrmul32_and0_20 >> 0) & 0x01), ((u_arrmul32_fa1_19_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_20_and0 = (ha(((u_arrmul32_and0_20 >> 0) & 0x01), ((u_arrmul32_fa1_19_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_20 = and_gate(((a >> 1) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa1_20_xor1 = (fa(((u_arrmul32_and1_20 >> 0) & 0x01), ((u_arrmul32_fa2_19_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_20_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_20_or0 = (fa(((u_arrmul32_and1_20 >> 0) & 0x01), ((u_arrmul32_fa2_19_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_20_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_20 = and_gate(((a >> 2) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa2_20_xor1 = (fa(((u_arrmul32_and2_20 >> 0) & 0x01), ((u_arrmul32_fa3_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_20_or0 = (fa(((u_arrmul32_and2_20 >> 0) & 0x01), ((u_arrmul32_fa3_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_20 = and_gate(((a >> 3) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa3_20_xor1 = (fa(((u_arrmul32_and3_20 >> 0) & 0x01), ((u_arrmul32_fa4_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_20_or0 = (fa(((u_arrmul32_and3_20 >> 0) & 0x01), ((u_arrmul32_fa4_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_20 = and_gate(((a >> 4) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa4_20_xor1 = (fa(((u_arrmul32_and4_20 >> 0) & 0x01), ((u_arrmul32_fa5_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_20_or0 = (fa(((u_arrmul32_and4_20 >> 0) & 0x01), ((u_arrmul32_fa5_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_20 = and_gate(((a >> 5) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa5_20_xor1 = (fa(((u_arrmul32_and5_20 >> 0) & 0x01), ((u_arrmul32_fa6_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_20_or0 = (fa(((u_arrmul32_and5_20 >> 0) & 0x01), ((u_arrmul32_fa6_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_20 = and_gate(((a >> 6) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa6_20_xor1 = (fa(((u_arrmul32_and6_20 >> 0) & 0x01), ((u_arrmul32_fa7_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_20_or0 = (fa(((u_arrmul32_and6_20 >> 0) & 0x01), ((u_arrmul32_fa7_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_20 = and_gate(((a >> 7) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa7_20_xor1 = (fa(((u_arrmul32_and7_20 >> 0) & 0x01), ((u_arrmul32_fa8_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_20_or0 = (fa(((u_arrmul32_and7_20 >> 0) & 0x01), ((u_arrmul32_fa8_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_20 = and_gate(((a >> 8) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa8_20_xor1 = (fa(((u_arrmul32_and8_20 >> 0) & 0x01), ((u_arrmul32_fa9_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_20_or0 = (fa(((u_arrmul32_and8_20 >> 0) & 0x01), ((u_arrmul32_fa9_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_20 = and_gate(((a >> 9) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa9_20_xor1 = (fa(((u_arrmul32_and9_20 >> 0) & 0x01), ((u_arrmul32_fa10_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_20_or0 = (fa(((u_arrmul32_and9_20 >> 0) & 0x01), ((u_arrmul32_fa10_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_20 = and_gate(((a >> 10) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa10_20_xor1 = (fa(((u_arrmul32_and10_20 >> 0) & 0x01), ((u_arrmul32_fa11_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_20_or0 = (fa(((u_arrmul32_and10_20 >> 0) & 0x01), ((u_arrmul32_fa11_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_20 = and_gate(((a >> 11) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa11_20_xor1 = (fa(((u_arrmul32_and11_20 >> 0) & 0x01), ((u_arrmul32_fa12_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_20_or0 = (fa(((u_arrmul32_and11_20 >> 0) & 0x01), ((u_arrmul32_fa12_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_20 = and_gate(((a >> 12) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa12_20_xor1 = (fa(((u_arrmul32_and12_20 >> 0) & 0x01), ((u_arrmul32_fa13_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_20_or0 = (fa(((u_arrmul32_and12_20 >> 0) & 0x01), ((u_arrmul32_fa13_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_20 = and_gate(((a >> 13) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa13_20_xor1 = (fa(((u_arrmul32_and13_20 >> 0) & 0x01), ((u_arrmul32_fa14_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_20_or0 = (fa(((u_arrmul32_and13_20 >> 0) & 0x01), ((u_arrmul32_fa14_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_20 = and_gate(((a >> 14) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa14_20_xor1 = (fa(((u_arrmul32_and14_20 >> 0) & 0x01), ((u_arrmul32_fa15_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_20_or0 = (fa(((u_arrmul32_and14_20 >> 0) & 0x01), ((u_arrmul32_fa15_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_20 = and_gate(((a >> 15) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa15_20_xor1 = (fa(((u_arrmul32_and15_20 >> 0) & 0x01), ((u_arrmul32_fa16_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_20_or0 = (fa(((u_arrmul32_and15_20 >> 0) & 0x01), ((u_arrmul32_fa16_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_20 = and_gate(((a >> 16) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa16_20_xor1 = (fa(((u_arrmul32_and16_20 >> 0) & 0x01), ((u_arrmul32_fa17_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_20_or0 = (fa(((u_arrmul32_and16_20 >> 0) & 0x01), ((u_arrmul32_fa17_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_20 = and_gate(((a >> 17) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa17_20_xor1 = (fa(((u_arrmul32_and17_20 >> 0) & 0x01), ((u_arrmul32_fa18_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_20_or0 = (fa(((u_arrmul32_and17_20 >> 0) & 0x01), ((u_arrmul32_fa18_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_20 = and_gate(((a >> 18) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa18_20_xor1 = (fa(((u_arrmul32_and18_20 >> 0) & 0x01), ((u_arrmul32_fa19_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_20_or0 = (fa(((u_arrmul32_and18_20 >> 0) & 0x01), ((u_arrmul32_fa19_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_20 = and_gate(((a >> 19) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa19_20_xor1 = (fa(((u_arrmul32_and19_20 >> 0) & 0x01), ((u_arrmul32_fa20_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_20_or0 = (fa(((u_arrmul32_and19_20 >> 0) & 0x01), ((u_arrmul32_fa20_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_20 = and_gate(((a >> 20) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa20_20_xor1 = (fa(((u_arrmul32_and20_20 >> 0) & 0x01), ((u_arrmul32_fa21_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_20_or0 = (fa(((u_arrmul32_and20_20 >> 0) & 0x01), ((u_arrmul32_fa21_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_20 = and_gate(((a >> 21) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa21_20_xor1 = (fa(((u_arrmul32_and21_20 >> 0) & 0x01), ((u_arrmul32_fa22_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_20_or0 = (fa(((u_arrmul32_and21_20 >> 0) & 0x01), ((u_arrmul32_fa22_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_20 = and_gate(((a >> 22) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa22_20_xor1 = (fa(((u_arrmul32_and22_20 >> 0) & 0x01), ((u_arrmul32_fa23_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_20_or0 = (fa(((u_arrmul32_and22_20 >> 0) & 0x01), ((u_arrmul32_fa23_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_20 = and_gate(((a >> 23) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa23_20_xor1 = (fa(((u_arrmul32_and23_20 >> 0) & 0x01), ((u_arrmul32_fa24_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_20_or0 = (fa(((u_arrmul32_and23_20 >> 0) & 0x01), ((u_arrmul32_fa24_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_20 = and_gate(((a >> 24) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa24_20_xor1 = (fa(((u_arrmul32_and24_20 >> 0) & 0x01), ((u_arrmul32_fa25_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_20_or0 = (fa(((u_arrmul32_and24_20 >> 0) & 0x01), ((u_arrmul32_fa25_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_20 = and_gate(((a >> 25) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa25_20_xor1 = (fa(((u_arrmul32_and25_20 >> 0) & 0x01), ((u_arrmul32_fa26_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_20_or0 = (fa(((u_arrmul32_and25_20 >> 0) & 0x01), ((u_arrmul32_fa26_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_20 = and_gate(((a >> 26) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa26_20_xor1 = (fa(((u_arrmul32_and26_20 >> 0) & 0x01), ((u_arrmul32_fa27_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_20_or0 = (fa(((u_arrmul32_and26_20 >> 0) & 0x01), ((u_arrmul32_fa27_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_20 = and_gate(((a >> 27) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa27_20_xor1 = (fa(((u_arrmul32_and27_20 >> 0) & 0x01), ((u_arrmul32_fa28_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_20_or0 = (fa(((u_arrmul32_and27_20 >> 0) & 0x01), ((u_arrmul32_fa28_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_20 = and_gate(((a >> 28) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa28_20_xor1 = (fa(((u_arrmul32_and28_20 >> 0) & 0x01), ((u_arrmul32_fa29_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_20_or0 = (fa(((u_arrmul32_and28_20 >> 0) & 0x01), ((u_arrmul32_fa29_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_20 = and_gate(((a >> 29) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa29_20_xor1 = (fa(((u_arrmul32_and29_20 >> 0) & 0x01), ((u_arrmul32_fa30_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_20_or0 = (fa(((u_arrmul32_and29_20 >> 0) & 0x01), ((u_arrmul32_fa30_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_20 = and_gate(((a >> 30) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa30_20_xor1 = (fa(((u_arrmul32_and30_20 >> 0) & 0x01), ((u_arrmul32_fa31_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_20_or0 = (fa(((u_arrmul32_and30_20 >> 0) & 0x01), ((u_arrmul32_fa31_19_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_20 = and_gate(((a >> 31) & 0x01), ((b >> 20) & 0x01));
|
|
u_arrmul32_fa31_20_xor1 = (fa(((u_arrmul32_and31_20 >> 0) & 0x01), ((u_arrmul32_fa31_19_or0 >> 0) & 0x01), ((u_arrmul32_fa30_20_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_20_or0 = (fa(((u_arrmul32_and31_20 >> 0) & 0x01), ((u_arrmul32_fa31_19_or0 >> 0) & 0x01), ((u_arrmul32_fa30_20_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_21 = and_gate(((a >> 0) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_ha0_21_xor0 = (ha(((u_arrmul32_and0_21 >> 0) & 0x01), ((u_arrmul32_fa1_20_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_21_and0 = (ha(((u_arrmul32_and0_21 >> 0) & 0x01), ((u_arrmul32_fa1_20_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_21 = and_gate(((a >> 1) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa1_21_xor1 = (fa(((u_arrmul32_and1_21 >> 0) & 0x01), ((u_arrmul32_fa2_20_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_21_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_21_or0 = (fa(((u_arrmul32_and1_21 >> 0) & 0x01), ((u_arrmul32_fa2_20_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_21_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_21 = and_gate(((a >> 2) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa2_21_xor1 = (fa(((u_arrmul32_and2_21 >> 0) & 0x01), ((u_arrmul32_fa3_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_21_or0 = (fa(((u_arrmul32_and2_21 >> 0) & 0x01), ((u_arrmul32_fa3_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_21 = and_gate(((a >> 3) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa3_21_xor1 = (fa(((u_arrmul32_and3_21 >> 0) & 0x01), ((u_arrmul32_fa4_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_21_or0 = (fa(((u_arrmul32_and3_21 >> 0) & 0x01), ((u_arrmul32_fa4_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_21 = and_gate(((a >> 4) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa4_21_xor1 = (fa(((u_arrmul32_and4_21 >> 0) & 0x01), ((u_arrmul32_fa5_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_21_or0 = (fa(((u_arrmul32_and4_21 >> 0) & 0x01), ((u_arrmul32_fa5_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_21 = and_gate(((a >> 5) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa5_21_xor1 = (fa(((u_arrmul32_and5_21 >> 0) & 0x01), ((u_arrmul32_fa6_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_21_or0 = (fa(((u_arrmul32_and5_21 >> 0) & 0x01), ((u_arrmul32_fa6_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_21 = and_gate(((a >> 6) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa6_21_xor1 = (fa(((u_arrmul32_and6_21 >> 0) & 0x01), ((u_arrmul32_fa7_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_21_or0 = (fa(((u_arrmul32_and6_21 >> 0) & 0x01), ((u_arrmul32_fa7_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_21 = and_gate(((a >> 7) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa7_21_xor1 = (fa(((u_arrmul32_and7_21 >> 0) & 0x01), ((u_arrmul32_fa8_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_21_or0 = (fa(((u_arrmul32_and7_21 >> 0) & 0x01), ((u_arrmul32_fa8_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_21 = and_gate(((a >> 8) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa8_21_xor1 = (fa(((u_arrmul32_and8_21 >> 0) & 0x01), ((u_arrmul32_fa9_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_21_or0 = (fa(((u_arrmul32_and8_21 >> 0) & 0x01), ((u_arrmul32_fa9_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_21 = and_gate(((a >> 9) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa9_21_xor1 = (fa(((u_arrmul32_and9_21 >> 0) & 0x01), ((u_arrmul32_fa10_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_21_or0 = (fa(((u_arrmul32_and9_21 >> 0) & 0x01), ((u_arrmul32_fa10_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_21 = and_gate(((a >> 10) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa10_21_xor1 = (fa(((u_arrmul32_and10_21 >> 0) & 0x01), ((u_arrmul32_fa11_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_21_or0 = (fa(((u_arrmul32_and10_21 >> 0) & 0x01), ((u_arrmul32_fa11_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_21 = and_gate(((a >> 11) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa11_21_xor1 = (fa(((u_arrmul32_and11_21 >> 0) & 0x01), ((u_arrmul32_fa12_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_21_or0 = (fa(((u_arrmul32_and11_21 >> 0) & 0x01), ((u_arrmul32_fa12_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_21 = and_gate(((a >> 12) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa12_21_xor1 = (fa(((u_arrmul32_and12_21 >> 0) & 0x01), ((u_arrmul32_fa13_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_21_or0 = (fa(((u_arrmul32_and12_21 >> 0) & 0x01), ((u_arrmul32_fa13_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_21 = and_gate(((a >> 13) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa13_21_xor1 = (fa(((u_arrmul32_and13_21 >> 0) & 0x01), ((u_arrmul32_fa14_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_21_or0 = (fa(((u_arrmul32_and13_21 >> 0) & 0x01), ((u_arrmul32_fa14_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_21 = and_gate(((a >> 14) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa14_21_xor1 = (fa(((u_arrmul32_and14_21 >> 0) & 0x01), ((u_arrmul32_fa15_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_21_or0 = (fa(((u_arrmul32_and14_21 >> 0) & 0x01), ((u_arrmul32_fa15_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_21 = and_gate(((a >> 15) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa15_21_xor1 = (fa(((u_arrmul32_and15_21 >> 0) & 0x01), ((u_arrmul32_fa16_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_21_or0 = (fa(((u_arrmul32_and15_21 >> 0) & 0x01), ((u_arrmul32_fa16_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_21 = and_gate(((a >> 16) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa16_21_xor1 = (fa(((u_arrmul32_and16_21 >> 0) & 0x01), ((u_arrmul32_fa17_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_21_or0 = (fa(((u_arrmul32_and16_21 >> 0) & 0x01), ((u_arrmul32_fa17_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_21 = and_gate(((a >> 17) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa17_21_xor1 = (fa(((u_arrmul32_and17_21 >> 0) & 0x01), ((u_arrmul32_fa18_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_21_or0 = (fa(((u_arrmul32_and17_21 >> 0) & 0x01), ((u_arrmul32_fa18_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_21 = and_gate(((a >> 18) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa18_21_xor1 = (fa(((u_arrmul32_and18_21 >> 0) & 0x01), ((u_arrmul32_fa19_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_21_or0 = (fa(((u_arrmul32_and18_21 >> 0) & 0x01), ((u_arrmul32_fa19_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_21 = and_gate(((a >> 19) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa19_21_xor1 = (fa(((u_arrmul32_and19_21 >> 0) & 0x01), ((u_arrmul32_fa20_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_21_or0 = (fa(((u_arrmul32_and19_21 >> 0) & 0x01), ((u_arrmul32_fa20_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_21 = and_gate(((a >> 20) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa20_21_xor1 = (fa(((u_arrmul32_and20_21 >> 0) & 0x01), ((u_arrmul32_fa21_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_21_or0 = (fa(((u_arrmul32_and20_21 >> 0) & 0x01), ((u_arrmul32_fa21_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_21 = and_gate(((a >> 21) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa21_21_xor1 = (fa(((u_arrmul32_and21_21 >> 0) & 0x01), ((u_arrmul32_fa22_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_21_or0 = (fa(((u_arrmul32_and21_21 >> 0) & 0x01), ((u_arrmul32_fa22_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_21 = and_gate(((a >> 22) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa22_21_xor1 = (fa(((u_arrmul32_and22_21 >> 0) & 0x01), ((u_arrmul32_fa23_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_21_or0 = (fa(((u_arrmul32_and22_21 >> 0) & 0x01), ((u_arrmul32_fa23_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_21 = and_gate(((a >> 23) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa23_21_xor1 = (fa(((u_arrmul32_and23_21 >> 0) & 0x01), ((u_arrmul32_fa24_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_21_or0 = (fa(((u_arrmul32_and23_21 >> 0) & 0x01), ((u_arrmul32_fa24_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_21 = and_gate(((a >> 24) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa24_21_xor1 = (fa(((u_arrmul32_and24_21 >> 0) & 0x01), ((u_arrmul32_fa25_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_21_or0 = (fa(((u_arrmul32_and24_21 >> 0) & 0x01), ((u_arrmul32_fa25_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_21 = and_gate(((a >> 25) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa25_21_xor1 = (fa(((u_arrmul32_and25_21 >> 0) & 0x01), ((u_arrmul32_fa26_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_21_or0 = (fa(((u_arrmul32_and25_21 >> 0) & 0x01), ((u_arrmul32_fa26_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_21 = and_gate(((a >> 26) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa26_21_xor1 = (fa(((u_arrmul32_and26_21 >> 0) & 0x01), ((u_arrmul32_fa27_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_21_or0 = (fa(((u_arrmul32_and26_21 >> 0) & 0x01), ((u_arrmul32_fa27_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_21 = and_gate(((a >> 27) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa27_21_xor1 = (fa(((u_arrmul32_and27_21 >> 0) & 0x01), ((u_arrmul32_fa28_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_21_or0 = (fa(((u_arrmul32_and27_21 >> 0) & 0x01), ((u_arrmul32_fa28_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_21 = and_gate(((a >> 28) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa28_21_xor1 = (fa(((u_arrmul32_and28_21 >> 0) & 0x01), ((u_arrmul32_fa29_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_21_or0 = (fa(((u_arrmul32_and28_21 >> 0) & 0x01), ((u_arrmul32_fa29_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_21 = and_gate(((a >> 29) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa29_21_xor1 = (fa(((u_arrmul32_and29_21 >> 0) & 0x01), ((u_arrmul32_fa30_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_21_or0 = (fa(((u_arrmul32_and29_21 >> 0) & 0x01), ((u_arrmul32_fa30_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_21 = and_gate(((a >> 30) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa30_21_xor1 = (fa(((u_arrmul32_and30_21 >> 0) & 0x01), ((u_arrmul32_fa31_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_21_or0 = (fa(((u_arrmul32_and30_21 >> 0) & 0x01), ((u_arrmul32_fa31_20_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_21 = and_gate(((a >> 31) & 0x01), ((b >> 21) & 0x01));
|
|
u_arrmul32_fa31_21_xor1 = (fa(((u_arrmul32_and31_21 >> 0) & 0x01), ((u_arrmul32_fa31_20_or0 >> 0) & 0x01), ((u_arrmul32_fa30_21_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_21_or0 = (fa(((u_arrmul32_and31_21 >> 0) & 0x01), ((u_arrmul32_fa31_20_or0 >> 0) & 0x01), ((u_arrmul32_fa30_21_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_22 = and_gate(((a >> 0) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_ha0_22_xor0 = (ha(((u_arrmul32_and0_22 >> 0) & 0x01), ((u_arrmul32_fa1_21_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_22_and0 = (ha(((u_arrmul32_and0_22 >> 0) & 0x01), ((u_arrmul32_fa1_21_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_22 = and_gate(((a >> 1) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa1_22_xor1 = (fa(((u_arrmul32_and1_22 >> 0) & 0x01), ((u_arrmul32_fa2_21_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_22_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_22_or0 = (fa(((u_arrmul32_and1_22 >> 0) & 0x01), ((u_arrmul32_fa2_21_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_22_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_22 = and_gate(((a >> 2) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa2_22_xor1 = (fa(((u_arrmul32_and2_22 >> 0) & 0x01), ((u_arrmul32_fa3_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_22_or0 = (fa(((u_arrmul32_and2_22 >> 0) & 0x01), ((u_arrmul32_fa3_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_22 = and_gate(((a >> 3) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa3_22_xor1 = (fa(((u_arrmul32_and3_22 >> 0) & 0x01), ((u_arrmul32_fa4_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_22_or0 = (fa(((u_arrmul32_and3_22 >> 0) & 0x01), ((u_arrmul32_fa4_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_22 = and_gate(((a >> 4) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa4_22_xor1 = (fa(((u_arrmul32_and4_22 >> 0) & 0x01), ((u_arrmul32_fa5_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_22_or0 = (fa(((u_arrmul32_and4_22 >> 0) & 0x01), ((u_arrmul32_fa5_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_22 = and_gate(((a >> 5) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa5_22_xor1 = (fa(((u_arrmul32_and5_22 >> 0) & 0x01), ((u_arrmul32_fa6_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_22_or0 = (fa(((u_arrmul32_and5_22 >> 0) & 0x01), ((u_arrmul32_fa6_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_22 = and_gate(((a >> 6) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa6_22_xor1 = (fa(((u_arrmul32_and6_22 >> 0) & 0x01), ((u_arrmul32_fa7_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_22_or0 = (fa(((u_arrmul32_and6_22 >> 0) & 0x01), ((u_arrmul32_fa7_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_22 = and_gate(((a >> 7) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa7_22_xor1 = (fa(((u_arrmul32_and7_22 >> 0) & 0x01), ((u_arrmul32_fa8_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_22_or0 = (fa(((u_arrmul32_and7_22 >> 0) & 0x01), ((u_arrmul32_fa8_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_22 = and_gate(((a >> 8) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa8_22_xor1 = (fa(((u_arrmul32_and8_22 >> 0) & 0x01), ((u_arrmul32_fa9_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_22_or0 = (fa(((u_arrmul32_and8_22 >> 0) & 0x01), ((u_arrmul32_fa9_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_22 = and_gate(((a >> 9) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa9_22_xor1 = (fa(((u_arrmul32_and9_22 >> 0) & 0x01), ((u_arrmul32_fa10_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_22_or0 = (fa(((u_arrmul32_and9_22 >> 0) & 0x01), ((u_arrmul32_fa10_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_22 = and_gate(((a >> 10) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa10_22_xor1 = (fa(((u_arrmul32_and10_22 >> 0) & 0x01), ((u_arrmul32_fa11_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_22_or0 = (fa(((u_arrmul32_and10_22 >> 0) & 0x01), ((u_arrmul32_fa11_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_22 = and_gate(((a >> 11) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa11_22_xor1 = (fa(((u_arrmul32_and11_22 >> 0) & 0x01), ((u_arrmul32_fa12_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_22_or0 = (fa(((u_arrmul32_and11_22 >> 0) & 0x01), ((u_arrmul32_fa12_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_22 = and_gate(((a >> 12) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa12_22_xor1 = (fa(((u_arrmul32_and12_22 >> 0) & 0x01), ((u_arrmul32_fa13_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_22_or0 = (fa(((u_arrmul32_and12_22 >> 0) & 0x01), ((u_arrmul32_fa13_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_22 = and_gate(((a >> 13) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa13_22_xor1 = (fa(((u_arrmul32_and13_22 >> 0) & 0x01), ((u_arrmul32_fa14_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_22_or0 = (fa(((u_arrmul32_and13_22 >> 0) & 0x01), ((u_arrmul32_fa14_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_22 = and_gate(((a >> 14) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa14_22_xor1 = (fa(((u_arrmul32_and14_22 >> 0) & 0x01), ((u_arrmul32_fa15_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_22_or0 = (fa(((u_arrmul32_and14_22 >> 0) & 0x01), ((u_arrmul32_fa15_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_22 = and_gate(((a >> 15) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa15_22_xor1 = (fa(((u_arrmul32_and15_22 >> 0) & 0x01), ((u_arrmul32_fa16_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_22_or0 = (fa(((u_arrmul32_and15_22 >> 0) & 0x01), ((u_arrmul32_fa16_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_22 = and_gate(((a >> 16) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa16_22_xor1 = (fa(((u_arrmul32_and16_22 >> 0) & 0x01), ((u_arrmul32_fa17_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_22_or0 = (fa(((u_arrmul32_and16_22 >> 0) & 0x01), ((u_arrmul32_fa17_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_22 = and_gate(((a >> 17) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa17_22_xor1 = (fa(((u_arrmul32_and17_22 >> 0) & 0x01), ((u_arrmul32_fa18_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_22_or0 = (fa(((u_arrmul32_and17_22 >> 0) & 0x01), ((u_arrmul32_fa18_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_22 = and_gate(((a >> 18) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa18_22_xor1 = (fa(((u_arrmul32_and18_22 >> 0) & 0x01), ((u_arrmul32_fa19_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_22_or0 = (fa(((u_arrmul32_and18_22 >> 0) & 0x01), ((u_arrmul32_fa19_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_22 = and_gate(((a >> 19) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa19_22_xor1 = (fa(((u_arrmul32_and19_22 >> 0) & 0x01), ((u_arrmul32_fa20_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_22_or0 = (fa(((u_arrmul32_and19_22 >> 0) & 0x01), ((u_arrmul32_fa20_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_22 = and_gate(((a >> 20) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa20_22_xor1 = (fa(((u_arrmul32_and20_22 >> 0) & 0x01), ((u_arrmul32_fa21_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_22_or0 = (fa(((u_arrmul32_and20_22 >> 0) & 0x01), ((u_arrmul32_fa21_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_22 = and_gate(((a >> 21) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa21_22_xor1 = (fa(((u_arrmul32_and21_22 >> 0) & 0x01), ((u_arrmul32_fa22_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_22_or0 = (fa(((u_arrmul32_and21_22 >> 0) & 0x01), ((u_arrmul32_fa22_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_22 = and_gate(((a >> 22) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa22_22_xor1 = (fa(((u_arrmul32_and22_22 >> 0) & 0x01), ((u_arrmul32_fa23_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_22_or0 = (fa(((u_arrmul32_and22_22 >> 0) & 0x01), ((u_arrmul32_fa23_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_22 = and_gate(((a >> 23) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa23_22_xor1 = (fa(((u_arrmul32_and23_22 >> 0) & 0x01), ((u_arrmul32_fa24_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_22_or0 = (fa(((u_arrmul32_and23_22 >> 0) & 0x01), ((u_arrmul32_fa24_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_22 = and_gate(((a >> 24) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa24_22_xor1 = (fa(((u_arrmul32_and24_22 >> 0) & 0x01), ((u_arrmul32_fa25_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_22_or0 = (fa(((u_arrmul32_and24_22 >> 0) & 0x01), ((u_arrmul32_fa25_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_22 = and_gate(((a >> 25) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa25_22_xor1 = (fa(((u_arrmul32_and25_22 >> 0) & 0x01), ((u_arrmul32_fa26_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_22_or0 = (fa(((u_arrmul32_and25_22 >> 0) & 0x01), ((u_arrmul32_fa26_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_22 = and_gate(((a >> 26) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa26_22_xor1 = (fa(((u_arrmul32_and26_22 >> 0) & 0x01), ((u_arrmul32_fa27_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_22_or0 = (fa(((u_arrmul32_and26_22 >> 0) & 0x01), ((u_arrmul32_fa27_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_22 = and_gate(((a >> 27) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa27_22_xor1 = (fa(((u_arrmul32_and27_22 >> 0) & 0x01), ((u_arrmul32_fa28_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_22_or0 = (fa(((u_arrmul32_and27_22 >> 0) & 0x01), ((u_arrmul32_fa28_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_22 = and_gate(((a >> 28) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa28_22_xor1 = (fa(((u_arrmul32_and28_22 >> 0) & 0x01), ((u_arrmul32_fa29_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_22_or0 = (fa(((u_arrmul32_and28_22 >> 0) & 0x01), ((u_arrmul32_fa29_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_22 = and_gate(((a >> 29) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa29_22_xor1 = (fa(((u_arrmul32_and29_22 >> 0) & 0x01), ((u_arrmul32_fa30_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_22_or0 = (fa(((u_arrmul32_and29_22 >> 0) & 0x01), ((u_arrmul32_fa30_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_22 = and_gate(((a >> 30) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa30_22_xor1 = (fa(((u_arrmul32_and30_22 >> 0) & 0x01), ((u_arrmul32_fa31_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_22_or0 = (fa(((u_arrmul32_and30_22 >> 0) & 0x01), ((u_arrmul32_fa31_21_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_22 = and_gate(((a >> 31) & 0x01), ((b >> 22) & 0x01));
|
|
u_arrmul32_fa31_22_xor1 = (fa(((u_arrmul32_and31_22 >> 0) & 0x01), ((u_arrmul32_fa31_21_or0 >> 0) & 0x01), ((u_arrmul32_fa30_22_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_22_or0 = (fa(((u_arrmul32_and31_22 >> 0) & 0x01), ((u_arrmul32_fa31_21_or0 >> 0) & 0x01), ((u_arrmul32_fa30_22_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_23 = and_gate(((a >> 0) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_ha0_23_xor0 = (ha(((u_arrmul32_and0_23 >> 0) & 0x01), ((u_arrmul32_fa1_22_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_23_and0 = (ha(((u_arrmul32_and0_23 >> 0) & 0x01), ((u_arrmul32_fa1_22_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_23 = and_gate(((a >> 1) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa1_23_xor1 = (fa(((u_arrmul32_and1_23 >> 0) & 0x01), ((u_arrmul32_fa2_22_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_23_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_23_or0 = (fa(((u_arrmul32_and1_23 >> 0) & 0x01), ((u_arrmul32_fa2_22_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_23_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_23 = and_gate(((a >> 2) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa2_23_xor1 = (fa(((u_arrmul32_and2_23 >> 0) & 0x01), ((u_arrmul32_fa3_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_23_or0 = (fa(((u_arrmul32_and2_23 >> 0) & 0x01), ((u_arrmul32_fa3_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_23 = and_gate(((a >> 3) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa3_23_xor1 = (fa(((u_arrmul32_and3_23 >> 0) & 0x01), ((u_arrmul32_fa4_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_23_or0 = (fa(((u_arrmul32_and3_23 >> 0) & 0x01), ((u_arrmul32_fa4_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_23 = and_gate(((a >> 4) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa4_23_xor1 = (fa(((u_arrmul32_and4_23 >> 0) & 0x01), ((u_arrmul32_fa5_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_23_or0 = (fa(((u_arrmul32_and4_23 >> 0) & 0x01), ((u_arrmul32_fa5_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_23 = and_gate(((a >> 5) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa5_23_xor1 = (fa(((u_arrmul32_and5_23 >> 0) & 0x01), ((u_arrmul32_fa6_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_23_or0 = (fa(((u_arrmul32_and5_23 >> 0) & 0x01), ((u_arrmul32_fa6_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_23 = and_gate(((a >> 6) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa6_23_xor1 = (fa(((u_arrmul32_and6_23 >> 0) & 0x01), ((u_arrmul32_fa7_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_23_or0 = (fa(((u_arrmul32_and6_23 >> 0) & 0x01), ((u_arrmul32_fa7_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_23 = and_gate(((a >> 7) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa7_23_xor1 = (fa(((u_arrmul32_and7_23 >> 0) & 0x01), ((u_arrmul32_fa8_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_23_or0 = (fa(((u_arrmul32_and7_23 >> 0) & 0x01), ((u_arrmul32_fa8_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_23 = and_gate(((a >> 8) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa8_23_xor1 = (fa(((u_arrmul32_and8_23 >> 0) & 0x01), ((u_arrmul32_fa9_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_23_or0 = (fa(((u_arrmul32_and8_23 >> 0) & 0x01), ((u_arrmul32_fa9_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_23 = and_gate(((a >> 9) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa9_23_xor1 = (fa(((u_arrmul32_and9_23 >> 0) & 0x01), ((u_arrmul32_fa10_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_23_or0 = (fa(((u_arrmul32_and9_23 >> 0) & 0x01), ((u_arrmul32_fa10_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_23 = and_gate(((a >> 10) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa10_23_xor1 = (fa(((u_arrmul32_and10_23 >> 0) & 0x01), ((u_arrmul32_fa11_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_23_or0 = (fa(((u_arrmul32_and10_23 >> 0) & 0x01), ((u_arrmul32_fa11_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_23 = and_gate(((a >> 11) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa11_23_xor1 = (fa(((u_arrmul32_and11_23 >> 0) & 0x01), ((u_arrmul32_fa12_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_23_or0 = (fa(((u_arrmul32_and11_23 >> 0) & 0x01), ((u_arrmul32_fa12_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_23 = and_gate(((a >> 12) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa12_23_xor1 = (fa(((u_arrmul32_and12_23 >> 0) & 0x01), ((u_arrmul32_fa13_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_23_or0 = (fa(((u_arrmul32_and12_23 >> 0) & 0x01), ((u_arrmul32_fa13_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_23 = and_gate(((a >> 13) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa13_23_xor1 = (fa(((u_arrmul32_and13_23 >> 0) & 0x01), ((u_arrmul32_fa14_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_23_or0 = (fa(((u_arrmul32_and13_23 >> 0) & 0x01), ((u_arrmul32_fa14_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_23 = and_gate(((a >> 14) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa14_23_xor1 = (fa(((u_arrmul32_and14_23 >> 0) & 0x01), ((u_arrmul32_fa15_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_23_or0 = (fa(((u_arrmul32_and14_23 >> 0) & 0x01), ((u_arrmul32_fa15_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_23 = and_gate(((a >> 15) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa15_23_xor1 = (fa(((u_arrmul32_and15_23 >> 0) & 0x01), ((u_arrmul32_fa16_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_23_or0 = (fa(((u_arrmul32_and15_23 >> 0) & 0x01), ((u_arrmul32_fa16_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_23 = and_gate(((a >> 16) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa16_23_xor1 = (fa(((u_arrmul32_and16_23 >> 0) & 0x01), ((u_arrmul32_fa17_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_23_or0 = (fa(((u_arrmul32_and16_23 >> 0) & 0x01), ((u_arrmul32_fa17_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_23 = and_gate(((a >> 17) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa17_23_xor1 = (fa(((u_arrmul32_and17_23 >> 0) & 0x01), ((u_arrmul32_fa18_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_23_or0 = (fa(((u_arrmul32_and17_23 >> 0) & 0x01), ((u_arrmul32_fa18_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_23 = and_gate(((a >> 18) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa18_23_xor1 = (fa(((u_arrmul32_and18_23 >> 0) & 0x01), ((u_arrmul32_fa19_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_23_or0 = (fa(((u_arrmul32_and18_23 >> 0) & 0x01), ((u_arrmul32_fa19_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_23 = and_gate(((a >> 19) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa19_23_xor1 = (fa(((u_arrmul32_and19_23 >> 0) & 0x01), ((u_arrmul32_fa20_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_23_or0 = (fa(((u_arrmul32_and19_23 >> 0) & 0x01), ((u_arrmul32_fa20_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_23 = and_gate(((a >> 20) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa20_23_xor1 = (fa(((u_arrmul32_and20_23 >> 0) & 0x01), ((u_arrmul32_fa21_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_23_or0 = (fa(((u_arrmul32_and20_23 >> 0) & 0x01), ((u_arrmul32_fa21_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_23 = and_gate(((a >> 21) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa21_23_xor1 = (fa(((u_arrmul32_and21_23 >> 0) & 0x01), ((u_arrmul32_fa22_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_23_or0 = (fa(((u_arrmul32_and21_23 >> 0) & 0x01), ((u_arrmul32_fa22_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_23 = and_gate(((a >> 22) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa22_23_xor1 = (fa(((u_arrmul32_and22_23 >> 0) & 0x01), ((u_arrmul32_fa23_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_23_or0 = (fa(((u_arrmul32_and22_23 >> 0) & 0x01), ((u_arrmul32_fa23_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_23 = and_gate(((a >> 23) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa23_23_xor1 = (fa(((u_arrmul32_and23_23 >> 0) & 0x01), ((u_arrmul32_fa24_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_23_or0 = (fa(((u_arrmul32_and23_23 >> 0) & 0x01), ((u_arrmul32_fa24_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_23 = and_gate(((a >> 24) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa24_23_xor1 = (fa(((u_arrmul32_and24_23 >> 0) & 0x01), ((u_arrmul32_fa25_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_23_or0 = (fa(((u_arrmul32_and24_23 >> 0) & 0x01), ((u_arrmul32_fa25_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_23 = and_gate(((a >> 25) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa25_23_xor1 = (fa(((u_arrmul32_and25_23 >> 0) & 0x01), ((u_arrmul32_fa26_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_23_or0 = (fa(((u_arrmul32_and25_23 >> 0) & 0x01), ((u_arrmul32_fa26_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_23 = and_gate(((a >> 26) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa26_23_xor1 = (fa(((u_arrmul32_and26_23 >> 0) & 0x01), ((u_arrmul32_fa27_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_23_or0 = (fa(((u_arrmul32_and26_23 >> 0) & 0x01), ((u_arrmul32_fa27_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_23 = and_gate(((a >> 27) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa27_23_xor1 = (fa(((u_arrmul32_and27_23 >> 0) & 0x01), ((u_arrmul32_fa28_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_23_or0 = (fa(((u_arrmul32_and27_23 >> 0) & 0x01), ((u_arrmul32_fa28_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_23 = and_gate(((a >> 28) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa28_23_xor1 = (fa(((u_arrmul32_and28_23 >> 0) & 0x01), ((u_arrmul32_fa29_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_23_or0 = (fa(((u_arrmul32_and28_23 >> 0) & 0x01), ((u_arrmul32_fa29_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_23 = and_gate(((a >> 29) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa29_23_xor1 = (fa(((u_arrmul32_and29_23 >> 0) & 0x01), ((u_arrmul32_fa30_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_23_or0 = (fa(((u_arrmul32_and29_23 >> 0) & 0x01), ((u_arrmul32_fa30_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_23 = and_gate(((a >> 30) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa30_23_xor1 = (fa(((u_arrmul32_and30_23 >> 0) & 0x01), ((u_arrmul32_fa31_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_23_or0 = (fa(((u_arrmul32_and30_23 >> 0) & 0x01), ((u_arrmul32_fa31_22_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_23 = and_gate(((a >> 31) & 0x01), ((b >> 23) & 0x01));
|
|
u_arrmul32_fa31_23_xor1 = (fa(((u_arrmul32_and31_23 >> 0) & 0x01), ((u_arrmul32_fa31_22_or0 >> 0) & 0x01), ((u_arrmul32_fa30_23_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_23_or0 = (fa(((u_arrmul32_and31_23 >> 0) & 0x01), ((u_arrmul32_fa31_22_or0 >> 0) & 0x01), ((u_arrmul32_fa30_23_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_24 = and_gate(((a >> 0) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_ha0_24_xor0 = (ha(((u_arrmul32_and0_24 >> 0) & 0x01), ((u_arrmul32_fa1_23_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_24_and0 = (ha(((u_arrmul32_and0_24 >> 0) & 0x01), ((u_arrmul32_fa1_23_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_24 = and_gate(((a >> 1) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa1_24_xor1 = (fa(((u_arrmul32_and1_24 >> 0) & 0x01), ((u_arrmul32_fa2_23_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_24_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_24_or0 = (fa(((u_arrmul32_and1_24 >> 0) & 0x01), ((u_arrmul32_fa2_23_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_24_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_24 = and_gate(((a >> 2) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa2_24_xor1 = (fa(((u_arrmul32_and2_24 >> 0) & 0x01), ((u_arrmul32_fa3_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_24_or0 = (fa(((u_arrmul32_and2_24 >> 0) & 0x01), ((u_arrmul32_fa3_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_24 = and_gate(((a >> 3) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa3_24_xor1 = (fa(((u_arrmul32_and3_24 >> 0) & 0x01), ((u_arrmul32_fa4_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_24_or0 = (fa(((u_arrmul32_and3_24 >> 0) & 0x01), ((u_arrmul32_fa4_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_24 = and_gate(((a >> 4) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa4_24_xor1 = (fa(((u_arrmul32_and4_24 >> 0) & 0x01), ((u_arrmul32_fa5_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_24_or0 = (fa(((u_arrmul32_and4_24 >> 0) & 0x01), ((u_arrmul32_fa5_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_24 = and_gate(((a >> 5) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa5_24_xor1 = (fa(((u_arrmul32_and5_24 >> 0) & 0x01), ((u_arrmul32_fa6_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_24_or0 = (fa(((u_arrmul32_and5_24 >> 0) & 0x01), ((u_arrmul32_fa6_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_24 = and_gate(((a >> 6) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa6_24_xor1 = (fa(((u_arrmul32_and6_24 >> 0) & 0x01), ((u_arrmul32_fa7_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_24_or0 = (fa(((u_arrmul32_and6_24 >> 0) & 0x01), ((u_arrmul32_fa7_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_24 = and_gate(((a >> 7) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa7_24_xor1 = (fa(((u_arrmul32_and7_24 >> 0) & 0x01), ((u_arrmul32_fa8_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_24_or0 = (fa(((u_arrmul32_and7_24 >> 0) & 0x01), ((u_arrmul32_fa8_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_24 = and_gate(((a >> 8) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa8_24_xor1 = (fa(((u_arrmul32_and8_24 >> 0) & 0x01), ((u_arrmul32_fa9_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_24_or0 = (fa(((u_arrmul32_and8_24 >> 0) & 0x01), ((u_arrmul32_fa9_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_24 = and_gate(((a >> 9) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa9_24_xor1 = (fa(((u_arrmul32_and9_24 >> 0) & 0x01), ((u_arrmul32_fa10_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_24_or0 = (fa(((u_arrmul32_and9_24 >> 0) & 0x01), ((u_arrmul32_fa10_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_24 = and_gate(((a >> 10) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa10_24_xor1 = (fa(((u_arrmul32_and10_24 >> 0) & 0x01), ((u_arrmul32_fa11_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_24_or0 = (fa(((u_arrmul32_and10_24 >> 0) & 0x01), ((u_arrmul32_fa11_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_24 = and_gate(((a >> 11) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa11_24_xor1 = (fa(((u_arrmul32_and11_24 >> 0) & 0x01), ((u_arrmul32_fa12_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_24_or0 = (fa(((u_arrmul32_and11_24 >> 0) & 0x01), ((u_arrmul32_fa12_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_24 = and_gate(((a >> 12) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa12_24_xor1 = (fa(((u_arrmul32_and12_24 >> 0) & 0x01), ((u_arrmul32_fa13_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_24_or0 = (fa(((u_arrmul32_and12_24 >> 0) & 0x01), ((u_arrmul32_fa13_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_24 = and_gate(((a >> 13) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa13_24_xor1 = (fa(((u_arrmul32_and13_24 >> 0) & 0x01), ((u_arrmul32_fa14_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_24_or0 = (fa(((u_arrmul32_and13_24 >> 0) & 0x01), ((u_arrmul32_fa14_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_24 = and_gate(((a >> 14) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa14_24_xor1 = (fa(((u_arrmul32_and14_24 >> 0) & 0x01), ((u_arrmul32_fa15_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_24_or0 = (fa(((u_arrmul32_and14_24 >> 0) & 0x01), ((u_arrmul32_fa15_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_24 = and_gate(((a >> 15) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa15_24_xor1 = (fa(((u_arrmul32_and15_24 >> 0) & 0x01), ((u_arrmul32_fa16_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_24_or0 = (fa(((u_arrmul32_and15_24 >> 0) & 0x01), ((u_arrmul32_fa16_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_24 = and_gate(((a >> 16) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa16_24_xor1 = (fa(((u_arrmul32_and16_24 >> 0) & 0x01), ((u_arrmul32_fa17_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_24_or0 = (fa(((u_arrmul32_and16_24 >> 0) & 0x01), ((u_arrmul32_fa17_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_24 = and_gate(((a >> 17) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa17_24_xor1 = (fa(((u_arrmul32_and17_24 >> 0) & 0x01), ((u_arrmul32_fa18_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_24_or0 = (fa(((u_arrmul32_and17_24 >> 0) & 0x01), ((u_arrmul32_fa18_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_24 = and_gate(((a >> 18) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa18_24_xor1 = (fa(((u_arrmul32_and18_24 >> 0) & 0x01), ((u_arrmul32_fa19_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_24_or0 = (fa(((u_arrmul32_and18_24 >> 0) & 0x01), ((u_arrmul32_fa19_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_24 = and_gate(((a >> 19) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa19_24_xor1 = (fa(((u_arrmul32_and19_24 >> 0) & 0x01), ((u_arrmul32_fa20_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_24_or0 = (fa(((u_arrmul32_and19_24 >> 0) & 0x01), ((u_arrmul32_fa20_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_24 = and_gate(((a >> 20) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa20_24_xor1 = (fa(((u_arrmul32_and20_24 >> 0) & 0x01), ((u_arrmul32_fa21_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_24_or0 = (fa(((u_arrmul32_and20_24 >> 0) & 0x01), ((u_arrmul32_fa21_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_24 = and_gate(((a >> 21) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa21_24_xor1 = (fa(((u_arrmul32_and21_24 >> 0) & 0x01), ((u_arrmul32_fa22_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_24_or0 = (fa(((u_arrmul32_and21_24 >> 0) & 0x01), ((u_arrmul32_fa22_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_24 = and_gate(((a >> 22) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa22_24_xor1 = (fa(((u_arrmul32_and22_24 >> 0) & 0x01), ((u_arrmul32_fa23_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_24_or0 = (fa(((u_arrmul32_and22_24 >> 0) & 0x01), ((u_arrmul32_fa23_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_24 = and_gate(((a >> 23) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa23_24_xor1 = (fa(((u_arrmul32_and23_24 >> 0) & 0x01), ((u_arrmul32_fa24_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_24_or0 = (fa(((u_arrmul32_and23_24 >> 0) & 0x01), ((u_arrmul32_fa24_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_24 = and_gate(((a >> 24) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa24_24_xor1 = (fa(((u_arrmul32_and24_24 >> 0) & 0x01), ((u_arrmul32_fa25_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_24_or0 = (fa(((u_arrmul32_and24_24 >> 0) & 0x01), ((u_arrmul32_fa25_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_24 = and_gate(((a >> 25) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa25_24_xor1 = (fa(((u_arrmul32_and25_24 >> 0) & 0x01), ((u_arrmul32_fa26_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_24_or0 = (fa(((u_arrmul32_and25_24 >> 0) & 0x01), ((u_arrmul32_fa26_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_24 = and_gate(((a >> 26) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa26_24_xor1 = (fa(((u_arrmul32_and26_24 >> 0) & 0x01), ((u_arrmul32_fa27_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_24_or0 = (fa(((u_arrmul32_and26_24 >> 0) & 0x01), ((u_arrmul32_fa27_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_24 = and_gate(((a >> 27) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa27_24_xor1 = (fa(((u_arrmul32_and27_24 >> 0) & 0x01), ((u_arrmul32_fa28_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_24_or0 = (fa(((u_arrmul32_and27_24 >> 0) & 0x01), ((u_arrmul32_fa28_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_24 = and_gate(((a >> 28) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa28_24_xor1 = (fa(((u_arrmul32_and28_24 >> 0) & 0x01), ((u_arrmul32_fa29_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_24_or0 = (fa(((u_arrmul32_and28_24 >> 0) & 0x01), ((u_arrmul32_fa29_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_24 = and_gate(((a >> 29) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa29_24_xor1 = (fa(((u_arrmul32_and29_24 >> 0) & 0x01), ((u_arrmul32_fa30_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_24_or0 = (fa(((u_arrmul32_and29_24 >> 0) & 0x01), ((u_arrmul32_fa30_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_24 = and_gate(((a >> 30) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa30_24_xor1 = (fa(((u_arrmul32_and30_24 >> 0) & 0x01), ((u_arrmul32_fa31_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_24_or0 = (fa(((u_arrmul32_and30_24 >> 0) & 0x01), ((u_arrmul32_fa31_23_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_24 = and_gate(((a >> 31) & 0x01), ((b >> 24) & 0x01));
|
|
u_arrmul32_fa31_24_xor1 = (fa(((u_arrmul32_and31_24 >> 0) & 0x01), ((u_arrmul32_fa31_23_or0 >> 0) & 0x01), ((u_arrmul32_fa30_24_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_24_or0 = (fa(((u_arrmul32_and31_24 >> 0) & 0x01), ((u_arrmul32_fa31_23_or0 >> 0) & 0x01), ((u_arrmul32_fa30_24_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_25 = and_gate(((a >> 0) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_ha0_25_xor0 = (ha(((u_arrmul32_and0_25 >> 0) & 0x01), ((u_arrmul32_fa1_24_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_25_and0 = (ha(((u_arrmul32_and0_25 >> 0) & 0x01), ((u_arrmul32_fa1_24_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_25 = and_gate(((a >> 1) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa1_25_xor1 = (fa(((u_arrmul32_and1_25 >> 0) & 0x01), ((u_arrmul32_fa2_24_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_25_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_25_or0 = (fa(((u_arrmul32_and1_25 >> 0) & 0x01), ((u_arrmul32_fa2_24_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_25_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_25 = and_gate(((a >> 2) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa2_25_xor1 = (fa(((u_arrmul32_and2_25 >> 0) & 0x01), ((u_arrmul32_fa3_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_25_or0 = (fa(((u_arrmul32_and2_25 >> 0) & 0x01), ((u_arrmul32_fa3_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_25 = and_gate(((a >> 3) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa3_25_xor1 = (fa(((u_arrmul32_and3_25 >> 0) & 0x01), ((u_arrmul32_fa4_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_25_or0 = (fa(((u_arrmul32_and3_25 >> 0) & 0x01), ((u_arrmul32_fa4_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_25 = and_gate(((a >> 4) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa4_25_xor1 = (fa(((u_arrmul32_and4_25 >> 0) & 0x01), ((u_arrmul32_fa5_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_25_or0 = (fa(((u_arrmul32_and4_25 >> 0) & 0x01), ((u_arrmul32_fa5_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_25 = and_gate(((a >> 5) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa5_25_xor1 = (fa(((u_arrmul32_and5_25 >> 0) & 0x01), ((u_arrmul32_fa6_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_25_or0 = (fa(((u_arrmul32_and5_25 >> 0) & 0x01), ((u_arrmul32_fa6_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_25 = and_gate(((a >> 6) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa6_25_xor1 = (fa(((u_arrmul32_and6_25 >> 0) & 0x01), ((u_arrmul32_fa7_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_25_or0 = (fa(((u_arrmul32_and6_25 >> 0) & 0x01), ((u_arrmul32_fa7_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_25 = and_gate(((a >> 7) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa7_25_xor1 = (fa(((u_arrmul32_and7_25 >> 0) & 0x01), ((u_arrmul32_fa8_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_25_or0 = (fa(((u_arrmul32_and7_25 >> 0) & 0x01), ((u_arrmul32_fa8_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_25 = and_gate(((a >> 8) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa8_25_xor1 = (fa(((u_arrmul32_and8_25 >> 0) & 0x01), ((u_arrmul32_fa9_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_25_or0 = (fa(((u_arrmul32_and8_25 >> 0) & 0x01), ((u_arrmul32_fa9_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_25 = and_gate(((a >> 9) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa9_25_xor1 = (fa(((u_arrmul32_and9_25 >> 0) & 0x01), ((u_arrmul32_fa10_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_25_or0 = (fa(((u_arrmul32_and9_25 >> 0) & 0x01), ((u_arrmul32_fa10_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_25 = and_gate(((a >> 10) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa10_25_xor1 = (fa(((u_arrmul32_and10_25 >> 0) & 0x01), ((u_arrmul32_fa11_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_25_or0 = (fa(((u_arrmul32_and10_25 >> 0) & 0x01), ((u_arrmul32_fa11_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_25 = and_gate(((a >> 11) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa11_25_xor1 = (fa(((u_arrmul32_and11_25 >> 0) & 0x01), ((u_arrmul32_fa12_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_25_or0 = (fa(((u_arrmul32_and11_25 >> 0) & 0x01), ((u_arrmul32_fa12_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_25 = and_gate(((a >> 12) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa12_25_xor1 = (fa(((u_arrmul32_and12_25 >> 0) & 0x01), ((u_arrmul32_fa13_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_25_or0 = (fa(((u_arrmul32_and12_25 >> 0) & 0x01), ((u_arrmul32_fa13_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_25 = and_gate(((a >> 13) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa13_25_xor1 = (fa(((u_arrmul32_and13_25 >> 0) & 0x01), ((u_arrmul32_fa14_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_25_or0 = (fa(((u_arrmul32_and13_25 >> 0) & 0x01), ((u_arrmul32_fa14_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_25 = and_gate(((a >> 14) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa14_25_xor1 = (fa(((u_arrmul32_and14_25 >> 0) & 0x01), ((u_arrmul32_fa15_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_25_or0 = (fa(((u_arrmul32_and14_25 >> 0) & 0x01), ((u_arrmul32_fa15_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_25 = and_gate(((a >> 15) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa15_25_xor1 = (fa(((u_arrmul32_and15_25 >> 0) & 0x01), ((u_arrmul32_fa16_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_25_or0 = (fa(((u_arrmul32_and15_25 >> 0) & 0x01), ((u_arrmul32_fa16_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_25 = and_gate(((a >> 16) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa16_25_xor1 = (fa(((u_arrmul32_and16_25 >> 0) & 0x01), ((u_arrmul32_fa17_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_25_or0 = (fa(((u_arrmul32_and16_25 >> 0) & 0x01), ((u_arrmul32_fa17_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_25 = and_gate(((a >> 17) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa17_25_xor1 = (fa(((u_arrmul32_and17_25 >> 0) & 0x01), ((u_arrmul32_fa18_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_25_or0 = (fa(((u_arrmul32_and17_25 >> 0) & 0x01), ((u_arrmul32_fa18_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_25 = and_gate(((a >> 18) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa18_25_xor1 = (fa(((u_arrmul32_and18_25 >> 0) & 0x01), ((u_arrmul32_fa19_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_25_or0 = (fa(((u_arrmul32_and18_25 >> 0) & 0x01), ((u_arrmul32_fa19_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_25 = and_gate(((a >> 19) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa19_25_xor1 = (fa(((u_arrmul32_and19_25 >> 0) & 0x01), ((u_arrmul32_fa20_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_25_or0 = (fa(((u_arrmul32_and19_25 >> 0) & 0x01), ((u_arrmul32_fa20_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_25 = and_gate(((a >> 20) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa20_25_xor1 = (fa(((u_arrmul32_and20_25 >> 0) & 0x01), ((u_arrmul32_fa21_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_25_or0 = (fa(((u_arrmul32_and20_25 >> 0) & 0x01), ((u_arrmul32_fa21_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_25 = and_gate(((a >> 21) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa21_25_xor1 = (fa(((u_arrmul32_and21_25 >> 0) & 0x01), ((u_arrmul32_fa22_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_25_or0 = (fa(((u_arrmul32_and21_25 >> 0) & 0x01), ((u_arrmul32_fa22_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_25 = and_gate(((a >> 22) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa22_25_xor1 = (fa(((u_arrmul32_and22_25 >> 0) & 0x01), ((u_arrmul32_fa23_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_25_or0 = (fa(((u_arrmul32_and22_25 >> 0) & 0x01), ((u_arrmul32_fa23_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_25 = and_gate(((a >> 23) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa23_25_xor1 = (fa(((u_arrmul32_and23_25 >> 0) & 0x01), ((u_arrmul32_fa24_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_25_or0 = (fa(((u_arrmul32_and23_25 >> 0) & 0x01), ((u_arrmul32_fa24_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_25 = and_gate(((a >> 24) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa24_25_xor1 = (fa(((u_arrmul32_and24_25 >> 0) & 0x01), ((u_arrmul32_fa25_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_25_or0 = (fa(((u_arrmul32_and24_25 >> 0) & 0x01), ((u_arrmul32_fa25_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_25 = and_gate(((a >> 25) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa25_25_xor1 = (fa(((u_arrmul32_and25_25 >> 0) & 0x01), ((u_arrmul32_fa26_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_25_or0 = (fa(((u_arrmul32_and25_25 >> 0) & 0x01), ((u_arrmul32_fa26_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_25 = and_gate(((a >> 26) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa26_25_xor1 = (fa(((u_arrmul32_and26_25 >> 0) & 0x01), ((u_arrmul32_fa27_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_25_or0 = (fa(((u_arrmul32_and26_25 >> 0) & 0x01), ((u_arrmul32_fa27_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_25 = and_gate(((a >> 27) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa27_25_xor1 = (fa(((u_arrmul32_and27_25 >> 0) & 0x01), ((u_arrmul32_fa28_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_25_or0 = (fa(((u_arrmul32_and27_25 >> 0) & 0x01), ((u_arrmul32_fa28_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_25 = and_gate(((a >> 28) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa28_25_xor1 = (fa(((u_arrmul32_and28_25 >> 0) & 0x01), ((u_arrmul32_fa29_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_25_or0 = (fa(((u_arrmul32_and28_25 >> 0) & 0x01), ((u_arrmul32_fa29_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_25 = and_gate(((a >> 29) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa29_25_xor1 = (fa(((u_arrmul32_and29_25 >> 0) & 0x01), ((u_arrmul32_fa30_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_25_or0 = (fa(((u_arrmul32_and29_25 >> 0) & 0x01), ((u_arrmul32_fa30_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_25 = and_gate(((a >> 30) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa30_25_xor1 = (fa(((u_arrmul32_and30_25 >> 0) & 0x01), ((u_arrmul32_fa31_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_25_or0 = (fa(((u_arrmul32_and30_25 >> 0) & 0x01), ((u_arrmul32_fa31_24_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_25 = and_gate(((a >> 31) & 0x01), ((b >> 25) & 0x01));
|
|
u_arrmul32_fa31_25_xor1 = (fa(((u_arrmul32_and31_25 >> 0) & 0x01), ((u_arrmul32_fa31_24_or0 >> 0) & 0x01), ((u_arrmul32_fa30_25_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_25_or0 = (fa(((u_arrmul32_and31_25 >> 0) & 0x01), ((u_arrmul32_fa31_24_or0 >> 0) & 0x01), ((u_arrmul32_fa30_25_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_26 = and_gate(((a >> 0) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_ha0_26_xor0 = (ha(((u_arrmul32_and0_26 >> 0) & 0x01), ((u_arrmul32_fa1_25_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_26_and0 = (ha(((u_arrmul32_and0_26 >> 0) & 0x01), ((u_arrmul32_fa1_25_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_26 = and_gate(((a >> 1) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa1_26_xor1 = (fa(((u_arrmul32_and1_26 >> 0) & 0x01), ((u_arrmul32_fa2_25_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_26_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_26_or0 = (fa(((u_arrmul32_and1_26 >> 0) & 0x01), ((u_arrmul32_fa2_25_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_26_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_26 = and_gate(((a >> 2) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa2_26_xor1 = (fa(((u_arrmul32_and2_26 >> 0) & 0x01), ((u_arrmul32_fa3_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_26_or0 = (fa(((u_arrmul32_and2_26 >> 0) & 0x01), ((u_arrmul32_fa3_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_26 = and_gate(((a >> 3) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa3_26_xor1 = (fa(((u_arrmul32_and3_26 >> 0) & 0x01), ((u_arrmul32_fa4_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_26_or0 = (fa(((u_arrmul32_and3_26 >> 0) & 0x01), ((u_arrmul32_fa4_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_26 = and_gate(((a >> 4) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa4_26_xor1 = (fa(((u_arrmul32_and4_26 >> 0) & 0x01), ((u_arrmul32_fa5_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_26_or0 = (fa(((u_arrmul32_and4_26 >> 0) & 0x01), ((u_arrmul32_fa5_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_26 = and_gate(((a >> 5) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa5_26_xor1 = (fa(((u_arrmul32_and5_26 >> 0) & 0x01), ((u_arrmul32_fa6_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_26_or0 = (fa(((u_arrmul32_and5_26 >> 0) & 0x01), ((u_arrmul32_fa6_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_26 = and_gate(((a >> 6) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa6_26_xor1 = (fa(((u_arrmul32_and6_26 >> 0) & 0x01), ((u_arrmul32_fa7_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_26_or0 = (fa(((u_arrmul32_and6_26 >> 0) & 0x01), ((u_arrmul32_fa7_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_26 = and_gate(((a >> 7) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa7_26_xor1 = (fa(((u_arrmul32_and7_26 >> 0) & 0x01), ((u_arrmul32_fa8_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_26_or0 = (fa(((u_arrmul32_and7_26 >> 0) & 0x01), ((u_arrmul32_fa8_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_26 = and_gate(((a >> 8) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa8_26_xor1 = (fa(((u_arrmul32_and8_26 >> 0) & 0x01), ((u_arrmul32_fa9_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_26_or0 = (fa(((u_arrmul32_and8_26 >> 0) & 0x01), ((u_arrmul32_fa9_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_26 = and_gate(((a >> 9) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa9_26_xor1 = (fa(((u_arrmul32_and9_26 >> 0) & 0x01), ((u_arrmul32_fa10_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_26_or0 = (fa(((u_arrmul32_and9_26 >> 0) & 0x01), ((u_arrmul32_fa10_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_26 = and_gate(((a >> 10) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa10_26_xor1 = (fa(((u_arrmul32_and10_26 >> 0) & 0x01), ((u_arrmul32_fa11_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_26_or0 = (fa(((u_arrmul32_and10_26 >> 0) & 0x01), ((u_arrmul32_fa11_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_26 = and_gate(((a >> 11) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa11_26_xor1 = (fa(((u_arrmul32_and11_26 >> 0) & 0x01), ((u_arrmul32_fa12_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_26_or0 = (fa(((u_arrmul32_and11_26 >> 0) & 0x01), ((u_arrmul32_fa12_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_26 = and_gate(((a >> 12) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa12_26_xor1 = (fa(((u_arrmul32_and12_26 >> 0) & 0x01), ((u_arrmul32_fa13_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_26_or0 = (fa(((u_arrmul32_and12_26 >> 0) & 0x01), ((u_arrmul32_fa13_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_26 = and_gate(((a >> 13) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa13_26_xor1 = (fa(((u_arrmul32_and13_26 >> 0) & 0x01), ((u_arrmul32_fa14_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_26_or0 = (fa(((u_arrmul32_and13_26 >> 0) & 0x01), ((u_arrmul32_fa14_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_26 = and_gate(((a >> 14) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa14_26_xor1 = (fa(((u_arrmul32_and14_26 >> 0) & 0x01), ((u_arrmul32_fa15_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_26_or0 = (fa(((u_arrmul32_and14_26 >> 0) & 0x01), ((u_arrmul32_fa15_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_26 = and_gate(((a >> 15) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa15_26_xor1 = (fa(((u_arrmul32_and15_26 >> 0) & 0x01), ((u_arrmul32_fa16_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_26_or0 = (fa(((u_arrmul32_and15_26 >> 0) & 0x01), ((u_arrmul32_fa16_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_26 = and_gate(((a >> 16) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa16_26_xor1 = (fa(((u_arrmul32_and16_26 >> 0) & 0x01), ((u_arrmul32_fa17_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_26_or0 = (fa(((u_arrmul32_and16_26 >> 0) & 0x01), ((u_arrmul32_fa17_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_26 = and_gate(((a >> 17) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa17_26_xor1 = (fa(((u_arrmul32_and17_26 >> 0) & 0x01), ((u_arrmul32_fa18_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_26_or0 = (fa(((u_arrmul32_and17_26 >> 0) & 0x01), ((u_arrmul32_fa18_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_26 = and_gate(((a >> 18) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa18_26_xor1 = (fa(((u_arrmul32_and18_26 >> 0) & 0x01), ((u_arrmul32_fa19_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_26_or0 = (fa(((u_arrmul32_and18_26 >> 0) & 0x01), ((u_arrmul32_fa19_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_26 = and_gate(((a >> 19) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa19_26_xor1 = (fa(((u_arrmul32_and19_26 >> 0) & 0x01), ((u_arrmul32_fa20_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_26_or0 = (fa(((u_arrmul32_and19_26 >> 0) & 0x01), ((u_arrmul32_fa20_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_26 = and_gate(((a >> 20) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa20_26_xor1 = (fa(((u_arrmul32_and20_26 >> 0) & 0x01), ((u_arrmul32_fa21_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_26_or0 = (fa(((u_arrmul32_and20_26 >> 0) & 0x01), ((u_arrmul32_fa21_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_26 = and_gate(((a >> 21) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa21_26_xor1 = (fa(((u_arrmul32_and21_26 >> 0) & 0x01), ((u_arrmul32_fa22_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_26_or0 = (fa(((u_arrmul32_and21_26 >> 0) & 0x01), ((u_arrmul32_fa22_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_26 = and_gate(((a >> 22) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa22_26_xor1 = (fa(((u_arrmul32_and22_26 >> 0) & 0x01), ((u_arrmul32_fa23_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_26_or0 = (fa(((u_arrmul32_and22_26 >> 0) & 0x01), ((u_arrmul32_fa23_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_26 = and_gate(((a >> 23) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa23_26_xor1 = (fa(((u_arrmul32_and23_26 >> 0) & 0x01), ((u_arrmul32_fa24_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_26_or0 = (fa(((u_arrmul32_and23_26 >> 0) & 0x01), ((u_arrmul32_fa24_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_26 = and_gate(((a >> 24) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa24_26_xor1 = (fa(((u_arrmul32_and24_26 >> 0) & 0x01), ((u_arrmul32_fa25_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_26_or0 = (fa(((u_arrmul32_and24_26 >> 0) & 0x01), ((u_arrmul32_fa25_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_26 = and_gate(((a >> 25) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa25_26_xor1 = (fa(((u_arrmul32_and25_26 >> 0) & 0x01), ((u_arrmul32_fa26_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_26_or0 = (fa(((u_arrmul32_and25_26 >> 0) & 0x01), ((u_arrmul32_fa26_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_26 = and_gate(((a >> 26) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa26_26_xor1 = (fa(((u_arrmul32_and26_26 >> 0) & 0x01), ((u_arrmul32_fa27_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_26_or0 = (fa(((u_arrmul32_and26_26 >> 0) & 0x01), ((u_arrmul32_fa27_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_26 = and_gate(((a >> 27) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa27_26_xor1 = (fa(((u_arrmul32_and27_26 >> 0) & 0x01), ((u_arrmul32_fa28_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_26_or0 = (fa(((u_arrmul32_and27_26 >> 0) & 0x01), ((u_arrmul32_fa28_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_26 = and_gate(((a >> 28) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa28_26_xor1 = (fa(((u_arrmul32_and28_26 >> 0) & 0x01), ((u_arrmul32_fa29_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_26_or0 = (fa(((u_arrmul32_and28_26 >> 0) & 0x01), ((u_arrmul32_fa29_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_26 = and_gate(((a >> 29) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa29_26_xor1 = (fa(((u_arrmul32_and29_26 >> 0) & 0x01), ((u_arrmul32_fa30_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_26_or0 = (fa(((u_arrmul32_and29_26 >> 0) & 0x01), ((u_arrmul32_fa30_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_26 = and_gate(((a >> 30) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa30_26_xor1 = (fa(((u_arrmul32_and30_26 >> 0) & 0x01), ((u_arrmul32_fa31_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_26_or0 = (fa(((u_arrmul32_and30_26 >> 0) & 0x01), ((u_arrmul32_fa31_25_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_26 = and_gate(((a >> 31) & 0x01), ((b >> 26) & 0x01));
|
|
u_arrmul32_fa31_26_xor1 = (fa(((u_arrmul32_and31_26 >> 0) & 0x01), ((u_arrmul32_fa31_25_or0 >> 0) & 0x01), ((u_arrmul32_fa30_26_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_26_or0 = (fa(((u_arrmul32_and31_26 >> 0) & 0x01), ((u_arrmul32_fa31_25_or0 >> 0) & 0x01), ((u_arrmul32_fa30_26_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_27 = and_gate(((a >> 0) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_ha0_27_xor0 = (ha(((u_arrmul32_and0_27 >> 0) & 0x01), ((u_arrmul32_fa1_26_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_27_and0 = (ha(((u_arrmul32_and0_27 >> 0) & 0x01), ((u_arrmul32_fa1_26_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_27 = and_gate(((a >> 1) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa1_27_xor1 = (fa(((u_arrmul32_and1_27 >> 0) & 0x01), ((u_arrmul32_fa2_26_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_27_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_27_or0 = (fa(((u_arrmul32_and1_27 >> 0) & 0x01), ((u_arrmul32_fa2_26_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_27_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_27 = and_gate(((a >> 2) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa2_27_xor1 = (fa(((u_arrmul32_and2_27 >> 0) & 0x01), ((u_arrmul32_fa3_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_27_or0 = (fa(((u_arrmul32_and2_27 >> 0) & 0x01), ((u_arrmul32_fa3_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_27 = and_gate(((a >> 3) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa3_27_xor1 = (fa(((u_arrmul32_and3_27 >> 0) & 0x01), ((u_arrmul32_fa4_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_27_or0 = (fa(((u_arrmul32_and3_27 >> 0) & 0x01), ((u_arrmul32_fa4_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_27 = and_gate(((a >> 4) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa4_27_xor1 = (fa(((u_arrmul32_and4_27 >> 0) & 0x01), ((u_arrmul32_fa5_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_27_or0 = (fa(((u_arrmul32_and4_27 >> 0) & 0x01), ((u_arrmul32_fa5_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_27 = and_gate(((a >> 5) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa5_27_xor1 = (fa(((u_arrmul32_and5_27 >> 0) & 0x01), ((u_arrmul32_fa6_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_27_or0 = (fa(((u_arrmul32_and5_27 >> 0) & 0x01), ((u_arrmul32_fa6_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_27 = and_gate(((a >> 6) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa6_27_xor1 = (fa(((u_arrmul32_and6_27 >> 0) & 0x01), ((u_arrmul32_fa7_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_27_or0 = (fa(((u_arrmul32_and6_27 >> 0) & 0x01), ((u_arrmul32_fa7_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_27 = and_gate(((a >> 7) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa7_27_xor1 = (fa(((u_arrmul32_and7_27 >> 0) & 0x01), ((u_arrmul32_fa8_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_27_or0 = (fa(((u_arrmul32_and7_27 >> 0) & 0x01), ((u_arrmul32_fa8_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_27 = and_gate(((a >> 8) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa8_27_xor1 = (fa(((u_arrmul32_and8_27 >> 0) & 0x01), ((u_arrmul32_fa9_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_27_or0 = (fa(((u_arrmul32_and8_27 >> 0) & 0x01), ((u_arrmul32_fa9_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_27 = and_gate(((a >> 9) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa9_27_xor1 = (fa(((u_arrmul32_and9_27 >> 0) & 0x01), ((u_arrmul32_fa10_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_27_or0 = (fa(((u_arrmul32_and9_27 >> 0) & 0x01), ((u_arrmul32_fa10_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_27 = and_gate(((a >> 10) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa10_27_xor1 = (fa(((u_arrmul32_and10_27 >> 0) & 0x01), ((u_arrmul32_fa11_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_27_or0 = (fa(((u_arrmul32_and10_27 >> 0) & 0x01), ((u_arrmul32_fa11_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_27 = and_gate(((a >> 11) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa11_27_xor1 = (fa(((u_arrmul32_and11_27 >> 0) & 0x01), ((u_arrmul32_fa12_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_27_or0 = (fa(((u_arrmul32_and11_27 >> 0) & 0x01), ((u_arrmul32_fa12_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_27 = and_gate(((a >> 12) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa12_27_xor1 = (fa(((u_arrmul32_and12_27 >> 0) & 0x01), ((u_arrmul32_fa13_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_27_or0 = (fa(((u_arrmul32_and12_27 >> 0) & 0x01), ((u_arrmul32_fa13_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_27 = and_gate(((a >> 13) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa13_27_xor1 = (fa(((u_arrmul32_and13_27 >> 0) & 0x01), ((u_arrmul32_fa14_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_27_or0 = (fa(((u_arrmul32_and13_27 >> 0) & 0x01), ((u_arrmul32_fa14_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_27 = and_gate(((a >> 14) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa14_27_xor1 = (fa(((u_arrmul32_and14_27 >> 0) & 0x01), ((u_arrmul32_fa15_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_27_or0 = (fa(((u_arrmul32_and14_27 >> 0) & 0x01), ((u_arrmul32_fa15_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_27 = and_gate(((a >> 15) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa15_27_xor1 = (fa(((u_arrmul32_and15_27 >> 0) & 0x01), ((u_arrmul32_fa16_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_27_or0 = (fa(((u_arrmul32_and15_27 >> 0) & 0x01), ((u_arrmul32_fa16_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_27 = and_gate(((a >> 16) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa16_27_xor1 = (fa(((u_arrmul32_and16_27 >> 0) & 0x01), ((u_arrmul32_fa17_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_27_or0 = (fa(((u_arrmul32_and16_27 >> 0) & 0x01), ((u_arrmul32_fa17_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_27 = and_gate(((a >> 17) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa17_27_xor1 = (fa(((u_arrmul32_and17_27 >> 0) & 0x01), ((u_arrmul32_fa18_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_27_or0 = (fa(((u_arrmul32_and17_27 >> 0) & 0x01), ((u_arrmul32_fa18_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_27 = and_gate(((a >> 18) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa18_27_xor1 = (fa(((u_arrmul32_and18_27 >> 0) & 0x01), ((u_arrmul32_fa19_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_27_or0 = (fa(((u_arrmul32_and18_27 >> 0) & 0x01), ((u_arrmul32_fa19_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_27 = and_gate(((a >> 19) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa19_27_xor1 = (fa(((u_arrmul32_and19_27 >> 0) & 0x01), ((u_arrmul32_fa20_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_27_or0 = (fa(((u_arrmul32_and19_27 >> 0) & 0x01), ((u_arrmul32_fa20_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_27 = and_gate(((a >> 20) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa20_27_xor1 = (fa(((u_arrmul32_and20_27 >> 0) & 0x01), ((u_arrmul32_fa21_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_27_or0 = (fa(((u_arrmul32_and20_27 >> 0) & 0x01), ((u_arrmul32_fa21_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_27 = and_gate(((a >> 21) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa21_27_xor1 = (fa(((u_arrmul32_and21_27 >> 0) & 0x01), ((u_arrmul32_fa22_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_27_or0 = (fa(((u_arrmul32_and21_27 >> 0) & 0x01), ((u_arrmul32_fa22_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_27 = and_gate(((a >> 22) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa22_27_xor1 = (fa(((u_arrmul32_and22_27 >> 0) & 0x01), ((u_arrmul32_fa23_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_27_or0 = (fa(((u_arrmul32_and22_27 >> 0) & 0x01), ((u_arrmul32_fa23_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_27 = and_gate(((a >> 23) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa23_27_xor1 = (fa(((u_arrmul32_and23_27 >> 0) & 0x01), ((u_arrmul32_fa24_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_27_or0 = (fa(((u_arrmul32_and23_27 >> 0) & 0x01), ((u_arrmul32_fa24_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_27 = and_gate(((a >> 24) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa24_27_xor1 = (fa(((u_arrmul32_and24_27 >> 0) & 0x01), ((u_arrmul32_fa25_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_27_or0 = (fa(((u_arrmul32_and24_27 >> 0) & 0x01), ((u_arrmul32_fa25_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_27 = and_gate(((a >> 25) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa25_27_xor1 = (fa(((u_arrmul32_and25_27 >> 0) & 0x01), ((u_arrmul32_fa26_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_27_or0 = (fa(((u_arrmul32_and25_27 >> 0) & 0x01), ((u_arrmul32_fa26_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_27 = and_gate(((a >> 26) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa26_27_xor1 = (fa(((u_arrmul32_and26_27 >> 0) & 0x01), ((u_arrmul32_fa27_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_27_or0 = (fa(((u_arrmul32_and26_27 >> 0) & 0x01), ((u_arrmul32_fa27_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_27 = and_gate(((a >> 27) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa27_27_xor1 = (fa(((u_arrmul32_and27_27 >> 0) & 0x01), ((u_arrmul32_fa28_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_27_or0 = (fa(((u_arrmul32_and27_27 >> 0) & 0x01), ((u_arrmul32_fa28_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_27 = and_gate(((a >> 28) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa28_27_xor1 = (fa(((u_arrmul32_and28_27 >> 0) & 0x01), ((u_arrmul32_fa29_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_27_or0 = (fa(((u_arrmul32_and28_27 >> 0) & 0x01), ((u_arrmul32_fa29_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_27 = and_gate(((a >> 29) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa29_27_xor1 = (fa(((u_arrmul32_and29_27 >> 0) & 0x01), ((u_arrmul32_fa30_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_27_or0 = (fa(((u_arrmul32_and29_27 >> 0) & 0x01), ((u_arrmul32_fa30_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_27 = and_gate(((a >> 30) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa30_27_xor1 = (fa(((u_arrmul32_and30_27 >> 0) & 0x01), ((u_arrmul32_fa31_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_27_or0 = (fa(((u_arrmul32_and30_27 >> 0) & 0x01), ((u_arrmul32_fa31_26_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_27 = and_gate(((a >> 31) & 0x01), ((b >> 27) & 0x01));
|
|
u_arrmul32_fa31_27_xor1 = (fa(((u_arrmul32_and31_27 >> 0) & 0x01), ((u_arrmul32_fa31_26_or0 >> 0) & 0x01), ((u_arrmul32_fa30_27_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_27_or0 = (fa(((u_arrmul32_and31_27 >> 0) & 0x01), ((u_arrmul32_fa31_26_or0 >> 0) & 0x01), ((u_arrmul32_fa30_27_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_28 = and_gate(((a >> 0) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_ha0_28_xor0 = (ha(((u_arrmul32_and0_28 >> 0) & 0x01), ((u_arrmul32_fa1_27_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_28_and0 = (ha(((u_arrmul32_and0_28 >> 0) & 0x01), ((u_arrmul32_fa1_27_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_28 = and_gate(((a >> 1) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa1_28_xor1 = (fa(((u_arrmul32_and1_28 >> 0) & 0x01), ((u_arrmul32_fa2_27_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_28_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_28_or0 = (fa(((u_arrmul32_and1_28 >> 0) & 0x01), ((u_arrmul32_fa2_27_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_28_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_28 = and_gate(((a >> 2) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa2_28_xor1 = (fa(((u_arrmul32_and2_28 >> 0) & 0x01), ((u_arrmul32_fa3_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_28_or0 = (fa(((u_arrmul32_and2_28 >> 0) & 0x01), ((u_arrmul32_fa3_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_28 = and_gate(((a >> 3) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa3_28_xor1 = (fa(((u_arrmul32_and3_28 >> 0) & 0x01), ((u_arrmul32_fa4_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_28_or0 = (fa(((u_arrmul32_and3_28 >> 0) & 0x01), ((u_arrmul32_fa4_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_28 = and_gate(((a >> 4) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa4_28_xor1 = (fa(((u_arrmul32_and4_28 >> 0) & 0x01), ((u_arrmul32_fa5_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_28_or0 = (fa(((u_arrmul32_and4_28 >> 0) & 0x01), ((u_arrmul32_fa5_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_28 = and_gate(((a >> 5) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa5_28_xor1 = (fa(((u_arrmul32_and5_28 >> 0) & 0x01), ((u_arrmul32_fa6_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_28_or0 = (fa(((u_arrmul32_and5_28 >> 0) & 0x01), ((u_arrmul32_fa6_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_28 = and_gate(((a >> 6) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa6_28_xor1 = (fa(((u_arrmul32_and6_28 >> 0) & 0x01), ((u_arrmul32_fa7_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_28_or0 = (fa(((u_arrmul32_and6_28 >> 0) & 0x01), ((u_arrmul32_fa7_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_28 = and_gate(((a >> 7) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa7_28_xor1 = (fa(((u_arrmul32_and7_28 >> 0) & 0x01), ((u_arrmul32_fa8_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_28_or0 = (fa(((u_arrmul32_and7_28 >> 0) & 0x01), ((u_arrmul32_fa8_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_28 = and_gate(((a >> 8) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa8_28_xor1 = (fa(((u_arrmul32_and8_28 >> 0) & 0x01), ((u_arrmul32_fa9_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_28_or0 = (fa(((u_arrmul32_and8_28 >> 0) & 0x01), ((u_arrmul32_fa9_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_28 = and_gate(((a >> 9) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa9_28_xor1 = (fa(((u_arrmul32_and9_28 >> 0) & 0x01), ((u_arrmul32_fa10_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_28_or0 = (fa(((u_arrmul32_and9_28 >> 0) & 0x01), ((u_arrmul32_fa10_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_28 = and_gate(((a >> 10) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa10_28_xor1 = (fa(((u_arrmul32_and10_28 >> 0) & 0x01), ((u_arrmul32_fa11_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_28_or0 = (fa(((u_arrmul32_and10_28 >> 0) & 0x01), ((u_arrmul32_fa11_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_28 = and_gate(((a >> 11) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa11_28_xor1 = (fa(((u_arrmul32_and11_28 >> 0) & 0x01), ((u_arrmul32_fa12_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_28_or0 = (fa(((u_arrmul32_and11_28 >> 0) & 0x01), ((u_arrmul32_fa12_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_28 = and_gate(((a >> 12) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa12_28_xor1 = (fa(((u_arrmul32_and12_28 >> 0) & 0x01), ((u_arrmul32_fa13_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_28_or0 = (fa(((u_arrmul32_and12_28 >> 0) & 0x01), ((u_arrmul32_fa13_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_28 = and_gate(((a >> 13) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa13_28_xor1 = (fa(((u_arrmul32_and13_28 >> 0) & 0x01), ((u_arrmul32_fa14_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_28_or0 = (fa(((u_arrmul32_and13_28 >> 0) & 0x01), ((u_arrmul32_fa14_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_28 = and_gate(((a >> 14) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa14_28_xor1 = (fa(((u_arrmul32_and14_28 >> 0) & 0x01), ((u_arrmul32_fa15_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_28_or0 = (fa(((u_arrmul32_and14_28 >> 0) & 0x01), ((u_arrmul32_fa15_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_28 = and_gate(((a >> 15) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa15_28_xor1 = (fa(((u_arrmul32_and15_28 >> 0) & 0x01), ((u_arrmul32_fa16_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_28_or0 = (fa(((u_arrmul32_and15_28 >> 0) & 0x01), ((u_arrmul32_fa16_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_28 = and_gate(((a >> 16) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa16_28_xor1 = (fa(((u_arrmul32_and16_28 >> 0) & 0x01), ((u_arrmul32_fa17_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_28_or0 = (fa(((u_arrmul32_and16_28 >> 0) & 0x01), ((u_arrmul32_fa17_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_28 = and_gate(((a >> 17) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa17_28_xor1 = (fa(((u_arrmul32_and17_28 >> 0) & 0x01), ((u_arrmul32_fa18_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_28_or0 = (fa(((u_arrmul32_and17_28 >> 0) & 0x01), ((u_arrmul32_fa18_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_28 = and_gate(((a >> 18) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa18_28_xor1 = (fa(((u_arrmul32_and18_28 >> 0) & 0x01), ((u_arrmul32_fa19_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_28_or0 = (fa(((u_arrmul32_and18_28 >> 0) & 0x01), ((u_arrmul32_fa19_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_28 = and_gate(((a >> 19) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa19_28_xor1 = (fa(((u_arrmul32_and19_28 >> 0) & 0x01), ((u_arrmul32_fa20_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_28_or0 = (fa(((u_arrmul32_and19_28 >> 0) & 0x01), ((u_arrmul32_fa20_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_28 = and_gate(((a >> 20) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa20_28_xor1 = (fa(((u_arrmul32_and20_28 >> 0) & 0x01), ((u_arrmul32_fa21_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_28_or0 = (fa(((u_arrmul32_and20_28 >> 0) & 0x01), ((u_arrmul32_fa21_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_28 = and_gate(((a >> 21) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa21_28_xor1 = (fa(((u_arrmul32_and21_28 >> 0) & 0x01), ((u_arrmul32_fa22_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_28_or0 = (fa(((u_arrmul32_and21_28 >> 0) & 0x01), ((u_arrmul32_fa22_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_28 = and_gate(((a >> 22) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa22_28_xor1 = (fa(((u_arrmul32_and22_28 >> 0) & 0x01), ((u_arrmul32_fa23_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_28_or0 = (fa(((u_arrmul32_and22_28 >> 0) & 0x01), ((u_arrmul32_fa23_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_28 = and_gate(((a >> 23) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa23_28_xor1 = (fa(((u_arrmul32_and23_28 >> 0) & 0x01), ((u_arrmul32_fa24_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_28_or0 = (fa(((u_arrmul32_and23_28 >> 0) & 0x01), ((u_arrmul32_fa24_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_28 = and_gate(((a >> 24) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa24_28_xor1 = (fa(((u_arrmul32_and24_28 >> 0) & 0x01), ((u_arrmul32_fa25_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_28_or0 = (fa(((u_arrmul32_and24_28 >> 0) & 0x01), ((u_arrmul32_fa25_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_28 = and_gate(((a >> 25) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa25_28_xor1 = (fa(((u_arrmul32_and25_28 >> 0) & 0x01), ((u_arrmul32_fa26_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_28_or0 = (fa(((u_arrmul32_and25_28 >> 0) & 0x01), ((u_arrmul32_fa26_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_28 = and_gate(((a >> 26) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa26_28_xor1 = (fa(((u_arrmul32_and26_28 >> 0) & 0x01), ((u_arrmul32_fa27_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_28_or0 = (fa(((u_arrmul32_and26_28 >> 0) & 0x01), ((u_arrmul32_fa27_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_28 = and_gate(((a >> 27) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa27_28_xor1 = (fa(((u_arrmul32_and27_28 >> 0) & 0x01), ((u_arrmul32_fa28_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_28_or0 = (fa(((u_arrmul32_and27_28 >> 0) & 0x01), ((u_arrmul32_fa28_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_28 = and_gate(((a >> 28) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa28_28_xor1 = (fa(((u_arrmul32_and28_28 >> 0) & 0x01), ((u_arrmul32_fa29_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_28_or0 = (fa(((u_arrmul32_and28_28 >> 0) & 0x01), ((u_arrmul32_fa29_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_28 = and_gate(((a >> 29) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa29_28_xor1 = (fa(((u_arrmul32_and29_28 >> 0) & 0x01), ((u_arrmul32_fa30_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_28_or0 = (fa(((u_arrmul32_and29_28 >> 0) & 0x01), ((u_arrmul32_fa30_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_28 = and_gate(((a >> 30) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa30_28_xor1 = (fa(((u_arrmul32_and30_28 >> 0) & 0x01), ((u_arrmul32_fa31_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_28_or0 = (fa(((u_arrmul32_and30_28 >> 0) & 0x01), ((u_arrmul32_fa31_27_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_28 = and_gate(((a >> 31) & 0x01), ((b >> 28) & 0x01));
|
|
u_arrmul32_fa31_28_xor1 = (fa(((u_arrmul32_and31_28 >> 0) & 0x01), ((u_arrmul32_fa31_27_or0 >> 0) & 0x01), ((u_arrmul32_fa30_28_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_28_or0 = (fa(((u_arrmul32_and31_28 >> 0) & 0x01), ((u_arrmul32_fa31_27_or0 >> 0) & 0x01), ((u_arrmul32_fa30_28_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_29 = and_gate(((a >> 0) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_ha0_29_xor0 = (ha(((u_arrmul32_and0_29 >> 0) & 0x01), ((u_arrmul32_fa1_28_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_29_and0 = (ha(((u_arrmul32_and0_29 >> 0) & 0x01), ((u_arrmul32_fa1_28_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_29 = and_gate(((a >> 1) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa1_29_xor1 = (fa(((u_arrmul32_and1_29 >> 0) & 0x01), ((u_arrmul32_fa2_28_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_29_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_29_or0 = (fa(((u_arrmul32_and1_29 >> 0) & 0x01), ((u_arrmul32_fa2_28_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_29_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_29 = and_gate(((a >> 2) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa2_29_xor1 = (fa(((u_arrmul32_and2_29 >> 0) & 0x01), ((u_arrmul32_fa3_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_29_or0 = (fa(((u_arrmul32_and2_29 >> 0) & 0x01), ((u_arrmul32_fa3_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_29 = and_gate(((a >> 3) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa3_29_xor1 = (fa(((u_arrmul32_and3_29 >> 0) & 0x01), ((u_arrmul32_fa4_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_29_or0 = (fa(((u_arrmul32_and3_29 >> 0) & 0x01), ((u_arrmul32_fa4_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_29 = and_gate(((a >> 4) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa4_29_xor1 = (fa(((u_arrmul32_and4_29 >> 0) & 0x01), ((u_arrmul32_fa5_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_29_or0 = (fa(((u_arrmul32_and4_29 >> 0) & 0x01), ((u_arrmul32_fa5_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_29 = and_gate(((a >> 5) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa5_29_xor1 = (fa(((u_arrmul32_and5_29 >> 0) & 0x01), ((u_arrmul32_fa6_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_29_or0 = (fa(((u_arrmul32_and5_29 >> 0) & 0x01), ((u_arrmul32_fa6_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_29 = and_gate(((a >> 6) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa6_29_xor1 = (fa(((u_arrmul32_and6_29 >> 0) & 0x01), ((u_arrmul32_fa7_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_29_or0 = (fa(((u_arrmul32_and6_29 >> 0) & 0x01), ((u_arrmul32_fa7_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_29 = and_gate(((a >> 7) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa7_29_xor1 = (fa(((u_arrmul32_and7_29 >> 0) & 0x01), ((u_arrmul32_fa8_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_29_or0 = (fa(((u_arrmul32_and7_29 >> 0) & 0x01), ((u_arrmul32_fa8_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_29 = and_gate(((a >> 8) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa8_29_xor1 = (fa(((u_arrmul32_and8_29 >> 0) & 0x01), ((u_arrmul32_fa9_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_29_or0 = (fa(((u_arrmul32_and8_29 >> 0) & 0x01), ((u_arrmul32_fa9_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_29 = and_gate(((a >> 9) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa9_29_xor1 = (fa(((u_arrmul32_and9_29 >> 0) & 0x01), ((u_arrmul32_fa10_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_29_or0 = (fa(((u_arrmul32_and9_29 >> 0) & 0x01), ((u_arrmul32_fa10_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_29 = and_gate(((a >> 10) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa10_29_xor1 = (fa(((u_arrmul32_and10_29 >> 0) & 0x01), ((u_arrmul32_fa11_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_29_or0 = (fa(((u_arrmul32_and10_29 >> 0) & 0x01), ((u_arrmul32_fa11_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_29 = and_gate(((a >> 11) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa11_29_xor1 = (fa(((u_arrmul32_and11_29 >> 0) & 0x01), ((u_arrmul32_fa12_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_29_or0 = (fa(((u_arrmul32_and11_29 >> 0) & 0x01), ((u_arrmul32_fa12_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_29 = and_gate(((a >> 12) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa12_29_xor1 = (fa(((u_arrmul32_and12_29 >> 0) & 0x01), ((u_arrmul32_fa13_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_29_or0 = (fa(((u_arrmul32_and12_29 >> 0) & 0x01), ((u_arrmul32_fa13_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_29 = and_gate(((a >> 13) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa13_29_xor1 = (fa(((u_arrmul32_and13_29 >> 0) & 0x01), ((u_arrmul32_fa14_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_29_or0 = (fa(((u_arrmul32_and13_29 >> 0) & 0x01), ((u_arrmul32_fa14_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_29 = and_gate(((a >> 14) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa14_29_xor1 = (fa(((u_arrmul32_and14_29 >> 0) & 0x01), ((u_arrmul32_fa15_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_29_or0 = (fa(((u_arrmul32_and14_29 >> 0) & 0x01), ((u_arrmul32_fa15_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_29 = and_gate(((a >> 15) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa15_29_xor1 = (fa(((u_arrmul32_and15_29 >> 0) & 0x01), ((u_arrmul32_fa16_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_29_or0 = (fa(((u_arrmul32_and15_29 >> 0) & 0x01), ((u_arrmul32_fa16_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_29 = and_gate(((a >> 16) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa16_29_xor1 = (fa(((u_arrmul32_and16_29 >> 0) & 0x01), ((u_arrmul32_fa17_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_29_or0 = (fa(((u_arrmul32_and16_29 >> 0) & 0x01), ((u_arrmul32_fa17_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_29 = and_gate(((a >> 17) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa17_29_xor1 = (fa(((u_arrmul32_and17_29 >> 0) & 0x01), ((u_arrmul32_fa18_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_29_or0 = (fa(((u_arrmul32_and17_29 >> 0) & 0x01), ((u_arrmul32_fa18_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_29 = and_gate(((a >> 18) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa18_29_xor1 = (fa(((u_arrmul32_and18_29 >> 0) & 0x01), ((u_arrmul32_fa19_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_29_or0 = (fa(((u_arrmul32_and18_29 >> 0) & 0x01), ((u_arrmul32_fa19_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_29 = and_gate(((a >> 19) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa19_29_xor1 = (fa(((u_arrmul32_and19_29 >> 0) & 0x01), ((u_arrmul32_fa20_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_29_or0 = (fa(((u_arrmul32_and19_29 >> 0) & 0x01), ((u_arrmul32_fa20_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_29 = and_gate(((a >> 20) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa20_29_xor1 = (fa(((u_arrmul32_and20_29 >> 0) & 0x01), ((u_arrmul32_fa21_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_29_or0 = (fa(((u_arrmul32_and20_29 >> 0) & 0x01), ((u_arrmul32_fa21_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_29 = and_gate(((a >> 21) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa21_29_xor1 = (fa(((u_arrmul32_and21_29 >> 0) & 0x01), ((u_arrmul32_fa22_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_29_or0 = (fa(((u_arrmul32_and21_29 >> 0) & 0x01), ((u_arrmul32_fa22_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_29 = and_gate(((a >> 22) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa22_29_xor1 = (fa(((u_arrmul32_and22_29 >> 0) & 0x01), ((u_arrmul32_fa23_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_29_or0 = (fa(((u_arrmul32_and22_29 >> 0) & 0x01), ((u_arrmul32_fa23_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_29 = and_gate(((a >> 23) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa23_29_xor1 = (fa(((u_arrmul32_and23_29 >> 0) & 0x01), ((u_arrmul32_fa24_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_29_or0 = (fa(((u_arrmul32_and23_29 >> 0) & 0x01), ((u_arrmul32_fa24_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_29 = and_gate(((a >> 24) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa24_29_xor1 = (fa(((u_arrmul32_and24_29 >> 0) & 0x01), ((u_arrmul32_fa25_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_29_or0 = (fa(((u_arrmul32_and24_29 >> 0) & 0x01), ((u_arrmul32_fa25_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_29 = and_gate(((a >> 25) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa25_29_xor1 = (fa(((u_arrmul32_and25_29 >> 0) & 0x01), ((u_arrmul32_fa26_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_29_or0 = (fa(((u_arrmul32_and25_29 >> 0) & 0x01), ((u_arrmul32_fa26_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_29 = and_gate(((a >> 26) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa26_29_xor1 = (fa(((u_arrmul32_and26_29 >> 0) & 0x01), ((u_arrmul32_fa27_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_29_or0 = (fa(((u_arrmul32_and26_29 >> 0) & 0x01), ((u_arrmul32_fa27_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_29 = and_gate(((a >> 27) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa27_29_xor1 = (fa(((u_arrmul32_and27_29 >> 0) & 0x01), ((u_arrmul32_fa28_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_29_or0 = (fa(((u_arrmul32_and27_29 >> 0) & 0x01), ((u_arrmul32_fa28_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_29 = and_gate(((a >> 28) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa28_29_xor1 = (fa(((u_arrmul32_and28_29 >> 0) & 0x01), ((u_arrmul32_fa29_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_29_or0 = (fa(((u_arrmul32_and28_29 >> 0) & 0x01), ((u_arrmul32_fa29_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_29 = and_gate(((a >> 29) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa29_29_xor1 = (fa(((u_arrmul32_and29_29 >> 0) & 0x01), ((u_arrmul32_fa30_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_29_or0 = (fa(((u_arrmul32_and29_29 >> 0) & 0x01), ((u_arrmul32_fa30_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_29 = and_gate(((a >> 30) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa30_29_xor1 = (fa(((u_arrmul32_and30_29 >> 0) & 0x01), ((u_arrmul32_fa31_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_29_or0 = (fa(((u_arrmul32_and30_29 >> 0) & 0x01), ((u_arrmul32_fa31_28_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_29 = and_gate(((a >> 31) & 0x01), ((b >> 29) & 0x01));
|
|
u_arrmul32_fa31_29_xor1 = (fa(((u_arrmul32_and31_29 >> 0) & 0x01), ((u_arrmul32_fa31_28_or0 >> 0) & 0x01), ((u_arrmul32_fa30_29_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_29_or0 = (fa(((u_arrmul32_and31_29 >> 0) & 0x01), ((u_arrmul32_fa31_28_or0 >> 0) & 0x01), ((u_arrmul32_fa30_29_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_30 = and_gate(((a >> 0) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_ha0_30_xor0 = (ha(((u_arrmul32_and0_30 >> 0) & 0x01), ((u_arrmul32_fa1_29_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_30_and0 = (ha(((u_arrmul32_and0_30 >> 0) & 0x01), ((u_arrmul32_fa1_29_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_30 = and_gate(((a >> 1) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa1_30_xor1 = (fa(((u_arrmul32_and1_30 >> 0) & 0x01), ((u_arrmul32_fa2_29_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_30_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_30_or0 = (fa(((u_arrmul32_and1_30 >> 0) & 0x01), ((u_arrmul32_fa2_29_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_30_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_30 = and_gate(((a >> 2) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa2_30_xor1 = (fa(((u_arrmul32_and2_30 >> 0) & 0x01), ((u_arrmul32_fa3_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_30_or0 = (fa(((u_arrmul32_and2_30 >> 0) & 0x01), ((u_arrmul32_fa3_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_30 = and_gate(((a >> 3) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa3_30_xor1 = (fa(((u_arrmul32_and3_30 >> 0) & 0x01), ((u_arrmul32_fa4_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_30_or0 = (fa(((u_arrmul32_and3_30 >> 0) & 0x01), ((u_arrmul32_fa4_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_30 = and_gate(((a >> 4) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa4_30_xor1 = (fa(((u_arrmul32_and4_30 >> 0) & 0x01), ((u_arrmul32_fa5_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_30_or0 = (fa(((u_arrmul32_and4_30 >> 0) & 0x01), ((u_arrmul32_fa5_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_30 = and_gate(((a >> 5) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa5_30_xor1 = (fa(((u_arrmul32_and5_30 >> 0) & 0x01), ((u_arrmul32_fa6_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_30_or0 = (fa(((u_arrmul32_and5_30 >> 0) & 0x01), ((u_arrmul32_fa6_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_30 = and_gate(((a >> 6) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa6_30_xor1 = (fa(((u_arrmul32_and6_30 >> 0) & 0x01), ((u_arrmul32_fa7_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_30_or0 = (fa(((u_arrmul32_and6_30 >> 0) & 0x01), ((u_arrmul32_fa7_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_30 = and_gate(((a >> 7) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa7_30_xor1 = (fa(((u_arrmul32_and7_30 >> 0) & 0x01), ((u_arrmul32_fa8_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_30_or0 = (fa(((u_arrmul32_and7_30 >> 0) & 0x01), ((u_arrmul32_fa8_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_30 = and_gate(((a >> 8) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa8_30_xor1 = (fa(((u_arrmul32_and8_30 >> 0) & 0x01), ((u_arrmul32_fa9_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_30_or0 = (fa(((u_arrmul32_and8_30 >> 0) & 0x01), ((u_arrmul32_fa9_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_30 = and_gate(((a >> 9) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa9_30_xor1 = (fa(((u_arrmul32_and9_30 >> 0) & 0x01), ((u_arrmul32_fa10_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_30_or0 = (fa(((u_arrmul32_and9_30 >> 0) & 0x01), ((u_arrmul32_fa10_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_30 = and_gate(((a >> 10) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa10_30_xor1 = (fa(((u_arrmul32_and10_30 >> 0) & 0x01), ((u_arrmul32_fa11_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_30_or0 = (fa(((u_arrmul32_and10_30 >> 0) & 0x01), ((u_arrmul32_fa11_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_30 = and_gate(((a >> 11) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa11_30_xor1 = (fa(((u_arrmul32_and11_30 >> 0) & 0x01), ((u_arrmul32_fa12_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_30_or0 = (fa(((u_arrmul32_and11_30 >> 0) & 0x01), ((u_arrmul32_fa12_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_30 = and_gate(((a >> 12) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa12_30_xor1 = (fa(((u_arrmul32_and12_30 >> 0) & 0x01), ((u_arrmul32_fa13_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_30_or0 = (fa(((u_arrmul32_and12_30 >> 0) & 0x01), ((u_arrmul32_fa13_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_30 = and_gate(((a >> 13) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa13_30_xor1 = (fa(((u_arrmul32_and13_30 >> 0) & 0x01), ((u_arrmul32_fa14_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_30_or0 = (fa(((u_arrmul32_and13_30 >> 0) & 0x01), ((u_arrmul32_fa14_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_30 = and_gate(((a >> 14) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa14_30_xor1 = (fa(((u_arrmul32_and14_30 >> 0) & 0x01), ((u_arrmul32_fa15_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_30_or0 = (fa(((u_arrmul32_and14_30 >> 0) & 0x01), ((u_arrmul32_fa15_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_30 = and_gate(((a >> 15) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa15_30_xor1 = (fa(((u_arrmul32_and15_30 >> 0) & 0x01), ((u_arrmul32_fa16_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_30_or0 = (fa(((u_arrmul32_and15_30 >> 0) & 0x01), ((u_arrmul32_fa16_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_30 = and_gate(((a >> 16) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa16_30_xor1 = (fa(((u_arrmul32_and16_30 >> 0) & 0x01), ((u_arrmul32_fa17_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_30_or0 = (fa(((u_arrmul32_and16_30 >> 0) & 0x01), ((u_arrmul32_fa17_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_30 = and_gate(((a >> 17) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa17_30_xor1 = (fa(((u_arrmul32_and17_30 >> 0) & 0x01), ((u_arrmul32_fa18_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_30_or0 = (fa(((u_arrmul32_and17_30 >> 0) & 0x01), ((u_arrmul32_fa18_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_30 = and_gate(((a >> 18) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa18_30_xor1 = (fa(((u_arrmul32_and18_30 >> 0) & 0x01), ((u_arrmul32_fa19_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_30_or0 = (fa(((u_arrmul32_and18_30 >> 0) & 0x01), ((u_arrmul32_fa19_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_30 = and_gate(((a >> 19) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa19_30_xor1 = (fa(((u_arrmul32_and19_30 >> 0) & 0x01), ((u_arrmul32_fa20_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_30_or0 = (fa(((u_arrmul32_and19_30 >> 0) & 0x01), ((u_arrmul32_fa20_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_30 = and_gate(((a >> 20) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa20_30_xor1 = (fa(((u_arrmul32_and20_30 >> 0) & 0x01), ((u_arrmul32_fa21_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_30_or0 = (fa(((u_arrmul32_and20_30 >> 0) & 0x01), ((u_arrmul32_fa21_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_30 = and_gate(((a >> 21) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa21_30_xor1 = (fa(((u_arrmul32_and21_30 >> 0) & 0x01), ((u_arrmul32_fa22_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_30_or0 = (fa(((u_arrmul32_and21_30 >> 0) & 0x01), ((u_arrmul32_fa22_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_30 = and_gate(((a >> 22) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa22_30_xor1 = (fa(((u_arrmul32_and22_30 >> 0) & 0x01), ((u_arrmul32_fa23_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_30_or0 = (fa(((u_arrmul32_and22_30 >> 0) & 0x01), ((u_arrmul32_fa23_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_30 = and_gate(((a >> 23) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa23_30_xor1 = (fa(((u_arrmul32_and23_30 >> 0) & 0x01), ((u_arrmul32_fa24_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_30_or0 = (fa(((u_arrmul32_and23_30 >> 0) & 0x01), ((u_arrmul32_fa24_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_30 = and_gate(((a >> 24) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa24_30_xor1 = (fa(((u_arrmul32_and24_30 >> 0) & 0x01), ((u_arrmul32_fa25_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_30_or0 = (fa(((u_arrmul32_and24_30 >> 0) & 0x01), ((u_arrmul32_fa25_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_30 = and_gate(((a >> 25) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa25_30_xor1 = (fa(((u_arrmul32_and25_30 >> 0) & 0x01), ((u_arrmul32_fa26_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_30_or0 = (fa(((u_arrmul32_and25_30 >> 0) & 0x01), ((u_arrmul32_fa26_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_30 = and_gate(((a >> 26) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa26_30_xor1 = (fa(((u_arrmul32_and26_30 >> 0) & 0x01), ((u_arrmul32_fa27_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_30_or0 = (fa(((u_arrmul32_and26_30 >> 0) & 0x01), ((u_arrmul32_fa27_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_30 = and_gate(((a >> 27) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa27_30_xor1 = (fa(((u_arrmul32_and27_30 >> 0) & 0x01), ((u_arrmul32_fa28_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_30_or0 = (fa(((u_arrmul32_and27_30 >> 0) & 0x01), ((u_arrmul32_fa28_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_30 = and_gate(((a >> 28) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa28_30_xor1 = (fa(((u_arrmul32_and28_30 >> 0) & 0x01), ((u_arrmul32_fa29_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_30_or0 = (fa(((u_arrmul32_and28_30 >> 0) & 0x01), ((u_arrmul32_fa29_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_30 = and_gate(((a >> 29) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa29_30_xor1 = (fa(((u_arrmul32_and29_30 >> 0) & 0x01), ((u_arrmul32_fa30_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_30_or0 = (fa(((u_arrmul32_and29_30 >> 0) & 0x01), ((u_arrmul32_fa30_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_30 = and_gate(((a >> 30) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa30_30_xor1 = (fa(((u_arrmul32_and30_30 >> 0) & 0x01), ((u_arrmul32_fa31_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_30_or0 = (fa(((u_arrmul32_and30_30 >> 0) & 0x01), ((u_arrmul32_fa31_29_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_30 = and_gate(((a >> 31) & 0x01), ((b >> 30) & 0x01));
|
|
u_arrmul32_fa31_30_xor1 = (fa(((u_arrmul32_and31_30 >> 0) & 0x01), ((u_arrmul32_fa31_29_or0 >> 0) & 0x01), ((u_arrmul32_fa30_30_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_30_or0 = (fa(((u_arrmul32_and31_30 >> 0) & 0x01), ((u_arrmul32_fa31_29_or0 >> 0) & 0x01), ((u_arrmul32_fa30_30_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and0_31 = and_gate(((a >> 0) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_ha0_31_xor0 = (ha(((u_arrmul32_and0_31 >> 0) & 0x01), ((u_arrmul32_fa1_30_xor1 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_ha0_31_and0 = (ha(((u_arrmul32_and0_31 >> 0) & 0x01), ((u_arrmul32_fa1_30_xor1 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and1_31 = and_gate(((a >> 1) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa1_31_xor1 = (fa(((u_arrmul32_and1_31 >> 0) & 0x01), ((u_arrmul32_fa2_30_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_31_and0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa1_31_or0 = (fa(((u_arrmul32_and1_31 >> 0) & 0x01), ((u_arrmul32_fa2_30_xor1 >> 0) & 0x01), ((u_arrmul32_ha0_31_and0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and2_31 = and_gate(((a >> 2) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa2_31_xor1 = (fa(((u_arrmul32_and2_31 >> 0) & 0x01), ((u_arrmul32_fa3_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa2_31_or0 = (fa(((u_arrmul32_and2_31 >> 0) & 0x01), ((u_arrmul32_fa3_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa1_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and3_31 = and_gate(((a >> 3) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa3_31_xor1 = (fa(((u_arrmul32_and3_31 >> 0) & 0x01), ((u_arrmul32_fa4_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa3_31_or0 = (fa(((u_arrmul32_and3_31 >> 0) & 0x01), ((u_arrmul32_fa4_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa2_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and4_31 = and_gate(((a >> 4) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa4_31_xor1 = (fa(((u_arrmul32_and4_31 >> 0) & 0x01), ((u_arrmul32_fa5_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa4_31_or0 = (fa(((u_arrmul32_and4_31 >> 0) & 0x01), ((u_arrmul32_fa5_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa3_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and5_31 = and_gate(((a >> 5) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa5_31_xor1 = (fa(((u_arrmul32_and5_31 >> 0) & 0x01), ((u_arrmul32_fa6_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa5_31_or0 = (fa(((u_arrmul32_and5_31 >> 0) & 0x01), ((u_arrmul32_fa6_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa4_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and6_31 = and_gate(((a >> 6) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa6_31_xor1 = (fa(((u_arrmul32_and6_31 >> 0) & 0x01), ((u_arrmul32_fa7_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa6_31_or0 = (fa(((u_arrmul32_and6_31 >> 0) & 0x01), ((u_arrmul32_fa7_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa5_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and7_31 = and_gate(((a >> 7) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa7_31_xor1 = (fa(((u_arrmul32_and7_31 >> 0) & 0x01), ((u_arrmul32_fa8_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa7_31_or0 = (fa(((u_arrmul32_and7_31 >> 0) & 0x01), ((u_arrmul32_fa8_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa6_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and8_31 = and_gate(((a >> 8) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa8_31_xor1 = (fa(((u_arrmul32_and8_31 >> 0) & 0x01), ((u_arrmul32_fa9_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa8_31_or0 = (fa(((u_arrmul32_and8_31 >> 0) & 0x01), ((u_arrmul32_fa9_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa7_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and9_31 = and_gate(((a >> 9) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa9_31_xor1 = (fa(((u_arrmul32_and9_31 >> 0) & 0x01), ((u_arrmul32_fa10_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa9_31_or0 = (fa(((u_arrmul32_and9_31 >> 0) & 0x01), ((u_arrmul32_fa10_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa8_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and10_31 = and_gate(((a >> 10) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa10_31_xor1 = (fa(((u_arrmul32_and10_31 >> 0) & 0x01), ((u_arrmul32_fa11_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa10_31_or0 = (fa(((u_arrmul32_and10_31 >> 0) & 0x01), ((u_arrmul32_fa11_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa9_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and11_31 = and_gate(((a >> 11) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa11_31_xor1 = (fa(((u_arrmul32_and11_31 >> 0) & 0x01), ((u_arrmul32_fa12_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa11_31_or0 = (fa(((u_arrmul32_and11_31 >> 0) & 0x01), ((u_arrmul32_fa12_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa10_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and12_31 = and_gate(((a >> 12) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa12_31_xor1 = (fa(((u_arrmul32_and12_31 >> 0) & 0x01), ((u_arrmul32_fa13_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa12_31_or0 = (fa(((u_arrmul32_and12_31 >> 0) & 0x01), ((u_arrmul32_fa13_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa11_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and13_31 = and_gate(((a >> 13) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa13_31_xor1 = (fa(((u_arrmul32_and13_31 >> 0) & 0x01), ((u_arrmul32_fa14_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa13_31_or0 = (fa(((u_arrmul32_and13_31 >> 0) & 0x01), ((u_arrmul32_fa14_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa12_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and14_31 = and_gate(((a >> 14) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa14_31_xor1 = (fa(((u_arrmul32_and14_31 >> 0) & 0x01), ((u_arrmul32_fa15_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa14_31_or0 = (fa(((u_arrmul32_and14_31 >> 0) & 0x01), ((u_arrmul32_fa15_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa13_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and15_31 = and_gate(((a >> 15) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa15_31_xor1 = (fa(((u_arrmul32_and15_31 >> 0) & 0x01), ((u_arrmul32_fa16_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa15_31_or0 = (fa(((u_arrmul32_and15_31 >> 0) & 0x01), ((u_arrmul32_fa16_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa14_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and16_31 = and_gate(((a >> 16) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa16_31_xor1 = (fa(((u_arrmul32_and16_31 >> 0) & 0x01), ((u_arrmul32_fa17_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa16_31_or0 = (fa(((u_arrmul32_and16_31 >> 0) & 0x01), ((u_arrmul32_fa17_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa15_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and17_31 = and_gate(((a >> 17) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa17_31_xor1 = (fa(((u_arrmul32_and17_31 >> 0) & 0x01), ((u_arrmul32_fa18_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa17_31_or0 = (fa(((u_arrmul32_and17_31 >> 0) & 0x01), ((u_arrmul32_fa18_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa16_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and18_31 = and_gate(((a >> 18) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa18_31_xor1 = (fa(((u_arrmul32_and18_31 >> 0) & 0x01), ((u_arrmul32_fa19_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa18_31_or0 = (fa(((u_arrmul32_and18_31 >> 0) & 0x01), ((u_arrmul32_fa19_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa17_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and19_31 = and_gate(((a >> 19) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa19_31_xor1 = (fa(((u_arrmul32_and19_31 >> 0) & 0x01), ((u_arrmul32_fa20_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa19_31_or0 = (fa(((u_arrmul32_and19_31 >> 0) & 0x01), ((u_arrmul32_fa20_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa18_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and20_31 = and_gate(((a >> 20) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa20_31_xor1 = (fa(((u_arrmul32_and20_31 >> 0) & 0x01), ((u_arrmul32_fa21_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa20_31_or0 = (fa(((u_arrmul32_and20_31 >> 0) & 0x01), ((u_arrmul32_fa21_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa19_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and21_31 = and_gate(((a >> 21) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa21_31_xor1 = (fa(((u_arrmul32_and21_31 >> 0) & 0x01), ((u_arrmul32_fa22_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa21_31_or0 = (fa(((u_arrmul32_and21_31 >> 0) & 0x01), ((u_arrmul32_fa22_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa20_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and22_31 = and_gate(((a >> 22) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa22_31_xor1 = (fa(((u_arrmul32_and22_31 >> 0) & 0x01), ((u_arrmul32_fa23_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa22_31_or0 = (fa(((u_arrmul32_and22_31 >> 0) & 0x01), ((u_arrmul32_fa23_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa21_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and23_31 = and_gate(((a >> 23) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa23_31_xor1 = (fa(((u_arrmul32_and23_31 >> 0) & 0x01), ((u_arrmul32_fa24_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa23_31_or0 = (fa(((u_arrmul32_and23_31 >> 0) & 0x01), ((u_arrmul32_fa24_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa22_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and24_31 = and_gate(((a >> 24) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa24_31_xor1 = (fa(((u_arrmul32_and24_31 >> 0) & 0x01), ((u_arrmul32_fa25_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa24_31_or0 = (fa(((u_arrmul32_and24_31 >> 0) & 0x01), ((u_arrmul32_fa25_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa23_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and25_31 = and_gate(((a >> 25) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa25_31_xor1 = (fa(((u_arrmul32_and25_31 >> 0) & 0x01), ((u_arrmul32_fa26_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa25_31_or0 = (fa(((u_arrmul32_and25_31 >> 0) & 0x01), ((u_arrmul32_fa26_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa24_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and26_31 = and_gate(((a >> 26) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa26_31_xor1 = (fa(((u_arrmul32_and26_31 >> 0) & 0x01), ((u_arrmul32_fa27_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa26_31_or0 = (fa(((u_arrmul32_and26_31 >> 0) & 0x01), ((u_arrmul32_fa27_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa25_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and27_31 = and_gate(((a >> 27) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa27_31_xor1 = (fa(((u_arrmul32_and27_31 >> 0) & 0x01), ((u_arrmul32_fa28_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa27_31_or0 = (fa(((u_arrmul32_and27_31 >> 0) & 0x01), ((u_arrmul32_fa28_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa26_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and28_31 = and_gate(((a >> 28) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa28_31_xor1 = (fa(((u_arrmul32_and28_31 >> 0) & 0x01), ((u_arrmul32_fa29_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa28_31_or0 = (fa(((u_arrmul32_and28_31 >> 0) & 0x01), ((u_arrmul32_fa29_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa27_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and29_31 = and_gate(((a >> 29) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa29_31_xor1 = (fa(((u_arrmul32_and29_31 >> 0) & 0x01), ((u_arrmul32_fa30_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa29_31_or0 = (fa(((u_arrmul32_and29_31 >> 0) & 0x01), ((u_arrmul32_fa30_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa28_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and30_31 = and_gate(((a >> 30) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa30_31_xor1 = (fa(((u_arrmul32_and30_31 >> 0) & 0x01), ((u_arrmul32_fa31_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa30_31_or0 = (fa(((u_arrmul32_and30_31 >> 0) & 0x01), ((u_arrmul32_fa31_30_xor1 >> 0) & 0x01), ((u_arrmul32_fa29_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
u_arrmul32_and31_31 = and_gate(((a >> 31) & 0x01), ((b >> 31) & 0x01));
|
|
u_arrmul32_fa31_31_xor1 = (fa(((u_arrmul32_and31_31 >> 0) & 0x01), ((u_arrmul32_fa31_30_or0 >> 0) & 0x01), ((u_arrmul32_fa30_31_or0 >> 0) & 0x01)) >> 0) & 0x01;
|
|
u_arrmul32_fa31_31_or0 = (fa(((u_arrmul32_and31_31 >> 0) & 0x01), ((u_arrmul32_fa31_30_or0 >> 0) & 0x01), ((u_arrmul32_fa30_31_or0 >> 0) & 0x01)) >> 1) & 0x01;
|
|
|
|
u_arrmul32_out |= ((u_arrmul32_and0_0 >> 0) & 0x01ull) << 0;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_1_xor0 >> 0) & 0x01ull) << 1;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_2_xor0 >> 0) & 0x01ull) << 2;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_3_xor0 >> 0) & 0x01ull) << 3;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_4_xor0 >> 0) & 0x01ull) << 4;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_5_xor0 >> 0) & 0x01ull) << 5;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_6_xor0 >> 0) & 0x01ull) << 6;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_7_xor0 >> 0) & 0x01ull) << 7;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_8_xor0 >> 0) & 0x01ull) << 8;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_9_xor0 >> 0) & 0x01ull) << 9;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_10_xor0 >> 0) & 0x01ull) << 10;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_11_xor0 >> 0) & 0x01ull) << 11;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_12_xor0 >> 0) & 0x01ull) << 12;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_13_xor0 >> 0) & 0x01ull) << 13;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_14_xor0 >> 0) & 0x01ull) << 14;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_15_xor0 >> 0) & 0x01ull) << 15;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_16_xor0 >> 0) & 0x01ull) << 16;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_17_xor0 >> 0) & 0x01ull) << 17;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_18_xor0 >> 0) & 0x01ull) << 18;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_19_xor0 >> 0) & 0x01ull) << 19;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_20_xor0 >> 0) & 0x01ull) << 20;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_21_xor0 >> 0) & 0x01ull) << 21;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_22_xor0 >> 0) & 0x01ull) << 22;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_23_xor0 >> 0) & 0x01ull) << 23;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_24_xor0 >> 0) & 0x01ull) << 24;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_25_xor0 >> 0) & 0x01ull) << 25;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_26_xor0 >> 0) & 0x01ull) << 26;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_27_xor0 >> 0) & 0x01ull) << 27;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_28_xor0 >> 0) & 0x01ull) << 28;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_29_xor0 >> 0) & 0x01ull) << 29;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_30_xor0 >> 0) & 0x01ull) << 30;
|
|
u_arrmul32_out |= ((u_arrmul32_ha0_31_xor0 >> 0) & 0x01ull) << 31;
|
|
u_arrmul32_out |= ((u_arrmul32_fa1_31_xor1 >> 0) & 0x01ull) << 32;
|
|
u_arrmul32_out |= ((u_arrmul32_fa2_31_xor1 >> 0) & 0x01ull) << 33;
|
|
u_arrmul32_out |= ((u_arrmul32_fa3_31_xor1 >> 0) & 0x01ull) << 34;
|
|
u_arrmul32_out |= ((u_arrmul32_fa4_31_xor1 >> 0) & 0x01ull) << 35;
|
|
u_arrmul32_out |= ((u_arrmul32_fa5_31_xor1 >> 0) & 0x01ull) << 36;
|
|
u_arrmul32_out |= ((u_arrmul32_fa6_31_xor1 >> 0) & 0x01ull) << 37;
|
|
u_arrmul32_out |= ((u_arrmul32_fa7_31_xor1 >> 0) & 0x01ull) << 38;
|
|
u_arrmul32_out |= ((u_arrmul32_fa8_31_xor1 >> 0) & 0x01ull) << 39;
|
|
u_arrmul32_out |= ((u_arrmul32_fa9_31_xor1 >> 0) & 0x01ull) << 40;
|
|
u_arrmul32_out |= ((u_arrmul32_fa10_31_xor1 >> 0) & 0x01ull) << 41;
|
|
u_arrmul32_out |= ((u_arrmul32_fa11_31_xor1 >> 0) & 0x01ull) << 42;
|
|
u_arrmul32_out |= ((u_arrmul32_fa12_31_xor1 >> 0) & 0x01ull) << 43;
|
|
u_arrmul32_out |= ((u_arrmul32_fa13_31_xor1 >> 0) & 0x01ull) << 44;
|
|
u_arrmul32_out |= ((u_arrmul32_fa14_31_xor1 >> 0) & 0x01ull) << 45;
|
|
u_arrmul32_out |= ((u_arrmul32_fa15_31_xor1 >> 0) & 0x01ull) << 46;
|
|
u_arrmul32_out |= ((u_arrmul32_fa16_31_xor1 >> 0) & 0x01ull) << 47;
|
|
u_arrmul32_out |= ((u_arrmul32_fa17_31_xor1 >> 0) & 0x01ull) << 48;
|
|
u_arrmul32_out |= ((u_arrmul32_fa18_31_xor1 >> 0) & 0x01ull) << 49;
|
|
u_arrmul32_out |= ((u_arrmul32_fa19_31_xor1 >> 0) & 0x01ull) << 50;
|
|
u_arrmul32_out |= ((u_arrmul32_fa20_31_xor1 >> 0) & 0x01ull) << 51;
|
|
u_arrmul32_out |= ((u_arrmul32_fa21_31_xor1 >> 0) & 0x01ull) << 52;
|
|
u_arrmul32_out |= ((u_arrmul32_fa22_31_xor1 >> 0) & 0x01ull) << 53;
|
|
u_arrmul32_out |= ((u_arrmul32_fa23_31_xor1 >> 0) & 0x01ull) << 54;
|
|
u_arrmul32_out |= ((u_arrmul32_fa24_31_xor1 >> 0) & 0x01ull) << 55;
|
|
u_arrmul32_out |= ((u_arrmul32_fa25_31_xor1 >> 0) & 0x01ull) << 56;
|
|
u_arrmul32_out |= ((u_arrmul32_fa26_31_xor1 >> 0) & 0x01ull) << 57;
|
|
u_arrmul32_out |= ((u_arrmul32_fa27_31_xor1 >> 0) & 0x01ull) << 58;
|
|
u_arrmul32_out |= ((u_arrmul32_fa28_31_xor1 >> 0) & 0x01ull) << 59;
|
|
u_arrmul32_out |= ((u_arrmul32_fa29_31_xor1 >> 0) & 0x01ull) << 60;
|
|
u_arrmul32_out |= ((u_arrmul32_fa30_31_xor1 >> 0) & 0x01ull) << 61;
|
|
u_arrmul32_out |= ((u_arrmul32_fa31_31_xor1 >> 0) & 0x01ull) << 62;
|
|
u_arrmul32_out |= ((u_arrmul32_fa31_31_or0 >> 0) & 0x01ull) << 63;
|
|
return u_arrmul32_out;
|
|
} |