Jan Klhůfek 56c86c13ca
New multipliers (#13)
* #10 CGP Circuits as inputs (#11)

* CGP Circuits as inputs

* #10 support of signed output in general circuit

* input as output works

* output connected to input (c)

* automated verilog testing

* output rename

* Implemented CSA and Wallace tree multiplier composing of CSAs. Also did some code cleanup.

* Typos fix and code cleanup.

* Added new (approximate) multiplier architectures and did some minor changes regarding sign extension for c output formats.

* Updated automated testing scripts.

* Small bugfix in python code generation (I initially thought this line is useless).

* Updated generated circuits folder.

Co-authored-by: Vojta Mrazek <mrazek@fit.vutbr.cz>
2022-04-17 16:00:00 +02:00

146 lines
6.6 KiB
Coq

module s_arrmul4(input [3:0] a, input [3:0] b, output [7:0] s_arrmul4_out);
wire s_arrmul4_and0_0;
wire s_arrmul4_and1_0;
wire s_arrmul4_and2_0;
wire s_arrmul4_nand3_0;
wire s_arrmul4_and0_1;
wire s_arrmul4_ha0_1_xor0;
wire s_arrmul4_ha0_1_and0;
wire s_arrmul4_and1_1;
wire s_arrmul4_fa1_1_xor0;
wire s_arrmul4_fa1_1_and0;
wire s_arrmul4_fa1_1_xor1;
wire s_arrmul4_fa1_1_and1;
wire s_arrmul4_fa1_1_or0;
wire s_arrmul4_and2_1;
wire s_arrmul4_fa2_1_xor0;
wire s_arrmul4_fa2_1_and0;
wire s_arrmul4_fa2_1_xor1;
wire s_arrmul4_fa2_1_and1;
wire s_arrmul4_fa2_1_or0;
wire s_arrmul4_nand3_1;
wire s_arrmul4_fa3_1_xor0;
wire s_arrmul4_fa3_1_xor1;
wire s_arrmul4_fa3_1_and1;
wire s_arrmul4_fa3_1_or0;
wire s_arrmul4_and0_2;
wire s_arrmul4_ha0_2_xor0;
wire s_arrmul4_ha0_2_and0;
wire s_arrmul4_and1_2;
wire s_arrmul4_fa1_2_xor0;
wire s_arrmul4_fa1_2_and0;
wire s_arrmul4_fa1_2_xor1;
wire s_arrmul4_fa1_2_and1;
wire s_arrmul4_fa1_2_or0;
wire s_arrmul4_and2_2;
wire s_arrmul4_fa2_2_xor0;
wire s_arrmul4_fa2_2_and0;
wire s_arrmul4_fa2_2_xor1;
wire s_arrmul4_fa2_2_and1;
wire s_arrmul4_fa2_2_or0;
wire s_arrmul4_nand3_2;
wire s_arrmul4_fa3_2_xor0;
wire s_arrmul4_fa3_2_and0;
wire s_arrmul4_fa3_2_xor1;
wire s_arrmul4_fa3_2_and1;
wire s_arrmul4_fa3_2_or0;
wire s_arrmul4_nand0_3;
wire s_arrmul4_ha0_3_xor0;
wire s_arrmul4_ha0_3_and0;
wire s_arrmul4_nand1_3;
wire s_arrmul4_fa1_3_xor0;
wire s_arrmul4_fa1_3_and0;
wire s_arrmul4_fa1_3_xor1;
wire s_arrmul4_fa1_3_and1;
wire s_arrmul4_fa1_3_or0;
wire s_arrmul4_nand2_3;
wire s_arrmul4_fa2_3_xor0;
wire s_arrmul4_fa2_3_and0;
wire s_arrmul4_fa2_3_xor1;
wire s_arrmul4_fa2_3_and1;
wire s_arrmul4_fa2_3_or0;
wire s_arrmul4_and3_3;
wire s_arrmul4_fa3_3_xor0;
wire s_arrmul4_fa3_3_and0;
wire s_arrmul4_fa3_3_xor1;
wire s_arrmul4_fa3_3_and1;
wire s_arrmul4_fa3_3_or0;
wire s_arrmul4_xor4_3;
assign s_arrmul4_and0_0 = a[0] & b[0];
assign s_arrmul4_and1_0 = a[1] & b[0];
assign s_arrmul4_and2_0 = a[2] & b[0];
assign s_arrmul4_nand3_0 = ~(a[3] & b[0]);
assign s_arrmul4_and0_1 = a[0] & b[1];
assign s_arrmul4_ha0_1_xor0 = s_arrmul4_and0_1 ^ s_arrmul4_and1_0;
assign s_arrmul4_ha0_1_and0 = s_arrmul4_and0_1 & s_arrmul4_and1_0;
assign s_arrmul4_and1_1 = a[1] & b[1];
assign s_arrmul4_fa1_1_xor0 = s_arrmul4_and1_1 ^ s_arrmul4_and2_0;
assign s_arrmul4_fa1_1_and0 = s_arrmul4_and1_1 & s_arrmul4_and2_0;
assign s_arrmul4_fa1_1_xor1 = s_arrmul4_fa1_1_xor0 ^ s_arrmul4_ha0_1_and0;
assign s_arrmul4_fa1_1_and1 = s_arrmul4_fa1_1_xor0 & s_arrmul4_ha0_1_and0;
assign s_arrmul4_fa1_1_or0 = s_arrmul4_fa1_1_and0 | s_arrmul4_fa1_1_and1;
assign s_arrmul4_and2_1 = a[2] & b[1];
assign s_arrmul4_fa2_1_xor0 = s_arrmul4_and2_1 ^ s_arrmul4_nand3_0;
assign s_arrmul4_fa2_1_and0 = s_arrmul4_and2_1 & s_arrmul4_nand3_0;
assign s_arrmul4_fa2_1_xor1 = s_arrmul4_fa2_1_xor0 ^ s_arrmul4_fa1_1_or0;
assign s_arrmul4_fa2_1_and1 = s_arrmul4_fa2_1_xor0 & s_arrmul4_fa1_1_or0;
assign s_arrmul4_fa2_1_or0 = s_arrmul4_fa2_1_and0 | s_arrmul4_fa2_1_and1;
assign s_arrmul4_nand3_1 = ~(a[3] & b[1]);
assign s_arrmul4_fa3_1_xor0 = ~s_arrmul4_nand3_1;
assign s_arrmul4_fa3_1_xor1 = s_arrmul4_fa3_1_xor0 ^ s_arrmul4_fa2_1_or0;
assign s_arrmul4_fa3_1_and1 = s_arrmul4_fa3_1_xor0 & s_arrmul4_fa2_1_or0;
assign s_arrmul4_fa3_1_or0 = s_arrmul4_nand3_1 | s_arrmul4_fa3_1_and1;
assign s_arrmul4_and0_2 = a[0] & b[2];
assign s_arrmul4_ha0_2_xor0 = s_arrmul4_and0_2 ^ s_arrmul4_fa1_1_xor1;
assign s_arrmul4_ha0_2_and0 = s_arrmul4_and0_2 & s_arrmul4_fa1_1_xor1;
assign s_arrmul4_and1_2 = a[1] & b[2];
assign s_arrmul4_fa1_2_xor0 = s_arrmul4_and1_2 ^ s_arrmul4_fa2_1_xor1;
assign s_arrmul4_fa1_2_and0 = s_arrmul4_and1_2 & s_arrmul4_fa2_1_xor1;
assign s_arrmul4_fa1_2_xor1 = s_arrmul4_fa1_2_xor0 ^ s_arrmul4_ha0_2_and0;
assign s_arrmul4_fa1_2_and1 = s_arrmul4_fa1_2_xor0 & s_arrmul4_ha0_2_and0;
assign s_arrmul4_fa1_2_or0 = s_arrmul4_fa1_2_and0 | s_arrmul4_fa1_2_and1;
assign s_arrmul4_and2_2 = a[2] & b[2];
assign s_arrmul4_fa2_2_xor0 = s_arrmul4_and2_2 ^ s_arrmul4_fa3_1_xor1;
assign s_arrmul4_fa2_2_and0 = s_arrmul4_and2_2 & s_arrmul4_fa3_1_xor1;
assign s_arrmul4_fa2_2_xor1 = s_arrmul4_fa2_2_xor0 ^ s_arrmul4_fa1_2_or0;
assign s_arrmul4_fa2_2_and1 = s_arrmul4_fa2_2_xor0 & s_arrmul4_fa1_2_or0;
assign s_arrmul4_fa2_2_or0 = s_arrmul4_fa2_2_and0 | s_arrmul4_fa2_2_and1;
assign s_arrmul4_nand3_2 = ~(a[3] & b[2]);
assign s_arrmul4_fa3_2_xor0 = s_arrmul4_nand3_2 ^ s_arrmul4_fa3_1_or0;
assign s_arrmul4_fa3_2_and0 = s_arrmul4_nand3_2 & s_arrmul4_fa3_1_or0;
assign s_arrmul4_fa3_2_xor1 = s_arrmul4_fa3_2_xor0 ^ s_arrmul4_fa2_2_or0;
assign s_arrmul4_fa3_2_and1 = s_arrmul4_fa3_2_xor0 & s_arrmul4_fa2_2_or0;
assign s_arrmul4_fa3_2_or0 = s_arrmul4_fa3_2_and0 | s_arrmul4_fa3_2_and1;
assign s_arrmul4_nand0_3 = ~(a[0] & b[3]);
assign s_arrmul4_ha0_3_xor0 = s_arrmul4_nand0_3 ^ s_arrmul4_fa1_2_xor1;
assign s_arrmul4_ha0_3_and0 = s_arrmul4_nand0_3 & s_arrmul4_fa1_2_xor1;
assign s_arrmul4_nand1_3 = ~(a[1] & b[3]);
assign s_arrmul4_fa1_3_xor0 = s_arrmul4_nand1_3 ^ s_arrmul4_fa2_2_xor1;
assign s_arrmul4_fa1_3_and0 = s_arrmul4_nand1_3 & s_arrmul4_fa2_2_xor1;
assign s_arrmul4_fa1_3_xor1 = s_arrmul4_fa1_3_xor0 ^ s_arrmul4_ha0_3_and0;
assign s_arrmul4_fa1_3_and1 = s_arrmul4_fa1_3_xor0 & s_arrmul4_ha0_3_and0;
assign s_arrmul4_fa1_3_or0 = s_arrmul4_fa1_3_and0 | s_arrmul4_fa1_3_and1;
assign s_arrmul4_nand2_3 = ~(a[2] & b[3]);
assign s_arrmul4_fa2_3_xor0 = s_arrmul4_nand2_3 ^ s_arrmul4_fa3_2_xor1;
assign s_arrmul4_fa2_3_and0 = s_arrmul4_nand2_3 & s_arrmul4_fa3_2_xor1;
assign s_arrmul4_fa2_3_xor1 = s_arrmul4_fa2_3_xor0 ^ s_arrmul4_fa1_3_or0;
assign s_arrmul4_fa2_3_and1 = s_arrmul4_fa2_3_xor0 & s_arrmul4_fa1_3_or0;
assign s_arrmul4_fa2_3_or0 = s_arrmul4_fa2_3_and0 | s_arrmul4_fa2_3_and1;
assign s_arrmul4_and3_3 = a[3] & b[3];
assign s_arrmul4_fa3_3_xor0 = s_arrmul4_and3_3 ^ s_arrmul4_fa3_2_or0;
assign s_arrmul4_fa3_3_and0 = s_arrmul4_and3_3 & s_arrmul4_fa3_2_or0;
assign s_arrmul4_fa3_3_xor1 = s_arrmul4_fa3_3_xor0 ^ s_arrmul4_fa2_3_or0;
assign s_arrmul4_fa3_3_and1 = s_arrmul4_fa3_3_xor0 & s_arrmul4_fa2_3_or0;
assign s_arrmul4_fa3_3_or0 = s_arrmul4_fa3_3_and0 | s_arrmul4_fa3_3_and1;
assign s_arrmul4_xor4_3 = ~s_arrmul4_fa3_3_or0;
assign s_arrmul4_out[0] = s_arrmul4_and0_0;
assign s_arrmul4_out[1] = s_arrmul4_ha0_1_xor0;
assign s_arrmul4_out[2] = s_arrmul4_ha0_2_xor0;
assign s_arrmul4_out[3] = s_arrmul4_ha0_3_xor0;
assign s_arrmul4_out[4] = s_arrmul4_fa1_3_xor1;
assign s_arrmul4_out[5] = s_arrmul4_fa2_3_xor1;
assign s_arrmul4_out[6] = s_arrmul4_fa3_3_xor1;
assign s_arrmul4_out[7] = s_arrmul4_xor4_3;
endmodule