// ------------------------------------------------------------ // // File Name: hdlsrc\qpskhdltest\FIR_Decimation // Created: 2014-04-21 15:30:25 // Generated by MATLAB 8.2 and HDL Coder 3.3 // // ------------------------------------------------------------ // // // ------------------------------------------------------------ // // Module: FIR_Decimation // Source Path: /FIR_Decimation // // ------------------------------------------------------------ // // HDL Implementation : Fully parallel // Multipliers : 39 // Folding Factor : 1 `timescale 1 ns / 1 ns module FIR_Decimation ( clk, enb_1_1_1, reset, FIR_Decimation_in_re, FIR_Decimation_in_im, FIR_Decimation_out_re, FIR_Decimation_out_im ); input clk; input enb_1_1_1; input reset; input signed [15:0] FIR_Decimation_in_re; //sfix16_En15 input signed [15:0] FIR_Decimation_in_im; //sfix16_En15 output signed [15:0] FIR_Decimation_out_re; //sfix16_En15 output signed [15:0] FIR_Decimation_out_im; //sfix16_En15 //////////////////////////////////////////////////////////////// //Module Architecture: FIR_Decimation //////////////////////////////////////////////////////////////// // Local Functions // Type Definitions // Constants parameter signed [15:0] coeffphase1_1 = 16'b1111111100110010; //sfix16_En16 parameter signed [15:0] coeffphase1_2 = 16'b1111111000101100; //sfix16_En16 parameter signed [15:0] coeffphase1_3 = 16'b1111100001010001; //sfix16_En16 parameter signed [15:0] coeffphase1_4 = 16'b0111001100111111; //sfix16_En16 parameter signed [15:0] coeffphase1_5 = 16'b1111100001010001; //sfix16_En16 parameter signed [15:0] coeffphase1_6 = 16'b1111111000101100; //sfix16_En16 parameter signed [15:0] coeffphase1_7 = 16'b1111111100110010; //sfix16_En16 parameter signed [15:0] coeffphase2_1 = 16'b1111111101100001; //sfix16_En16 parameter signed [15:0] coeffphase2_2 = 16'b1111111010000110; //sfix16_En16 parameter signed [15:0] coeffphase2_3 = 16'b1111100011000010; //sfix16_En16 parameter signed [15:0] coeffphase2_4 = 16'b0110110010100111; //sfix16_En16 parameter signed [15:0] coeffphase2_5 = 16'b1111101111000100; //sfix16_En16 parameter signed [15:0] coeffphase2_6 = 16'b1111111011011011; //sfix16_En16 parameter signed [15:0] coeffphase2_7 = 16'b0000000000000000; //sfix16_En16 parameter signed [15:0] coeffphase3_1 = 16'b0000000000000000; //sfix16_En16 parameter signed [15:0] coeffphase3_2 = 16'b0000000000000000; //sfix16_En16 parameter signed [15:0] coeffphase3_3 = 16'b0000000000000000; //sfix16_En16 parameter signed [15:0] coeffphase3_4 = 16'b0101101010000011; //sfix16_En16 parameter signed [15:0] coeffphase3_5 = 16'b0000000000000000; //sfix16_En16 parameter signed [15:0] coeffphase3_6 = 16'b0000000000000000; //sfix16_En16 parameter signed [15:0] coeffphase3_7 = 16'b0000000000000000; //sfix16_En16 parameter signed [15:0] coeffphase4_1 = 16'b0000000010111111; //sfix16_En16 parameter signed [15:0] coeffphase4_2 = 16'b0000000111111010; //sfix16_En16 parameter signed [15:0] coeffphase4_3 = 16'b0000111110000110; //sfix16_En16 parameter signed [15:0] coeffphase4_4 = 16'b0100000100110001; //sfix16_En16 parameter signed [15:0] coeffphase4_5 = 16'b0000001011001001; //sfix16_En16 parameter signed [15:0] coeffphase4_6 = 16'b0000000011101010; //sfix16_En16 parameter signed [15:0] coeffphase4_7 = 16'b0000000000000000; //sfix16_En16 parameter signed [15:0] coeffphase5_1 = 16'b0000000100101010; //sfix16_En16 parameter signed [15:0] coeffphase5_2 = 16'b0000001101001011; //sfix16_En16 parameter signed [15:0] coeffphase5_3 = 16'b0010011001101010; //sfix16_En16 parameter signed [15:0] coeffphase5_4 = 16'b0010011001101010; //sfix16_En16 parameter signed [15:0] coeffphase5_5 = 16'b0000001101001011; //sfix16_En16 parameter signed [15:0] coeffphase5_6 = 16'b0000000100101010; //sfix16_En16 parameter signed [15:0] coeffphase5_7 = 16'b0000000000000000; //sfix16_En16 parameter signed [15:0] coeffphase6_1 = 16'b0000000011101010; //sfix16_En16 parameter signed [15:0] coeffphase6_2 = 16'b0000001011001001; //sfix16_En16 parameter signed [15:0] coeffphase6_3 = 16'b0100000100110001; //sfix16_En16 parameter signed [15:0] coeffphase6_4 = 16'b0000111110000110; //sfix16_En16 parameter signed [15:0] coeffphase6_5 = 16'b0000000111111010; //sfix16_En16 parameter signed [15:0] coeffphase6_6 = 16'b0000000010111111; //sfix16_En16 parameter signed [15:0] coeffphase6_7 = 16'b0000000000000000; //sfix16_En16 parameter signed [15:0] coeffphase7_1 = 16'b0000000000000000; //sfix16_En16 parameter signed [15:0] coeffphase7_2 = 16'b0000000000000000; //sfix16_En16 parameter signed [15:0] coeffphase7_3 = 16'b0101101010000011; //sfix16_En16 parameter signed [15:0] coeffphase7_4 = 16'b0000000000000000; //sfix16_En16 parameter signed [15:0] coeffphase7_5 = 16'b0000000000000000; //sfix16_En16 parameter signed [15:0] coeffphase7_6 = 16'b0000000000000000; //sfix16_En16 parameter signed [15:0] coeffphase7_7 = 16'b0000000000000000; //sfix16_En16 parameter signed [15:0] coeffphase8_1 = 16'b1111111011011011; //sfix16_En16 parameter signed [15:0] coeffphase8_2 = 16'b1111101111000100; //sfix16_En16 parameter signed [15:0] coeffphase8_3 = 16'b0110110010100111; //sfix16_En16 parameter signed [15:0] coeffphase8_4 = 16'b1111100011000010; //sfix16_En16 parameter signed [15:0] coeffphase8_5 = 16'b1111111010000110; //sfix16_En16 parameter signed [15:0] coeffphase8_6 = 16'b1111111101100001; //sfix16_En16 parameter signed [15:0] coeffphase8_7 = 16'b0000000000000000; //sfix16_En16 // Signals reg [7:0] ring_count; // ufix8 wire phase_0; // boolean wire phase_1; // boolean wire phase_2; // boolean wire phase_3; // boolean wire phase_4; // boolean wire phase_5; // boolean wire phase_6; // boolean wire phase_7; // boolean wire signed [15:0] input_typeconvert_re; // sfix16_En15 wire signed [15:0] input_typeconvert_im; // sfix16_En15 reg signed [15:0] input_pipeline_phase0_re [0:5] ; // sfix16_En15 reg signed [15:0] input_pipeline_phase0_im [0:5] ; // sfix16_En15 reg signed [15:0] input_pipeline_phase1_re [0:5] ; // sfix16_En15 reg signed [15:0] input_pipeline_phase1_im [0:5] ; // sfix16_En15 reg signed [15:0] input_pipeline_phase2_re [0:3] ; // sfix16_En15 reg signed [15:0] input_pipeline_phase2_im [0:3] ; // sfix16_En15 reg signed [15:0] input_pipeline_phase3_re [0:5] ; // sfix16_En15 reg signed [15:0] input_pipeline_phase3_im [0:5] ; // sfix16_En15 reg signed [15:0] input_pipeline_phase4_re [0:5] ; // sfix16_En15 reg signed [15:0] input_pipeline_phase4_im [0:5] ; // sfix16_En15 reg signed [15:0] input_pipeline_phase5_re [0:5] ; // sfix16_En15 reg signed [15:0] input_pipeline_phase5_im [0:5] ; // sfix16_En15 reg signed [15:0] input_pipeline_phase6_re [0:2] ; // sfix16_En15 reg signed [15:0] input_pipeline_phase6_im [0:2] ; // sfix16_En15 reg signed [15:0] input_pipeline_phase7_re [0:5] ; // sfix16_En15 reg signed [15:0] input_pipeline_phase7_im [0:5] ; // sfix16_En15 wire signed [15:0] product_phase0_1_re; // sfix16_En15 wire signed [15:0] product_phase0_1_im; // sfix16_En15 wire signed [31:0] mul_temp; // sfix32_En31 wire signed [31:0] mul_temp_1; // sfix32_En31 wire signed [15:0] product_phase0_2_re; // sfix16_En15 wire signed [15:0] product_phase0_2_im; // sfix16_En15 wire signed [31:0] mul_temp_2; // sfix32_En31 wire signed [31:0] mul_temp_3; // sfix32_En31 wire signed [15:0] product_phase0_3_re; // sfix16_En15 wire signed [15:0] product_phase0_3_im; // sfix16_En15 wire signed [31:0] mul_temp_4; // sfix32_En31 wire signed [31:0] mul_temp_5; // sfix32_En31 wire signed [15:0] product_phase0_4_re; // sfix16_En15 wire signed [15:0] product_phase0_4_im; // sfix16_En15 wire signed [31:0] mul_temp_6; // sfix32_En31 wire signed [31:0] mul_temp_7; // sfix32_En31 wire signed [15:0] product_phase0_5_re; // sfix16_En15 wire signed [15:0] product_phase0_5_im; // sfix16_En15 wire signed [31:0] mul_temp_8; // sfix32_En31 wire signed [31:0] mul_temp_9; // sfix32_En31 wire signed [15:0] product_phase0_6_re; // sfix16_En15 wire signed [15:0] product_phase0_6_im; // sfix16_En15 wire signed [31:0] mul_temp_10; // sfix32_En31 wire signed [31:0] mul_temp_11; // sfix32_En31 wire signed [15:0] product_phase0_7_re; // sfix16_En15 wire signed [15:0] product_phase0_7_im; // sfix16_En15 wire signed [31:0] mul_temp_12; // sfix32_En31 wire signed [31:0] mul_temp_13; // sfix32_En31 wire signed [15:0] product_phase1_1_re; // sfix16_En15 wire signed [15:0] product_phase1_1_im; // sfix16_En15 wire signed [31:0] mul_temp_14; // sfix32_En31 wire signed [31:0] mul_temp_15; // sfix32_En31 wire signed [15:0] product_phase1_2_re; // sfix16_En15 wire signed [15:0] product_phase1_2_im; // sfix16_En15 wire signed [31:0] mul_temp_16; // sfix32_En31 wire signed [31:0] mul_temp_17; // sfix32_En31 wire signed [15:0] product_phase1_3_re; // sfix16_En15 wire signed [15:0] product_phase1_3_im; // sfix16_En15 wire signed [31:0] mul_temp_18; // sfix32_En31 wire signed [31:0] mul_temp_19; // sfix32_En31 wire signed [15:0] product_phase1_4_re; // sfix16_En15 wire signed [15:0] product_phase1_4_im; // sfix16_En15 wire signed [31:0] mul_temp_20; // sfix32_En31 wire signed [31:0] mul_temp_21; // sfix32_En31 wire signed [15:0] product_phase1_5_re; // sfix16_En15 wire signed [15:0] product_phase1_5_im; // sfix16_En15 wire signed [31:0] mul_temp_22; // sfix32_En31 wire signed [31:0] mul_temp_23; // sfix32_En31 wire signed [15:0] product_phase1_6_re; // sfix16_En15 wire signed [15:0] product_phase1_6_im; // sfix16_En15 wire signed [31:0] mul_temp_24; // sfix32_En31 wire signed [31:0] mul_temp_25; // sfix32_En31 wire signed [15:0] product_phase2_4_re; // sfix16_En15 wire signed [15:0] product_phase2_4_im; // sfix16_En15 wire signed [31:0] mul_temp_26; // sfix32_En31 wire signed [31:0] mul_temp_27; // sfix32_En31 wire signed [15:0] product_phase3_1_re; // sfix16_En15 wire signed [15:0] product_phase3_1_im; // sfix16_En15 wire signed [31:0] mul_temp_28; // sfix32_En31 wire signed [31:0] mul_temp_29; // sfix32_En31 wire signed [15:0] product_phase3_2_re; // sfix16_En15 wire signed [15:0] product_phase3_2_im; // sfix16_En15 wire signed [31:0] mul_temp_30; // sfix32_En31 wire signed [31:0] mul_temp_31; // sfix32_En31 wire signed [15:0] product_phase3_3_re; // sfix16_En15 wire signed [15:0] product_phase3_3_im; // sfix16_En15 wire signed [31:0] mul_temp_32; // sfix32_En31 wire signed [31:0] mul_temp_33; // sfix32_En31 wire signed [15:0] product_phase3_4_re; // sfix16_En15 wire signed [15:0] product_phase3_4_im; // sfix16_En15 wire signed [31:0] mul_temp_34; // sfix32_En31 wire signed [31:0] mul_temp_35; // sfix32_En31 wire signed [15:0] product_phase3_5_re; // sfix16_En15 wire signed [15:0] product_phase3_5_im; // sfix16_En15 wire signed [31:0] mul_temp_36; // sfix32_En31 wire signed [31:0] mul_temp_37; // sfix32_En31 wire signed [15:0] product_phase3_6_re; // sfix16_En15 wire signed [15:0] product_phase3_6_im; // sfix16_En15 wire signed [31:0] mul_temp_38; // sfix32_En31 wire signed [31:0] mul_temp_39; // sfix32_En31 wire signed [15:0] product_phase4_1_re; // sfix16_En15 wire signed [15:0] product_phase4_1_im; // sfix16_En15 wire signed [31:0] mul_temp_40; // sfix32_En31 wire signed [31:0] mul_temp_41; // sfix32_En31 wire signed [15:0] product_phase4_2_re; // sfix16_En15 wire signed [15:0] product_phase4_2_im; // sfix16_En15 wire signed [31:0] mul_temp_42; // sfix32_En31 wire signed [31:0] mul_temp_43; // sfix32_En31 wire signed [15:0] product_phase4_3_re; // sfix16_En15 wire signed [15:0] product_phase4_3_im; // sfix16_En15 wire signed [31:0] mul_temp_44; // sfix32_En31 wire signed [31:0] mul_temp_45; // sfix32_En31 wire signed [15:0] product_phase4_4_re; // sfix16_En15 wire signed [15:0] product_phase4_4_im; // sfix16_En15 wire signed [31:0] mul_temp_46; // sfix32_En31 wire signed [31:0] mul_temp_47; // sfix32_En31 wire signed [15:0] product_phase4_5_re; // sfix16_En15 wire signed [15:0] product_phase4_5_im; // sfix16_En15 wire signed [31:0] mul_temp_48; // sfix32_En31 wire signed [31:0] mul_temp_49; // sfix32_En31 wire signed [15:0] product_phase4_6_re; // sfix16_En15 wire signed [15:0] product_phase4_6_im; // sfix16_En15 wire signed [31:0] mul_temp_50; // sfix32_En31 wire signed [31:0] mul_temp_51; // sfix32_En31 wire signed [15:0] product_phase5_1_re; // sfix16_En15 wire signed [15:0] product_phase5_1_im; // sfix16_En15 wire signed [31:0] mul_temp_52; // sfix32_En31 wire signed [31:0] mul_temp_53; // sfix32_En31 wire signed [15:0] product_phase5_2_re; // sfix16_En15 wire signed [15:0] product_phase5_2_im; // sfix16_En15 wire signed [31:0] mul_temp_54; // sfix32_En31 wire signed [31:0] mul_temp_55; // sfix32_En31 wire signed [15:0] product_phase5_3_re; // sfix16_En15 wire signed [15:0] product_phase5_3_im; // sfix16_En15 wire signed [31:0] mul_temp_56; // sfix32_En31 wire signed [31:0] mul_temp_57; // sfix32_En31 wire signed [15:0] product_phase5_4_re; // sfix16_En15 wire signed [15:0] product_phase5_4_im; // sfix16_En15 wire signed [31:0] mul_temp_58; // sfix32_En31 wire signed [31:0] mul_temp_59; // sfix32_En31 wire signed [15:0] product_phase5_5_re; // sfix16_En15 wire signed [15:0] product_phase5_5_im; // sfix16_En15 wire signed [31:0] mul_temp_60; // sfix32_En31 wire signed [31:0] mul_temp_61; // sfix32_En31 wire signed [15:0] product_phase5_6_re; // sfix16_En15 wire signed [15:0] product_phase5_6_im; // sfix16_En15 wire signed [31:0] mul_temp_62; // sfix32_En31 wire signed [31:0] mul_temp_63; // sfix32_En31 wire signed [15:0] product_phase6_3_re; // sfix16_En15 wire signed [15:0] product_phase6_3_im; // sfix16_En15 wire signed [31:0] mul_temp_64; // sfix32_En31 wire signed [31:0] mul_temp_65; // sfix32_En31 wire signed [15:0] product_phase7_1_re; // sfix16_En15 wire signed [15:0] product_phase7_1_im; // sfix16_En15 wire signed [31:0] mul_temp_66; // sfix32_En31 wire signed [31:0] mul_temp_67; // sfix32_En31 wire signed [15:0] product_phase7_2_re; // sfix16_En15 wire signed [15:0] product_phase7_2_im; // sfix16_En15 wire signed [31:0] mul_temp_68; // sfix32_En31 wire signed [31:0] mul_temp_69; // sfix32_En31 wire signed [15:0] product_phase7_3_re; // sfix16_En15 wire signed [15:0] product_phase7_3_im; // sfix16_En15 wire signed [31:0] mul_temp_70; // sfix32_En31 wire signed [31:0] mul_temp_71; // sfix32_En31 wire signed [15:0] product_phase7_4_re; // sfix16_En15 wire signed [15:0] product_phase7_4_im; // sfix16_En15 wire signed [31:0] mul_temp_72; // sfix32_En31 wire signed [31:0] mul_temp_73; // sfix32_En31 wire signed [15:0] product_phase7_5_re; // sfix16_En15 wire signed [15:0] product_phase7_5_im; // sfix16_En15 wire signed [31:0] mul_temp_74; // sfix32_En31 wire signed [31:0] mul_temp_75; // sfix32_En31 wire signed [15:0] product_phase7_6_re; // sfix16_En15 wire signed [15:0] product_phase7_6_im; // sfix16_En15 wire signed [31:0] mul_temp_76; // sfix32_En31 wire signed [31:0] mul_temp_77; // sfix32_En31 wire signed [15:0] quantized_sum_re; // sfix16_En15 wire signed [15:0] quantized_sum_im; // sfix16_En15 wire signed [15:0] sum1_re; // sfix16_En15 wire signed [15:0] sum1_im; // sfix16_En15 wire signed [15:0] add_cast; // sfix16_En15 wire signed [15:0] add_cast_1; // sfix16_En15 wire signed [16:0] add_temp; // sfix17_En15 wire signed [15:0] add_cast_2; // sfix16_En15 wire signed [15:0] add_cast_3; // sfix16_En15 wire signed [16:0] add_temp_1; // sfix17_En15 wire signed [15:0] sum2_re; // sfix16_En15 wire signed [15:0] sum2_im; // sfix16_En15 wire signed [15:0] add_cast_4; // sfix16_En15 wire signed [15:0] add_cast_5; // sfix16_En15 wire signed [16:0] add_temp_2; // sfix17_En15 wire signed [15:0] add_cast_6; // sfix16_En15 wire signed [15:0] add_cast_7; // sfix16_En15 wire signed [16:0] add_temp_3; // sfix17_En15 wire signed [15:0] sum3_re; // sfix16_En15 wire signed [15:0] sum3_im; // sfix16_En15 wire signed [15:0] add_cast_8; // sfix16_En15 wire signed [15:0] add_cast_9; // sfix16_En15 wire signed [16:0] add_temp_4; // sfix17_En15 wire signed [15:0] add_cast_10; // sfix16_En15 wire signed [15:0] add_cast_11; // sfix16_En15 wire signed [16:0] add_temp_5; // sfix17_En15 wire signed [15:0] sum4_re; // sfix16_En15 wire signed [15:0] sum4_im; // sfix16_En15 wire signed [15:0] add_cast_12; // sfix16_En15 wire signed [15:0] add_cast_13; // sfix16_En15 wire signed [16:0] add_temp_6; // sfix17_En15 wire signed [15:0] add_cast_14; // sfix16_En15 wire signed [15:0] add_cast_15; // sfix16_En15 wire signed [16:0] add_temp_7; // sfix17_En15 wire signed [15:0] sum5_re; // sfix16_En15 wire signed [15:0] sum5_im; // sfix16_En15 wire signed [15:0] add_cast_16; // sfix16_En15 wire signed [15:0] add_cast_17; // sfix16_En15 wire signed [16:0] add_temp_8; // sfix17_En15 wire signed [15:0] add_cast_18; // sfix16_En15 wire signed [15:0] add_cast_19; // sfix16_En15 wire signed [16:0] add_temp_9; // sfix17_En15 wire signed [15:0] sum6_re; // sfix16_En15 wire signed [15:0] sum6_im; // sfix16_En15 wire signed [15:0] add_cast_20; // sfix16_En15 wire signed [15:0] add_cast_21; // sfix16_En15 wire signed [16:0] add_temp_10; // sfix17_En15 wire signed [15:0] add_cast_22; // sfix16_En15 wire signed [15:0] add_cast_23; // sfix16_En15 wire signed [16:0] add_temp_11; // sfix17_En15 wire signed [15:0] sum7_re; // sfix16_En15 wire signed [15:0] sum7_im; // sfix16_En15 wire signed [15:0] add_cast_24; // sfix16_En15 wire signed [15:0] add_cast_25; // sfix16_En15 wire signed [16:0] add_temp_12; // sfix17_En15 wire signed [15:0] add_cast_26; // sfix16_En15 wire signed [15:0] add_cast_27; // sfix16_En15 wire signed [16:0] add_temp_13; // sfix17_En15 wire signed [15:0] sum8_re; // sfix16_En15 wire signed [15:0] sum8_im; // sfix16_En15 wire signed [15:0] add_cast_28; // sfix16_En15 wire signed [15:0] add_cast_29; // sfix16_En15 wire signed [16:0] add_temp_14; // sfix17_En15 wire signed [15:0] add_cast_30; // sfix16_En15 wire signed [15:0] add_cast_31; // sfix16_En15 wire signed [16:0] add_temp_15; // sfix17_En15 wire signed [15:0] sum9_re; // sfix16_En15 wire signed [15:0] sum9_im; // sfix16_En15 wire signed [15:0] add_cast_32; // sfix16_En15 wire signed [15:0] add_cast_33; // sfix16_En15 wire signed [16:0] add_temp_16; // sfix17_En15 wire signed [15:0] add_cast_34; // sfix16_En15 wire signed [15:0] add_cast_35; // sfix16_En15 wire signed [16:0] add_temp_17; // sfix17_En15 wire signed [15:0] sum10_re; // sfix16_En15 wire signed [15:0] sum10_im; // sfix16_En15 wire signed [15:0] add_cast_36; // sfix16_En15 wire signed [15:0] add_cast_37; // sfix16_En15 wire signed [16:0] add_temp_18; // sfix17_En15 wire signed [15:0] add_cast_38; // sfix16_En15 wire signed [15:0] add_cast_39; // sfix16_En15 wire signed [16:0] add_temp_19; // sfix17_En15 wire signed [15:0] sum11_re; // sfix16_En15 wire signed [15:0] sum11_im; // sfix16_En15 wire signed [15:0] add_cast_40; // sfix16_En15 wire signed [15:0] add_cast_41; // sfix16_En15 wire signed [16:0] add_temp_20; // sfix17_En15 wire signed [15:0] add_cast_42; // sfix16_En15 wire signed [15:0] add_cast_43; // sfix16_En15 wire signed [16:0] add_temp_21; // sfix17_En15 wire signed [15:0] sum12_re; // sfix16_En15 wire signed [15:0] sum12_im; // sfix16_En15 wire signed [15:0] add_cast_44; // sfix16_En15 wire signed [15:0] add_cast_45; // sfix16_En15 wire signed [16:0] add_temp_22; // sfix17_En15 wire signed [15:0] add_cast_46; // sfix16_En15 wire signed [15:0] add_cast_47; // sfix16_En15 wire signed [16:0] add_temp_23; // sfix17_En15 wire signed [15:0] sum13_re; // sfix16_En15 wire signed [15:0] sum13_im; // sfix16_En15 wire signed [15:0] add_cast_48; // sfix16_En15 wire signed [15:0] add_cast_49; // sfix16_En15 wire signed [16:0] add_temp_24; // sfix17_En15 wire signed [15:0] add_cast_50; // sfix16_En15 wire signed [15:0] add_cast_51; // sfix16_En15 wire signed [16:0] add_temp_25; // sfix17_En15 wire signed [15:0] sum14_re; // sfix16_En15 wire signed [15:0] sum14_im; // sfix16_En15 wire signed [15:0] add_cast_52; // sfix16_En15 wire signed [15:0] add_cast_53; // sfix16_En15 wire signed [16:0] add_temp_26; // sfix17_En15 wire signed [15:0] add_cast_54; // sfix16_En15 wire signed [15:0] add_cast_55; // sfix16_En15 wire signed [16:0] add_temp_27; // sfix17_En15 wire signed [15:0] sum15_re; // sfix16_En15 wire signed [15:0] sum15_im; // sfix16_En15 wire signed [15:0] add_cast_56; // sfix16_En15 wire signed [15:0] add_cast_57; // sfix16_En15 wire signed [16:0] add_temp_28; // sfix17_En15 wire signed [15:0] add_cast_58; // sfix16_En15 wire signed [15:0] add_cast_59; // sfix16_En15 wire signed [16:0] add_temp_29; // sfix17_En15 wire signed [15:0] sum16_re; // sfix16_En15 wire signed [15:0] sum16_im; // sfix16_En15 wire signed [15:0] add_cast_60; // sfix16_En15 wire signed [15:0] add_cast_61; // sfix16_En15 wire signed [16:0] add_temp_30; // sfix17_En15 wire signed [15:0] add_cast_62; // sfix16_En15 wire signed [15:0] add_cast_63; // sfix16_En15 wire signed [16:0] add_temp_31; // sfix17_En15 wire signed [15:0] sum17_re; // sfix16_En15 wire signed [15:0] sum17_im; // sfix16_En15 wire signed [15:0] add_cast_64; // sfix16_En15 wire signed [15:0] add_cast_65; // sfix16_En15 wire signed [16:0] add_temp_32; // sfix17_En15 wire signed [15:0] add_cast_66; // sfix16_En15 wire signed [15:0] add_cast_67; // sfix16_En15 wire signed [16:0] add_temp_33; // sfix17_En15 wire signed [15:0] sum18_re; // sfix16_En15 wire signed [15:0] sum18_im; // sfix16_En15 wire signed [15:0] add_cast_68; // sfix16_En15 wire signed [15:0] add_cast_69; // sfix16_En15 wire signed [16:0] add_temp_34; // sfix17_En15 wire signed [15:0] add_cast_70; // sfix16_En15 wire signed [15:0] add_cast_71; // sfix16_En15 wire signed [16:0] add_temp_35; // sfix17_En15 wire signed [15:0] sum19_re; // sfix16_En15 wire signed [15:0] sum19_im; // sfix16_En15 wire signed [15:0] add_cast_72; // sfix16_En15 wire signed [15:0] add_cast_73; // sfix16_En15 wire signed [16:0] add_temp_36; // sfix17_En15 wire signed [15:0] add_cast_74; // sfix16_En15 wire signed [15:0] add_cast_75; // sfix16_En15 wire signed [16:0] add_temp_37; // sfix17_En15 wire signed [15:0] sum20_re; // sfix16_En15 wire signed [15:0] sum20_im; // sfix16_En15 wire signed [15:0] add_cast_76; // sfix16_En15 wire signed [15:0] add_cast_77; // sfix16_En15 wire signed [16:0] add_temp_38; // sfix17_En15 wire signed [15:0] add_cast_78; // sfix16_En15 wire signed [15:0] add_cast_79; // sfix16_En15 wire signed [16:0] add_temp_39; // sfix17_En15 wire signed [15:0] sum21_re; // sfix16_En15 wire signed [15:0] sum21_im; // sfix16_En15 wire signed [15:0] add_cast_80; // sfix16_En15 wire signed [15:0] add_cast_81; // sfix16_En15 wire signed [16:0] add_temp_40; // sfix17_En15 wire signed [15:0] add_cast_82; // sfix16_En15 wire signed [15:0] add_cast_83; // sfix16_En15 wire signed [16:0] add_temp_41; // sfix17_En15 wire signed [15:0] sum22_re; // sfix16_En15 wire signed [15:0] sum22_im; // sfix16_En15 wire signed [15:0] add_cast_84; // sfix16_En15 wire signed [15:0] add_cast_85; // sfix16_En15 wire signed [16:0] add_temp_42; // sfix17_En15 wire signed [15:0] add_cast_86; // sfix16_En15 wire signed [15:0] add_cast_87; // sfix16_En15 wire signed [16:0] add_temp_43; // sfix17_En15 wire signed [15:0] sum23_re; // sfix16_En15 wire signed [15:0] sum23_im; // sfix16_En15 wire signed [15:0] add_cast_88; // sfix16_En15 wire signed [15:0] add_cast_89; // sfix16_En15 wire signed [16:0] add_temp_44; // sfix17_En15 wire signed [15:0] add_cast_90; // sfix16_En15 wire signed [15:0] add_cast_91; // sfix16_En15 wire signed [16:0] add_temp_45; // sfix17_En15 wire signed [15:0] sum24_re; // sfix16_En15 wire signed [15:0] sum24_im; // sfix16_En15 wire signed [15:0] add_cast_92; // sfix16_En15 wire signed [15:0] add_cast_93; // sfix16_En15 wire signed [16:0] add_temp_46; // sfix17_En15 wire signed [15:0] add_cast_94; // sfix16_En15 wire signed [15:0] add_cast_95; // sfix16_En15 wire signed [16:0] add_temp_47; // sfix17_En15 wire signed [15:0] sum25_re; // sfix16_En15 wire signed [15:0] sum25_im; // sfix16_En15 wire signed [15:0] add_cast_96; // sfix16_En15 wire signed [15:0] add_cast_97; // sfix16_En15 wire signed [16:0] add_temp_48; // sfix17_En15 wire signed [15:0] add_cast_98; // sfix16_En15 wire signed [15:0] add_cast_99; // sfix16_En15 wire signed [16:0] add_temp_49; // sfix17_En15 wire signed [15:0] sum26_re; // sfix16_En15 wire signed [15:0] sum26_im; // sfix16_En15 wire signed [15:0] add_cast_100; // sfix16_En15 wire signed [15:0] add_cast_101; // sfix16_En15 wire signed [16:0] add_temp_50; // sfix17_En15 wire signed [15:0] add_cast_102; // sfix16_En15 wire signed [15:0] add_cast_103; // sfix16_En15 wire signed [16:0] add_temp_51; // sfix17_En15 wire signed [15:0] sum27_re; // sfix16_En15 wire signed [15:0] sum27_im; // sfix16_En15 wire signed [15:0] add_cast_104; // sfix16_En15 wire signed [15:0] add_cast_105; // sfix16_En15 wire signed [16:0] add_temp_52; // sfix17_En15 wire signed [15:0] add_cast_106; // sfix16_En15 wire signed [15:0] add_cast_107; // sfix16_En15 wire signed [16:0] add_temp_53; // sfix17_En15 wire signed [15:0] sum28_re; // sfix16_En15 wire signed [15:0] sum28_im; // sfix16_En15 wire signed [15:0] add_cast_108; // sfix16_En15 wire signed [15:0] add_cast_109; // sfix16_En15 wire signed [16:0] add_temp_54; // sfix17_En15 wire signed [15:0] add_cast_110; // sfix16_En15 wire signed [15:0] add_cast_111; // sfix16_En15 wire signed [16:0] add_temp_55; // sfix17_En15 wire signed [15:0] sum29_re; // sfix16_En15 wire signed [15:0] sum29_im; // sfix16_En15 wire signed [15:0] add_cast_112; // sfix16_En15 wire signed [15:0] add_cast_113; // sfix16_En15 wire signed [16:0] add_temp_56; // sfix17_En15 wire signed [15:0] add_cast_114; // sfix16_En15 wire signed [15:0] add_cast_115; // sfix16_En15 wire signed [16:0] add_temp_57; // sfix17_En15 wire signed [15:0] sum30_re; // sfix16_En15 wire signed [15:0] sum30_im; // sfix16_En15 wire signed [15:0] add_cast_116; // sfix16_En15 wire signed [15:0] add_cast_117; // sfix16_En15 wire signed [16:0] add_temp_58; // sfix17_En15 wire signed [15:0] add_cast_118; // sfix16_En15 wire signed [15:0] add_cast_119; // sfix16_En15 wire signed [16:0] add_temp_59; // sfix17_En15 wire signed [15:0] sum31_re; // sfix16_En15 wire signed [15:0] sum31_im; // sfix16_En15 wire signed [15:0] add_cast_120; // sfix16_En15 wire signed [15:0] add_cast_121; // sfix16_En15 wire signed [16:0] add_temp_60; // sfix17_En15 wire signed [15:0] add_cast_122; // sfix16_En15 wire signed [15:0] add_cast_123; // sfix16_En15 wire signed [16:0] add_temp_61; // sfix17_En15 wire signed [15:0] sum32_re; // sfix16_En15 wire signed [15:0] sum32_im; // sfix16_En15 wire signed [15:0] add_cast_124; // sfix16_En15 wire signed [15:0] add_cast_125; // sfix16_En15 wire signed [16:0] add_temp_62; // sfix17_En15 wire signed [15:0] add_cast_126; // sfix16_En15 wire signed [15:0] add_cast_127; // sfix16_En15 wire signed [16:0] add_temp_63; // sfix17_En15 wire signed [15:0] sum33_re; // sfix16_En15 wire signed [15:0] sum33_im; // sfix16_En15 wire signed [15:0] add_cast_128; // sfix16_En15 wire signed [15:0] add_cast_129; // sfix16_En15 wire signed [16:0] add_temp_64; // sfix17_En15 wire signed [15:0] add_cast_130; // sfix16_En15 wire signed [15:0] add_cast_131; // sfix16_En15 wire signed [16:0] add_temp_65; // sfix17_En15 wire signed [15:0] sum34_re; // sfix16_En15 wire signed [15:0] sum34_im; // sfix16_En15 wire signed [15:0] add_cast_132; // sfix16_En15 wire signed [15:0] add_cast_133; // sfix16_En15 wire signed [16:0] add_temp_66; // sfix17_En15 wire signed [15:0] add_cast_134; // sfix16_En15 wire signed [15:0] add_cast_135; // sfix16_En15 wire signed [16:0] add_temp_67; // sfix17_En15 wire signed [15:0] sum35_re; // sfix16_En15 wire signed [15:0] sum35_im; // sfix16_En15 wire signed [15:0] add_cast_136; // sfix16_En15 wire signed [15:0] add_cast_137; // sfix16_En15 wire signed [16:0] add_temp_68; // sfix17_En15 wire signed [15:0] add_cast_138; // sfix16_En15 wire signed [15:0] add_cast_139; // sfix16_En15 wire signed [16:0] add_temp_69; // sfix17_En15 wire signed [15:0] sum36_re; // sfix16_En15 wire signed [15:0] sum36_im; // sfix16_En15 wire signed [15:0] add_cast_140; // sfix16_En15 wire signed [15:0] add_cast_141; // sfix16_En15 wire signed [16:0] add_temp_70; // sfix17_En15 wire signed [15:0] add_cast_142; // sfix16_En15 wire signed [15:0] add_cast_143; // sfix16_En15 wire signed [16:0] add_temp_71; // sfix17_En15 wire signed [15:0] sum37_re; // sfix16_En15 wire signed [15:0] sum37_im; // sfix16_En15 wire signed [15:0] add_cast_144; // sfix16_En15 wire signed [15:0] add_cast_145; // sfix16_En15 wire signed [16:0] add_temp_72; // sfix17_En15 wire signed [15:0] add_cast_146; // sfix16_En15 wire signed [15:0] add_cast_147; // sfix16_En15 wire signed [16:0] add_temp_73; // sfix17_En15 wire signed [15:0] sum38_re; // sfix16_En15 wire signed [15:0] sum38_im; // sfix16_En15 wire signed [15:0] add_cast_148; // sfix16_En15 wire signed [15:0] add_cast_149; // sfix16_En15 wire signed [16:0] add_temp_74; // sfix17_En15 wire signed [15:0] add_cast_150; // sfix16_En15 wire signed [15:0] add_cast_151; // sfix16_En15 wire signed [16:0] add_temp_75; // sfix17_En15 reg signed [15:0] regout_re; // sfix16_En15 reg signed [15:0] regout_im; // sfix16_En15 wire signed [15:0] muxout_re; // sfix16_En15 wire signed [15:0] muxout_im; // sfix16_En15 // Block Statements always @ (posedge clk or posedge reset) begin: ce_output if (reset == 1'b1) begin ring_count <= 1; end else begin if (enb_1_1_1 == 1'b1) begin ring_count <= {ring_count[0], ring_count[7 : 1]}; end end end // ce_output assign phase_0 = ring_count[0] && enb_1_1_1; assign phase_1 = ring_count[1] && enb_1_1_1; assign phase_2 = ring_count[2] && enb_1_1_1; assign phase_3 = ring_count[3] && enb_1_1_1; assign phase_4 = ring_count[4] && enb_1_1_1; assign phase_5 = ring_count[5] && enb_1_1_1; assign phase_6 = ring_count[6] && enb_1_1_1; assign phase_7 = ring_count[7] && enb_1_1_1; assign input_typeconvert_re = FIR_Decimation_in_re; assign input_typeconvert_im = FIR_Decimation_in_im; always @( posedge clk or posedge reset) begin: Delay_Pipeline_Phase0_process if (reset == 1'b1) begin input_pipeline_phase0_re[0] <= 0; input_pipeline_phase0_re[1] <= 0; input_pipeline_phase0_re[2] <= 0; input_pipeline_phase0_re[3] <= 0; input_pipeline_phase0_re[4] <= 0; input_pipeline_phase0_re[5] <= 0; input_pipeline_phase0_im[0] <= 0; input_pipeline_phase0_im[1] <= 0; input_pipeline_phase0_im[2] <= 0; input_pipeline_phase0_im[3] <= 0; input_pipeline_phase0_im[4] <= 0; input_pipeline_phase0_im[5] <= 0; end else begin if (phase_0 == 1'b1) begin input_pipeline_phase0_re[0] <= input_typeconvert_re; input_pipeline_phase0_re[1] <= input_pipeline_phase0_re[0]; input_pipeline_phase0_re[2] <= input_pipeline_phase0_re[1]; input_pipeline_phase0_re[3] <= input_pipeline_phase0_re[2]; input_pipeline_phase0_re[4] <= input_pipeline_phase0_re[3]; input_pipeline_phase0_re[5] <= input_pipeline_phase0_re[4]; input_pipeline_phase0_im[0] <= input_typeconvert_im; input_pipeline_phase0_im[1] <= input_pipeline_phase0_im[0]; input_pipeline_phase0_im[2] <= input_pipeline_phase0_im[1]; input_pipeline_phase0_im[3] <= input_pipeline_phase0_im[2]; input_pipeline_phase0_im[4] <= input_pipeline_phase0_im[3]; input_pipeline_phase0_im[5] <= input_pipeline_phase0_im[4]; end end end // Delay_Pipeline_Phase0_process always @( posedge clk or posedge reset) begin: Delay_Pipeline_Phase1_process if (reset == 1'b1) begin input_pipeline_phase1_re[0] <= 0; input_pipeline_phase1_re[1] <= 0; input_pipeline_phase1_re[2] <= 0; input_pipeline_phase1_re[3] <= 0; input_pipeline_phase1_re[4] <= 0; input_pipeline_phase1_re[5] <= 0; input_pipeline_phase1_im[0] <= 0; input_pipeline_phase1_im[1] <= 0; input_pipeline_phase1_im[2] <= 0; input_pipeline_phase1_im[3] <= 0; input_pipeline_phase1_im[4] <= 0; input_pipeline_phase1_im[5] <= 0; end else begin if (phase_1 == 1'b1) begin input_pipeline_phase1_re[0] <= input_typeconvert_re; input_pipeline_phase1_re[1] <= input_pipeline_phase1_re[0]; input_pipeline_phase1_re[2] <= input_pipeline_phase1_re[1]; input_pipeline_phase1_re[3] <= input_pipeline_phase1_re[2]; input_pipeline_phase1_re[4] <= input_pipeline_phase1_re[3]; input_pipeline_phase1_re[5] <= input_pipeline_phase1_re[4]; input_pipeline_phase1_im[0] <= input_typeconvert_im; input_pipeline_phase1_im[1] <= input_pipeline_phase1_im[0]; input_pipeline_phase1_im[2] <= input_pipeline_phase1_im[1]; input_pipeline_phase1_im[3] <= input_pipeline_phase1_im[2]; input_pipeline_phase1_im[4] <= input_pipeline_phase1_im[3]; input_pipeline_phase1_im[5] <= input_pipeline_phase1_im[4]; end end end // Delay_Pipeline_Phase1_process always @( posedge clk or posedge reset) begin: Delay_Pipeline_Phase2_process if (reset == 1'b1) begin input_pipeline_phase2_re[0] <= 0; input_pipeline_phase2_re[1] <= 0; input_pipeline_phase2_re[2] <= 0; input_pipeline_phase2_re[3] <= 0; input_pipeline_phase2_im[0] <= 0; input_pipeline_phase2_im[1] <= 0; input_pipeline_phase2_im[2] <= 0; input_pipeline_phase2_im[3] <= 0; end else begin if (phase_2 == 1'b1) begin input_pipeline_phase2_re[0] <= input_typeconvert_re; input_pipeline_phase2_re[1] <= input_pipeline_phase2_re[0]; input_pipeline_phase2_re[2] <= input_pipeline_phase2_re[1]; input_pipeline_phase2_re[3] <= input_pipeline_phase2_re[2]; input_pipeline_phase2_im[0] <= input_typeconvert_im; input_pipeline_phase2_im[1] <= input_pipeline_phase2_im[0]; input_pipeline_phase2_im[2] <= input_pipeline_phase2_im[1]; input_pipeline_phase2_im[3] <= input_pipeline_phase2_im[2]; end end end // Delay_Pipeline_Phase2_process always @( posedge clk or posedge reset) begin: Delay_Pipeline_Phase3_process if (reset == 1'b1) begin input_pipeline_phase3_re[0] <= 0; input_pipeline_phase3_re[1] <= 0; input_pipeline_phase3_re[2] <= 0; input_pipeline_phase3_re[3] <= 0; input_pipeline_phase3_re[4] <= 0; input_pipeline_phase3_re[5] <= 0; input_pipeline_phase3_im[0] <= 0; input_pipeline_phase3_im[1] <= 0; input_pipeline_phase3_im[2] <= 0; input_pipeline_phase3_im[3] <= 0; input_pipeline_phase3_im[4] <= 0; input_pipeline_phase3_im[5] <= 0; end else begin if (phase_3 == 1'b1) begin input_pipeline_phase3_re[0] <= input_typeconvert_re; input_pipeline_phase3_re[1] <= input_pipeline_phase3_re[0]; input_pipeline_phase3_re[2] <= input_pipeline_phase3_re[1]; input_pipeline_phase3_re[3] <= input_pipeline_phase3_re[2]; input_pipeline_phase3_re[4] <= input_pipeline_phase3_re[3]; input_pipeline_phase3_re[5] <= input_pipeline_phase3_re[4]; input_pipeline_phase3_im[0] <= input_typeconvert_im; input_pipeline_phase3_im[1] <= input_pipeline_phase3_im[0]; input_pipeline_phase3_im[2] <= input_pipeline_phase3_im[1]; input_pipeline_phase3_im[3] <= input_pipeline_phase3_im[2]; input_pipeline_phase3_im[4] <= input_pipeline_phase3_im[3]; input_pipeline_phase3_im[5] <= input_pipeline_phase3_im[4]; end end end // Delay_Pipeline_Phase3_process always @( posedge clk or posedge reset) begin: Delay_Pipeline_Phase4_process if (reset == 1'b1) begin input_pipeline_phase4_re[0] <= 0; input_pipeline_phase4_re[1] <= 0; input_pipeline_phase4_re[2] <= 0; input_pipeline_phase4_re[3] <= 0; input_pipeline_phase4_re[4] <= 0; input_pipeline_phase4_re[5] <= 0; input_pipeline_phase4_im[0] <= 0; input_pipeline_phase4_im[1] <= 0; input_pipeline_phase4_im[2] <= 0; input_pipeline_phase4_im[3] <= 0; input_pipeline_phase4_im[4] <= 0; input_pipeline_phase4_im[5] <= 0; end else begin if (phase_4 == 1'b1) begin input_pipeline_phase4_re[0] <= input_typeconvert_re; input_pipeline_phase4_re[1] <= input_pipeline_phase4_re[0]; input_pipeline_phase4_re[2] <= input_pipeline_phase4_re[1]; input_pipeline_phase4_re[3] <= input_pipeline_phase4_re[2]; input_pipeline_phase4_re[4] <= input_pipeline_phase4_re[3]; input_pipeline_phase4_re[5] <= input_pipeline_phase4_re[4]; input_pipeline_phase4_im[0] <= input_typeconvert_im; input_pipeline_phase4_im[1] <= input_pipeline_phase4_im[0]; input_pipeline_phase4_im[2] <= input_pipeline_phase4_im[1]; input_pipeline_phase4_im[3] <= input_pipeline_phase4_im[2]; input_pipeline_phase4_im[4] <= input_pipeline_phase4_im[3]; input_pipeline_phase4_im[5] <= input_pipeline_phase4_im[4]; end end end // Delay_Pipeline_Phase4_process always @( posedge clk or posedge reset) begin: Delay_Pipeline_Phase5_process if (reset == 1'b1) begin input_pipeline_phase5_re[0] <= 0; input_pipeline_phase5_re[1] <= 0; input_pipeline_phase5_re[2] <= 0; input_pipeline_phase5_re[3] <= 0; input_pipeline_phase5_re[4] <= 0; input_pipeline_phase5_re[5] <= 0; input_pipeline_phase5_im[0] <= 0; input_pipeline_phase5_im[1] <= 0; input_pipeline_phase5_im[2] <= 0; input_pipeline_phase5_im[3] <= 0; input_pipeline_phase5_im[4] <= 0; input_pipeline_phase5_im[5] <= 0; end else begin if (phase_5 == 1'b1) begin input_pipeline_phase5_re[0] <= input_typeconvert_re; input_pipeline_phase5_re[1] <= input_pipeline_phase5_re[0]; input_pipeline_phase5_re[2] <= input_pipeline_phase5_re[1]; input_pipeline_phase5_re[3] <= input_pipeline_phase5_re[2]; input_pipeline_phase5_re[4] <= input_pipeline_phase5_re[3]; input_pipeline_phase5_re[5] <= input_pipeline_phase5_re[4]; input_pipeline_phase5_im[0] <= input_typeconvert_im; input_pipeline_phase5_im[1] <= input_pipeline_phase5_im[0]; input_pipeline_phase5_im[2] <= input_pipeline_phase5_im[1]; input_pipeline_phase5_im[3] <= input_pipeline_phase5_im[2]; input_pipeline_phase5_im[4] <= input_pipeline_phase5_im[3]; input_pipeline_phase5_im[5] <= input_pipeline_phase5_im[4]; end end end // Delay_Pipeline_Phase5_process always @( posedge clk or posedge reset) begin: Delay_Pipeline_Phase6_process if (reset == 1'b1) begin input_pipeline_phase6_re[0] <= 0; input_pipeline_phase6_re[1] <= 0; input_pipeline_phase6_re[2] <= 0; input_pipeline_phase6_im[0] <= 0; input_pipeline_phase6_im[1] <= 0; input_pipeline_phase6_im[2] <= 0; end else begin if (phase_6 == 1'b1) begin input_pipeline_phase6_re[0] <= input_typeconvert_re; input_pipeline_phase6_re[1] <= input_pipeline_phase6_re[0]; input_pipeline_phase6_re[2] <= input_pipeline_phase6_re[1]; input_pipeline_phase6_im[0] <= input_typeconvert_im; input_pipeline_phase6_im[1] <= input_pipeline_phase6_im[0]; input_pipeline_phase6_im[2] <= input_pipeline_phase6_im[1]; end end end // Delay_Pipeline_Phase6_process always @( posedge clk or posedge reset) begin: Delay_Pipeline_Phase7_process if (reset == 1'b1) begin input_pipeline_phase7_re[0] <= 0; input_pipeline_phase7_re[1] <= 0; input_pipeline_phase7_re[2] <= 0; input_pipeline_phase7_re[3] <= 0; input_pipeline_phase7_re[4] <= 0; input_pipeline_phase7_re[5] <= 0; input_pipeline_phase7_im[0] <= 0; input_pipeline_phase7_im[1] <= 0; input_pipeline_phase7_im[2] <= 0; input_pipeline_phase7_im[3] <= 0; input_pipeline_phase7_im[4] <= 0; input_pipeline_phase7_im[5] <= 0; end else begin if (phase_7 == 1'b1) begin input_pipeline_phase7_re[0] <= input_typeconvert_re; input_pipeline_phase7_re[1] <= input_pipeline_phase7_re[0]; input_pipeline_phase7_re[2] <= input_pipeline_phase7_re[1]; input_pipeline_phase7_re[3] <= input_pipeline_phase7_re[2]; input_pipeline_phase7_re[4] <= input_pipeline_phase7_re[3]; input_pipeline_phase7_re[5] <= input_pipeline_phase7_re[4]; input_pipeline_phase7_im[0] <= input_typeconvert_im; input_pipeline_phase7_im[1] <= input_pipeline_phase7_im[0]; input_pipeline_phase7_im[2] <= input_pipeline_phase7_im[1]; input_pipeline_phase7_im[3] <= input_pipeline_phase7_im[2]; input_pipeline_phase7_im[4] <= input_pipeline_phase7_im[3]; input_pipeline_phase7_im[5] <= input_pipeline_phase7_im[4]; end end end // Delay_Pipeline_Phase7_process assign mul_temp = input_typeconvert_re * coeffphase1_1; assign product_phase0_1_re = mul_temp[31:16]; assign mul_temp_1 = input_typeconvert_im * coeffphase1_1; assign product_phase0_1_im = mul_temp_1[31:16]; assign mul_temp_2 = input_pipeline_phase0_re[0] * coeffphase1_2; assign product_phase0_2_re = mul_temp_2[31:16]; assign mul_temp_3 = input_pipeline_phase0_im[0] * coeffphase1_2; assign product_phase0_2_im = mul_temp_3[31:16]; assign mul_temp_4 = input_pipeline_phase0_re[1] * coeffphase1_3; assign product_phase0_3_re = mul_temp_4[31:16]; assign mul_temp_5 = input_pipeline_phase0_im[1] * coeffphase1_3; assign product_phase0_3_im = mul_temp_5[31:16]; assign mul_temp_6 = input_pipeline_phase0_re[2] * coeffphase1_4; assign product_phase0_4_re = mul_temp_6[31:16]; assign mul_temp_7 = input_pipeline_phase0_im[2] * coeffphase1_4; assign product_phase0_4_im = mul_temp_7[31:16]; assign mul_temp_8 = input_pipeline_phase0_re[3] * coeffphase1_5; assign product_phase0_5_re = mul_temp_8[31:16]; assign mul_temp_9 = input_pipeline_phase0_im[3] * coeffphase1_5; assign product_phase0_5_im = mul_temp_9[31:16]; assign mul_temp_10 = input_pipeline_phase0_re[4] * coeffphase1_6; assign product_phase0_6_re = mul_temp_10[31:16]; assign mul_temp_11 = input_pipeline_phase0_im[4] * coeffphase1_6; assign product_phase0_6_im = mul_temp_11[31:16]; assign mul_temp_12 = input_pipeline_phase0_re[5] * coeffphase1_7; assign product_phase0_7_re = mul_temp_12[31:16]; assign mul_temp_13 = input_pipeline_phase0_im[5] * coeffphase1_7; assign product_phase0_7_im = mul_temp_13[31:16]; assign mul_temp_14 = input_pipeline_phase1_re[0] * coeffphase2_1; assign product_phase1_1_re = mul_temp_14[31:16]; assign mul_temp_15 = input_pipeline_phase1_im[0] * coeffphase2_1; assign product_phase1_1_im = mul_temp_15[31:16]; assign mul_temp_16 = input_pipeline_phase1_re[1] * coeffphase2_2; assign product_phase1_2_re = mul_temp_16[31:16]; assign mul_temp_17 = input_pipeline_phase1_im[1] * coeffphase2_2; assign product_phase1_2_im = mul_temp_17[31:16]; assign mul_temp_18 = input_pipeline_phase1_re[2] * coeffphase2_3; assign product_phase1_3_re = mul_temp_18[31:16]; assign mul_temp_19 = input_pipeline_phase1_im[2] * coeffphase2_3; assign product_phase1_3_im = mul_temp_19[31:16]; assign mul_temp_20 = input_pipeline_phase1_re[3] * coeffphase2_4; assign product_phase1_4_re = mul_temp_20[31:16]; assign mul_temp_21 = input_pipeline_phase1_im[3] * coeffphase2_4; assign product_phase1_4_im = mul_temp_21[31:16]; assign mul_temp_22 = input_pipeline_phase1_re[4] * coeffphase2_5; assign product_phase1_5_re = mul_temp_22[31:16]; assign mul_temp_23 = input_pipeline_phase1_im[4] * coeffphase2_5; assign product_phase1_5_im = mul_temp_23[31:16]; assign mul_temp_24 = input_pipeline_phase1_re[5] * coeffphase2_6; assign product_phase1_6_re = mul_temp_24[31:16]; assign mul_temp_25 = input_pipeline_phase1_im[5] * coeffphase2_6; assign product_phase1_6_im = mul_temp_25[31:16]; assign mul_temp_26 = input_pipeline_phase2_re[3] * coeffphase3_4; assign product_phase2_4_re = mul_temp_26[31:16]; assign mul_temp_27 = input_pipeline_phase2_im[3] * coeffphase3_4; assign product_phase2_4_im = mul_temp_27[31:16]; assign mul_temp_28 = input_pipeline_phase3_re[0] * coeffphase4_1; assign product_phase3_1_re = mul_temp_28[31:16]; assign mul_temp_29 = input_pipeline_phase3_im[0] * coeffphase4_1; assign product_phase3_1_im = mul_temp_29[31:16]; assign mul_temp_30 = input_pipeline_phase3_re[1] * coeffphase4_2; assign product_phase3_2_re = mul_temp_30[31:16]; assign mul_temp_31 = input_pipeline_phase3_im[1] * coeffphase4_2; assign product_phase3_2_im = mul_temp_31[31:16]; assign mul_temp_32 = input_pipeline_phase3_re[2] * coeffphase4_3; assign product_phase3_3_re = mul_temp_32[31:16]; assign mul_temp_33 = input_pipeline_phase3_im[2] * coeffphase4_3; assign product_phase3_3_im = mul_temp_33[31:16]; assign mul_temp_34 = input_pipeline_phase3_re[3] * coeffphase4_4; assign product_phase3_4_re = mul_temp_34[31:16]; assign mul_temp_35 = input_pipeline_phase3_im[3] * coeffphase4_4; assign product_phase3_4_im = mul_temp_35[31:16]; assign mul_temp_36 = input_pipeline_phase3_re[4] * coeffphase4_5; assign product_phase3_5_re = mul_temp_36[31:16]; assign mul_temp_37 = input_pipeline_phase3_im[4] * coeffphase4_5; assign product_phase3_5_im = mul_temp_37[31:16]; assign mul_temp_38 = input_pipeline_phase3_re[5] * coeffphase4_6; assign product_phase3_6_re = mul_temp_38[31:16]; assign mul_temp_39 = input_pipeline_phase3_im[5] * coeffphase4_6; assign product_phase3_6_im = mul_temp_39[31:16]; assign mul_temp_40 = input_pipeline_phase4_re[0] * coeffphase5_1; assign product_phase4_1_re = mul_temp_40[31:16]; assign mul_temp_41 = input_pipeline_phase4_im[0] * coeffphase5_1; assign product_phase4_1_im = mul_temp_41[31:16]; assign mul_temp_42 = input_pipeline_phase4_re[1] * coeffphase5_2; assign product_phase4_2_re = mul_temp_42[31:16]; assign mul_temp_43 = input_pipeline_phase4_im[1] * coeffphase5_2; assign product_phase4_2_im = mul_temp_43[31:16]; assign mul_temp_44 = input_pipeline_phase4_re[2] * coeffphase5_3; assign product_phase4_3_re = mul_temp_44[31:16]; assign mul_temp_45 = input_pipeline_phase4_im[2] * coeffphase5_3; assign product_phase4_3_im = mul_temp_45[31:16]; assign mul_temp_46 = input_pipeline_phase4_re[3] * coeffphase5_4; assign product_phase4_4_re = mul_temp_46[31:16]; assign mul_temp_47 = input_pipeline_phase4_im[3] * coeffphase5_4; assign product_phase4_4_im = mul_temp_47[31:16]; assign mul_temp_48 = input_pipeline_phase4_re[4] * coeffphase5_5; assign product_phase4_5_re = mul_temp_48[31:16]; assign mul_temp_49 = input_pipeline_phase4_im[4] * coeffphase5_5; assign product_phase4_5_im = mul_temp_49[31:16]; assign mul_temp_50 = input_pipeline_phase4_re[5] * coeffphase5_6; assign product_phase4_6_re = mul_temp_50[31:16]; assign mul_temp_51 = input_pipeline_phase4_im[5] * coeffphase5_6; assign product_phase4_6_im = mul_temp_51[31:16]; assign mul_temp_52 = input_pipeline_phase5_re[0] * coeffphase6_1; assign product_phase5_1_re = mul_temp_52[31:16]; assign mul_temp_53 = input_pipeline_phase5_im[0] * coeffphase6_1; assign product_phase5_1_im = mul_temp_53[31:16]; assign mul_temp_54 = input_pipeline_phase5_re[1] * coeffphase6_2; assign product_phase5_2_re = mul_temp_54[31:16]; assign mul_temp_55 = input_pipeline_phase5_im[1] * coeffphase6_2; assign product_phase5_2_im = mul_temp_55[31:16]; assign mul_temp_56 = input_pipeline_phase5_re[2] * coeffphase6_3; assign product_phase5_3_re = mul_temp_56[31:16]; assign mul_temp_57 = input_pipeline_phase5_im[2] * coeffphase6_3; assign product_phase5_3_im = mul_temp_57[31:16]; assign mul_temp_58 = input_pipeline_phase5_re[3] * coeffphase6_4; assign product_phase5_4_re = mul_temp_58[31:16]; assign mul_temp_59 = input_pipeline_phase5_im[3] * coeffphase6_4; assign product_phase5_4_im = mul_temp_59[31:16]; assign mul_temp_60 = input_pipeline_phase5_re[4] * coeffphase6_5; assign product_phase5_5_re = mul_temp_60[31:16]; assign mul_temp_61 = input_pipeline_phase5_im[4] * coeffphase6_5; assign product_phase5_5_im = mul_temp_61[31:16]; assign mul_temp_62 = input_pipeline_phase5_re[5] * coeffphase6_6; assign product_phase5_6_re = mul_temp_62[31:16]; assign mul_temp_63 = input_pipeline_phase5_im[5] * coeffphase6_6; assign product_phase5_6_im = mul_temp_63[31:16]; assign mul_temp_64 = input_pipeline_phase6_re[2] * coeffphase7_3; assign product_phase6_3_re = mul_temp_64[31:16]; assign mul_temp_65 = input_pipeline_phase6_im[2] * coeffphase7_3; assign product_phase6_3_im = mul_temp_65[31:16]; assign mul_temp_66 = input_pipeline_phase7_re[0] * coeffphase8_1; assign product_phase7_1_re = mul_temp_66[31:16]; assign mul_temp_67 = input_pipeline_phase7_im[0] * coeffphase8_1; assign product_phase7_1_im = mul_temp_67[31:16]; assign mul_temp_68 = input_pipeline_phase7_re[1] * coeffphase8_2; assign product_phase7_2_re = mul_temp_68[31:16]; assign mul_temp_69 = input_pipeline_phase7_im[1] * coeffphase8_2; assign product_phase7_2_im = mul_temp_69[31:16]; assign mul_temp_70 = input_pipeline_phase7_re[2] * coeffphase8_3; assign product_phase7_3_re = mul_temp_70[31:16]; assign mul_temp_71 = input_pipeline_phase7_im[2] * coeffphase8_3; assign product_phase7_3_im = mul_temp_71[31:16]; assign mul_temp_72 = input_pipeline_phase7_re[3] * coeffphase8_4; assign product_phase7_4_re = mul_temp_72[31:16]; assign mul_temp_73 = input_pipeline_phase7_im[3] * coeffphase8_4; assign product_phase7_4_im = mul_temp_73[31:16]; assign mul_temp_74 = input_pipeline_phase7_re[4] * coeffphase8_5; assign product_phase7_5_re = mul_temp_74[31:16]; assign mul_temp_75 = input_pipeline_phase7_im[4] * coeffphase8_5; assign product_phase7_5_im = mul_temp_75[31:16]; assign mul_temp_76 = input_pipeline_phase7_re[5] * coeffphase8_6; assign product_phase7_6_re = mul_temp_76[31:16]; assign mul_temp_77 = input_pipeline_phase7_im[5] * coeffphase8_6; assign product_phase7_6_im = mul_temp_77[31:16]; assign quantized_sum_re = product_phase7_1_re; assign quantized_sum_im = product_phase7_1_im; assign add_cast = quantized_sum_re; assign add_cast_1 = product_phase7_2_re; assign add_temp = add_cast + add_cast_1; assign sum1_re = add_temp[15:0]; assign add_cast_2 = quantized_sum_im; assign add_cast_3 = product_phase7_2_im; assign add_temp_1 = add_cast_2 + add_cast_3; assign sum1_im = add_temp_1[15:0]; assign add_cast_4 = sum1_re; assign add_cast_5 = product_phase7_3_re; assign add_temp_2 = add_cast_4 + add_cast_5; assign sum2_re = add_temp_2[15:0]; assign add_cast_6 = sum1_im; assign add_cast_7 = product_phase7_3_im; assign add_temp_3 = add_cast_6 + add_cast_7; assign sum2_im = add_temp_3[15:0]; assign add_cast_8 = sum2_re; assign add_cast_9 = product_phase7_4_re; assign add_temp_4 = add_cast_8 + add_cast_9; assign sum3_re = add_temp_4[15:0]; assign add_cast_10 = sum2_im; assign add_cast_11 = product_phase7_4_im; assign add_temp_5 = add_cast_10 + add_cast_11; assign sum3_im = add_temp_5[15:0]; assign add_cast_12 = sum3_re; assign add_cast_13 = product_phase7_5_re; assign add_temp_6 = add_cast_12 + add_cast_13; assign sum4_re = add_temp_6[15:0]; assign add_cast_14 = sum3_im; assign add_cast_15 = product_phase7_5_im; assign add_temp_7 = add_cast_14 + add_cast_15; assign sum4_im = add_temp_7[15:0]; assign add_cast_16 = sum4_re; assign add_cast_17 = product_phase7_6_re; assign add_temp_8 = add_cast_16 + add_cast_17; assign sum5_re = add_temp_8[15:0]; assign add_cast_18 = sum4_im; assign add_cast_19 = product_phase7_6_im; assign add_temp_9 = add_cast_18 + add_cast_19; assign sum5_im = add_temp_9[15:0]; assign add_cast_20 = sum5_re; assign add_cast_21 = product_phase6_3_re; assign add_temp_10 = add_cast_20 + add_cast_21; assign sum6_re = add_temp_10[15:0]; assign add_cast_22 = sum5_im; assign add_cast_23 = product_phase6_3_im; assign add_temp_11 = add_cast_22 + add_cast_23; assign sum6_im = add_temp_11[15:0]; assign add_cast_24 = sum6_re; assign add_cast_25 = product_phase5_1_re; assign add_temp_12 = add_cast_24 + add_cast_25; assign sum7_re = add_temp_12[15:0]; assign add_cast_26 = sum6_im; assign add_cast_27 = product_phase5_1_im; assign add_temp_13 = add_cast_26 + add_cast_27; assign sum7_im = add_temp_13[15:0]; assign add_cast_28 = sum7_re; assign add_cast_29 = product_phase5_2_re; assign add_temp_14 = add_cast_28 + add_cast_29; assign sum8_re = add_temp_14[15:0]; assign add_cast_30 = sum7_im; assign add_cast_31 = product_phase5_2_im; assign add_temp_15 = add_cast_30 + add_cast_31; assign sum8_im = add_temp_15[15:0]; assign add_cast_32 = sum8_re; assign add_cast_33 = product_phase5_3_re; assign add_temp_16 = add_cast_32 + add_cast_33; assign sum9_re = add_temp_16[15:0]; assign add_cast_34 = sum8_im; assign add_cast_35 = product_phase5_3_im; assign add_temp_17 = add_cast_34 + add_cast_35; assign sum9_im = add_temp_17[15:0]; assign add_cast_36 = sum9_re; assign add_cast_37 = product_phase5_4_re; assign add_temp_18 = add_cast_36 + add_cast_37; assign sum10_re = add_temp_18[15:0]; assign add_cast_38 = sum9_im; assign add_cast_39 = product_phase5_4_im; assign add_temp_19 = add_cast_38 + add_cast_39; assign sum10_im = add_temp_19[15:0]; assign add_cast_40 = sum10_re; assign add_cast_41 = product_phase5_5_re; assign add_temp_20 = add_cast_40 + add_cast_41; assign sum11_re = add_temp_20[15:0]; assign add_cast_42 = sum10_im; assign add_cast_43 = product_phase5_5_im; assign add_temp_21 = add_cast_42 + add_cast_43; assign sum11_im = add_temp_21[15:0]; assign add_cast_44 = sum11_re; assign add_cast_45 = product_phase5_6_re; assign add_temp_22 = add_cast_44 + add_cast_45; assign sum12_re = add_temp_22[15:0]; assign add_cast_46 = sum11_im; assign add_cast_47 = product_phase5_6_im; assign add_temp_23 = add_cast_46 + add_cast_47; assign sum12_im = add_temp_23[15:0]; assign add_cast_48 = sum12_re; assign add_cast_49 = product_phase4_1_re; assign add_temp_24 = add_cast_48 + add_cast_49; assign sum13_re = add_temp_24[15:0]; assign add_cast_50 = sum12_im; assign add_cast_51 = product_phase4_1_im; assign add_temp_25 = add_cast_50 + add_cast_51; assign sum13_im = add_temp_25[15:0]; assign add_cast_52 = sum13_re; assign add_cast_53 = product_phase4_2_re; assign add_temp_26 = add_cast_52 + add_cast_53; assign sum14_re = add_temp_26[15:0]; assign add_cast_54 = sum13_im; assign add_cast_55 = product_phase4_2_im; assign add_temp_27 = add_cast_54 + add_cast_55; assign sum14_im = add_temp_27[15:0]; assign add_cast_56 = sum14_re; assign add_cast_57 = product_phase4_3_re; assign add_temp_28 = add_cast_56 + add_cast_57; assign sum15_re = add_temp_28[15:0]; assign add_cast_58 = sum14_im; assign add_cast_59 = product_phase4_3_im; assign add_temp_29 = add_cast_58 + add_cast_59; assign sum15_im = add_temp_29[15:0]; assign add_cast_60 = sum15_re; assign add_cast_61 = product_phase4_4_re; assign add_temp_30 = add_cast_60 + add_cast_61; assign sum16_re = add_temp_30[15:0]; assign add_cast_62 = sum15_im; assign add_cast_63 = product_phase4_4_im; assign add_temp_31 = add_cast_62 + add_cast_63; assign sum16_im = add_temp_31[15:0]; assign add_cast_64 = sum16_re; assign add_cast_65 = product_phase4_5_re; assign add_temp_32 = add_cast_64 + add_cast_65; assign sum17_re = add_temp_32[15:0]; assign add_cast_66 = sum16_im; assign add_cast_67 = product_phase4_5_im; assign add_temp_33 = add_cast_66 + add_cast_67; assign sum17_im = add_temp_33[15:0]; assign add_cast_68 = sum17_re; assign add_cast_69 = product_phase4_6_re; assign add_temp_34 = add_cast_68 + add_cast_69; assign sum18_re = add_temp_34[15:0]; assign add_cast_70 = sum17_im; assign add_cast_71 = product_phase4_6_im; assign add_temp_35 = add_cast_70 + add_cast_71; assign sum18_im = add_temp_35[15:0]; assign add_cast_72 = sum18_re; assign add_cast_73 = product_phase3_1_re; assign add_temp_36 = add_cast_72 + add_cast_73; assign sum19_re = add_temp_36[15:0]; assign add_cast_74 = sum18_im; assign add_cast_75 = product_phase3_1_im; assign add_temp_37 = add_cast_74 + add_cast_75; assign sum19_im = add_temp_37[15:0]; assign add_cast_76 = sum19_re; assign add_cast_77 = product_phase3_2_re; assign add_temp_38 = add_cast_76 + add_cast_77; assign sum20_re = add_temp_38[15:0]; assign add_cast_78 = sum19_im; assign add_cast_79 = product_phase3_2_im; assign add_temp_39 = add_cast_78 + add_cast_79; assign sum20_im = add_temp_39[15:0]; assign add_cast_80 = sum20_re; assign add_cast_81 = product_phase3_3_re; assign add_temp_40 = add_cast_80 + add_cast_81; assign sum21_re = add_temp_40[15:0]; assign add_cast_82 = sum20_im; assign add_cast_83 = product_phase3_3_im; assign add_temp_41 = add_cast_82 + add_cast_83; assign sum21_im = add_temp_41[15:0]; assign add_cast_84 = sum21_re; assign add_cast_85 = product_phase3_4_re; assign add_temp_42 = add_cast_84 + add_cast_85; assign sum22_re = add_temp_42[15:0]; assign add_cast_86 = sum21_im; assign add_cast_87 = product_phase3_4_im; assign add_temp_43 = add_cast_86 + add_cast_87; assign sum22_im = add_temp_43[15:0]; assign add_cast_88 = sum22_re; assign add_cast_89 = product_phase3_5_re; assign add_temp_44 = add_cast_88 + add_cast_89; assign sum23_re = add_temp_44[15:0]; assign add_cast_90 = sum22_im; assign add_cast_91 = product_phase3_5_im; assign add_temp_45 = add_cast_90 + add_cast_91; assign sum23_im = add_temp_45[15:0]; assign add_cast_92 = sum23_re; assign add_cast_93 = product_phase3_6_re; assign add_temp_46 = add_cast_92 + add_cast_93; assign sum24_re = add_temp_46[15:0]; assign add_cast_94 = sum23_im; assign add_cast_95 = product_phase3_6_im; assign add_temp_47 = add_cast_94 + add_cast_95; assign sum24_im = add_temp_47[15:0]; assign add_cast_96 = sum24_re; assign add_cast_97 = product_phase2_4_re; assign add_temp_48 = add_cast_96 + add_cast_97; assign sum25_re = add_temp_48[15:0]; assign add_cast_98 = sum24_im; assign add_cast_99 = product_phase2_4_im; assign add_temp_49 = add_cast_98 + add_cast_99; assign sum25_im = add_temp_49[15:0]; assign add_cast_100 = sum25_re; assign add_cast_101 = product_phase1_1_re; assign add_temp_50 = add_cast_100 + add_cast_101; assign sum26_re = add_temp_50[15:0]; assign add_cast_102 = sum25_im; assign add_cast_103 = product_phase1_1_im; assign add_temp_51 = add_cast_102 + add_cast_103; assign sum26_im = add_temp_51[15:0]; assign add_cast_104 = sum26_re; assign add_cast_105 = product_phase1_2_re; assign add_temp_52 = add_cast_104 + add_cast_105; assign sum27_re = add_temp_52[15:0]; assign add_cast_106 = sum26_im; assign add_cast_107 = product_phase1_2_im; assign add_temp_53 = add_cast_106 + add_cast_107; assign sum27_im = add_temp_53[15:0]; assign add_cast_108 = sum27_re; assign add_cast_109 = product_phase1_3_re; assign add_temp_54 = add_cast_108 + add_cast_109; assign sum28_re = add_temp_54[15:0]; assign add_cast_110 = sum27_im; assign add_cast_111 = product_phase1_3_im; assign add_temp_55 = add_cast_110 + add_cast_111; assign sum28_im = add_temp_55[15:0]; assign add_cast_112 = sum28_re; assign add_cast_113 = product_phase1_4_re; assign add_temp_56 = add_cast_112 + add_cast_113; assign sum29_re = add_temp_56[15:0]; assign add_cast_114 = sum28_im; assign add_cast_115 = product_phase1_4_im; assign add_temp_57 = add_cast_114 + add_cast_115; assign sum29_im = add_temp_57[15:0]; assign add_cast_116 = sum29_re; assign add_cast_117 = product_phase1_5_re; assign add_temp_58 = add_cast_116 + add_cast_117; assign sum30_re = add_temp_58[15:0]; assign add_cast_118 = sum29_im; assign add_cast_119 = product_phase1_5_im; assign add_temp_59 = add_cast_118 + add_cast_119; assign sum30_im = add_temp_59[15:0]; assign add_cast_120 = sum30_re; assign add_cast_121 = product_phase1_6_re; assign add_temp_60 = add_cast_120 + add_cast_121; assign sum31_re = add_temp_60[15:0]; assign add_cast_122 = sum30_im; assign add_cast_123 = product_phase1_6_im; assign add_temp_61 = add_cast_122 + add_cast_123; assign sum31_im = add_temp_61[15:0]; assign add_cast_124 = sum31_re; assign add_cast_125 = product_phase0_1_re; assign add_temp_62 = add_cast_124 + add_cast_125; assign sum32_re = add_temp_62[15:0]; assign add_cast_126 = sum31_im; assign add_cast_127 = product_phase0_1_im; assign add_temp_63 = add_cast_126 + add_cast_127; assign sum32_im = add_temp_63[15:0]; assign add_cast_128 = sum32_re; assign add_cast_129 = product_phase0_2_re; assign add_temp_64 = add_cast_128 + add_cast_129; assign sum33_re = add_temp_64[15:0]; assign add_cast_130 = sum32_im; assign add_cast_131 = product_phase0_2_im; assign add_temp_65 = add_cast_130 + add_cast_131; assign sum33_im = add_temp_65[15:0]; assign add_cast_132 = sum33_re; assign add_cast_133 = product_phase0_3_re; assign add_temp_66 = add_cast_132 + add_cast_133; assign sum34_re = add_temp_66[15:0]; assign add_cast_134 = sum33_im; assign add_cast_135 = product_phase0_3_im; assign add_temp_67 = add_cast_134 + add_cast_135; assign sum34_im = add_temp_67[15:0]; assign add_cast_136 = sum34_re; assign add_cast_137 = product_phase0_4_re; assign add_temp_68 = add_cast_136 + add_cast_137; assign sum35_re = add_temp_68[15:0]; assign add_cast_138 = sum34_im; assign add_cast_139 = product_phase0_4_im; assign add_temp_69 = add_cast_138 + add_cast_139; assign sum35_im = add_temp_69[15:0]; assign add_cast_140 = sum35_re; assign add_cast_141 = product_phase0_5_re; assign add_temp_70 = add_cast_140 + add_cast_141; assign sum36_re = add_temp_70[15:0]; assign add_cast_142 = sum35_im; assign add_cast_143 = product_phase0_5_im; assign add_temp_71 = add_cast_142 + add_cast_143; assign sum36_im = add_temp_71[15:0]; assign add_cast_144 = sum36_re; assign add_cast_145 = product_phase0_6_re; assign add_temp_72 = add_cast_144 + add_cast_145; assign sum37_re = add_temp_72[15:0]; assign add_cast_146 = sum36_im; assign add_cast_147 = product_phase0_6_im; assign add_temp_73 = add_cast_146 + add_cast_147; assign sum37_im = add_temp_73[15:0]; assign add_cast_148 = sum37_re; assign add_cast_149 = product_phase0_7_re; assign add_temp_74 = add_cast_148 + add_cast_149; assign sum38_re = add_temp_74[15:0]; assign add_cast_150 = sum37_im; assign add_cast_151 = product_phase0_7_im; assign add_temp_75 = add_cast_150 + add_cast_151; assign sum38_im = add_temp_75[15:0]; always @ (posedge clk or posedge reset) begin: DataHoldRegister_process if (reset == 1'b1) begin regout_re <= 0; regout_im <= 0; end else begin if (phase_0 == 1'b1) begin regout_re <= sum38_re; regout_im <= sum38_im; end end end // DataHoldRegister_process assign muxout_re = (phase_0 == 1'b1) ? sum38_re : regout_re; assign muxout_im = (phase_0 == 1'b1) ? sum38_im : regout_im; // Assignment Statements assign FIR_Decimation_out_re = muxout_re; assign FIR_Decimation_out_im = muxout_im; endmodule // FIR_Decimation