[x265] [PATCH] partialButterfy32 code cleanup

Steve Borho steve at borho.org
Sat Jul 6 21:33:39 CEST 2013


On Fri, Jul 5, 2013 at 5:13 AM, <praveen at multicorewareinc.com> wrote:

> # HG changeset patch
> # User praveentiwari
> # Date 1373019211 -19800
> # Node ID 3fb2694d307d8f236b9d62f1c99eed41c27a10a1
> # Parent  1d651532cb8a4e02f3a48f08171817074f22f767
> partialButterfy32 code cleanup
>
> diff -r 1d651532cb8a -r 3fb2694d307d source/common/vec/dct.inc
> --- a/source/common/vec/dct.inc Fri Jul 05 15:29:50 2013 +0530
> +++ b/source/common/vec/dct.inc Fri Jul 05 15:43:31 2013 +0530
> @@ -1270,112 +1270,112 @@
>      int j;
>      int add = 1 << (shift - 1);
>
> -    Vec4i g_aiT_zero_row_first_two(64, 64, 0, 0);
>


So here we had g_ai, short for "global array of integers".  I think you
want to simply throw away the first 6 chars and rename these to
"zero_row_first_two", etc.



> -    Vec4i g_aiT_eight_row_first_two(83, 36, 0, 0);
> -    Vec4i g_aiT_sixten_row_first_two(64, -64, 0, 0);
> -    Vec4i g_aiT_twentyfour_row_first_two(36, -83, 0, 0);
> -
> -    Vec4i g_aiT_four_row_first_four(89, 75, 50, 18);
> -    Vec4i g_aiT_twelve_row_first_four(75, -18, -89, -50);
> -    Vec4i g_aiT_twenty_row_first_four(50, -89, 18, 75);
> -    Vec4i g_aiT_twentyeight_row_first_four(18, -50, 75, -89);
> -
> -    Vec4i g_aiT_two_row_first_four(90, 87, 80, 70);
> -    Vec4i g_aiT_two_row_second_four(57, 43, 25,  9);
> -    Vec4i g_aiT_six_row_first_four(87, 57,  9, -43);
> -    Vec4i g_aiT_six_row_second_four(-80, -90, -70, -25);
> -    Vec4i g_aiT_ten_row_first_four(80,  9, -70, -87);
> -    Vec4i g_aiT_ten_row_second_four(-25, 57, 90, 43);
> -    Vec4i g_aiT_fourteen_row_first_four(70, -43, -87,  9);
> -    Vec4i g_aiT_fourteen_row_second_four(90, 25, -80, -57);
> -    Vec4i g_aiT_eighteen_row_first_four(57, -80, -25, 90);
> -    Vec4i g_aiT_eighteen_row_second_four(-9, -87, 43, 70);
> -    Vec4i g_aiT_twentytwo_row_first_four(43, -90, 57, 25);
> -    Vec4i g_aiT_twentytwo_row_second_four(-87, 70,  9, -80);
> -    Vec4i g_aiT_twentysix_row_first_four(25, -70, 90, -80);
> -    Vec4i g_aiT_twentysix_row_second_four(43,  9, -57, 87);
> -    Vec4i g_aiT_thirty_row_first_four(9, -25, 43, -57);
> -    Vec4i g_aiT_thirty_row_second_four(70, -80, 87, -90);
> -
> -    Vec4i g_aiT_one_row_first_four(90, 90, 88, 85);
> -    Vec4i g_aiT_one_row_second_four(82, 78, 73, 67);
> -    Vec4i g_aiT_one_row_third_four(61, 54, 46, 38);
> -    Vec4i g_aiT_one_row_fourth_four(31, 22, 13,  4);
> -
> -    Vec4i g_aiT_three_row_first_four(90, 82, 67, 46);
> -    Vec4i g_aiT_three_row_second_four(22, -4, -31, -54);
> -    Vec4i g_aiT_three_row_third_four(-73, -85, -90, -88);
> -    Vec4i g_aiT_three_row_fourth_four(-78, -61, -38, -13);
> -
> -    Vec4i g_aiT_five_row_first_four(88, 67, 31, -13);
> -    Vec4i g_aiT_five_row_second_four(-54, -82, -90, -78);
> -    Vec4i g_aiT_five_row_third_four(-46, -4, 38, 73);
> -    Vec4i g_aiT_five_row_fourth_four(90, 85, 61, 22);
> -
> -    Vec4i g_aiT_seven_row_first_four(85, 46, -13, -67);
> -    Vec4i g_aiT_seven_row_second_four(-90, -73, -22, 38);
> -    Vec4i g_aiT_seven_row_third_four(82, 88, 54, -4);
> -    Vec4i g_aiT_seven_row_fourth_four(-61, -90, -78, -31);
> -
> -    Vec4i g_aiT_nine_row_first_four(82, 22, -54, -90);
> -    Vec4i g_aiT_nine_row_second_four(-61, 13, 78, 85);
> -    Vec4i g_aiT_nine_row_third_four(31, -46, -90, -67);
> -    Vec4i g_aiT_nine_row_fourth_four(4, 73, 88, 38);
> -
> -    Vec4i g_aiT_eleven_row_first_four(78, -4, -82, -73);
> -    Vec4i g_aiT_eleven_row_second_four(13, 85, 67, -22);
> -    Vec4i g_aiT_eleven_row_third_four(-88, -61, 31, 90);
> -    Vec4i g_aiT_eleven_row_fourth_four(54, -38, -90, -46);
> -
> -    Vec4i g_aiT_thirteen_row_first_four(73, -31, -90, -22);
> -    Vec4i g_aiT_thirteen_row_second_four(78, 67, -38, -90);
> -    Vec4i g_aiT_thirteen_row_third_four(-13, 82, 61, -46);
> -    Vec4i g_aiT_thirteen_row_fourth_four(-88, -4, 85, 54);
> -
> -    Vec4i g_aiT_fifteen_row_first_four(67, -54, -78, 38);
> -    Vec4i g_aiT_fifteen_row_second_four(85, -22, -90,  4);
> -    Vec4i g_aiT_fifteen_row_third_four(90, 13, -88, -31);
> -    Vec4i g_aiT_fifteen_row_fourth_four(82, 46, -73, -61);
> -
> -    Vec4i g_aiT_seventeen_row_first_four(61, -73, -46, 82);
> -    Vec4i g_aiT_seventeen_row_second_four(31, -88, -13, 90);
> -    Vec4i g_aiT_seventeen_row_third_four(-4, -90, 22, 85);
> -    Vec4i g_aiT_seventeen_row_fourth_four(-38, -78, 54, 67);
> -
> -    Vec4i g_aiT_nineteen_row_first_four(54, -85, -4, 88);
> -    Vec4i g_aiT_nineteen_row_second_four(-46, -61, 82, 13);
> -    Vec4i g_aiT_nineteen_row_third_four(-90, 38, 67, -78);
> -    Vec4i g_aiT_nineteen_row_fourth_four(-22, 90, -31, -73);
> -
> -    Vec4i g_aiT_twentyone_row_first_four(46, -90, 38, 54);
> -    Vec4i g_aiT_twentyone_row_second_four(-90, 31, 61, -88);
> -    Vec4i g_aiT_twentyone_row_third_four(22, 67, -85, 13);
> -    Vec4i g_aiT_twentyone_row_fourth_four(73, -82,  4, 78);
> -
> -    Vec4i g_aiT_twentythree_row_first_four(38, -88, 73, -4);
> -    Vec4i g_aiT_twentythree_row_second_four(-67, 90, -46, -31);
> -    Vec4i g_aiT_twentythree_row_third_four(85, -78, 13, 61);
> -    Vec4i g_aiT_twentythree_row_fourth_four(-90, 54, 22, -82);
> -
> -    Vec4i g_aiT_twentyfive_row_first_four(31, -78, 90, -61);
> -    Vec4i g_aiT_twentyfive_row_second_four(4, 54, -88, 82);
> -    Vec4i g_aiT_twentyfive_row_third_four(-38, -22, 73, -90);
> -    Vec4i g_aiT_twentyfive_row_fourth_four(67, -13, -46, 85);
> -
> -    Vec4i g_aiT_twentyseven_row_first_four(22, -61, 85, -90);
> -    Vec4i g_aiT_twentyseven_row_second_four(73, -38, -4, 46);
> -    Vec4i g_aiT_twentyseven_row_third_four(-78, 90, -82, 54);
> -    Vec4i g_aiT_twentyseven_row_fourth_four(-13, -31, 67, -88);
> -
> -    Vec4i g_aiT_twentynine_row_first_four(13, -38, 61, -78);
> -    Vec4i g_aiT_twentynine_row_second_four(88, -90, 85, -73);
> -    Vec4i g_aiT_twentynine_row_third_four(54, -31,  4, 22);
> -    Vec4i g_aiT_twentynine_row_fourth_four(-46, 67, -82, 90);
> -
> -    Vec4i g_aiT_thirtyone_row_first_four(4, -13, 22, -31);
> -    Vec4i g_aiT_thirtyone_row_second_four(38, -46, 54, -61);
> -    Vec4i g_aiT_thirtyone_row_third_four(67, -73, 78, -82);
> -    Vec4i g_aiT_thirtyone_row_fourth_four(85, -88, 90, -90);
> +    Vec4i gaiTRow0_n0_n3(64, 64, 0, 0);
> +    Vec4i gaiTRow8_n0_n3(83, 36, 0, 0);
> +    Vec4i gaiTRow16_n0_n3(64, -64, 0, 0);
> +    Vec4i gaiTRow24_n0_n3(36, -83, 0, 0);
> +
> +    Vec4i gaiTRow4_n0_n3(89, 75, 50, 18);
> +    Vec4i gaiTRow12_n0_n3(75, -18, -89, -50);
> +    Vec4i gaiTRow20_n0_n3(50, -89, 18, 75);
> +    Vec4i gaiTRow28_n0_n3(18, -50, 75, -89);
> +
> +    Vec4i gaiTRow2_n0_n3(90, 87, 80, 70);
> +    Vec4i gaiTRow2_n4_n7(57, 43, 25,  9);
> +    Vec4i gaiTRow6_n0_n3(87, 57,  9, -43);
> +    Vec4i gaiTRow6_n4_n7(-80, -90, -70, -25);
> +    Vec4i gaiTRow10_n0_n3(80,  9, -70, -87);
> +    Vec4i gaiTRow10_n4_n7(-25, 57, 90, 43);
> +    Vec4i gaiTRow14_n0_n3(70, -43, -87,  9);
> +    Vec4i gaiTRow14_n4_n7(90, 25, -80, -57);
> +    Vec4i gaiTRow18_n0_n3(57, -80, -25, 90);
> +    Vec4i gaiTRow18_n4_n7(-9, -87, 43, 70);
> +    Vec4i gaiTRow22_n0_n3(43, -90, 57, 25);
> +    Vec4i gaiTRow22_n4_n7(-87, 70,  9, -80);
> +    Vec4i gaiTRow26_n0_n3(25, -70, 90, -80);
> +    Vec4i gaiTRow26_n4_n7(43,  9, -57, 87);
> +    Vec4i gaiTRow30_n0_n3(9, -25, 43, -57);
> +    Vec4i gaiTRow30_n4_n7(70, -80, 87, -90);
> +
> +    Vec4i gaiTRow1_n0_n3(90, 90, 88, 85);
> +    Vec4i gaiTRow1_n4_n7(82, 78, 73, 67);
> +    Vec4i gaiTRow1_n8_n11(61, 54, 46, 38);
> +    Vec4i gaiTRow1_n12_n15(31, 22, 13,  4);
> +
> +    Vec4i gaiTRow3_n0_n3(90, 82, 67, 46);
> +    Vec4i gaiTRow3_n4_n7(22, -4, -31, -54);
> +    Vec4i gaiTRow3_n8_n11(-73, -85, -90, -88);
> +    Vec4i gaiTRow3_n12_n15(-78, -61, -38, -13);
> +
> +    Vec4i gaiTRow5_n0_n3(88, 67, 31, -13);
> +    Vec4i gaiTRow5_n4_n7(-54, -82, -90, -78);
> +    Vec4i gaiTRow5_n8_n11(-46, -4, 38, 73);
> +    Vec4i gaiTRow5_n12_n15(90, 85, 61, 22);
> +
> +    Vec4i gaiTRow7_n0_n3(85, 46, -13, -67);
> +    Vec4i gaiTRow7_n4_n7(-90, -73, -22, 38);
> +    Vec4i gaiTRow7_n8_n11(82, 88, 54, -4);
> +    Vec4i gaiTRow7_n12_n15(-61, -90, -78, -31);
> +
> +    Vec4i gaiT9_n0_n3(82, 22, -54, -90);
> +    Vec4i gaiT9_n4_n7(-61, 13, 78, 85);
> +    Vec4i gaiT9_n8_n11(31, -46, -90, -67);
> +    Vec4i gaiT9_n12_n15(4, 73, 88, 38);
> +
> +    Vec4i gaiT11_n0_n3(78, -4, -82, -73);
> +    Vec4i gaiT11_n4_n7(13, 85, 67, -22);
> +    Vec4i gaiT11_n8_n11(-88, -61, 31, 90);
> +    Vec4i gaiT11_n12_n15(54, -38, -90, -46);
> +
> +    Vec4i gaiT13_n0_n3(73, -31, -90, -22);
> +    Vec4i gaiT13_n4_n7(78, 67, -38, -90);
> +    Vec4i gaiT13_n8_n11(-13, 82, 61, -46);
> +    Vec4i gaiT13_n12_n15(-88, -4, 85, 54);
> +
> +    Vec4i gaiT15_n0_n3(67, -54, -78, 38);
> +    Vec4i gaiT15_n4_n7(85, -22, -90,  4);
> +    Vec4i gaiT15_n8_n11(90, 13, -88, -31);
> +    Vec4i gaiT15_n12_n15(82, 46, -73, -61);
> +
> +    Vec4i gaiT17_n0_n3(61, -73, -46, 82);
> +    Vec4i gaiT17_n4_n7(31, -88, -13, 90);
> +    Vec4i gaiT17_n8_n11(-4, -90, 22, 85);
> +    Vec4i gaiT17_n12_n15(-38, -78, 54, 67);
> +
> +    Vec4i gaiT19_n0_n3(54, -85, -4, 88);
> +    Vec4i gaiT19_n4_n7(-46, -61, 82, 13);
> +    Vec4i gaiT19_n8_n11(-90, 38, 67, -78);
> +    Vec4i gaiT19_n12_n15(-22, 90, -31, -73);
> +
> +    Vec4i gaiT21_n0_n3(46, -90, 38, 54);
> +    Vec4i gaiT21_n4_n7(-90, 31, 61, -88);
> +    Vec4i gaiT21_n8_n11(22, 67, -85, 13);
> +    Vec4i gaiT21_n12_n15(73, -82,  4, 78);
> +
> +    Vec4i gaiT23_n0_n3(38, -88, 73, -4);
> +    Vec4i gaiT23_n4_n7(-67, 90, -46, -31);
> +    Vec4i gaiT23_n8_n11(85, -78, 13, 61);
> +    Vec4i gaiT23_n12_n15(-90, 54, 22, -82);
> +
> +    Vec4i gaiT25_n0_n3(31, -78, 90, -61);
> +    Vec4i gaiT25_n4_n7(4, 54, -88, 82);
> +    Vec4i gaiT25_n8_n11(-38, -22, 73, -90);
> +    Vec4i gaiT25_n12_n15(67, -13, -46, 85);
> +
> +    Vec4i gaiT27_n0_n3(22, -61, 85, -90);
> +    Vec4i gaiT27_n4_n7(73, -38, -4, 46);
> +    Vec4i gaiT27_n8_n11(-78, 90, -82, 54);
> +    Vec4i gaiT27_n12_n15(-13, -31, 67, -88);
> +
> +    Vec4i gaiT29_n0_n3(13, -38, 61, -78);
> +    Vec4i gaiT29_n4_n7(88, -90, 85, -73);
> +    Vec4i gaiT29_n8_n11(54, -31,  4, 22);
> +    Vec4i gaiT29_n12_n15(-46, 67, -82, 90);
> +
> +    Vec4i gaiT31_n0_n3(4, -13, 22, -31);
> +    Vec4i gaiT31_n4_n7(38, -46, 54, -61);
> +    Vec4i gaiT31_n8_n11(67, -73, 78, -82);
> +    Vec4i gaiT31_n12_n15(85, -88, 90, -90);
>
>      for (j = 0; j < line; j++)
>      {
> @@ -1429,20 +1429,20 @@
>          Vec4i EEEE = EEEE_first_half + EEEE_second_half;
>          Vec4i EEEO = EEEE_first_half - EEEE_second_half;
>
> -        int dst0_hresult = (horizontal_add(g_aiT_zero_row_first_two *
> EEEE) + add) >> shift;
> -        int dst8_hresult = (horizontal_add(g_aiT_eight_row_first_two *
> EEEO) + add) >> shift;
> -        int dst16_hresult = (horizontal_add(g_aiT_sixten_row_first_two *
> EEEE) + add) >> shift;
> -        int dst24_hresult =
> (horizontal_add(g_aiT_twentyfour_row_first_two * EEEO) + add) >> shift;
> +        int dst0_hresult = (horizontal_add(gaiTRow0_n0_n3 * EEEE) + add)
> >> shift;
> +        int dst8_hresult = (horizontal_add(gaiTRow8_n0_n3 * EEEO) + add)
> >> shift;
> +        int dst16_hresult = (horizontal_add(gaiTRow16_n0_n3 * EEEE) +
> add) >> shift;
> +        int dst24_hresult = (horizontal_add(gaiTRow24_n0_n3 * EEEO) +
> add) >> shift;
>
>          dst[0] = dst0_hresult;
>          dst[8 * line] = dst8_hresult;
>          dst[16 * line] = dst16_hresult;
>          dst[24 * line] = dst24_hresult;
>
> -        int dst4_hresult = (horizontal_add(g_aiT_four_row_first_four *
> EEO) + add) >> shift;
> -        int dst12_hresult = (horizontal_add(g_aiT_twelve_row_first_four *
> EEO) + add) >> shift;
> -        int dst20_hresult = (horizontal_add(g_aiT_twenty_row_first_four *
> EEO) + add) >> shift;
> -        int dst28_hresult =
> (horizontal_add(g_aiT_twentyeight_row_first_four * EEO) + add) >> shift;
> +        int dst4_hresult = (horizontal_add(gaiTRow4_n0_n3 * EEO) + add)
> >> shift;
> +        int dst12_hresult = (horizontal_add(gaiTRow12_n0_n3 * EEO) + add)
> >> shift;
> +        int dst20_hresult = (horizontal_add(gaiTRow20_n0_n3 * EEO) + add)
> >> shift;
> +        int dst28_hresult = (horizontal_add(gaiTRow28_n0_n3 * EEO) + add)
> >> shift;
>
>          dst[4 * line] = dst4_hresult;
>          dst[12 * line] = dst12_hresult;
> @@ -1450,29 +1450,29 @@
>          dst[28 * line] = dst28_hresult;
>
>          int dst2_hresult =
> -            (horizontal_add((g_aiT_two_row_first_four *
> -                             EO_first_four) + (g_aiT_two_row_second_four
> * EO_last_four)) + add) >> shift;
> +            (horizontal_add((gaiTRow2_n0_n3 *
> +                             EO_first_four) + (gaiTRow2_n4_n7 *
> EO_last_four)) + add) >> shift;
>          int dst6_hresult =
> -            (horizontal_add((g_aiT_six_row_first_four *
> -                             EO_first_four) + (g_aiT_six_row_second_four
> * EO_last_four)) + add) >> shift;
> +            (horizontal_add((gaiTRow6_n0_n3 *
> +                             EO_first_four) + (gaiTRow6_n4_n7 *
> EO_last_four)) + add) >> shift;
>          int dst10_hresult =
> -            (horizontal_add((g_aiT_ten_row_first_four *
> -                             EO_first_four) + (g_aiT_ten_row_second_four
> * EO_last_four)) + add) >> shift;
> +            (horizontal_add((gaiTRow10_n0_n3 *
> +                             EO_first_four) + (gaiTRow10_n4_n7 *
> EO_last_four)) + add) >> shift;
>          int dst14_hresult =
> -            (horizontal_add((g_aiT_fourteen_row_first_four *
> -                             EO_first_four) +
> (g_aiT_fourteen_row_second_four * EO_last_four)) + add) >> shift;
> +            (horizontal_add((gaiTRow14_n0_n3 *
> +                             EO_first_four) + (gaiTRow14_n4_n7 *
> EO_last_four)) + add) >> shift;
>          int dst18_hresult =
> -            (horizontal_add((g_aiT_eighteen_row_first_four *
> -                             EO_first_four) +
> (g_aiT_eighteen_row_second_four * EO_last_four)) + add) >> shift;
> +            (horizontal_add((gaiTRow18_n0_n3 *
> +                             EO_first_four) + (gaiTRow18_n4_n7 *
> EO_last_four)) + add) >> shift;
>          int dst22_hresult =
> -            (horizontal_add((g_aiT_twentytwo_row_first_four *
> -                             EO_first_four) +
> (g_aiT_twentytwo_row_second_four * EO_last_four)) + add) >> shift;
> +            (horizontal_add((gaiTRow22_n0_n3 *
> +                             EO_first_four) + (gaiTRow22_n4_n7 *
> EO_last_four)) + add) >> shift;
>          int dst26_hresult =
> -            (horizontal_add((g_aiT_twentysix_row_first_four *
> -                             EO_first_four) +
> (g_aiT_twentysix_row_second_four * EO_last_four)) + add) >> shift;
> +            (horizontal_add((gaiTRow26_n0_n3 *
> +                             EO_first_four) + (gaiTRow26_n4_n7 *
> EO_last_four)) + add) >> shift;
>          int dst30_hresult =
> -            (horizontal_add((g_aiT_thirty_row_first_four *
> -                             EO_first_four) +
> (g_aiT_thirty_row_second_four * EO_last_four)) + add) >> shift;
> +            (horizontal_add((gaiTRow30_n0_n3 *
> +                             EO_first_four) + (gaiTRow30_n4_n7 *
> EO_last_four)) + add) >> shift;
>
>          dst[2 * line] = dst2_hresult;
>          dst[6 * line] = dst6_hresult;
> @@ -1483,42 +1483,42 @@
>          dst[26 * line] = dst26_hresult;
>          dst[30 * line] = dst30_hresult;
>
> -        Vec4i dst1_temp = (g_aiT_one_row_first_four * O_first_four) +
> (g_aiT_one_row_second_four * O_second_four) +
> -            (g_aiT_one_row_third_four * O_third_four) +
> (g_aiT_one_row_fourth_four * O_last_four);
> -        Vec4i dst3_temp = (g_aiT_three_row_first_four * O_first_four) +
> (g_aiT_three_row_second_four * O_second_four) +
> -            (g_aiT_three_row_third_four * O_third_four) +
> (g_aiT_three_row_fourth_four * O_last_four);
> -        Vec4i dst5_temp = (g_aiT_five_row_first_four * O_first_four) +
> (g_aiT_five_row_second_four * O_second_four) +
> -            (g_aiT_five_row_third_four * O_third_four) +
> (g_aiT_five_row_fourth_four * O_last_four);
> -        Vec4i dst7_temp = (g_aiT_seven_row_first_four * O_first_four) +
> (g_aiT_seven_row_second_four * O_second_four) +
> -            (g_aiT_seven_row_third_four * O_third_four) +
> (g_aiT_seven_row_fourth_four * O_last_four);
> -        Vec4i dst9_temp = (g_aiT_nine_row_first_four * O_first_four) +
> (g_aiT_nine_row_second_four * O_second_four) +
> -            (g_aiT_nine_row_third_four * O_third_four) +
> (g_aiT_nine_row_fourth_four * O_last_four);
> -        Vec4i dst11_temp = (g_aiT_eleven_row_first_four * O_first_four) +
> (g_aiT_eleven_row_second_four * O_second_four) +
> -            (g_aiT_eleven_row_third_four * O_third_four) +
> (g_aiT_eleven_row_fourth_four * O_last_four);
> -        Vec4i dst13_temp = (g_aiT_thirteen_row_first_four * O_first_four)
> + (g_aiT_thirteen_row_second_four * O_second_four) +
> -            (g_aiT_thirteen_row_third_four * O_third_four) +
> (g_aiT_thirteen_row_fourth_four * O_last_four);
> -        Vec4i dst15_temp = (g_aiT_fifteen_row_first_four * O_first_four)
> + (g_aiT_fifteen_row_second_four * O_second_four) +
> -            (g_aiT_fifteen_row_third_four * O_third_four) +
> (g_aiT_fifteen_row_fourth_four * O_last_four);
> -        Vec4i dst17_temp = (g_aiT_seventeen_row_first_four *
> O_first_four) + (g_aiT_seventeen_row_second_four * O_second_four) +
> -            (g_aiT_seventeen_row_third_four * O_third_four) +
> (g_aiT_seventeen_row_fourth_four * O_last_four);
> -        Vec4i dst19_temp = (g_aiT_nineteen_row_first_four * O_first_four)
> + (g_aiT_nineteen_row_second_four * O_second_four) +
> -            (g_aiT_nineteen_row_third_four * O_third_four) +
> (g_aiT_nineteen_row_fourth_four * O_last_four);
> -        Vec4i dst21_temp = (g_aiT_twentyone_row_first_four *
> O_first_four) + (g_aiT_twentyone_row_second_four * O_second_four) +
> -            (g_aiT_twentyone_row_third_four * O_third_four) +
> (g_aiT_twentyone_row_fourth_four * O_last_four);
> +        Vec4i dst1_temp = (gaiTRow1_n0_n3 * O_first_four) +
> (gaiTRow1_n4_n7 * O_second_four) +
> +            (gaiTRow1_n8_n11 * O_third_four) + (gaiTRow1_n12_n15 *
> O_last_four);
> +        Vec4i dst3_temp = (gaiTRow3_n0_n3 * O_first_four) +
> (gaiTRow3_n4_n7 * O_second_four) +
> +            (gaiTRow3_n8_n11 * O_third_four) + (gaiTRow3_n12_n15 *
> O_last_four);
> +        Vec4i dst5_temp = (gaiTRow5_n0_n3 * O_first_four) +
> (gaiTRow5_n4_n7 * O_second_four) +
> +            (gaiTRow5_n8_n11 * O_third_four) + (gaiTRow5_n12_n15 *
> O_last_four);
> +        Vec4i dst7_temp = (gaiTRow7_n0_n3 * O_first_four) +
> (gaiTRow7_n4_n7 * O_second_four) +
> +            (gaiTRow7_n8_n11 * O_third_four) + (gaiTRow7_n12_n15 *
> O_last_four);
> +        Vec4i dst9_temp = (gaiT9_n0_n3 * O_first_four) + (gaiT9_n4_n7 *
> O_second_four) +
> +            (gaiT9_n8_n11 * O_third_four) + (gaiT9_n12_n15 * O_last_four);
> +        Vec4i dst11_temp = (gaiT11_n0_n3 * O_first_four) + (gaiT11_n4_n7
> * O_second_four) +
> +            (gaiT11_n8_n11 * O_third_four) + (gaiT11_n12_n15 *
> O_last_four);
> +        Vec4i dst13_temp = (gaiT13_n0_n3 * O_first_four) + (gaiT13_n4_n7
> * O_second_four) +
> +            (gaiT13_n8_n11 * O_third_four) + (gaiT13_n12_n15 *
> O_last_four);
> +        Vec4i dst15_temp = (gaiT15_n0_n3 * O_first_four) + (gaiT15_n4_n7
> * O_second_four) +
> +            (gaiT15_n8_n11 * O_third_four) + (gaiT15_n12_n15 *
> O_last_four);
> +        Vec4i dst17_temp = (gaiT17_n0_n3 * O_first_four) + (gaiT17_n4_n7
> * O_second_four) +
> +            (gaiT17_n8_n11 * O_third_four) + (gaiT17_n12_n15 *
> O_last_four);
> +        Vec4i dst19_temp = (gaiT19_n0_n3 * O_first_four) + (gaiT19_n4_n7
> * O_second_four) +
> +            (gaiT19_n8_n11 * O_third_four) + (gaiT19_n12_n15 *
> O_last_four);
> +        Vec4i dst21_temp = (gaiT21_n0_n3 * O_first_four) + (gaiT21_n4_n7
> * O_second_four) +
> +            (gaiT21_n8_n11 * O_third_four) + (gaiT21_n12_n15 *
> O_last_four);
>          Vec4i dst23_temp =
> -            (g_aiT_twentythree_row_first_four * O_first_four) +
> (g_aiT_twentythree_row_second_four * O_second_four) +
> -            (g_aiT_twentythree_row_third_four * O_third_four) +
> (g_aiT_twentythree_row_fourth_four * O_last_four);
> +            (gaiT23_n0_n3 * O_first_four) + (gaiT23_n4_n7 *
> O_second_four) +
> +            (gaiT23_n8_n11 * O_third_four) + (gaiT23_n12_n15 *
> O_last_four);
>          Vec4i dst25_temp =
> -            (g_aiT_twentyfive_row_first_four * O_first_four) +
> (g_aiT_twentyfive_row_second_four * O_second_four) +
> -            (g_aiT_twentyfive_row_third_four * O_third_four) +
> (g_aiT_twentyfive_row_fourth_four * O_last_four);
> +            (gaiT25_n0_n3 * O_first_four) + (gaiT25_n4_n7 *
> O_second_four) +
> +            (gaiT25_n8_n11 * O_third_four) + (gaiT25_n12_n15 *
> O_last_four);
>          Vec4i dst27_temp =
> -            (g_aiT_twentyseven_row_first_four * O_first_four) +
> (g_aiT_twentyseven_row_second_four * O_second_four) +
> -            (g_aiT_twentyseven_row_third_four * O_third_four) +
> (g_aiT_twentyseven_row_fourth_four * O_last_four);
> +            (gaiT27_n0_n3 * O_first_four) + (gaiT27_n4_n7 *
> O_second_four) +
> +            (gaiT27_n8_n11 * O_third_four) + (gaiT27_n12_n15 *
> O_last_four);
>          Vec4i dst29_temp =
> -            (g_aiT_twentynine_row_first_four * O_first_four) +
> (g_aiT_twentynine_row_second_four * O_second_four) +
> -            (g_aiT_twentynine_row_third_four * O_third_four) +
> (g_aiT_twentynine_row_fourth_four * O_last_four);
> -        Vec4i dst31_temp = (g_aiT_thirtyone_row_first_four *
> O_first_four) + (g_aiT_thirtyone_row_second_four * O_second_four) +
> -            (g_aiT_thirtyone_row_third_four * O_third_four) +
> (g_aiT_thirtyone_row_fourth_four * O_last_four);
> +            (gaiT29_n0_n3 * O_first_four) + (gaiT29_n4_n7 *
> O_second_four) +
> +            (gaiT29_n8_n11 * O_third_four) + (gaiT29_n12_n15 *
> O_last_four);
> +        Vec4i dst31_temp = (gaiT31_n0_n3 * O_first_four) + (gaiT31_n4_n7
> * O_second_four) +
> +            (gaiT31_n8_n11 * O_third_four) + (gaiT31_n12_n15 *
> O_last_four);
>
>          dst[1 * line] = (horizontal_add(dst1_temp) + add) >> shift;
>          dst[3 * line] = (horizontal_add(dst3_temp) + add) >> shift;
> _______________________________________________
> x265-devel mailing list
> x265-devel at videolan.org
> http://mailman.videolan.org/listinfo/x265-devel
>



-- 
Steve Borho
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://mailman.videolan.org/pipermail/x265-devel/attachments/20130706/be724f00/attachment-0001.html>


More information about the x265-devel mailing list