[x265] [PATCH] asm: luma_vpp[16x32, 16x64] in avx2: improve 3875c->2480c, 7499c->4915c
Divya Manivannan
divya at multicorewareinc.com
Fri Nov 21 08:46:23 CET 2014
Please ignore this patch. I will further optimize the lea instructions and
resend the patch again.
Regards,
Divya
On Fri, Nov 21, 2014 at 10:26 AM, Divya Manivannan <
divya at multicorewareinc.com> wrote:
> # HG changeset patch
> # User Divya Manivannan <divya at multicorewareinc.com>
> # Date 1416545650 -19800
> # Fri Nov 21 10:24:10 2014 +0530
> # Node ID d8cf16a4f4385b07f9aae668480e9a3dad0d9bae
> # Parent 2abf89f5c4f2b797705f3b6e8d6670962daa38b9
> asm: luma_vpp[16x32, 16x64] in avx2: improve 3875c->2480c, 7499c->4915c
>
> diff -r 2abf89f5c4f2 -r d8cf16a4f438 source/common/x86/asm-primitives.cpp
> --- a/source/common/x86/asm-primitives.cpp Thu Nov 20 14:31:04 2014
> -0800
> +++ b/source/common/x86/asm-primitives.cpp Fri Nov 21 10:24:10 2014
> +0530
> @@ -1799,6 +1799,8 @@
> p.transpose[BLOCK_32x32] = x265_transpose32_avx2;
> p.transpose[BLOCK_64x64] = x265_transpose64_avx2;
> p.luma_vpp[LUMA_16x16] = x265_interp_8tap_vert_pp_16x16_avx2;
> + p.luma_vpp[LUMA_16x32] = x265_interp_8tap_vert_pp_16x32_avx2;
> + p.luma_vpp[LUMA_16x64] = x265_interp_8tap_vert_pp_16x64_avx2;
> #endif
> p.luma_hpp[LUMA_4x4] = x265_interp_8tap_horiz_pp_4x4_avx2;
> p.luma_vpp[LUMA_4x4] = x265_interp_8tap_vert_pp_4x4_avx2;
> diff -r 2abf89f5c4f2 -r d8cf16a4f438 source/common/x86/ipfilter8.asm
> --- a/source/common/x86/ipfilter8.asm Thu Nov 20 14:31:04 2014 -0800
> +++ b/source/common/x86/ipfilter8.asm Fri Nov 21 10:24:10 2014 +0530
> @@ -4246,6 +4246,309 @@
> RET
> %endif
>
> +%macro FILTER_VER_LUMA_AVX2_16xN 2
> +INIT_YMM avx2
> +%if ARCH_X86_64 == 1
> +cglobal interp_8tap_vert_pp_%1x%2, 4, 7, 15
> + mov r4d, r4m
> + shl r4d, 7
> +
> +%ifdef PIC
> + lea r5, [tab_LumaCoeffVer_32]
> + add r5, r4
> +%else
> + lea r5, [tab_LumaCoeffVer_32 + r4]
> +%endif
> +
> + lea r4, [r1 * 3]
> + sub r0, r4
> + mova m14, [pw_512]
> + mov word [rsp], %2 / 16
> +
> +.loop:
> + movu xm0, [r0] ; m0 = row 0
> + movu xm1, [r0 + r1] ; m1 = row 1
> + punpckhbw xm2, xm0, xm1
> + punpcklbw xm0, xm1
> + vinserti128 m0, m0, xm2, 1
> + pmaddubsw m0, [r5]
> + movu xm2, [r0 + r1 * 2] ; m2 = row 2
> + punpckhbw xm3, xm1, xm2
> + punpcklbw xm1, xm2
> + vinserti128 m1, m1, xm3, 1
> + pmaddubsw m1, [r5]
> + movu xm3, [r0 + r4] ; m3 = row 3
> + punpckhbw xm4, xm2, xm3
> + punpcklbw xm2, xm3
> + vinserti128 m2, m2, xm4, 1
> + pmaddubsw m4, m2, [r5 + 1 * mmsize]
> + paddw m0, m4
> + pmaddubsw m2, [r5]
> + lea r0, [r0 + r1 * 4]
> + movu xm4, [r0] ; m4 = row 4
> + punpckhbw xm5, xm3, xm4
> + punpcklbw xm3, xm4
> + vinserti128 m3, m3, xm5, 1
> + pmaddubsw m5, m3, [r5 + 1 * mmsize]
> + paddw m1, m5
> + pmaddubsw m3, [r5]
> + movu xm5, [r0 + r1] ; m5 = row 5
> + punpckhbw xm6, xm4, xm5
> + punpcklbw xm4, xm5
> + vinserti128 m4, m4, xm6, 1
> + pmaddubsw m6, m4, [r5 + 2 * mmsize]
> + paddw m0, m6
> + pmaddubsw m6, m4, [r5 + 1 * mmsize]
> + paddw m2, m6
> + pmaddubsw m4, [r5]
> + movu xm6, [r0 + r1 * 2] ; m6 = row 6
> + punpckhbw xm7, xm5, xm6
> + punpcklbw xm5, xm6
> + vinserti128 m5, m5, xm7, 1
> + pmaddubsw m7, m5, [r5 + 2 * mmsize]
> + paddw m1, m7
> + pmaddubsw m7, m5, [r5 + 1 * mmsize]
> + paddw m3, m7
> + pmaddubsw m5, [r5]
> + movu xm7, [r0 + r4] ; m7 = row 7
> + punpckhbw xm8, xm6, xm7
> + punpcklbw xm6, xm7
> + vinserti128 m6, m6, xm8, 1
> + pmaddubsw m8, m6, [r5 + 3 * mmsize]
> + paddw m0, m8
> + pmaddubsw m8, m6, [r5 + 2 * mmsize]
> + paddw m2, m8
> + pmaddubsw m8, m6, [r5 + 1 * mmsize]
> + paddw m4, m8
> + pmaddubsw m6, [r5]
> + lea r0, [r0 + r1 * 4]
> + movu xm8, [r0] ; m8 = row 8
> + punpckhbw xm9, xm7, xm8
> + punpcklbw xm7, xm8
> + vinserti128 m7, m7, xm9, 1
> + pmaddubsw m9, m7, [r5 + 3 * mmsize]
> + paddw m1, m9
> + pmaddubsw m9, m7, [r5 + 2 * mmsize]
> + paddw m3, m9
> + pmaddubsw m9, m7, [r5 + 1 * mmsize]
> + paddw m5, m9
> + pmaddubsw m7, [r5]
> + movu xm9, [r0 + r1] ; m9 = row 9
> + punpckhbw xm10, xm8, xm9
> + punpcklbw xm8, xm9
> + vinserti128 m8, m8, xm10, 1
> + pmaddubsw m10, m8, [r5 + 3 * mmsize]
> + paddw m2, m10
> + pmaddubsw m10, m8, [r5 + 2 * mmsize]
> + paddw m4, m10
> + pmaddubsw m10, m8, [r5 + 1 * mmsize]
> + paddw m6, m10
> + pmaddubsw m8, [r5]
> + movu xm10, [r0 + r1 * 2] ; m10 = row 10
> + punpckhbw xm11, xm9, xm10
> + punpcklbw xm9, xm10
> + vinserti128 m9, m9, xm11, 1
> + pmaddubsw m11, m9, [r5 + 3 * mmsize]
> + paddw m3, m11
> + pmaddubsw m11, m9, [r5 + 2 * mmsize]
> + paddw m5, m11
> + pmaddubsw m11, m9, [r5 + 1 * mmsize]
> + paddw m7, m11
> + pmaddubsw m9, [r5]
> + movu xm11, [r0 + r4] ; m11 = row 11
> + punpckhbw xm12, xm10, xm11
> + punpcklbw xm10, xm11
> + vinserti128 m10, m10, xm12, 1
> + pmaddubsw m12, m10, [r5 + 3 * mmsize]
> + paddw m4, m12
> + pmaddubsw m12, m10, [r5 + 2 * mmsize]
> + paddw m6, m12
> + pmaddubsw m12, m10, [r5 + 1 * mmsize]
> + paddw m8, m12
> + pmaddubsw m10, [r5]
> + lea r0, [r0 + r1 * 4]
> + movu xm12, [r0] ; m12 = row 12
> + punpckhbw xm13, xm11, xm12
> + punpcklbw xm11, xm12
> + vinserti128 m11, m11, xm13, 1
> + pmaddubsw m13, m11, [r5 + 3 * mmsize]
> + paddw m5, m13
> + pmaddubsw m13, m11, [r5 + 2 * mmsize]
> + paddw m7, m13
> + pmaddubsw m13, m11, [r5 + 1 * mmsize]
> + paddw m9, m13
> + pmaddubsw m11, [r5]
> +
> + pmulhrsw m0, m14 ; m0 = word: row 0
> + pmulhrsw m1, m14 ; m1 = word: row 1
> + pmulhrsw m2, m14 ; m2 = word: row 2
> + pmulhrsw m3, m14 ; m3 = word: row 3
> + pmulhrsw m4, m14 ; m4 = word: row 4
> + pmulhrsw m5, m14 ; m5 = word: row 5
> + packuswb m0, m1
> + packuswb m2, m3
> + packuswb m4, m5
> + vpermq m0, m0, 11011000b
> + vpermq m2, m2, 11011000b
> + vpermq m4, m4, 11011000b
> + vextracti128 xm1, m0, 1
> + vextracti128 xm3, m2, 1
> + vextracti128 xm5, m4, 1
> + lea r6, [r3 * 3]
> + movu [r2], xm0
> + movu [r2 + r3], xm1
> + movu [r2 + r3 * 2], xm2
> + movu [r2 + r6], xm3
> + lea r2, [r2 + r3 * 4]
> + movu [r2], xm4
> + movu [r2 + r3], xm5
> +
> + movu xm13, [r0 + r1] ; m13 = row 13
> + punpckhbw xm0, xm12, xm13
> + punpcklbw xm12, xm13
> + vinserti128 m12, m12, xm0, 1
> + pmaddubsw m0, m12, [r5 + 3 * mmsize]
> + paddw m6, m0
> + pmaddubsw m0, m12, [r5 + 2 * mmsize]
> + paddw m8, m0
> + pmaddubsw m0, m12, [r5 + 1 * mmsize]
> + paddw m10, m0
> + pmaddubsw m12, [r5]
> + movu xm0, [r0 + r1 * 2] ; m0 = row 14
> + punpckhbw xm1, xm13, xm0
> + punpcklbw xm13, xm0
> + vinserti128 m13, m13, xm1, 1
> + pmaddubsw m1, m13, [r5 + 3 * mmsize]
> + paddw m7, m1
> + pmaddubsw m1, m13, [r5 + 2 * mmsize]
> + paddw m9, m1
> + pmaddubsw m1, m13, [r5 + 1 * mmsize]
> + paddw m11, m1
> + pmaddubsw m13, [r5]
> +
> + pmulhrsw m6, m14 ; m6 = word: row 6
> + pmulhrsw m7, m14 ; m7 = word: row 7
> + packuswb m6, m7
> + vpermq m6, m6, 11011000b
> + vextracti128 xm7, m6, 1
> + movu [r2 + r3 * 2], xm6
> + movu [r2 + r6], xm7
> + lea r2, [r2 + r3 * 4]
> +
> + movu xm1, [r0 + r4] ; m1 = row 15
> + punpckhbw xm2, xm0, xm1
> + punpcklbw xm0, xm1
> + vinserti128 m0, m0, xm2, 1
> + pmaddubsw m2, m0, [r5 + 3 * mmsize]
> + paddw m8, m2
> + pmaddubsw m2, m0, [r5 + 2 * mmsize]
> + paddw m10, m2
> + pmaddubsw m2, m0, [r5 + 1 * mmsize]
> + paddw m12, m2
> + pmaddubsw m0, [r5]
> + lea r0, [r0 + r1 * 4]
> + movu xm2, [r0] ; m2 = row 16
> + punpckhbw xm3, xm1, xm2
> + punpcklbw xm1, xm2
> + vinserti128 m1, m1, xm3, 1
> + pmaddubsw m3, m1, [r5 + 3 * mmsize]
> + paddw m9, m3
> + pmaddubsw m3, m1, [r5 + 2 * mmsize]
> + paddw m11, m3
> + pmaddubsw m3, m1, [r5 + 1 * mmsize]
> + paddw m13, m3
> + pmaddubsw m1, [r5]
> + movu xm3, [r0 + r1] ; m3 = row 17
> + punpckhbw xm4, xm2, xm3
> + punpcklbw xm2, xm3
> + vinserti128 m2, m2, xm4, 1
> + pmaddubsw m4, m2, [r5 + 3 * mmsize]
> + paddw m10, m4
> + pmaddubsw m4, m2, [r5 + 2 * mmsize]
> + paddw m12, m4
> + pmaddubsw m2, [r5 + 1 * mmsize]
> + paddw m0, m2
> + movu xm4, [r0 + r1 * 2] ; m4 = row 18
> + punpckhbw xm5, xm3, xm4
> + punpcklbw xm3, xm4
> + vinserti128 m3, m3, xm5, 1
> + pmaddubsw m5, m3, [r5 + 3 * mmsize]
> + paddw m11, m5
> + pmaddubsw m5, m3, [r5 + 2 * mmsize]
> + paddw m13, m5
> + pmaddubsw m3, [r5 + 1 * mmsize]
> + paddw m1, m3
> + movu xm5, [r0 + r4] ; m5 = row 19
> + punpckhbw xm6, xm4, xm5
> + punpcklbw xm4, xm5
> + vinserti128 m4, m4, xm6, 1
> + pmaddubsw m6, m4, [r5 + 3 * mmsize]
> + paddw m12, m6
> + pmaddubsw m4, [r5 + 2 * mmsize]
> + paddw m0, m4
> + lea r0, [r0 + r1 * 4]
> + movu xm6, [r0] ; m6 = row 20
> + punpckhbw xm7, xm5, xm6
> + punpcklbw xm5, xm6
> + vinserti128 m5, m5, xm7, 1
> + pmaddubsw m7, m5, [r5 + 3 * mmsize]
> + paddw m13, m7
> + pmaddubsw m5, [r5 + 2 * mmsize]
> + paddw m1, m5
> + movu xm7, [r0 + r1] ; m7 = row 21
> + punpckhbw xm2, xm6, xm7
> + punpcklbw xm6, xm7
> + vinserti128 m6, m6, xm2, 1
> + pmaddubsw m6, [r5 + 3 * mmsize]
> + paddw m0, m6
> + movu xm2, [r0 + r1 * 2] ; m2 = row 22
> + punpckhbw xm3, xm7, xm2
> + punpcklbw xm7, xm2
> + vinserti128 m7, m7, xm3, 1
> + pmaddubsw m7, [r5 + 3 * mmsize]
> + paddw m1, m7
> +
> + pmulhrsw m8, m14 ; m8 = word: row 8
> + pmulhrsw m9, m14 ; m9 = word: row 9
> + pmulhrsw m10, m14 ; m10 = word: row 10
> + pmulhrsw m11, m14 ; m11 = word: row 11
> + pmulhrsw m12, m14 ; m12 = word: row 12
> + pmulhrsw m13, m14 ; m13 = word: row 13
> + pmulhrsw m0, m14 ; m0 = word: row 14
> + pmulhrsw m1, m14 ; m1 = word: row 15
> + packuswb m8, m9
> + packuswb m10, m11
> + packuswb m12, m13
> + packuswb m0, m1
> + vpermq m8, m8, 11011000b
> + vpermq m10, m10, 11011000b
> + vpermq m12, m12, 11011000b
> + vpermq m0, m0, 11011000b
> + vextracti128 xm9, m8, 1
> + vextracti128 xm11, m10, 1
> + vextracti128 xm13, m12, 1
> + vextracti128 xm1, m0, 1
> + movu [r2], xm8
> + movu [r2 + r3], xm9
> + movu [r2 + r3 * 2], xm10
> + movu [r2 + r6], xm11
> + lea r2, [r2 + r3 * 4]
> + movu [r2], xm12
> + movu [r2 + r3], xm13
> + movu [r2 + r3 * 2], xm0
> + movu [r2 + r6], xm1
> + lea r2, [r2 + r3 * 4]
> + lea r6, [r1 * 4]
> + sub r0, r6
> + dec word [rsp]
> + jnz .loop
> + RET
> +%endif
> +%endmacro
> +
> +FILTER_VER_LUMA_AVX2_16xN 16, 32
> +FILTER_VER_LUMA_AVX2_16xN 16, 64
> +
>
> ;-------------------------------------------------------------------------------------------------------------
> ; void interp_8tap_vert_%3_%1x%2(pixel *src, intptr_t srcStride, pixel
> *dst, intptr_t dstStride, int coeffIdx)
>
> ;-------------------------------------------------------------------------------------------------------------
>
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://mailman.videolan.org/pipermail/x265-devel/attachments/20141121/7de3c44c/attachment-0001.html>
More information about the x265-devel
mailing list