[x265] [PATCH 207 of 307] x86: AVX512 interp_8tap_vert_pp_48x64

mythreyi at multicorewareinc.com mythreyi at multicorewareinc.com
Sat Apr 7 04:33:25 CEST 2018


# HG changeset patch
# User Vignesh Vijayakumar<vignesh at multicorewareinc.com>
# Date 1511850857 -19800
#      Tue Nov 28 12:04:17 2017 +0530
# Node ID e7c3e842a8862391a23c10a978b22ceaa878fbcd
# Parent  6c434e27f2657e97c5ee16d71f6e0e8218ea94ad
x86: AVX512 interp_8tap_vert_pp_48x64

AVX2 performance   : 24.63x
AVX512 performance : 34.90x

diff -r 6c434e27f265 -r e7c3e842a886 source/common/x86/asm-primitives.cpp
--- a/source/common/x86/asm-primitives.cpp	Tue Nov 28 11:08:43 2017 +0530
+++ b/source/common/x86/asm-primitives.cpp	Tue Nov 28 12:04:17 2017 +0530
@@ -4992,6 +4992,7 @@
         p.pu[LUMA_32x24].luma_vpp = PFX(interp_8tap_vert_pp_32x24_avx512);
         p.pu[LUMA_32x16].luma_vpp = PFX(interp_8tap_vert_pp_32x16_avx512);
         p.pu[LUMA_32x8].luma_vpp = PFX(interp_8tap_vert_pp_32x8_avx512);
+        p.pu[LUMA_48x64].luma_vpp = PFX(interp_8tap_vert_pp_48x64_avx512);
         p.pu[LUMA_64x64].luma_vpp = PFX(interp_8tap_vert_pp_64x64_avx512);
         p.pu[LUMA_64x48].luma_vpp = PFX(interp_8tap_vert_pp_64x48_avx512);
         p.pu[LUMA_64x32].luma_vpp = PFX(interp_8tap_vert_pp_64x32_avx512);
diff -r 6c434e27f265 -r e7c3e842a886 source/common/x86/ipfilter8.asm
--- a/source/common/x86/ipfilter8.asm	Tue Nov 28 11:08:43 2017 +0530
+++ b/source/common/x86/ipfilter8.asm	Tue Nov 28 12:04:17 2017 +0530
@@ -14174,6 +14174,246 @@
     FILTER_VER_PP_LUMA_32xN_AVX512 64
 %endif
 
+%macro PROCESS_LUMA_VERT_PP_48x8_AVX512 0
+    PROCESS_LUMA_VERT_PP_32x4_AVX512
+    lea                   r8,                 [r4 + 4 * r1]
+    movu                  ym1,                [r5]
+    vinserti32x8          m1,                 [r5 + 2 * r1],       1
+    movu                  ym3,                [r5 + r1]
+    vinserti32x8          m3,                 [r5 + r6],           1
+    punpcklbw             m0,                 m1,                  m3
+    pmaddubsw             m0,                 m8
+    punpckhbw             m1,                 m3
+    pmaddubsw             m1,                 m8
+
+    movu                  ym4,                [r5 + 2 * r1]
+    vinserti32x8          m4,                 [r5 + 4 * r1],       1
+    punpcklbw             m2,                 m3,                  m4
+    pmaddubsw             m2,                 m8
+    punpckhbw             m3,                 m4
+    pmaddubsw             m3,                 m8
+
+    movu                  ym5,                [r5 + r6]
+    vinserti32x8          m5,                 [r4 + r1],           1
+    punpcklbw             m6,                 m4,                  m5
+    pmaddubsw             m6,                 m9
+    punpckhbw             m4,                 m5
+    pmaddubsw             m4,                 m9
+
+    paddw                 m0,                 m6
+    paddw                 m1,                 m4
+
+    movu                  ym4,                [r5 + 4 * r1]
+    vinserti32x8          m4,                 [r4 + 2 * r1],       1
+    punpcklbw             m6,                 m5,                  m4
+    pmaddubsw             m6,                 m9
+    punpckhbw             m5,                 m4
+    pmaddubsw             m5,                 m9
+
+    paddw                 m2,                 m6
+    paddw                 m3,                 m5
+
+    movu                  ym15,               [r4 + r1]
+    vinserti32x8          m15,                [r4 + r6],           1
+    punpcklbw             m12,                m4,                 m15
+    pmaddubsw             m12,                m10
+    punpckhbw             m13,                m4,                 m15
+    pmaddubsw             m13,                m10
+
+    movu                  ym4,                [r4 + 2 * r1]
+    vinserti32x8          m4,                 [r4 + 4 * r1],       1
+    punpcklbw             m14,                m15,                 m4
+    pmaddubsw             m14,                m10
+    punpckhbw             m15,                m4
+    pmaddubsw             m15,                m10
+
+    movu                  ym5,                [r4 + r6]
+    vinserti32x8          m5,                 [r8 + r1],           1
+    punpcklbw             m6,                 m4,                  m5
+    pmaddubsw             m6,                 m11
+    punpckhbw             m4,                 m5
+    pmaddubsw             m4,                 m11
+
+    paddw                 m12,                m6
+    paddw                 m13,                m4
+
+    movu                  ym4,                [r4 + 4 * r1]
+    vinserti32x8          m4,                 [r8 + 2 * r1],       1
+    punpcklbw             m6,                 m5,                  m4
+    pmaddubsw             m6,                 m11
+    punpckhbw             m5,                 m4
+    pmaddubsw             m5,                 m11
+
+    paddw                 m14,                m6
+    paddw                 m15,                m5
+
+    paddw                 m0,                 m12
+    paddw                 m1,                 m13
+    paddw                 m2,                 m14
+    paddw                 m3,                 m15
+
+    pmulhrsw              m0,                 m7
+    pmulhrsw              m1,                 m7
+    pmulhrsw              m2,                 m7
+    pmulhrsw              m3,                 m7
+
+    packuswb              m0,                 m1
+    packuswb              m2,                 m3
+    lea                   r9,                 [r2 + 4 * r3]
+    movu                  [r9],               ym0
+    movu                  [r9 + r3],          ym2
+    vextracti32x8         [r9 + 2 * r3],      m0,                  1
+    vextracti32x8         [r9 + r7],          m2,                  1
+
+    movu                  xm1,                [r0 + mmsize/2]
+    vinserti32x4          m1,                 [r0 + 2 * r1 + mmsize/2],       1
+    vinserti32x4          m1,                 [r5 + mmsize/2],                2
+    vinserti32x4          m1,                 [r5 + 2 * r1 + mmsize/2],       3
+    movu                  xm3,                [r0 + r1 + mmsize/2]
+    vinserti32x4          m3,                 [r0 + r6 + mmsize/2],           1
+    vinserti32x4          m3,                 [r5 + r1 + mmsize/2],           2
+    vinserti32x4          m3,                 [r5 + r6 + mmsize/2],           3
+    punpcklbw             m0,                 m1,                  m3
+    pmaddubsw             m0,                 m8
+    punpckhbw             m1,                 m3
+    pmaddubsw             m1,                 m8
+
+    movu                  xm4,                [r0 + 2 * r1 + mmsize/2]
+    vinserti32x4          m4,                 [r0 + 4 * r1 + mmsize/2],       1
+    vinserti32x4          m4,                 [r5 + 2 * r1 + mmsize/2],       2
+    vinserti32x4          m4,                 [r5 + 4 * r1 + mmsize/2],       3
+    punpcklbw             m2,                 m3,                  m4
+    pmaddubsw             m2,                 m8
+    punpckhbw             m3,                 m4
+    pmaddubsw             m3,                 m8
+
+    movu                  xm5,                [r0 + r6 + mmsize/2]
+    vinserti32x4          m5,                 [r5 + r1 + mmsize/2],           1
+    vinserti32x4          m5,                 [r5 + r6 + mmsize/2],           2
+    vinserti32x4          m5,                 [r4 + r1 + mmsize/2],           3
+    punpcklbw             m6,                 m4,                  m5
+    pmaddubsw             m6,                 m9
+    punpckhbw             m4,                 m5
+    pmaddubsw             m4,                 m9
+
+    paddw                 m0,                 m6
+    paddw                 m1,                 m4
+
+    movu                  xm4,                [r0 + 4 * r1 + mmsize/2]
+    vinserti32x4          m4,                 [r5 + 2 * r1 + mmsize/2],       1
+    vinserti32x4          m4,                 [r5 + 4 * r1 + mmsize/2],       2
+    vinserti32x4          m4,                 [r4 + 2 * r1 + mmsize/2],       3
+    punpcklbw             m6,                 m5,                  m4
+    pmaddubsw             m6,                 m9
+    punpckhbw             m5,                 m4
+    pmaddubsw             m5,                 m9
+
+    paddw                 m2,                 m6
+    paddw                 m3,                 m5
+
+    movu                  xm15,               [r5 + r1 + mmsize/2]
+    vinserti32x4          m15,                [r5 + r6 + mmsize/2],           1
+    vinserti32x4          m15,                [r4 + r1 + mmsize/2],           2
+    vinserti32x4          m15,                [r4 + r6 + mmsize/2],           3
+    punpcklbw             m12,                m4,                 m15
+    pmaddubsw             m12,                m10
+    punpckhbw             m13,                m4,                 m15
+    pmaddubsw             m13,                m10
+
+    movu                  xm4,                [r5 + 2 * r1 + mmsize/2]
+    vinserti32x4          m4,                 [r5 + 4 * r1 + mmsize/2],       1
+    vinserti32x4          m4,                 [r4 + 2 * r1 + mmsize/2],       2
+    vinserti32x4          m4,                 [r4 + 4 * r1 + mmsize/2],       3
+    punpcklbw             m14,                m15,                 m4
+    pmaddubsw             m14,                m10
+    punpckhbw             m15,                m4
+    pmaddubsw             m15,                m10
+
+    movu                  xm5,                [r5 + r6 + mmsize/2]
+    vinserti32x4          m5,                 [r4 + r1 + mmsize/2],           1
+    vinserti32x4          m5,                 [r4 + r6 + mmsize/2],           2
+    vinserti32x4          m5,                 [r8 + r1 + mmsize/2],           3
+    punpcklbw             m6,                 m4,                  m5
+    pmaddubsw             m6,                 m11
+    punpckhbw             m4,                 m5
+    pmaddubsw             m4,                 m11
+
+    paddw                 m12,                m6
+    paddw                 m13,                m4
+
+    movu                  xm4,                [r5 + 4 * r1 + mmsize/2]
+    vinserti32x4          m4,                 [r4 + 2 * r1 + mmsize/2],       1
+    vinserti32x4          m4,                 [r4 + 4 * r1 + mmsize/2],       2
+    vinserti32x4          m4,                 [r8 + 2 * r1 + mmsize/2],       3
+    punpcklbw             m6,                 m5,                  m4
+    pmaddubsw             m6,                 m11
+    punpckhbw             m5,                 m4
+    pmaddubsw             m5,                 m11
+
+    paddw                 m14,                m6
+    paddw                 m15,                m5
+
+    paddw                 m0,                 m12
+    paddw                 m1,                 m13
+    paddw                 m2,                 m14
+    paddw                 m3,                 m15
+
+    pmulhrsw              m0,                 m7
+    pmulhrsw              m1,                 m7
+    pmulhrsw              m2,                 m7
+    pmulhrsw              m3,                 m7
+
+    packuswb              m0,                 m1
+    packuswb              m2,                 m3
+    movu                  [r2 + mmsize/2],               xm0
+    movu                  [r2 + r3 + mmsize/2],          xm2
+    vextracti32x4         [r2 + 2 * r3 + mmsize/2],      m0,                  1
+    vextracti32x4         [r2 + r7 + mmsize/2],          m2,                  1
+    lea                   r2,                 [r2 + 4 * r3]
+    vextracti32x4         [r2 + mmsize/2],               m0,                  2
+    vextracti32x4         [r2 + r3 + mmsize/2],          m2,                  2
+    vextracti32x4         [r2 + 2 * r3 + mmsize/2],      m0,                  3
+    vextracti32x4         [r2 + r7 + mmsize/2],          m2,                  3
+%endmacro
+;-----------------------------------------------------------------------------------------------------------------
+; void interp_8tap_vert(int16_t *src, intptr_t srcStride, int16_t *dst, intptr_t dstStride, int coeffIdx)
+;-----------------------------------------------------------------------------------------------------------------
+%macro FILTER_VER_PP_LUMA_48x64_AVX512 0
+INIT_ZMM avx512
+cglobal interp_8tap_vert_pp_48x64, 5, 10, 16
+    mov                   r4d,                r4m
+    shl                   r4d,                8
+    lea                   r6,                 [3 * r1]
+    lea                   r7,                 [3 * r3]
+    sub                   r0,                 r6
+
+%ifdef PIC
+    lea                   r5,                 [tab_LumaCoeffVer_32_avx512]
+    mova                  m8,                 [r5 + r4]
+    mova                  m9,                 [r5 + r4 + 1 * mmsize]
+    mova                  m10,                [r5 + r4 + 2 * mmsize]
+    mova                  m11,                [r5 + r4 + 3 * mmsize]
+%else
+    mova                  m8,                 [tab_LumaCoeffVer_32_avx512 + r4]
+    mova                  m9,                 [tab_LumaCoeffVer_32_avx512 + r4 + 1 * mmsize]
+    mova                  m10,                [tab_LumaCoeffVer_32_avx512 + r4 + 2 * mmsize]
+    mova                  m11,                [tab_LumaCoeffVer_32_avx512 + r4 + 3 * mmsize]
+%endif
+
+    vbroadcasti32x8       m7,                 [pw_512]
+%rep 7
+    PROCESS_LUMA_VERT_PP_48x8_AVX512
+    lea                   r0,                 [r4]
+    lea                   r2,                 [r2 + 4 * r3]
+%endrep
+    PROCESS_LUMA_VERT_PP_48x8_AVX512
+    RET
+%endmacro
+
+%if ARCH_X86_64
+    FILTER_VER_PP_LUMA_48x64_AVX512
+%endif
+
 %macro PROCESS_LUMA_VERT_PP_64x2_AVX512 0
     lea                   r5,                 [r0 + 4 * r1]
     movu                  m1,                 [r0]


More information about the x265-devel mailing list