[x265] [PATCH 3 of 3] asm: avx2 code for sad_x3_64xN, improved over 40% than SSE
dnyaneshwar at multicorewareinc.com
dnyaneshwar at multicorewareinc.com
Thu Sep 10 14:03:55 CEST 2015
# HG changeset patch
# User Dnyaneshwar G <dnyaneshwar at multicorewareinc.com>
# Date 1441886472 -19800
# Thu Sep 10 17:31:12 2015 +0530
# Node ID d31b9e8bdcf4f5fac2e3f0c567f1c90c1d19a382
# Parent 5b5d7438e90196d7974b9ceec2130b6c924e2342
asm: avx2 code for sad_x3_64xN, improved over 40% than SSE
diff -r 5b5d7438e901 -r d31b9e8bdcf4 source/common/x86/asm-primitives.cpp
--- a/source/common/x86/asm-primitives.cpp Thu Sep 10 17:18:03 2015 +0530
+++ b/source/common/x86/asm-primitives.cpp Thu Sep 10 17:31:12 2015 +0530
@@ -3576,6 +3576,11 @@
p.pu[LUMA_32x24].sad_x3 = PFX(pixel_sad_x3_32x24_avx2);
p.pu[LUMA_32x32].sad_x3 = PFX(pixel_sad_x3_32x32_avx2);
p.pu[LUMA_32x64].sad_x3 = PFX(pixel_sad_x3_32x64_avx2);
+ p.pu[LUMA_64x16].sad_x3 = PFX(pixel_sad_x3_64x16_avx2);
+ p.pu[LUMA_64x32].sad_x3 = PFX(pixel_sad_x3_64x32_avx2);
+ p.pu[LUMA_64x48].sad_x3 = PFX(pixel_sad_x3_64x48_avx2);
+ p.pu[LUMA_64x64].sad_x3 = PFX(pixel_sad_x3_64x64_avx2);
+ p.pu[LUMA_48x64].sad_x3 = PFX(pixel_sad_x3_48x64_avx2);
/* The following primitives have been disabled since performance compared to SSE is negligible/negative */
#if 0
diff -r 5b5d7438e901 -r d31b9e8bdcf4 source/common/x86/sad-a.asm
--- a/source/common/x86/sad-a.asm Thu Sep 10 17:18:03 2015 +0530
+++ b/source/common/x86/sad-a.asm Thu Sep 10 17:31:12 2015 +0530
@@ -4054,6 +4054,372 @@
paddd m2, m3
%endmacro
+%macro SAD_X3_64x8_AVX2 0
+ movu m3, [r0]
+ movu m4, [r1]
+ movu m5, [r2]
+ movu m6, [r3]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + mmsize]
+ movu m4, [r1 + mmsize]
+ movu m5, [r2 + mmsize]
+ movu m6, [r3 + mmsize]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + FENC_STRIDE]
+ movu m4, [r1 + r4]
+ movu m5, [r2 + r4]
+ movu m6, [r3 + r4]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + FENC_STRIDE + mmsize]
+ movu m4, [r1 + r4 + mmsize]
+ movu m5, [r2 + r4 + mmsize]
+ movu m6, [r3 + r4 + mmsize]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + FENC_STRIDE * 2]
+ movu m4, [r1 + r4 * 2]
+ movu m5, [r2 + r4 * 2]
+ movu m6, [r3 + r4 * 2]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + FENC_STRIDE * 2 + mmsize]
+ movu m4, [r1 + r4 * 2 + mmsize]
+ movu m5, [r2 + r4 * 2 + mmsize]
+ movu m6, [r3 + r4 * 2 + mmsize]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + FENC_STRIDE * 3]
+ movu m4, [r1 + r6]
+ movu m5, [r2 + r6]
+ movu m6, [r3 + r6]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + FENC_STRIDE * 3 + mmsize]
+ movu m4, [r1 + r6 + mmsize]
+ movu m5, [r2 + r6 + mmsize]
+ movu m6, [r3 + r6 + mmsize]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ movu m3, [r0]
+ movu m4, [r1]
+ movu m5, [r2]
+ movu m6, [r3]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + mmsize]
+ movu m4, [r1 + mmsize]
+ movu m5, [r2 + mmsize]
+ movu m6, [r3 + mmsize]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + FENC_STRIDE]
+ movu m4, [r1 + r4]
+ movu m5, [r2 + r4]
+ movu m6, [r3 + r4]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + FENC_STRIDE + mmsize]
+ movu m4, [r1 + r4 + mmsize]
+ movu m5, [r2 + r4 + mmsize]
+ movu m6, [r3 + r4 + mmsize]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + FENC_STRIDE * 2]
+ movu m4, [r1 + r4 * 2]
+ movu m5, [r2 + r4 * 2]
+ movu m6, [r3 + r4 * 2]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + FENC_STRIDE * 2 + mmsize]
+ movu m4, [r1 + r4 * 2 + mmsize]
+ movu m5, [r2 + r4 * 2 + mmsize]
+ movu m6, [r3 + r4 * 2 + mmsize]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + FENC_STRIDE * 3]
+ movu m4, [r1 + r6]
+ movu m5, [r2 + r6]
+ movu m6, [r3 + r6]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + FENC_STRIDE * 3 + mmsize]
+ movu m4, [r1 + r6 + mmsize]
+ movu m5, [r2 + r6 + mmsize]
+ movu m6, [r3 + r6 + mmsize]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+%endmacro
+
+%macro SAD_X3_48x8_AVX2 0
+ movu m3, [r0]
+ movu m4, [r1]
+ movu m5, [r2]
+ movu m6, [r3]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu xm3, [r0 + mmsize]
+ movu xm4, [r1 + mmsize]
+ movu xm5, [r2 + mmsize]
+ movu xm6, [r3 + mmsize]
+ vinserti128 m3, m3, [r0 + FENC_STRIDE], 1
+ vinserti128 m4, m4, [r1 + r4], 1
+ vinserti128 m5, m5, [r2 + r4], 1
+ vinserti128 m6, m6, [r3 + r4], 1
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + FENC_STRIDE + mmsize/2]
+ movu m4, [r1 + r4 + mmsize/2]
+ movu m5, [r2 + r4 + mmsize/2]
+ movu m6, [r3 + r4 + mmsize/2]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + FENC_STRIDE * 2]
+ movu m4, [r1 + r4 * 2]
+ movu m5, [r2 + r4 * 2]
+ movu m6, [r3 + r4 * 2]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu xm3, [r0 + FENC_STRIDE * 2 + mmsize]
+ movu xm4, [r1 + r4 * 2 + mmsize]
+ movu xm5, [r2 + r4 * 2 + mmsize]
+ movu xm6, [r3 + r4 * 2 + mmsize]
+ vinserti128 m3, m3, [r0 + FENC_STRIDE * 3], 1
+ vinserti128 m4, m4, [r1 + r6], 1
+ vinserti128 m5, m5, [r2 + r6], 1
+ vinserti128 m6, m6, [r3 + r6], 1
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + FENC_STRIDE * 3 + mmsize/2]
+ movu m4, [r1 + r6 + mmsize/2]
+ movu m5, [r2 + r6 + mmsize/2]
+ movu m6, [r3 + r6 + mmsize/2]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ movu m3, [r0]
+ movu m4, [r1]
+ movu m5, [r2]
+ movu m6, [r3]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu xm3, [r0 + mmsize]
+ movu xm4, [r1 + mmsize]
+ movu xm5, [r2 + mmsize]
+ movu xm6, [r3 + mmsize]
+ vinserti128 m3, m3, [r0 + FENC_STRIDE], 1
+ vinserti128 m4, m4, [r1 + r4], 1
+ vinserti128 m5, m5, [r2 + r4], 1
+ vinserti128 m6, m6, [r3 + r4], 1
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + FENC_STRIDE + mmsize/2]
+ movu m4, [r1 + r4 + mmsize/2]
+ movu m5, [r2 + r4 + mmsize/2]
+ movu m6, [r3 + r4 + mmsize/2]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + FENC_STRIDE * 2]
+ movu m4, [r1 + r4 * 2]
+ movu m5, [r2 + r4 * 2]
+ movu m6, [r3 + r4 * 2]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu xm3, [r0 + FENC_STRIDE * 2 + mmsize]
+ movu xm4, [r1 + r4 * 2 + mmsize]
+ movu xm5, [r2 + r4 * 2 + mmsize]
+ movu xm6, [r3 + r4 * 2 + mmsize]
+ vinserti128 m3, m3, [r0 + FENC_STRIDE * 3], 1
+ vinserti128 m4, m4, [r1 + r6], 1
+ vinserti128 m5, m5, [r2 + r6], 1
+ vinserti128 m6, m6, [r3 + r6], 1
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+
+ movu m3, [r0 + FENC_STRIDE * 3 + mmsize/2]
+ movu m4, [r1 + r6 + mmsize/2]
+ movu m5, [r2 + r6 + mmsize/2]
+ movu m6, [r3 + r6 + mmsize/2]
+
+ psadbw m7, m3, m4
+ paddd m0, m7
+ psadbw m4, m3, m5
+ paddd m1, m4
+ psadbw m3, m6
+ paddd m2, m3
+%endmacro
+
%macro PIXEL_SAD_X3_END_AVX2 0
vextracti128 xm3, m0, 1
vextracti128 xm4, m1, 1
@@ -4213,6 +4579,217 @@
SAD_X3_32x8_AVX2
PIXEL_SAD_X3_END_AVX2
RET
+
+cglobal pixel_sad_x3_64x16, 6,7,8
+ pxor m0, m0
+ pxor m1, m1
+ pxor m2, m2
+ lea r6, [r4 * 3]
+
+ SAD_X3_64x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_64x8_AVX2
+ PIXEL_SAD_X3_END_AVX2
+ RET
+
+cglobal pixel_sad_x3_64x32, 6,7,8
+ pxor m0, m0
+ pxor m1, m1
+ pxor m2, m2
+ lea r6, [r4 * 3]
+
+ SAD_X3_64x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_64x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_64x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_64x8_AVX2
+ PIXEL_SAD_X3_END_AVX2
+ RET
+
+cglobal pixel_sad_x3_64x48, 6,7,8
+ pxor m0, m0
+ pxor m1, m1
+ pxor m2, m2
+ lea r6, [r4 * 3]
+
+ SAD_X3_64x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_64x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_64x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_64x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_64x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_64x8_AVX2
+ PIXEL_SAD_X3_END_AVX2
+ RET
+
+cglobal pixel_sad_x3_64x64, 6,7,8
+ pxor m0, m0
+ pxor m1, m1
+ pxor m2, m2
+ lea r6, [r4 * 3]
+
+ SAD_X3_64x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_64x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_64x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_64x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_64x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_64x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_64x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_64x8_AVX2
+ PIXEL_SAD_X3_END_AVX2
+ RET
+
+cglobal pixel_sad_x3_48x64, 6,7,8
+ pxor m0, m0
+ pxor m1, m1
+ pxor m2, m2
+ lea r6, [r4 * 3]
+
+ SAD_X3_48x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_48x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_48x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_48x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_48x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_48x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_48x8_AVX2
+
+ lea r0, [r0 + FENC_STRIDE * 4]
+ lea r1, [r1 + r4 * 4]
+ lea r2, [r2 + r4 * 4]
+ lea r3, [r3 + r4 * 4]
+
+ SAD_X3_48x8_AVX2
+ PIXEL_SAD_X3_END_AVX2
+ RET
%endif
INIT_YMM avx2
More information about the x265-devel
mailing list