[x265] [PATCH] asm: 10bpp avx2 code for intra_pred_ang32x32 mode 9, 10, 26 & 27
dnyaneshwar at multicorewareinc.com
dnyaneshwar at multicorewareinc.com
Wed Jun 10 14:05:55 CEST 2015
# HG changeset patch
# User Dnyaneshwar G <dnyaneshwar at multicorewareinc.com>
# Date 1433937427 -19800
# Wed Jun 10 17:27:07 2015 +0530
# Node ID ba355a7921b4a6adfe1bea6b57699489655b41f8
# Parent 6fad2ccf68ce2419290b238fd57c49f64fc62d3e
asm: 10bpp avx2 code for intra_pred_ang32x32 mode 9,10,26 & 27
performance improvement over SSE:
intra_ang_32x32[ 9] 8282c->4071c, 51%
intra_ang_32x32[27] 5678c->2777c, 51%
intra_ang_32x32[10] 1290c->643c, 50%
intra_ang_32x32[26] 1268c->640c, 50%
diff -r 6fad2ccf68ce -r ba355a7921b4 source/common/x86/asm-primitives.cpp
--- a/source/common/x86/asm-primitives.cpp Wed Jun 10 16:25:46 2015 +0530
+++ b/source/common/x86/asm-primitives.cpp Wed Jun 10 17:27:07 2015 +0530
@@ -1263,6 +1263,10 @@
p.cu[BLOCK_32x32].intra_pred[6] = x265_intra_pred_ang32_6_avx2;
p.cu[BLOCK_32x32].intra_pred[7] = x265_intra_pred_ang32_7_avx2;
p.cu[BLOCK_32x32].intra_pred[8] = x265_intra_pred_ang32_8_avx2;
+ p.cu[BLOCK_32x32].intra_pred[9] = x265_intra_pred_ang32_9_avx2;
+ p.cu[BLOCK_32x32].intra_pred[10] = x265_intra_pred_ang32_10_avx2;
+ p.cu[BLOCK_32x32].intra_pred[26] = x265_intra_pred_ang32_26_avx2;
+ p.cu[BLOCK_32x32].intra_pred[27] = x265_intra_pred_ang32_27_avx2;
p.cu[BLOCK_32x32].intra_pred[28] = x265_intra_pred_ang32_28_avx2;
p.cu[BLOCK_32x32].intra_pred[29] = x265_intra_pred_ang32_29_avx2;
p.cu[BLOCK_32x32].intra_pred[30] = x265_intra_pred_ang32_30_avx2;
diff -r 6fad2ccf68ce -r ba355a7921b4 source/common/x86/intrapred.h
--- a/source/common/x86/intrapred.h Wed Jun 10 16:25:46 2015 +0530
+++ b/source/common/x86/intrapred.h Wed Jun 10 17:27:07 2015 +0530
@@ -282,6 +282,8 @@
void x265_intra_pred_ang32_6_avx2(pixel* dst, intptr_t dstStride, const pixel* srcPix, int dirMode, int bFilter);
void x265_intra_pred_ang32_7_avx2(pixel* dst, intptr_t dstStride, const pixel* srcPix, int dirMode, int bFilter);
void x265_intra_pred_ang32_8_avx2(pixel* dst, intptr_t dstStride, const pixel* srcPix, int dirMode, int bFilter);
+void x265_intra_pred_ang32_9_avx2(pixel* dst, intptr_t dstStride, const pixel* srcPix, int dirMode, int bFilter);
+void x265_intra_pred_ang32_10_avx2(pixel* dst, intptr_t dstStride, const pixel* srcPix, int dirMode, int bFilter);
void x265_intra_pred_ang32_26_avx2(pixel* dst, intptr_t dstStride, const pixel* srcPix, int dirMode, int bFilter);
void x265_intra_pred_ang32_27_avx2(pixel* dst, intptr_t dstStride, const pixel* srcPix, int dirMode, int bFilter);
void x265_intra_pred_ang32_28_avx2(pixel* dst, intptr_t dstStride, const pixel* srcPix, int dirMode, int bFilter);
diff -r 6fad2ccf68ce -r ba355a7921b4 source/common/x86/intrapred16.asm
--- a/source/common/x86/intrapred16.asm Wed Jun 10 16:25:46 2015 +0530
+++ b/source/common/x86/intrapred16.asm Wed Jun 10 17:27:07 2015 +0530
@@ -14390,6 +14390,259 @@
call ang32_mode_8_28
RET
+
+cglobal intra_pred_ang32_9, 3,8,13
+ add r2, 128
+ xor r6d, r6d
+ lea r3, [ang_table_avx2 + 16 * 32]
+ add r1d, r1d
+ lea r4, [r1 * 3]
+ lea r7, [r0 + 8 * r1]
+
+ call ang16_mode_9_27
+
+ add r2, 2
+ lea r0, [r0 + 32]
+
+ call ang16_mode_9_27
+
+ add r2, 30
+ lea r0, [r7 + 8 * r1]
+
+ call ang16_mode_9_27
+
+ add r2, 2
+ lea r0, [r0 + 32]
+
+ call ang16_mode_9_27
+ RET
+
+cglobal intra_pred_ang32_27, 3,7,13
+ xor r6d, r6d
+ inc r6d
+ lea r3, [ang_table_avx2 + 16 * 32]
+ add r1d, r1d
+ lea r4, [r1 * 3]
+ lea r5, [r0 + 32]
+
+ call ang16_mode_9_27
+
+ add r2, 2
+
+ call ang16_mode_9_27
+
+ add r2, 30
+ mov r0, r5
+
+ call ang16_mode_9_27
+
+ add r2, 2
+
+ call ang16_mode_9_27
+ RET
+
+cglobal intra_pred_ang32_10, 3,4,2
+ add r2, mmsize*4
+ add r1d, r1d
+ lea r3, [r1 * 3]
+
+ vpbroadcastw m0, [r2 + 2] ; [1...]
+ movu [r0], m0
+ movu [r0 + 32], m0
+ vpbroadcastw m1, [r2 + 2 + 2] ; [2...]
+ movu [r0 + r1], m1
+ movu [r0 + r1 + 32], m1
+ vpbroadcastw m0, [r2 + 2 + 4] ; [3...]
+ movu [r0 + r1 * 2], m0
+ movu [r0 + r1 * 2 + 32], m0
+ vpbroadcastw m1, [r2 + 2 + 6] ; [4...]
+ movu [r0 + r3], m1
+ movu [r0 + r3 + 32], m1
+
+ lea r0, [r0 + r1 * 4]
+ vpbroadcastw m0, [r2 + 2 + 8] ; [5...]
+ movu [r0], m0
+ movu [r0 + 32], m0
+ vpbroadcastw m1, [r2 + 2 + 10] ; [6...]
+ movu [r0 + r1], m1
+ movu [r0 + r1 + 32], m1
+ vpbroadcastw m0, [r2 + 2 + 12] ; [7...]
+ movu [r0 + r1 * 2], m0
+ movu [r0 + r1 * 2 + 32], m0
+ vpbroadcastw m1, [r2 + 2 + 14] ; [8...]
+ movu [r0 + r3], m1
+ movu [r0 + r3 + 32], m1
+
+ lea r0, [r0 + r1 *4]
+ vpbroadcastw m0, [r2 + 2 + 16] ; [9...]
+ movu [r0], m0
+ movu [r0 + 32], m0
+ vpbroadcastw m1, [r2 + 2 + 18] ; [10...]
+ movu [r0 + r1], m1
+ movu [r0 + r1 + 32], m1
+ vpbroadcastw m0, [r2 + 2 + 20] ; [11...]
+ movu [r0 + r1 * 2], m0
+ movu [r0 + r1 * 2 + 32], m0
+ vpbroadcastw m1, [r2 + 2 + 22] ; [12...]
+ movu [r0 + r3], m1
+ movu [r0 + r3 + 32], m1
+
+ lea r0, [r0 + r1 *4]
+ vpbroadcastw m0, [r2 + 2 + 24] ; [13...]
+ movu [r0], m0
+ movu [r0 + 32], m0
+ vpbroadcastw m1, [r2 + 2 + 26] ; [14...]
+ movu [r0 + r1], m1
+ movu [r0 + r1 + 32], m1
+ vpbroadcastw m0, [r2 + 2 + 28] ; [15...]
+ movu [r0 + r1 * 2], m0
+ movu [r0 + r1 * 2 + 32], m0
+ vpbroadcastw m1, [r2 + 2 + 30] ; [16...]
+ movu [r0 + r3], m1
+ movu [r0 + r3 + 32], m1
+
+ lea r0, [r0 + r1 *4]
+ vpbroadcastw m0, [r2 + 2 + 32] ; [17...]
+ movu [r0], m0
+ movu [r0 + 32], m0
+ vpbroadcastw m1, [r2 + 2 + 34] ; [18...]
+ movu [r0 + r1], m1
+ movu [r0 + r1 + 32], m1
+ vpbroadcastw m0, [r2 + 2 + 36] ; [19...]
+ movu [r0 + r1 * 2], m0
+ movu [r0 + r1 * 2 + 32], m0
+ vpbroadcastw m1, [r2 + 2 + 38] ; [20...]
+ movu [r0 + r3], m1
+ movu [r0 + r3 + 32], m1
+
+ lea r0, [r0 + r1 *4]
+ vpbroadcastw m0, [r2 + 2 + 40] ; [21...]
+ movu [r0], m0
+ movu [r0 + 32], m0
+ vpbroadcastw m1, [r2 + 2 + 42] ; [22...]
+ movu [r0 + r1], m1
+ movu [r0 + r1 + 32], m1
+ vpbroadcastw m0, [r2 + 2 + 44] ; [23...]
+ movu [r0 + r1 * 2], m0
+ movu [r0 + r1 * 2 + 32], m0
+ vpbroadcastw m1, [r2 + 2 + 46] ; [24...]
+ movu [r0 + r3], m1
+ movu [r0 + r3 + 32], m1
+
+ lea r0, [r0 + r1 *4]
+ vpbroadcastw m0, [r2 + 2 + 48] ; [25...]
+ movu [r0], m0
+ movu [r0 + 32], m0
+ vpbroadcastw m1, [r2 + 2 + 50] ; [26...]
+ movu [r0 + r1], m1
+ movu [r0 + r1 + 32], m1
+ vpbroadcastw m0, [r2 + 2 + 52] ; [27...]
+ movu [r0 + r1 * 2], m0
+ movu [r0 + r1 * 2 + 32], m0
+ vpbroadcastw m1, [r2 + 2 + 54] ; [28...]
+ movu [r0 + r3], m1
+ movu [r0 + r3 + 32], m1
+
+ lea r0, [r0 + r1 *4]
+ vpbroadcastw m0, [r2 + 2 + 56] ; [29...]
+ movu [r0], m0
+ movu [r0 + 32], m0
+ vpbroadcastw m1, [r2 + 2 + 58] ; [30...]
+ movu [r0 + r1], m1
+ movu [r0 + r1 + 32], m1
+ vpbroadcastw m0, [r2 + 2 + 60] ; [31...]
+ movu [r0 + r1 * 2], m0
+ movu [r0 + r1 * 2 + 32], m0
+ vpbroadcastw m1, [r2 + 2 + 62] ; [32...]
+ movu [r0 + r3], m1
+ movu [r0 + r3 + 32], m1
+ RET
+
+cglobal intra_pred_ang32_26, 3,3,2
+ movu m0, [r2 + 2]
+ movu m1, [r2 + 34]
+ add r1d, r1d
+ lea r2, [r1 * 3]
+
+ movu [r0], m0
+ movu [r0 + 32], m1
+ movu [r0 + r1], m0
+ movu [r0 + r1 + 32], m1
+ movu [r0 + r1 * 2], m0
+ movu [r0 + r1 * 2 + 32], m1
+ movu [r0 + r2], m0
+ movu [r0 + r2 + 32], m1
+
+ lea r0, [r0 + r1 *4]
+ movu [r0], m0
+ movu [r0 + 32], m1
+ movu [r0 + r1], m0
+ movu [r0 + r1 + 32], m1
+ movu [r0 + r1 * 2], m0
+ movu [r0 + r1 * 2 + 32], m1
+ movu [r0 + r2], m0
+ movu [r0 + r2 + 32], m1
+
+ lea r0, [r0 + r1 *4]
+ movu [r0], m0
+ movu [r0 + 32], m1
+ movu [r0 + r1], m0
+ movu [r0 + r1 + 32], m1
+ movu [r0 + r1 * 2], m0
+ movu [r0 + r1 * 2 + 32], m1
+ movu [r0 + r2], m0
+ movu [r0 + r2 + 32], m1
+
+ lea r0, [r0 + r1 *4]
+ movu [r0], m0
+ movu [r0 + 32], m1
+ movu [r0 + r1], m0
+ movu [r0 + r1 + 32], m1
+ movu [r0 + r1 * 2], m0
+ movu [r0 + r1 * 2 + 32], m1
+ movu [r0 + r2], m0
+ movu [r0 + r2 + 32], m1
+
+ lea r0, [r0 + r1 *4]
+ movu [r0], m0
+ movu [r0 + 32], m1
+ movu [r0 + r1], m0
+ movu [r0 + r1 + 32], m1
+ movu [r0 + r1 * 2], m0
+ movu [r0 + r1 * 2 + 32], m1
+ movu [r0 + r2], m0
+ movu [r0 + r2 + 32], m1
+
+ lea r0, [r0 + r1 *4]
+ movu [r0], m0
+ movu [r0 + 32], m1
+ movu [r0 + r1], m0
+ movu [r0 + r1 + 32], m1
+ movu [r0 + r1 * 2], m0
+ movu [r0 + r1 * 2 + 32], m1
+ movu [r0 + r2], m0
+ movu [r0 + r2 + 32], m1
+
+ lea r0, [r0 + r1 *4]
+ movu [r0], m0
+ movu [r0 + 32], m1
+ movu [r0 + r1], m0
+ movu [r0 + r1 + 32], m1
+ movu [r0 + r1 * 2], m0
+ movu [r0 + r1 * 2 + 32], m1
+ movu [r0 + r2], m0
+ movu [r0 + r2 + 32], m1
+
+ lea r0, [r0 + r1 *4]
+ movu [r0], m0
+ movu [r0 + 32], m1
+ movu [r0 + r1], m0
+ movu [r0 + r1 + 32], m1
+ movu [r0 + r1 * 2], m0
+ movu [r0 + r1 * 2 + 32], m1
+ movu [r0 + r2], m0
+ movu [r0 + r2 + 32], m1
+ RET
;-------------------------------------------------------------------------------------------------------
; end of avx2 code for intra_pred_ang32 mode 2 to 34
;-------------------------------------------------------------------------------------------------------
More information about the x265-devel
mailing list