[x265-commits] [x265] regression: overhaul test coverage, many more features co...
Steve Borho
steve at borho.org
Fri Mar 27 20:03:41 CET 2015
details: http://hg.videolan.org/x265/rev/01f17bab8e4d
branches: stable
changeset: 9924:01f17bab8e4d
user: Steve Borho <steve at borho.org>
date: Fri Mar 27 00:25:02 2015 -0500
description:
regression: overhaul test coverage, many more features covered in less test time
Subject: [x265] regression: test-file format can't handle commas in arguments
details: http://hg.videolan.org/x265/rev/22c59a45a7e7
branches: stable
changeset: 9925:22c59a45a7e7
user: Steve Borho <steve at borho.org>
date: Fri Mar 27 10:41:24 2015 -0500
description:
regression: test-file format can't handle commas in arguments
Subject: [x265] search: comment nits
details: http://hg.videolan.org/x265/rev/83872c8f50e4
branches: stable
changeset: 9926:83872c8f50e4
user: Steve Borho <steve at borho.org>
date: Fri Mar 27 11:02:32 2015 -0500
description:
search: comment nits
Subject: [x265] asm: intra_pred_ang32_33 improved by ~35% over SSE4
details: http://hg.videolan.org/x265/rev/6d6a736efd39
branches:
changeset: 9927:6d6a736efd39
user: Praveen Tiwari <praveen at multicorewareinc.com>
date: Thu Mar 26 13:20:04 2015 +0530
description:
asm: intra_pred_ang32_33 improved by ~35% over SSE4
AVX2:
intra_ang_32x32[33] 11.11x 2618.69 29084.27
SSE4:
intra_ang_32x32[33] 7.59x 4055.42 30792.64
Subject: [x265] asm: intra_pred_ang32_25 improved by ~53% over SSE4
details: http://hg.videolan.org/x265/rev/277625e699fb
branches:
changeset: 9928:277625e699fb
user: Praveen Tiwari <praveen at multicorewareinc.com>
date: Thu Mar 26 14:23:20 2015 +0530
description:
asm: intra_pred_ang32_25 improved by ~53% over SSE4
AVX2:
intra_ang_32x32[25] 23.11x 1293.83 29904.12
SSE4:
intra_ang_32x32[25] 10.31x 2759.33 28451.26
Subject: [x265] asm: intra_pred_ang32_24 improved by ~53% over SSE4
details: http://hg.videolan.org/x265/rev/72e664803190
branches:
changeset: 9929:72e664803190
user: Praveen Tiwari <praveen at multicorewareinc.com>
date: Thu Mar 26 16:50:20 2015 +0530
description:
asm: intra_pred_ang32_24 improved by ~53% over SSE4
Subject: [x265] asm: scale2D_64to32 avx2 code
details: http://hg.videolan.org/x265/rev/50ca30ea3826
branches:
changeset: 9930:50ca30ea3826
user: Dnyaneshwar G <dnyaneshwar at multicorewareinc.com>
date: Fri Mar 27 11:27:24 2015 +0530
description:
asm: scale2D_64to32 avx2 code
AVX2:
scale2D_64to32 10.41x 3861.30 40192.99
scale2D_64to32 10.35x 3880.97 40175.66
SSSE3:
scale2D_64to32 5.44x 7454.44 40576.51
scale2D_64to32 5.45x 7445.73 40613.14
Subject: [x265] param: add missing rdoq-level and deblock offsets to param2string()
details: http://hg.videolan.org/x265/rev/2da2b9dd7eb3
branches: stable
changeset: 9931:2da2b9dd7eb3
user: Steve Borho <steve at borho.org>
date: Fri Mar 27 13:08:47 2015 -0500
description:
param: add missing rdoq-level and deblock offsets to param2string()
replace the deprecated --[no-]lft with --[no]deblock[=tC:B]
Subject: [x265] Merge with stable
details: http://hg.videolan.org/x265/rev/36d70728acc2
branches:
changeset: 9932:36d70728acc2
user: Steve Borho <steve at borho.org>
date: Fri Mar 27 13:16:28 2015 -0500
description:
Merge with stable
diffstat:
source/common/param.cpp | 5 +-
source/common/x86/asm-primitives.cpp | 5 +
source/common/x86/intrapred.h | 3 +
source/common/x86/intrapred8.asm | 990 +++++++++++++++++++++++++++++++++++
source/common/x86/pixel-util.h | 1 +
source/common/x86/pixel-util8.asm | 120 +++-
source/encoder/search.cpp | 8 +-
source/test/regression-tests.txt | 226 ++-----
8 files changed, 1198 insertions(+), 160 deletions(-)
diffs (truncated from 1511 to 300 lines):
diff -r 3d0f23cb0e58 -r 36d70728acc2 source/common/param.cpp
--- a/source/common/param.cpp Thu Mar 26 15:09:51 2015 -0500
+++ b/source/common/param.cpp Fri Mar 27 13:16:28 2015 -0500
@@ -1383,9 +1383,12 @@ char *x265_param2string(x265_param* p)
s += sprintf(s, " crqpoffs=%d", p->crQpOffset);
s += sprintf(s, " rd=%d", p->rdLevel);
s += sprintf(s, " psy-rd=%.2f", p->psyRd);
+ s += sprintf(s, " rdoq-level=%d", p->rdoqLevel);
s += sprintf(s, " psy-rdoq=%.2f", p->psyRdoq);
BOOL(p->bEnableSignHiding, "signhide");
- BOOL(p->bEnableLoopFilter, "lft");
+ BOOL(p->bEnableLoopFilter, "deblock");
+ if (p->bEnableLoopFilter && (p->deblockingFilterBetaOffset || p->deblockingFilterTCOffset))
+ s += sprintf(s, "=%d:%d", p->deblockingFilterTCOffset, p->deblockingFilterBetaOffset);
BOOL(p->bEnableSAO, "sao");
BOOL(p->bSaoNonDeblocked, "sao-non-deblock");
BOOL(p->bBPyramid, "b-pyramid");
diff -r 3d0f23cb0e58 -r 36d70728acc2 source/common/x86/asm-primitives.cpp
--- a/source/common/x86/asm-primitives.cpp Thu Mar 26 15:09:51 2015 -0500
+++ b/source/common/x86/asm-primitives.cpp Fri Mar 27 13:16:28 2015 -0500
@@ -1447,6 +1447,8 @@ void setupAssemblyPrimitives(EncoderPrim
#if X86_64
if (cpuMask & X265_CPU_AVX2)
{
+ p.scale2D_64to32 = x265_scale2D_64to32_avx2;
+
p.cu[BLOCK_4x4].psy_cost_ss = x265_psyCost_ss_4x4_avx2;
p.cu[BLOCK_8x8].psy_cost_ss = x265_psyCost_ss_8x8_avx2;
p.cu[BLOCK_16x16].psy_cost_ss = x265_psyCost_ss_16x16_avx2;
@@ -1642,6 +1644,9 @@ void setupAssemblyPrimitives(EncoderPrim
p.cu[BLOCK_32x32].intra_pred[30] = x265_intra_pred_ang32_30_avx2;
p.cu[BLOCK_32x32].intra_pred[31] = x265_intra_pred_ang32_31_avx2;
p.cu[BLOCK_32x32].intra_pred[32] = x265_intra_pred_ang32_32_avx2;
+ p.cu[BLOCK_32x32].intra_pred[33] = x265_intra_pred_ang32_33_avx2;
+ p.cu[BLOCK_32x32].intra_pred[25] = x265_intra_pred_ang32_25_avx2;
+ p.cu[BLOCK_32x32].intra_pred[24] = x265_intra_pred_ang32_24_avx2;
// copy_sp primitives
p.cu[BLOCK_16x16].copy_sp = x265_blockcopy_sp_16x16_avx2;
diff -r 3d0f23cb0e58 -r 36d70728acc2 source/common/x86/intrapred.h
--- a/source/common/x86/intrapred.h Thu Mar 26 15:09:51 2015 -0500
+++ b/source/common/x86/intrapred.h Fri Mar 27 13:16:28 2015 -0500
@@ -212,6 +212,9 @@ void x265_intra_pred_ang32_29_avx2(pixel
void x265_intra_pred_ang32_30_avx2(pixel* dst, intptr_t dstStride, const pixel* srcPix, int dirMode, int bFilter);
void x265_intra_pred_ang32_31_avx2(pixel* dst, intptr_t dstStride, const pixel* srcPix, int dirMode, int bFilter);
void x265_intra_pred_ang32_32_avx2(pixel* dst, intptr_t dstStride, const pixel* srcPix, int dirMode, int bFilter);
+void x265_intra_pred_ang32_33_avx2(pixel* dst, intptr_t dstStride, const pixel* srcPix, int dirMode, int bFilter);
+void x265_intra_pred_ang32_25_avx2(pixel* dst, intptr_t dstStride, const pixel* srcPix, int dirMode, int bFilter);
+void x265_intra_pred_ang32_24_avx2(pixel* dst, intptr_t dstStride, const pixel* srcPix, int dirMode, int bFilter);
void x265_all_angs_pred_4x4_sse4(pixel *dest, pixel *refPix, pixel *filtPix, int bLuma);
void x265_all_angs_pred_8x8_sse4(pixel *dest, pixel *refPix, pixel *filtPix, int bLuma);
void x265_all_angs_pred_16x16_sse4(pixel *dest, pixel *refPix, pixel *filtPix, int bLuma);
diff -r 3d0f23cb0e58 -r 36d70728acc2 source/common/x86/intrapred8.asm
--- a/source/common/x86/intrapred8.asm Thu Mar 26 15:09:51 2015 -0500
+++ b/source/common/x86/intrapred8.asm Fri Mar 27 13:16:28 2015 -0500
@@ -376,6 +376,77 @@ c_ang32_mode_32: db 11, 21, 11, 21, 11
db 21, 11, 21, 11, 21, 11, 21, 11, 21, 11, 21, 11, 21, 11, 21, 11, 21, 11, 21, 11, 21, 11, 21, 11, 21, 11, 21, 11, 21, 11, 21, 11
db 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0
+
+ALIGN 32
+c_ang32_mode_33: db 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26
+ db 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20
+ db 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14
+ db 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8
+ db 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28
+ db 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22
+ db 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16
+ db 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10
+ db 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30
+ db 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24
+ db 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18
+ db 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12
+ db 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6
+ db 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26
+ db 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20
+ db 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14
+ db 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8
+ db 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28
+ db 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22
+ db 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16
+ db 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10
+ db 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30
+ db 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24
+ db 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18
+ db 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12
+ db 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6
+ db 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0
+
+
+
+ALIGN 32
+c_ang32_mode_25: db 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28
+ db 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24
+ db 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20
+ db 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16
+ db 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12
+ db 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8
+ db 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4
+ db 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0
+ db 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28
+ db 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24
+ db 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20
+ db 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16
+ db 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12
+ db 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8
+ db 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4
+ db 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0
+
+
+
+ALIGN 32
+c_ang32_mode_24: db 5, 27, 5, 27, 5, 27, 5, 27, 5, 27, 5, 27, 5, 27, 5, 27, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22
+ db 15, 17, 15, 17, 15, 17, 15, 17, 15, 17, 15, 17, 15, 17, 15, 17, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12
+ db 25, 7, 25, 7, 25, 7, 25, 7, 25, 7, 25, 7, 25, 7, 25, 7, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2
+ db 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24
+ db 13, 19, 13, 19, 13, 19, 13, 19, 13, 19, 13, 19, 13, 19, 13, 19, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14
+ db 23, 9, 23, 9, 23, 9, 23, 9, 23, 9, 23, 9, 23, 9, 23, 9, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4
+ db 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26
+ db 11, 21, 11, 21, 11, 21, 11, 21, 11, 21, 11, 21, 11, 21, 11, 21, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16
+ db 21, 11, 21, 11, 21, 11, 21, 11, 21, 11, 21, 11, 21, 11, 21, 11, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6, 26, 6
+ db 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1
+ db 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 4, 28, 9, 23, 9, 23, 9, 23, 9, 23, 9, 23, 9, 23, 9, 23, 9, 23
+ db 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 14, 18, 19, 13, 19, 13, 19, 13, 19, 13, 19, 13, 19, 13, 19, 13, 19, 13
+ db 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 24, 8, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3
+ db 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 2, 30, 7, 25, 7, 25, 7, 25, 7, 25, 7, 25, 7, 25, 7, 25, 7, 25
+ db 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 12, 20, 17, 15, 17, 15, 17, 15, 17, 15, 17, 15, 17, 15, 17, 15, 17, 15
+ db 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 22, 10, 27, 5, 27, 5, 27, 5, 27, 5, 27, 5, 27, 5, 27, 5, 27, 5
+ db 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0, 32, 0
+
ALIGN 32
;; (blkSize - 1 - x)
pw_planar4_0: dw 3, 2, 1, 0, 3, 2, 1, 0
@@ -13514,5 +13585,924 @@ cglobal intra_pred_ang32_32, 3, 5, 11
vpermq m6, m6, 11011000b
movu [r0 + r3], m6
RET
+
+INIT_YMM avx2
+cglobal intra_pred_ang32_33, 3, 5, 11
+ mova m0, [pw_1024]
+ mova m1, [intra_pred_shuff_0_8]
+ lea r3, [3 * r1]
+ lea r4, [c_ang32_mode_33]
+
+ ;row [0]
+ vbroadcasti128 m2, [r2 + 1]
+ pshufb m2, m1
+ vbroadcasti128 m3, [r2 + 9]
+ pshufb m3, m1
+ vbroadcasti128 m4, [r2 + 17]
+ pshufb m4, m1
+ vbroadcasti128 m5, [r2 + 25]
+ pshufb m5, m1
+
+ mova m10, [r4 + 0 * mmsize]
+ vperm2i128 m6, m2, m3, 00100000b
+ pmaddubsw m6, m10
+ pmulhrsw m6, m0
+ vperm2i128 m7, m4, m5, 00100000b
+ pmaddubsw m7, m10
+ pmulhrsw m7, m0
+ packuswb m6, m7
+ vpermq m6, m6, 11011000b
+ movu [r0], m6
+
+ ;row [1]
+ vbroadcasti128 m2, [r2 + 2]
+ pshufb m2, m1
+ vbroadcasti128 m3, [r2 + 10]
+ pshufb m3, m1
+ vbroadcasti128 m4, [r2 + 18]
+ pshufb m4, m1
+ vbroadcasti128 m5, [r2 + 26]
+ pshufb m5, m1
+
+ mova m10, [r4 + 1 * mmsize]
+ vperm2i128 m6, m2, m3, 00100000b
+ pmaddubsw m6, m10
+ pmulhrsw m6, m0
+ vperm2i128 m7, m4, m5, 00100000b
+ pmaddubsw m7, m10
+ pmulhrsw m7, m0
+ packuswb m6, m7
+ vpermq m6, m6, 11011000b
+ movu [r0 + r1], m6
+
+ ;row [2]
+ vbroadcasti128 m2, [r2 + 3]
+ pshufb m2, m1
+ vbroadcasti128 m3, [r2 + 11]
+ pshufb m3, m1
+ vbroadcasti128 m4, [r2 + 19]
+ pshufb m4, m1
+ vbroadcasti128 m5, [r2 + 27]
+ pshufb m5, m1
+
+ mova m10, [r4 + 2 * mmsize]
+ vperm2i128 m6, m2, m3, 00100000b
+ pmaddubsw m6, m10
+ pmulhrsw m6, m0
+ vperm2i128 m7, m4, m5, 00100000b
+ pmaddubsw m7, m10
+ pmulhrsw m7, m0
+ packuswb m6, m7
+ vpermq m6, m6, 11011000b
+ movu [r0 + 2 * r1], m6
+
+ ;row [3]
+ vbroadcasti128 m2, [r2 + 4]
+ pshufb m2, m1
+ vbroadcasti128 m3, [r2 + 12]
+ pshufb m3, m1
+ vbroadcasti128 m4, [r2 + 20]
+ pshufb m4, m1
+ vbroadcasti128 m5, [r2 + 28]
+ pshufb m5, m1
+
+ mova m10, [r4 + 3 * mmsize]
+ vperm2i128 m6, m2, m3, 00100000b
+ pmaddubsw m6, m10
+ pmulhrsw m6, m0
+ vperm2i128 m7, m4, m5, 00100000b
+ pmaddubsw m7, m10
+ pmulhrsw m7, m0
+ packuswb m6, m7
+ vpermq m6, m6, 11011000b
+ movu [r0 + r3], m6
+
+ ;row [4, 5]
+ vbroadcasti128 m2, [r2 + 5]
+ pshufb m2, m1
+ vbroadcasti128 m3, [r2 + 13]
+ pshufb m3, m1
+ vbroadcasti128 m4, [r2 + 21]
+ pshufb m4, m1
+ vbroadcasti128 m5, [r2 + 29]
+ pshufb m5, m1
+
+ add r4, 4 * mmsize
+ lea r0, [r0 + 4 * r1]
+ mova m10, [r4 + 0 * mmsize]
+
+ INTRA_PRED_ANG32_CAL_ROW
+ movu [r0], m7
+ movu [r0 + r1], m6
+
+ ;row [6]
+ vbroadcasti128 m2, [r2 + 6]
+ pshufb m2, m1
+ vbroadcasti128 m3, [r2 + 14]
+ pshufb m3, m1
+ vbroadcasti128 m4, [r2 + 22]
+ pshufb m4, m1
+ vbroadcasti128 m5, [r2 + 30]
+ pshufb m5, m1
+
+ mova m10, [r4 + 1 * mmsize]
+ vperm2i128 m6, m2, m3, 00100000b
+ pmaddubsw m6, m10
+ pmulhrsw m6, m0
+ vperm2i128 m7, m4, m5, 00100000b
+ pmaddubsw m7, m10
+ pmulhrsw m7, m0
+ packuswb m6, m7
+ vpermq m6, m6, 11011000b
+ movu [r0 + 2 * r1], m6
+
+ ;row [7]
+ vbroadcasti128 m2, [r2 + 7]
+ pshufb m2, m1
+ vbroadcasti128 m3, [r2 + 15]
+ pshufb m3, m1
+ vbroadcasti128 m4, [r2 + 23]
+ pshufb m4, m1
+ vbroadcasti128 m5, [r2 + 31]
+ pshufb m5, m1
+
+ mova m10, [r4 + 2 * mmsize]
+ vperm2i128 m6, m2, m3, 00100000b
+ pmaddubsw m6, m10
+ pmulhrsw m6, m0
+ vperm2i128 m7, m4, m5, 00100000b
+ pmaddubsw m7, m10
+ pmulhrsw m7, m0
+ packuswb m6, m7
+ vpermq m6, m6, 11011000b
+ movu [r0 + r3], m6
+
+ ;row [8]
+ vbroadcasti128 m2, [r2 + 8]
+ pshufb m2, m1
+ vbroadcasti128 m3, [r2 + 16]
+ pshufb m3, m1
+ vbroadcasti128 m4, [r2 + 24]
+ pshufb m4, m1
+ vbroadcasti128 m5, [r2 + 32]
+ pshufb m5, m1
+
+ lea r0, [r0 + 4 * r1]
More information about the x265-commits
mailing list