<div dir="ltr">From 7b9bf4bc9c5bdfae6aec2d22ab2a8430f9b1603f Mon Sep 17 00:00:00 2001<br>From: AnusuyaKumarasamy <<a href="mailto:anusuya.kumarasamy@multicorewareinc.com">anusuya.kumarasamy@multicorewareinc.com</a>><br>Date: Thu, 24 Oct 2024 17:57:48 +0530<br>Subject: [PATCH 05/10] shifted full resolution functions to lookahead<br><br>---<br> source/common/frame.cpp      |  2 ++<br> source/common/frame.h        |  1 +<br> source/encoder/encoder.cpp   | 17 ++++-------------<br> source/encoder/slicetype.cpp |  6 +++---<br> 4 files changed, 10 insertions(+), 16 deletions(-)<br><br>diff --git a/source/common/frame.cpp b/source/common/frame.cpp<br>index 80f341edf..63e9b829b 100644<br>--- a/source/common/frame.cpp<br>+++ b/source/common/frame.cpp<br>@@ -91,6 +91,7 @@ bool Frame::create(x265_param *param, float* quantOffsets)<br>     if (m_param->bEnableTemporalFilter)<br>     {<br>         m_mcstf = new TemporalFilter;<br>+        m_mcstffencPic = new PicYuv;<br>         m_mcstf->m_range = param->mcstfFrameRange;<br>         m_mcstf->init(param);<br> <br>@@ -105,6 +106,7 @@ bool Frame::create(x265_param *param, float* quantOffsets)<br>         if (!m_fencPicSubsampled4->createScaledPicYUV(param, 4))<br>             return false;<br> <br>+        m_mcstffencPic->create(param, !!m_param->bCopyPicToFrame);<br>         CHECKED_MALLOC_ZERO(m_isSubSampled, int, 1);<br>     }<br> <br>diff --git a/source/common/frame.h b/source/common/frame.h<br>index e85727deb..588fa6696 100644<br>--- a/source/common/frame.h<br>+++ b/source/common/frame.h<br>@@ -148,6 +148,7 @@ public:<br>     Frame*                 m_prevMCSTF;<br>     int*                   m_isSubSampled;<br>     TemporalFilterRefPicInfo m_mcstfRefList[MAX_MCSTF_TEMPORAL_WINDOW_LENGTH];<br>+    PicYuv*                m_mcstffencPic;<br> <br>     /*Vbv-End-Flag*/<br>     int vbvEndFlag;<br>diff --git a/source/encoder/encoder.cpp b/source/encoder/encoder.cpp<br>index d75dcafd8..7745a7196 100644<br>--- a/source/encoder/encoder.cpp<br>+++ b/source/encoder/encoder.cpp<br>@@ -1409,7 +1409,7 @@ inline int enqueueRefFrame(FrameEncoder* curframeEncoder, Frame* iterFrame, Fram<br> {<br>     TemporalFilterRefPicInfo* dest = &curframeEncoder->m_mcstfRefList[curFrame->m_mcstf->m_numRef];<br>     dest->poc = iterFrame->m_poc;<br>-    dest->picBuffer = iterFrame->m_fencPic;<br>+    dest->picBuffer = iterFrame->m_mcstffencPic;<br>     dest->picBufferSubSampled2 = iterFrame->m_fencPicSubsampled2;<br>     dest->picBufferSubSampled4 = iterFrame->m_fencPicSubsampled4;<br>     dest->isFilteredFrame = isPreFiltered;<br>@@ -1418,7 +1418,7 @@ inline int enqueueRefFrame(FrameEncoder* curframeEncoder, Frame* iterFrame, Fram<br> <br>     TemporalFilterRefPicInfo* temp = &curFrame->m_mcstfRefList[curFrame->m_mcstf->m_numRef];<br>     temp->poc = iterFrame->m_poc;<br>-    temp->picBuffer = iterFrame->m_fencPic;<br>+    temp->picBuffer = iterFrame->m_mcstffencPic;<br>     temp->lowres = iterFrame->m_lowres.lowresPlane[0];<br>     temp->lowerRes = iterFrame->m_lowres.lowerResPlane[0];<br>     temp->isFilteredFrame = isPreFiltered;<br>@@ -1945,6 +1945,7 @@ int Encoder::encode(const x265_picture* pic_in, x265_picture* pic_out)<br>             extendPicBorder(orig->m_picOrg[2], orig->m_strideC, orig->m_picWidth >> orig->m_hChromaShift, orig->m_picHeight >> orig->m_vChromaShift, orig->m_chromaMarginX, orig->m_chromaMarginY);<br> <br>             //TODO: Add subsampling here if required<br>+            inFrame[0]->m_mcstffencPic->copyFromFrame(inFrame[0]->m_fencPic);<br>             m_lookahead->m_origPicBuf->addPicture(inFrame[0]);;<br>         }<br> <br>@@ -2461,6 +2462,7 @@ int Encoder::encode(const x265_picture* pic_in, x265_picture* pic_out)<br> <br>                 Frame* dupFrame = m_lookahead->m_origPicBuf->m_mcstfOrigPicFreeList.popBackMCSTF();<br>                 dupFrame->m_fencPic->copyFromFrame(frameEnc[0]->m_fencPic);<br>+                dupFrame->m_mcstffencPic->copyFromFrame(frameEnc[0]->m_mcstffencPic);<br>                 dupFrame->m_poc = frameEnc[0]->m_poc;<br>                 dupFrame->m_encodeOrder = frameEnc[0]->m_encodeOrder;<br>                 dupFrame->m_refPicCnt[1] = 2 * dupFrame->m_mcstf->m_range + 1;<br>@@ -2517,17 +2519,6 @@ int Encoder::encode(const x265_picture* pic_in, x265_picture* pic_out)<br>                     return -1;<br>                 }<br> <br>-                for (uint8_t i = 1; i <= frameEnc[0]->m_mcstf->m_numRef; i++)<br>-                {<br>-                    TemporalFilterRefPicInfo* ref = &frameEnc[0]->m_mcstfRefList[i - 1];<br>-                    Frame* curFrame = m_lookahead->m_origPicBuf->m_mcstfPicList.getPOCMCSTF(ref->poc);<br>-<br>-                    //curFrame->m_mcstf->motionEstimationLuma(ref->mvs0, ref->mvsStride0, frameEnc[0]->m_lowres.lowerResPlane[0], (curFrame->m_lowres.lumaStride / 2), (curFrame->m_lowres.lines / 2), (curFrame->m_lowres.width / 2), ref->lowerRes, 16);<br>-                    //curFrame->m_mcstf->motionEstimationLuma(ref->mvs1, ref->mvsStride1, frameEnc[0]->m_lowres.lowresPlane[0], (curFrame->m_lowres.lumaStride), (curFrame->m_lowres.lines), (curFrame->m_lowres.width), ref->lowres, 16, ref->mvs0, ref->mvsStride0, 2);<br>-                    curFrame->m_mcstf->motionEstimationLuma(ref->mvs2, ref->mvsStride2, frameEnc[0]->m_fencPic->m_picOrg[0], curFrame->m_fencPic->m_stride, curFrame->m_fencPic->m_picHeight, curFrame->m_fencPic->m_picWidth, ref->picBuffer->m_picOrg[0], 16, ref->mvs1, ref->mvsStride1, 2);<br>-                    curFrame->m_mcstf->motionEstimationLumaDoubleRes(ref->mvs, ref->mvsStride, frameEnc[0]->m_fencPic, ref->picBuffer, 8, ref->mvs2, ref->mvsStride2, 1, ref->error);<br>-                }<br>-<br>                 for (int i = 0; i < frameEnc[0]->m_mcstf->m_numRef; i++)<br>                 {<br>                     TemporalFilterRefPicInfo* ref = &frameEnc[0]->m_mcstfRefList[i];<br>diff --git a/source/encoder/slicetype.cpp b/source/encoder/slicetype.cpp<br>index 19f6d8f53..bd9f0793a 100644<br>--- a/source/encoder/slicetype.cpp<br>+++ b/source/encoder/slicetype.cpp<br>@@ -1803,8 +1803,8 @@ void Lookahead::estimatelowresmotion(Frame* curframe)<br> <br>         curframe->m_mcstf->motionEstimationLuma(ref->mvs0, ref->mvsStride0, curframe->m_lowres.lowerResPlane[0], (curframe->m_lowres.lumaStride / 2), (curframe->m_lowres.lines / 2), (curframe->m_lowres.width / 2), ref->lowerRes, 16);<br>         curframe->m_mcstf->motionEstimationLuma(ref->mvs1, ref->mvsStride1, curframe->m_lowres.lowresPlane[0], (curframe->m_lowres.lumaStride), (curframe->m_lowres.lines), (curframe->m_lowres.width), ref->lowres, 16, ref->mvs0, ref->mvsStride0, 2);<br>-        //curframe->m_mcstf->motionEstimationLuma(ref->mvs2, ref->mvsStride2, curframe->m_fencPic->m_picOrg[0], curframe->m_fencPic->m_stride, curframe->m_fencPic->m_picHeight, curframe->m_fencPic->m_picWidth, ref->picBuffer->m_picOrg[0], 16, ref->mvs1, ref->mvsStride1, 2);<br>-        //curframe->m_mcstf->motionEstimationLumaDoubleRes(ref->mvs, ref->mvsStride, curframe->m_fencPic, ref->picBuffer, 8, ref->mvs2, ref->mvsStride2, 1, ref->error);<br>+        curframe->m_mcstf->motionEstimationLuma(ref->mvs2, ref->mvsStride2, curframe->m_fencPic->m_picOrg[0], curframe->m_fencPic->m_stride, curframe->m_fencPic->m_picHeight, curframe->m_fencPic->m_picWidth, ref->picBuffer->m_picOrg[0], 16, ref->mvs1, ref->mvsStride1, 2);<br>+        curframe->m_mcstf->motionEstimationLumaDoubleRes(ref->mvs, ref->mvsStride, curframe->m_fencPic, ref->picBuffer, 8, ref->mvs2, ref->mvsStride2, 1, ref->error);<br>     }<br> <br> }<br>@@ -1813,7 +1813,7 @@ inline int enqueueRefFrame(Frame* iterFrame, Frame* curFrame, bool isPreFiltered<br> {<br>     TemporalFilterRefPicInfo * temp = &curFrame->m_mcstfRefList[curFrame->m_mcstf->m_numRef];<br>     temp->poc = iterFrame->m_poc;<br>-    temp->picBuffer = iterFrame->m_fencPic;<br>+    temp->picBuffer = iterFrame->m_mcstffencPic;<br>     temp->lowres = iterFrame->m_lowres.lowresPlane[0];<br>     temp->lowerRes = iterFrame->m_lowres.lowerResPlane[0];<br>     temp->isFilteredFrame = isPreFiltered;<br>-- <br>2.36.0.windows.1<br><br></div>