sl@0: /* sl@0: * LIBOIL - Library of Optimized Inner Loops sl@0: * Copyright (c) 2003,2004 David A. Schleef sl@0: * All rights reserved. sl@0: * sl@0: * Redistribution and use in source and binary forms, with or without sl@0: * modification, are permitted provided that the following conditions sl@0: * are met: sl@0: * 1. Redistributions of source code must retain the above copyright sl@0: * notice, this list of conditions and the following disclaimer. sl@0: * 2. Redistributions in binary form must reproduce the above copyright sl@0: * notice, this list of conditions and the following disclaimer in the sl@0: * documentation and/or other materials provided with the distribution. sl@0: * sl@0: * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR sl@0: * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED sl@0: * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE sl@0: * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, sl@0: * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES sl@0: * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR sl@0: * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) sl@0: * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, sl@0: * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING sl@0: * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE sl@0: * POSSIBILITY OF SUCH DAMAGE. sl@0: */ sl@0: //Portions Copyright (c) 2008-2009 Nokia Corporation and/or its subsidiary(-ies). All rights reserved. sl@0: sl@0: #ifdef HAVE_CONFIG_H sl@0: #include "config.h" sl@0: #endif sl@0: sl@0: #include sl@0: sl@0: OIL_DECLARE_CLASS (err_intra8x8_u8); sl@0: OIL_DECLARE_CLASS (err_inter8x8_u8); sl@0: OIL_DECLARE_CLASS (err_inter8x8_u8_avg); sl@0: sl@0: static void sl@0: err_intra8x8_u8_mmx (uint32_t *dest, uint8_t *src1, int ss1) sl@0: { sl@0: #if !defined(__WINSCW__) && !defined(__WINS__) sl@0: uint32_t xsum; sl@0: uint32_t xxsum; sl@0: sl@0: __asm__ __volatile__ ( sl@0: " pxor %%mm5, %%mm5 \n\t" sl@0: " pxor %%mm6, %%mm6 \n\t" sl@0: " pxor %%mm7, %%mm7 \n\t" sl@0: " mov $8, %%edi \n\t" sl@0: "1: \n\t" sl@0: " movq (%2), %%mm0 \n\t" /* take 8 bytes */ sl@0: " movq %%mm0, %%mm2 \n\t" sl@0: sl@0: " punpcklbw %%mm6, %%mm0 \n\t" sl@0: " punpckhbw %%mm6, %%mm2 \n\t" sl@0: sl@0: " paddw %%mm0, %%mm5 \n\t" sl@0: " paddw %%mm2, %%mm5 \n\t" sl@0: sl@0: " pmaddwd %%mm0, %%mm0 \n\t" sl@0: " pmaddwd %%mm2, %%mm2 \n\t" sl@0: sl@0: " paddd %%mm0, %%mm7 \n\t" sl@0: " paddd %%mm2, %%mm7 \n\t" sl@0: sl@0: " add %3, %2 \n\t" /* Inc pointer into src data */ sl@0: sl@0: " dec %%edi \n\t" sl@0: " jnz 1b \n\t" sl@0: sl@0: " movq %%mm5, %%mm0 \n\t" sl@0: " psrlq $32, %%mm5 \n\t" sl@0: " paddw %%mm0, %%mm5 \n\t" sl@0: " movq %%mm5, %%mm0 \n\t" sl@0: " psrlq $16, %%mm5 \n\t" sl@0: " paddw %%mm0, %%mm5 \n\t" sl@0: " movd %%mm5, %%edi \n\t" sl@0: " movswl %%di, %%edi \n\t" sl@0: " movl %%edi, %0 \n\t" sl@0: sl@0: " movq %%mm7, %%mm0 \n\t" sl@0: " psrlq $32, %%mm7 \n\t" sl@0: " paddd %%mm0, %%mm7 \n\t" sl@0: " movd %%mm7, %1 \n\t" sl@0: " emms \n\t" sl@0: sl@0: : "=r" (xsum), sl@0: "=r" (xxsum), sl@0: "+r" (src1) sl@0: : "r" (ss1) sl@0: : "edi", "memory" sl@0: ); sl@0: sl@0: /* Compute population variance as mis-match metric. */ sl@0: *dest = (((xxsum<<6) - xsum*xsum)); sl@0: #endif sl@0: } sl@0: OIL_DEFINE_IMPL_FULL (err_intra8x8_u8_mmx, err_intra8x8_u8, OIL_IMPL_FLAG_MMX); sl@0: sl@0: static void sl@0: err_inter8x8_u8_mmx (uint32_t *dest, uint8_t *src1, int ss1, uint8_t *src2, int ss2) sl@0: { sl@0: uint32_t xsum; sl@0: uint32_t xxsum; sl@0: #if !defined(__WINSCW__) && !defined(__WINS__) sl@0: __asm__ __volatile__ ( sl@0: " pxor %%mm5, %%mm5 \n\t" sl@0: " pxor %%mm6, %%mm6 \n\t" sl@0: " pxor %%mm7, %%mm7 \n\t" sl@0: " mov $8, %%edi \n\t" sl@0: "1: \n\t" sl@0: " movq (%2), %%mm0 \n\t" /* take 8 bytes */ sl@0: " movq (%3), %%mm1 \n\t" sl@0: " movq %%mm0, %%mm2 \n\t" sl@0: " movq %%mm1, %%mm3 \n\t" sl@0: sl@0: " punpcklbw %%mm6, %%mm0 \n\t" sl@0: " punpcklbw %%mm6, %%mm1 \n\t" sl@0: " punpckhbw %%mm6, %%mm2 \n\t" sl@0: " punpckhbw %%mm6, %%mm3 \n\t" sl@0: sl@0: " psubsw %%mm1, %%mm0 \n\t" sl@0: " psubsw %%mm3, %%mm2 \n\t" sl@0: sl@0: " paddw %%mm0, %%mm5 \n\t" sl@0: " paddw %%mm2, %%mm5 \n\t" sl@0: sl@0: " pmaddwd %%mm0, %%mm0 \n\t" sl@0: " pmaddwd %%mm2, %%mm2 \n\t" sl@0: sl@0: " paddd %%mm0, %%mm7 \n\t" sl@0: " paddd %%mm2, %%mm7 \n\t" sl@0: sl@0: " add %4, %2 \n\t" /* Inc pointer into src data */ sl@0: " add %5, %3 \n\t" /* Inc pointer into ref data */ sl@0: sl@0: " dec %%edi \n\t" sl@0: " jnz 1b \n\t" sl@0: sl@0: " movq %%mm5, %%mm0 \n\t" sl@0: " psrlq $32, %%mm5 \n\t" sl@0: " paddw %%mm0, %%mm5 \n\t" sl@0: " movq %%mm5, %%mm0 \n\t" sl@0: " psrlq $16, %%mm5 \n\t" sl@0: " paddw %%mm0, %%mm5 \n\t" sl@0: " movd %%mm5, %%edi \n\t" sl@0: " movswl %%di, %%edi \n\t" sl@0: " movl %%edi, %0 \n\t" sl@0: sl@0: " movq %%mm7, %%mm0 \n\t" sl@0: " psrlq $32, %%mm7 \n\t" sl@0: " paddd %%mm0, %%mm7 \n\t" sl@0: " movd %%mm7, %1 \n\t" sl@0: " emms \n\t" sl@0: sl@0: : "=m" (xsum), sl@0: "=m" (xxsum), sl@0: "+r" (src1), sl@0: "+r" (src2) sl@0: : "m" (ss1), sl@0: "m" (ss2) sl@0: : "edi", "memory" sl@0: ); sl@0: sl@0: /* Compute and return population variance as mis-match metric. */ sl@0: *dest = (((xxsum<<6) - xsum*xsum)); sl@0: #endif sl@0: } sl@0: OIL_DEFINE_IMPL_FULL (err_inter8x8_u8_mmx, err_inter8x8_u8, OIL_IMPL_FLAG_MMX); sl@0: sl@0: static void sl@0: err_inter8x8_u8_avg_mmx (uint32_t *dest, uint8_t *src1, int ss1, uint8_t *src2, uint8_t *src3, int ss2) sl@0: { sl@0: #if !defined(__WINSCW__) && !defined(__WINS__) sl@0: uint32_t xsum; sl@0: uint32_t xxsum; sl@0: sl@0: __asm__ __volatile__ ( sl@0: " pcmpeqd %%mm4, %%mm4 \n\t" /* fefefefefefefefe in mm4 */ sl@0: " paddb %%mm4, %%mm4 \n\t" sl@0: " pxor %%mm5, %%mm5 \n\t" sl@0: " pxor %%mm6, %%mm6 \n\t" sl@0: " pxor %%mm7, %%mm7 \n\t" sl@0: " mov $8, %%edi \n\t" sl@0: "1: \n\t" sl@0: " movq (%2), %%mm0 \n\t" /* take 8 bytes */ sl@0: sl@0: " movq (%3), %%mm2 \n\t" sl@0: " movq (%4), %%mm3 \n\t" /* take average of mm2 and mm3 */ sl@0: " movq %%mm2, %%mm1 \n\t" sl@0: " pand %%mm3, %%mm1 \n\t" sl@0: " pxor %%mm2, %%mm3 \n\t" sl@0: " pand %%mm4, %%mm3 \n\t" sl@0: " psrlq $1, %%mm3 \n\t" sl@0: " paddb %%mm3, %%mm1 \n\t" sl@0: sl@0: " movq %%mm0, %%mm2 \n\t" sl@0: " movq %%mm1, %%mm3 \n\t" sl@0: sl@0: " punpcklbw %%mm6, %%mm0 \n\t" sl@0: " punpcklbw %%mm6, %%mm1 \n\t" sl@0: " punpckhbw %%mm6, %%mm2 \n\t" sl@0: " punpckhbw %%mm6, %%mm3 \n\t" sl@0: sl@0: " psubsw %%mm1, %%mm0 \n\t" sl@0: " psubsw %%mm3, %%mm2 \n\t" sl@0: sl@0: " paddw %%mm0, %%mm5 \n\t" sl@0: " paddw %%mm2, %%mm5 \n\t" sl@0: sl@0: " pmaddwd %%mm0, %%mm0 \n\t" sl@0: " pmaddwd %%mm2, %%mm2 \n\t" sl@0: sl@0: " paddd %%mm0, %%mm7 \n\t" sl@0: " paddd %%mm2, %%mm7 \n\t" sl@0: sl@0: " add %5, %2 \n\t" /* Inc pointer into src data */ sl@0: " add %6, %3 \n\t" /* Inc pointer into ref data */ sl@0: " add %6, %4 \n\t" /* Inc pointer into ref data */ sl@0: sl@0: " dec %%edi \n\t" sl@0: " jnz 1b \n\t" sl@0: sl@0: " movq %%mm5, %%mm0 \n\t" sl@0: " psrlq $32, %%mm5 \n\t" sl@0: " paddw %%mm0, %%mm5 \n\t" sl@0: " movq %%mm5, %%mm0 \n\t" sl@0: " psrlq $16, %%mm5 \n\t" sl@0: " paddw %%mm0, %%mm5 \n\t" sl@0: " movd %%mm5, %%edi \n\t" sl@0: " movswl %%di, %%edi \n\t" sl@0: " movl %%edi, %0 \n\t" sl@0: sl@0: " movq %%mm7, %%mm0 \n\t" sl@0: " psrlq $32, %%mm7 \n\t" sl@0: " paddd %%mm0, %%mm7 \n\t" sl@0: " movd %%mm7, %1 \n\t" sl@0: " emms \n\t" sl@0: sl@0: : "=m" (xsum), sl@0: "=m" (xxsum), sl@0: "+r" (src1), sl@0: "+r" (src2), sl@0: "+r" (src3) sl@0: : "m" (ss1), sl@0: "m" (ss2) sl@0: : "edi", "memory" sl@0: ); sl@0: sl@0: /* Compute and return population variance as mis-match metric. */ sl@0: *dest = (((xxsum<<6) - xsum*xsum)); sl@0: #endif sl@0: } sl@0: sl@0: OIL_DEFINE_IMPL_FULL (err_inter8x8_u8_avg_mmx, err_inter8x8_u8_avg, OIL_IMPL_FLAG_MMX); sl@0: sl@0: #ifdef ENABLE_BROKEN_IMPLS sl@0: static void sl@0: err_inter8x8_u8_avg_mmxext (uint32_t *dest, uint8_t *src1, int ss1, uint8_t *src2, uint8_t *src3, int ss2) sl@0: { sl@0: uint32_t xsum; sl@0: uint32_t xxsum; sl@0: sl@0: __asm__ __volatile__ ( sl@0: " pxor %%mm4, %%mm4 \n\t" sl@0: " pxor %%mm5, %%mm5 \n\t" sl@0: " mov $0x01010101, %%edi \n\t" sl@0: " movd %%edi, %%mm6 \n\t" sl@0: " punpcklbw %%mm6, %%mm6 \n\t" sl@0: " pxor %%mm7, %%mm7 \n\t" sl@0: " mov $8, %%edi \n\t" sl@0: "1: \n\t" sl@0: " movq (%2), %%mm0 \n\t" /* take 8 bytes */ sl@0: sl@0: " movq (%3), %%mm2 \n\t" sl@0: " movq (%4), %%mm1 \n\t" /* take average of mm2 and mm1 */ sl@0: " movq %%mm1, %%mm3 \n\t" sl@0: " pavgb %%mm2, %%mm1 \n\t" sl@0: " pxor %%mm2, %%mm3 \n\t" sl@0: " pand %%mm6, %%mm3 \n\t" sl@0: " psubb %%mm3, %%mm1 \n\t" sl@0: sl@0: " movq %%mm0, %%mm2 \n\t" sl@0: " movq %%mm1, %%mm3 \n\t" sl@0: sl@0: " punpcklbw %%mm4, %%mm0 \n\t" sl@0: " punpcklbw %%mm4, %%mm1 \n\t" sl@0: " punpckhbw %%mm4, %%mm2 \n\t" sl@0: " punpckhbw %%mm4, %%mm3 \n\t" sl@0: sl@0: " psubsw %%mm1, %%mm0 \n\t" sl@0: " psubsw %%mm3, %%mm2 \n\t" sl@0: sl@0: " paddw %%mm0, %%mm5 \n\t" sl@0: " paddw %%mm2, %%mm5 \n\t" sl@0: sl@0: " pmaddwd %%mm0, %%mm0 \n\t" sl@0: " pmaddwd %%mm2, %%mm2 \n\t" sl@0: sl@0: " paddd %%mm0, %%mm7 \n\t" sl@0: " paddd %%mm2, %%mm7 \n\t" sl@0: sl@0: " add %5, %2 \n\t" /* Inc pointer into src data */ sl@0: " add %6, %3 \n\t" /* Inc pointer into ref data */ sl@0: " add %6, %4 \n\t" /* Inc pointer into ref data */ sl@0: sl@0: " dec %%edi \n\t" sl@0: " jnz 1b \n\t" sl@0: sl@0: " movq %%mm5, %%mm0 \n\t" sl@0: " psrlq $32, %%mm5 \n\t" sl@0: " paddw %%mm0, %%mm5 \n\t" sl@0: " movq %%mm5, %%mm0 \n\t" sl@0: " psrlq $16, %%mm5 \n\t" sl@0: " paddw %%mm0, %%mm5 \n\t" sl@0: " movd %%mm5, %%edi \n\t" sl@0: " movswl %%di, %%edi \n\t" sl@0: " movl %%edi, %0 \n\t" sl@0: sl@0: " movq %%mm7, %%mm0 \n\t" sl@0: " psrlq $32, %%mm7 \n\t" sl@0: " paddd %%mm0, %%mm7 \n\t" sl@0: " movd %%mm7, %1 \n\t" sl@0: " emms \n\t" sl@0: sl@0: : "=m" (xsum), sl@0: "=m" (xxsum), sl@0: "+r" (src1), sl@0: "+r" (src2), sl@0: "+r" (src3) sl@0: : "m" (ss1), sl@0: "m" (ss2) sl@0: : "edi", "memory" sl@0: ); sl@0: sl@0: /* Compute and return population variance as mis-match metric. */ sl@0: *dest = (((xxsum<<6) - xsum*xsum)); sl@0: } sl@0: sl@0: OIL_DEFINE_IMPL_FULL (err_inter8x8_u8_avg_mmxext, err_inter8x8_u8_avg, OIL_IMPL_FLAG_MMX | OIL_IMPL_FLAG_MMXEXT); sl@0: #endif sl@0: sl@0: sl@0: sl@0: #ifdef __SYMBIAN32__ sl@0: sl@0: OilFunctionImpl* __oil_function_impl_err_intra8x8_u8_mmx, err_intra8x8_u8() { sl@0: return &_oil_function_impl_err_intra8x8_u8_mmx, err_intra8x8_u8; sl@0: } sl@0: #endif sl@0: sl@0: #ifdef __SYMBIAN32__ sl@0: sl@0: OilFunctionImpl* __oil_function_impl_err_inter8x8_u8_mmx, err_inter8x8_u8() { sl@0: return &_oil_function_impl_err_inter8x8_u8_mmx, err_inter8x8_u8; sl@0: } sl@0: #endif sl@0: sl@0: #ifdef __SYMBIAN32__ sl@0: sl@0: OilFunctionImpl* __oil_function_impl_err_inter8x8_u8_avg_mmx, err_inter8x8_u8_avg() { sl@0: return &_oil_function_impl_err_inter8x8_u8_avg_mmx, err_inter8x8_u8_avg; sl@0: } sl@0: #endif sl@0: sl@0: #ifdef __SYMBIAN32__ sl@0: sl@0: OilFunctionImpl* __oil_function_impl_err_inter8x8_u8_avg_mmxext, err_inter8x8_u8_avg() { sl@0: return &_oil_function_impl_err_inter8x8_u8_avg_mmxext, err_inter8x8_u8_avg; sl@0: } sl@0: #endif sl@0: