Update contrib.
3 * Eric Anholt. All rights reserved.
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND
15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE
18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 //Portions Copyright (c) 2008-2009 Nokia Corporation and/or its subsidiary(-ies). All rights reserved.
31 #include <liboilclasses.h>
32 #include <liboilfunction.h>
33 #include <emmintrin.h>
34 #include <liboilcolorspace.h>
36 #define SSE_FUNCTION __attribute__((force_align_arg_pointer))
38 #define COMPOSITE_ADD(d,s) oil_clamp_255((d) + (s))
40 SSE_FUNCTION static void
41 composite_add_argb_sse (uint32_t *dest, const uint32_t *src, int n)
43 /* Initial operations to align the destination pointer */
44 for (; ((long)dest & 15) && (n > 0); n--) {
45 uint32_t d = *dest, s = *src++;
48 COMPOSITE_ADD(oil_argb_A(d), oil_argb_A(s)),
49 COMPOSITE_ADD(oil_argb_R(d), oil_argb_R(s)),
50 COMPOSITE_ADD(oil_argb_G(d), oil_argb_G(s)),
51 COMPOSITE_ADD(oil_argb_B(d), oil_argb_B(s)));
53 for (; n >= 4; n -= 4) {
55 s = _mm_loadu_si128((__m128i *)src);
56 *(__m128i *)dest = _mm_adds_epu8(s, *(__m128i *)dest);
61 uint32_t d = *dest, s = *src++;
64 COMPOSITE_ADD(oil_argb_A(d), oil_argb_A(s)),
65 COMPOSITE_ADD(oil_argb_R(d), oil_argb_R(s)),
66 COMPOSITE_ADD(oil_argb_G(d), oil_argb_G(s)),
67 COMPOSITE_ADD(oil_argb_B(d), oil_argb_B(s)));
70 OIL_DEFINE_IMPL_FULL (composite_add_argb_sse, composite_add_argb,
73 SSE_FUNCTION static void
74 composite_add_argb_const_src_sse (uint32_t *dest, const uint32_t *src_1, int n)
77 uint32_t val = *src_1;
79 /* Initial operations to align the destination pointer */
80 for (; ((long)dest & 15) && (n > 0); n--) {
84 COMPOSITE_ADD(oil_argb_A(d), oil_argb_A(val)),
85 COMPOSITE_ADD(oil_argb_R(d), oil_argb_R(val)),
86 COMPOSITE_ADD(oil_argb_G(d), oil_argb_G(val)),
87 COMPOSITE_ADD(oil_argb_B(d), oil_argb_B(val)));
89 s = _mm_set1_epi32(val);
90 for (; n >= 4; n -= 4) {
92 xmm0 = _mm_adds_epu8(s, *(__m128i *)dest);
93 _mm_store_si128((__m128i *)dest, xmm0);
100 COMPOSITE_ADD(oil_argb_A(d), oil_argb_A(val)),
101 COMPOSITE_ADD(oil_argb_R(d), oil_argb_R(val)),
102 COMPOSITE_ADD(oil_argb_G(d), oil_argb_G(val)),
103 COMPOSITE_ADD(oil_argb_B(d), oil_argb_B(val)));
106 OIL_DEFINE_IMPL_FULL (composite_add_argb_const_src_sse,
107 composite_add_argb_const_src, OIL_IMPL_FLAG_SSE2);
109 SSE_FUNCTION static void
110 composite_add_u8_sse (uint8_t *dest, const uint8_t *src, int n)
112 /* Initial operations to align the destination pointer */
113 for (; ((long)dest & 15) && (n > 0); n--) {
114 int x = (int)*dest + *src++;
119 for (; n >= 16; n -= 16) {
121 s = _mm_loadu_si128((__m128i *)src);
122 d = _mm_adds_epu8(s, *(__m128i *)dest);
123 _mm_store_si128((__m128i *)dest, d);
128 int x = (int)*dest + *src++;
134 OIL_DEFINE_IMPL_FULL (composite_add_u8_sse, composite_add_u8,
137 SSE_FUNCTION static void
138 composite_add_u8_const_src_sse (uint8_t *dest, const uint8_t *src_1, int n)
143 /* Initial operations to align the destination pointer */
144 for (; ((long)dest & 15) && (n > 0); n--) {
150 s = _mm_set1_epi8(val);
151 for (; n >= 16; n -= 16) {
153 d = _mm_adds_epu8(*(__m128i *)dest, s);
154 _mm_store_si128((__m128i *)dest, d);
164 OIL_DEFINE_IMPL_FULL (composite_add_u8_const_src_sse,
165 composite_add_u8_const_src, OIL_IMPL_FLAG_SSE2);
170 OilFunctionImpl* __oil_function_impl_composite_add_argb_sse, composite_add_argb() {
171 return &_oil_function_impl_composite_add_argb_sse, composite_add_argb;
177 OilFunctionImpl* __oil_function_impl_composite_add_argb_const_src_sse() {
178 return &_oil_function_impl_composite_add_argb_const_src_sse;
184 OilFunctionImpl* __oil_function_impl_composite_add_u8_sse, composite_add_u8() {
185 return &_oil_function_impl_composite_add_u8_sse, composite_add_u8;
191 OilFunctionImpl* __oil_function_impl_composite_add_u8_const_src_sse() {
192 return &_oil_function_impl_composite_add_u8_const_src_sse;