2 * Copyright (c) 2009 Mans Rullgard <mans@mansr.com>
4 * This file is part of FFmpeg.
6 * FFmpeg is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
11 * FFmpeg is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with FFmpeg; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
25 .macro call_2x_pixels type, subp
26 function ff_\type\()_pixels16\subp\()_armv6, export=1
28 bl ff_\type\()_pixels8\subp\()_armv6
32 b ff_\type\()_pixels8\subp\()_armv6
37 call_2x_pixels put, _x2
38 call_2x_pixels put, _y2
39 call_2x_pixels put, _x2_no_rnd
40 call_2x_pixels put, _y2_no_rnd
42 function ff_put_pixels16_armv6, export=1
55 strd r10, r11, [r0, #8]
64 function ff_put_pixels8_armv6, export=1
80 function ff_put_pixels8_x2_armv6, export=1
83 orr r12, r12, r12, lsl #8
84 orr r12, r12, r12, lsl #16
92 orr r6, r6, r5, lsl #24
97 orr r10, r10, r9, lsl #24
113 strd r4, r5, [r0], r2
115 strd r8, r9, [r0], r2
121 function ff_put_pixels8_y2_armv6, export=1
124 orr r12, r12, r12, lsl #8
125 orr r12, r12, r12, lsl #16
151 strd r8, r9, [r0], r2
153 strd r10, r11, [r0], r2
160 function ff_put_pixels8_x2_no_rnd_armv6, export=1
172 orr r6, r6, r5, lsl #24
174 orr r12, r12, r9, lsl #24
188 function ff_put_pixels8_y2_no_rnd_armv6, export=1
213 function ff_avg_pixels8_armv6, export=1
217 orr lr, lr, lr, lsl #8
218 orr lr, lr, lr, lsl #16
228 ldrd r6, r7, [r0, r2]
240 strd r4, r5, [r0], r2
244 ldrd r4, r5, [r0, r2]
251 strd r6, r7, [r0], r2
258 strd r6, r7, [r0], r2
263 function ff_add_pixels_clamped_armv6, export=1
267 ldm r0!, {r4,r5,r12,lr}
269 pkhbt r8, r4, r5, lsl #16
270 pkhtb r5, r5, r4, asr #16
271 pkhbt r4, r12, lr, lsl #16
272 pkhtb lr, lr, r12, asr #16
275 uxtab16 r5, r5, r6, ror #8
277 uxtab16 lr, lr, r7, ror #8
282 orr r6, r8, r5, lsl #8
283 orr r7, r4, lr, lsl #8
285 strd r6, r7, [r1], r2
290 function ff_pix_abs16_armv6, export=1
300 usada8 r12, r4, r8, r12
303 usada8 lr, r5, r9, lr
305 usada8 r12, r6, r8, r12
307 usada8 lr, r7, r9, lr
319 function ff_pix_abs16_x2_armv6, export=1
324 orr lr, lr, lr, lsl #8
325 orr lr, lr, lr, lsl #16
332 orr r10, r10, r9, lsl #24
336 orr r6, r6, r5, lsl #24
340 usada8 r0, r4, r7, r0
348 orr r10, r10, r6, lsl #24
349 usada8 r0, r8, r4, r0
359 usada8 r0, r4, r8, r0
360 orr r10, r10, r7, lsl #24
368 usada8 r0, r5, r9, r0
374 .macro usad_y2 p0, p1, p2, p3, n0, n1, n2, n3
382 usada8 r0, \p0, \n2, r0
390 usada8 r0, \p1, \p0, r0
398 usada8 r0, \p2, \p1, r0
405 usada8 r0, \p3, \p0, r0
409 function ff_pix_abs16_y2_armv6, export=1
416 orr lr, lr, lr, lsl #8
417 orr lr, lr, lr, lsl #16
424 usad_y2 r4, r5, r6, r7, r8, r9, r10, r11
426 usad_y2 r8, r9, r10, r11, r4, r5, r6, r7