1/* 2 * linux/arch/arm/lib/csumpartialcopyuser.S 3 * 4 * Copyright (C) 1995-1998 Russell King 5 * 6 * This program is free software; you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License version 2 as 8 * published by the Free Software Foundation. 9 * 10 * 27/03/03 Ian Molton Clean up CONFIG_CPU 11 * 12 */ 13#include <linux/linkage.h> 14#include <asm/assembler.h> 15#include <asm/errno.h> 16#include <asm/asm-offsets.h> 17 18 .text 19 20 .macro save_regs 21 mov ip, sp 22 stmfd sp!, {r1 - r2, r4 - r8, fp, ip, lr, pc} 23 sub fp, ip, #4 24 .endm 25 26 .macro load_regs 27 ldmfd sp, {r1, r2, r4-r8, fp, sp, pc} 28 .endm 29 30 .macro load1b, reg1 319999: ldrbt \reg1, [r0], $1 32 .section __ex_table, "a" 33 .align 3 34 .long 9999b, 6001f 35 .previous 36 .endm 37 38 .macro load2b, reg1, reg2 399999: ldrbt \reg1, [r0], $1 409998: ldrbt \reg2, [r0], $1 41 .section __ex_table, "a" 42 .long 9999b, 6001f 43 .long 9998b, 6001f 44 .previous 45 .endm 46 47 .macro load1l, reg1 489999: ldrt \reg1, [r0], $4 49 .section __ex_table, "a" 50 .align 3 51 .long 9999b, 6001f 52 .previous 53 .endm 54 55 .macro load2l, reg1, reg2 569999: ldrt \reg1, [r0], $4 579998: ldrt \reg2, [r0], $4 58 .section __ex_table, "a" 59 .long 9999b, 6001f 60 .long 9998b, 6001f 61 .previous 62 .endm 63 64 .macro load4l, reg1, reg2, reg3, reg4 659999: ldrt \reg1, [r0], $4 669998: ldrt \reg2, [r0], $4 679997: ldrt \reg3, [r0], $4 689996: ldrt \reg4, [r0], $4 69 .section __ex_table, "a" 70 .long 9999b, 6001f 71 .long 9998b, 6001f 72 .long 9997b, 6001f 73 .long 9996b, 6001f 74 .previous 75 .endm 76 77/* 78 * unsigned int 79 * csum_partial_copy_from_user(const char *src, char *dst, int len, int sum, int *err_ptr) 80 * r0 = src, r1 = dst, r2 = len, r3 = sum, [sp] = *err_ptr 81 * Returns : r0 = checksum, [[sp, #0], #0] = 0 or -EFAULT 82 */ 83 84#define FN_ENTRY ENTRY(csum_partial_copy_from_user) 85 86#include "csumpartialcopygeneric.S" 87 88/* 89 * FIXME: minor buglet here 90 * We don't return the checksum for the data present in the buffer. To do 91 * so properly, we would have to add in whatever registers were loaded before 92 * the fault, which, with the current asm above is not predictable. 93 */ 94 .section .fixup,"ax" 95 .align 4 966001: mov r4, #-EFAULT 97 ldr r5, [fp, #4] @ *err_ptr 98 str r4, [r5] 99 ldmia sp, {r1, r2} @ retrieve dst, len 100 add r2, r2, r1 101 mov r0, #0 @ zero the buffer 1026002: teq r2, r1 103 strneb r0, [r1], #1 104 bne 6002b 105 load_regs 106 .previous 107