1/* 2 * This file contains assembly-language implementations 3 * of IP-style 1's complement checksum routines. 4 * 5 * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org) 6 * 7 * This program is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU General Public License 9 * as published by the Free Software Foundation; either version 10 * 2 of the License, or (at your option) any later version. 11 * 12 * Severely hacked about by Paul Mackerras (paulus@cs.anu.edu.au). 13 */ 14 15#include <linux/sys.h> 16#include <asm/processor.h> 17#include <asm/errno.h> 18#include <asm/ppc_asm.h> 19 20 .text 21 22/* 23 * ip_fast_csum(buf, len) -- Optimized for IP header 24 * len is in words and is always >= 5. 25 */ 26_GLOBAL(ip_fast_csum) 27 lwz r0,0(r3) 28 lwzu r5,4(r3) 29 addic. r4,r4,-2 30 addc r0,r0,r5 31 mtctr r4 32 blelr- 331: lwzu r4,4(r3) 34 adde r0,r0,r4 35 bdnz 1b 36 addze r0,r0 /* add in final carry */ 37 rlwinm r3,r0,16,0,31 /* fold two halves together */ 38 add r3,r0,r3 39 not r3,r3 40 srwi r3,r3,16 41 blr 42 43/* 44 * Compute checksum of TCP or UDP pseudo-header: 45 * csum_tcpudp_magic(saddr, daddr, len, proto, sum) 46 */ 47_GLOBAL(csum_tcpudp_magic) 48 rlwimi r5,r6,16,0,15 /* put proto in upper half of len */ 49 addc r0,r3,r4 /* add 4 32-bit words together */ 50 adde r0,r0,r5 51 adde r0,r0,r7 52 addze r0,r0 /* add in final carry */ 53 rlwinm r3,r0,16,0,31 /* fold two halves together */ 54 add r3,r0,r3 55 not r3,r3 56 srwi r3,r3,16 57 blr 58 59/* 60 * computes the checksum of a memory block at buff, length len, 61 * and adds in "sum" (32-bit) 62 * 63 * csum_partial(buff, len, sum) 64 */ 65_GLOBAL(csum_partial) 66 addic r0,r5,0 67 subi r3,r3,4 68 srwi. r6,r4,2 69 beq 3f /* if we're doing < 4 bytes */ 70 andi. r5,r3,2 /* Align buffer to longword boundary */ 71 beq+ 1f 72 lhz r5,4(r3) /* do 2 bytes to get aligned */ 73 addi r3,r3,2 74 subi r4,r4,2 75 addc r0,r0,r5 76 srwi. r6,r4,2 /* # words to do */ 77 beq 3f 781: mtctr r6 792: lwzu r5,4(r3) /* the bdnz has zero overhead, so it should */ 80 adde r0,r0,r5 /* be unnecessary to unroll this loop */ 81 bdnz 2b 82 andi. r4,r4,3 833: cmpwi 0,r4,2 84 blt+ 4f 85 lhz r5,4(r3) 86 addi r3,r3,2 87 subi r4,r4,2 88 adde r0,r0,r5 894: cmpwi 0,r4,1 90 bne+ 5f 91 lbz r5,4(r3) 92 slwi r5,r5,8 /* Upper byte of word */ 93 adde r0,r0,r5 945: addze r3,r0 /* add in final carry */ 95 blr 96 97/* 98 * Computes the checksum of a memory block at src, length len, 99 * and adds in "sum" (32-bit), while copying the block to dst. 100 * If an access exception occurs on src or dst, it stores -EFAULT 101 * to *src_err or *dst_err respectively, and (for an error on 102 * src) zeroes the rest of dst. 103 * 104 * csum_partial_copy_generic(src, dst, len, sum, src_err, dst_err) 105 */ 106_GLOBAL(csum_partial_copy_generic) 107 addic r0,r6,0 108 subi r3,r3,4 109 subi r4,r4,4 110 srwi. r6,r5,2 111 beq 3f /* if we're doing < 4 bytes */ 112 andi. r9,r4,2 /* Align dst to longword boundary */ 113 beq+ 1f 11481: lhz r6,4(r3) /* do 2 bytes to get aligned */ 115 addi r3,r3,2 116 subi r5,r5,2 11791: sth r6,4(r4) 118 addi r4,r4,2 119 addc r0,r0,r6 120 srwi. r6,r5,2 /* # words to do */ 121 beq 3f 1221: srwi. r6,r5,4 /* # groups of 4 words to do */ 123 beq 10f 124 mtctr r6 12571: lwz r6,4(r3) 12672: lwz r9,8(r3) 12773: lwz r10,12(r3) 12874: lwzu r11,16(r3) 129 adde r0,r0,r6 13075: stw r6,4(r4) 131 adde r0,r0,r9 13276: stw r9,8(r4) 133 adde r0,r0,r10 13477: stw r10,12(r4) 135 adde r0,r0,r11 13678: stwu r11,16(r4) 137 bdnz 71b 13810: rlwinm. r6,r5,30,30,31 /* # words left to do */ 139 beq 13f 140 mtctr r6 14182: lwzu r9,4(r3) 14292: stwu r9,4(r4) 143 adde r0,r0,r9 144 bdnz 82b 14513: andi. r5,r5,3 1463: cmpwi 0,r5,2 147 blt+ 4f 14883: lhz r6,4(r3) 149 addi r3,r3,2 150 subi r5,r5,2 15193: sth r6,4(r4) 152 addi r4,r4,2 153 adde r0,r0,r6 1544: cmpwi 0,r5,1 155 bne+ 5f 15684: lbz r6,4(r3) 15794: stb r6,4(r4) 158 slwi r6,r6,8 /* Upper byte of word */ 159 adde r0,r0,r6 1605: addze r3,r0 /* add in final carry */ 161 blr 162 163/* These shouldn't go in the fixup section, since that would 164 cause the ex_table addresses to get out of order. */ 165 166src_error_4: 167 mfctr r6 /* update # bytes remaining from ctr */ 168 rlwimi r5,r6,4,0,27 169 b 79f 170src_error_1: 171 li r6,0 172 subi r5,r5,2 17395: sth r6,4(r4) 174 addi r4,r4,2 17579: srwi. r6,r5,2 176 beq 3f 177 mtctr r6 178src_error_2: 179 li r6,0 18096: stwu r6,4(r4) 181 bdnz 96b 1823: andi. r5,r5,3 183 beq src_error 184src_error_3: 185 li r6,0 186 mtctr r5 187 addi r4,r4,3 18897: stbu r6,1(r4) 189 bdnz 97b 190src_error: 191 cmpwi 0,r7,0 192 beq 1f 193 li r6,-EFAULT 194 stw r6,0(r7) 1951: addze r3,r0 196 blr 197 198dst_error: 199 cmpwi 0,r8,0 200 beq 1f 201 li r6,-EFAULT 202 stw r6,0(r8) 2031: addze r3,r0 204 blr 205 206.section __ex_table,"a" 207 .long 81b,src_error_1 208 .long 91b,dst_error 209 .long 71b,src_error_4 210 .long 72b,src_error_4 211 .long 73b,src_error_4 212 .long 74b,src_error_4 213 .long 75b,dst_error 214 .long 76b,dst_error 215 .long 77b,dst_error 216 .long 78b,dst_error 217 .long 82b,src_error_2 218 .long 92b,dst_error 219 .long 83b,src_error_3 220 .long 93b,dst_error 221 .long 84b,src_error_3 222 .long 94b,dst_error 223 .long 95b,dst_error 224 .long 96b,dst_error 225 .long 97b,dst_error 226