1/* SPDX-License-Identifier: GPL-2.0-only */ 2 3#include <linux/linkage.h> 4#include <asm/asm.h> 5#include <asm-generic/export.h> 6#include <asm/alternative-macros.h> 7#include <asm/errata_list.h> 8 9/* int strncmp(const char *cs, const char *ct, size_t count) */ 10SYM_FUNC_START(strncmp) 11 12 ALTERNATIVE("nop", "j strncmp_zbb", 0, RISCV_ISA_EXT_ZBB, CONFIG_RISCV_ISA_ZBB) 13 14 /* 15 * Returns 16 * a0 - comparison result, value like strncmp 17 * 18 * Parameters 19 * a0 - string1 20 * a1 - string2 21 * a2 - number of characters to compare 22 * 23 * Clobbers 24 * t0, t1, t2 25 */ 26 li t2, 0 271: 28 beq a2, t2, 2f 29 lbu t0, 0(a0) 30 lbu t1, 0(a1) 31 addi a0, a0, 1 32 addi a1, a1, 1 33 bne t0, t1, 3f 34 addi t2, t2, 1 35 bnez t0, 1b 362: 37 li a0, 0 38 ret 393: 40 /* 41 * strncmp only needs to return (< 0, 0, > 0) values 42 * not necessarily -1, 0, +1 43 */ 44 sub a0, t0, t1 45 ret 46 47/* 48 * Variant of strncmp using the ZBB extension if available 49 */ 50#ifdef CONFIG_RISCV_ISA_ZBB 51strncmp_zbb: 52 53.option push 54.option arch,+zbb 55 56 /* 57 * Returns 58 * a0 - comparison result, like strncmp 59 * 60 * Parameters 61 * a0 - string1 62 * a1 - string2 63 * a2 - number of characters to compare 64 * 65 * Clobbers 66 * t0, t1, t2, t3, t4, t5, t6 67 */ 68 69 or t2, a0, a1 70 li t5, -1 71 and t2, t2, SZREG-1 72 add t4, a0, a2 73 bnez t2, 4f 74 75 /* Adjust limit for fast-path. */ 76 andi t6, t4, -SZREG 77 78 /* Main loop for aligned string. */ 79 .p2align 3 801: 81 bgt a0, t6, 3f 82 REG_L t0, 0(a0) 83 REG_L t1, 0(a1) 84 orc.b t3, t0 85 bne t3, t5, 2f 86 addi a0, a0, SZREG 87 addi a1, a1, SZREG 88 beq t0, t1, 1b 89 90 /* 91 * Words don't match, and no null byte in the first 92 * word. Get bytes in big-endian order and compare. 93 */ 94#ifndef CONFIG_CPU_BIG_ENDIAN 95 rev8 t0, t0 96 rev8 t1, t1 97#endif 98 99 /* Synthesize (t0 >= t1) ? 1 : -1 in a branchless sequence. */ 100 sltu a0, t0, t1 101 neg a0, a0 102 ori a0, a0, 1 103 ret 104 1052: 106 /* 107 * Found a null byte. 108 * If words don't match, fall back to simple loop. 109 */ 110 bne t0, t1, 3f 111 112 /* Otherwise, strings are equal. */ 113 li a0, 0 114 ret 115 116 /* Simple loop for misaligned strings. */ 1173: 118 /* Restore limit for slow-path. */ 119 .p2align 3 1204: 121 bge a0, t4, 6f 122 lbu t0, 0(a0) 123 lbu t1, 0(a1) 124 addi a0, a0, 1 125 addi a1, a1, 1 126 bne t0, t1, 5f 127 bnez t0, 4b 128 1295: 130 sub a0, t0, t1 131 ret 132 1336: 134 li a0, 0 135 ret 136 137.option pop 138#endif 139SYM_FUNC_END(strncmp) 140