xref: /openbmc/linux/arch/arc/lib/strchr-700.S (revision 75bf465f0bc33e9b776a46d6a1b9b990f5fb7c37)
1*d2912cb1SThomas Gleixner/* SPDX-License-Identifier: GPL-2.0-only */
25210d1e6SVineet Gupta/*
35210d1e6SVineet Gupta * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
45210d1e6SVineet Gupta */
55210d1e6SVineet Gupta
65210d1e6SVineet Gupta/* ARC700 has a relatively long pipeline and branch prediction, so we want
75210d1e6SVineet Gupta   to avoid branches that are hard to predict.  On the other hand, the
85210d1e6SVineet Gupta   presence of the norm instruction makes it easier to operate on whole
95210d1e6SVineet Gupta   words branch-free.  */
105210d1e6SVineet Gupta
11ec7ac6afSVineet Gupta#include <linux/linkage.h>
125210d1e6SVineet Gupta
1386effd0dSVineet GuptaENTRY_CFI(strchr)
145210d1e6SVineet Gupta	extb_s	r1,r1
155210d1e6SVineet Gupta	asl	r5,r1,8
165210d1e6SVineet Gupta	bmsk	r2,r0,1
175210d1e6SVineet Gupta	or	r5,r5,r1
185210d1e6SVineet Gupta	mov_s	r3,0x01010101
195210d1e6SVineet Gupta	breq.d	r2,r0,.Laligned
205210d1e6SVineet Gupta	asl	r4,r5,16
215210d1e6SVineet Gupta	sub_s	r0,r0,r2
225210d1e6SVineet Gupta	asl	r7,r2,3
235210d1e6SVineet Gupta	ld_s	r2,[r0]
245210d1e6SVineet Gupta#ifdef __LITTLE_ENDIAN__
255210d1e6SVineet Gupta	asl	r7,r3,r7
265210d1e6SVineet Gupta#else
275210d1e6SVineet Gupta	lsr	r7,r3,r7
285210d1e6SVineet Gupta#endif
295210d1e6SVineet Gupta	or	r5,r5,r4
305210d1e6SVineet Gupta	ror	r4,r3
315210d1e6SVineet Gupta	sub	r12,r2,r7
325210d1e6SVineet Gupta	bic_s	r12,r12,r2
335210d1e6SVineet Gupta	and	r12,r12,r4
345210d1e6SVineet Gupta	brne.d	r12,0,.Lfound0_ua
355210d1e6SVineet Gupta	xor	r6,r2,r5
365210d1e6SVineet Gupta	ld.a	r2,[r0,4]
375210d1e6SVineet Gupta	sub	r12,r6,r7
385210d1e6SVineet Gupta	bic	r12,r12,r6
39b0f55f2aSJoern Rennecke#ifdef __LITTLE_ENDIAN__
405210d1e6SVineet Gupta	and	r7,r12,r4
415210d1e6SVineet Gupta	breq	r7,0,.Loop ; For speed, we want this branch to be unaligned.
425210d1e6SVineet Gupta	b	.Lfound_char ; Likewise this one.
43b0f55f2aSJoern Rennecke#else
44b0f55f2aSJoern Rennecke	and	r12,r12,r4
45b0f55f2aSJoern Rennecke	breq	r12,0,.Loop ; For speed, we want this branch to be unaligned.
46b0f55f2aSJoern Rennecke	lsr_s	r12,r12,7
47b0f55f2aSJoern Rennecke	bic 	r2,r7,r6
48b0f55f2aSJoern Rennecke	b.d	.Lfound_char_b
49b0f55f2aSJoern Rennecke	and_s	r2,r2,r12
50b0f55f2aSJoern Rennecke#endif
515210d1e6SVineet Gupta; /* We require this code address to be unaligned for speed...  */
525210d1e6SVineet Gupta.Laligned:
535210d1e6SVineet Gupta	ld_s	r2,[r0]
545210d1e6SVineet Gupta	or	r5,r5,r4
555210d1e6SVineet Gupta	ror	r4,r3
565210d1e6SVineet Gupta; /* ... so that this code address is aligned, for itself and ...  */
575210d1e6SVineet Gupta.Loop:
585210d1e6SVineet Gupta	sub	r12,r2,r3
595210d1e6SVineet Gupta	bic_s	r12,r12,r2
605210d1e6SVineet Gupta	and	r12,r12,r4
615210d1e6SVineet Gupta	brne.d	r12,0,.Lfound0
625210d1e6SVineet Gupta	xor	r6,r2,r5
635210d1e6SVineet Gupta	ld.a	r2,[r0,4]
645210d1e6SVineet Gupta	sub	r12,r6,r3
655210d1e6SVineet Gupta	bic	r12,r12,r6
665210d1e6SVineet Gupta	and	r7,r12,r4
675210d1e6SVineet Gupta	breq	r7,0,.Loop /* ... so that this branch is unaligned.  */
685210d1e6SVineet Gupta	; Found searched-for character.  r0 has already advanced to next word.
695210d1e6SVineet Gupta#ifdef __LITTLE_ENDIAN__
705210d1e6SVineet Gupta/* We only need the information about the first matching byte
715210d1e6SVineet Gupta   (i.e. the least significant matching byte) to be exact,
725210d1e6SVineet Gupta   hence there is no problem with carry effects.  */
735210d1e6SVineet Gupta.Lfound_char:
745210d1e6SVineet Gupta	sub	r3,r7,1
755210d1e6SVineet Gupta	bic	r3,r3,r7
765210d1e6SVineet Gupta	norm	r2,r3
775210d1e6SVineet Gupta	sub_s	r0,r0,1
785210d1e6SVineet Gupta	asr_s	r2,r2,3
795210d1e6SVineet Gupta	j.d	[blink]
805210d1e6SVineet Gupta	sub_s	r0,r0,r2
815210d1e6SVineet Gupta
825210d1e6SVineet Gupta	.balign	4
835210d1e6SVineet Gupta.Lfound0_ua:
845210d1e6SVineet Gupta	mov	r3,r7
855210d1e6SVineet Gupta.Lfound0:
865210d1e6SVineet Gupta	sub	r3,r6,r3
875210d1e6SVineet Gupta	bic	r3,r3,r6
885210d1e6SVineet Gupta	and	r2,r3,r4
895210d1e6SVineet Gupta	or_s	r12,r12,r2
905210d1e6SVineet Gupta	sub_s	r3,r12,1
915210d1e6SVineet Gupta	bic_s	r3,r3,r12
925210d1e6SVineet Gupta	norm	r3,r3
935210d1e6SVineet Gupta	add_s	r0,r0,3
945210d1e6SVineet Gupta	asr_s	r12,r3,3
955210d1e6SVineet Gupta	asl.f	0,r2,r3
965210d1e6SVineet Gupta	sub_s	r0,r0,r12
975210d1e6SVineet Gupta	j_s.d	[blink]
985210d1e6SVineet Gupta	mov.pl	r0,0
995210d1e6SVineet Gupta#else /* BIG ENDIAN */
1005210d1e6SVineet Gupta.Lfound_char:
1015210d1e6SVineet Gupta	lsr	r7,r7,7
1025210d1e6SVineet Gupta
1035210d1e6SVineet Gupta	bic	r2,r7,r6
104b0f55f2aSJoern Rennecke.Lfound_char_b:
1055210d1e6SVineet Gupta	norm	r2,r2
1065210d1e6SVineet Gupta	sub_s	r0,r0,4
1075210d1e6SVineet Gupta	asr_s	r2,r2,3
1085210d1e6SVineet Gupta	j.d	[blink]
1095210d1e6SVineet Gupta	add_s	r0,r0,r2
1105210d1e6SVineet Gupta
1115210d1e6SVineet Gupta.Lfound0_ua:
1125210d1e6SVineet Gupta	mov_s	r3,r7
1135210d1e6SVineet Gupta.Lfound0:
1145210d1e6SVineet Gupta	asl_s	r2,r2,7
1155210d1e6SVineet Gupta	or	r7,r6,r4
1165210d1e6SVineet Gupta	bic_s	r12,r12,r2
1175210d1e6SVineet Gupta	sub	r2,r7,r3
1185210d1e6SVineet Gupta	or	r2,r2,r6
1195210d1e6SVineet Gupta	bic	r12,r2,r12
1205210d1e6SVineet Gupta	bic.f	r3,r4,r12
1215210d1e6SVineet Gupta	norm	r3,r3
1225210d1e6SVineet Gupta
1235210d1e6SVineet Gupta	add.pl	r3,r3,1
1245210d1e6SVineet Gupta	asr_s	r12,r3,3
1255210d1e6SVineet Gupta	asl.f	0,r2,r3
1265210d1e6SVineet Gupta	add_s	r0,r0,r12
1275210d1e6SVineet Gupta	j_s.d	[blink]
1285210d1e6SVineet Gupta	mov.mi	r0,0
1295210d1e6SVineet Gupta#endif /* ENDIAN */
13086effd0dSVineet GuptaEND_CFI(strchr)
131