cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

strrchr.S (2854B)


      1/* SPDX-License-Identifier: GPL-2.0 */
      2/*
      3 * arch/alpha/lib/strrchr.S
      4 * Contributed by Richard Henderson (rth@tamu.edu)
      5 *
      6 * Return the address of the last occurrence of a given character
      7 * within a null-terminated string, or null if it is not found.
      8 */
      9#include <asm/export.h>
     10#include <asm/regdef.h>
     11
     12	.set noreorder
     13	.set noat
     14
     15	.align 3
     16	.ent strrchr
     17	.globl strrchr
     18strrchr:
     19	.frame sp, 0, ra
     20	.prologue 0
     21
     22	zapnot	a1, 1, a1	# e0    : zero extend our test character
     23	mov	zero, t6	# .. e1 : t6 is last match aligned addr
     24	sll	a1, 8, t5	# e0    : replicate our test character
     25	mov	zero, t8	# .. e1 : t8 is last match byte compare mask
     26	or	t5, a1, a1	# e0    :
     27	ldq_u   t0, 0(a0)	# .. e1 : load first quadword
     28	sll	a1, 16, t5	# e0    :
     29	andnot  a0, 7, v0	# .. e1 : align source addr
     30	or	t5, a1, a1	# e0    :
     31	lda	t4, -1		# .. e1 : build garbage mask
     32	sll	a1, 32, t5	# e0    :
     33	cmpbge  zero, t0, t1	# .. e1 : bits set iff byte == zero
     34	mskqh	t4, a0, t4	# e0    :
     35	or	t5, a1, a1	# .. e1 : character replication complete
     36	xor	t0, a1, t2	# e0    : make bytes == c zero
     37	cmpbge	zero, t4, t4	# .. e1 : bits set iff byte is garbage
     38	cmpbge  zero, t2, t3	# e0    : bits set iff byte == c
     39	andnot	t1, t4, t1	# .. e1 : clear garbage from null test
     40	andnot	t3, t4, t3	# e0    : clear garbage from char test
     41	bne	t1, $eos	# .. e1 : did we already hit the terminator?
     42
     43	/* Character search main loop */
     44$loop:
     45	ldq	t0, 8(v0)	# e0    : load next quadword
     46	cmovne	t3, v0, t6	# .. e1 : save previous comparisons match
     47	cmovne	t3, t3, t8	# e0    :
     48	addq	v0, 8, v0	# .. e1 :
     49	xor	t0, a1, t2	# e0    :
     50	cmpbge	zero, t0, t1	# .. e1 : bits set iff byte == zero
     51	cmpbge	zero, t2, t3	# e0    : bits set iff byte == c
     52	beq	t1, $loop	# .. e1 : if we havnt seen a null, loop
     53
     54	/* Mask out character matches after terminator */
     55$eos:
     56	negq	t1, t4		# e0    : isolate first null byte match
     57	and	t1, t4, t4	# e1    :
     58	subq	t4, 1, t5	# e0    : build a mask of the bytes up to...
     59	or	t4, t5, t4	# e1    : ... and including the null
     60
     61	and	t3, t4, t3	# e0    : mask out char matches after null
     62	cmovne	t3, t3, t8	# .. e1 : save it, if match found
     63	cmovne	t3, v0, t6	# e0    :
     64
     65	/* Locate the address of the last matched character */
     66
     67	/* Retain the early exit for the ev4 -- the ev5 mispredict penalty
     68	   is 5 cycles -- the same as just falling through.  */
     69	beq	t8, $retnull	# .. e1 :
     70
     71	and	t8, 0xf0, t2	# e0    : binary search for the high bit set
     72	cmovne	t2, t2, t8	# .. e1 (zdb)
     73	cmovne	t2, 4, t2	# e0    :
     74	and	t8, 0xcc, t1	# .. e1 :
     75	cmovne	t1, t1, t8	# e0    :
     76	cmovne	t1, 2, t1	# .. e1 :
     77	and	t8, 0xaa, t0	# e0    :
     78	cmovne	t0, 1, t0	# .. e1 (zdb)
     79	addq	t2, t1, t1	# e0    :
     80	addq	t6, t0, v0	# .. e1 : add our aligned base ptr to the mix
     81	addq	v0, t1, v0	# e0    :
     82	ret			# .. e1 :
     83
     84$retnull:
     85	mov	zero, v0	# e0    :
     86	ret			# .. e1 :
     87
     88	.end strrchr
     89	EXPORT_SYMBOL(strrchr)