summaryrefslogtreecommitdiff
path: root/src/string/aarch64/memset.S
blob: f0d29b7fa39b5c50d2be0fe18e70a01b93da5d86 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
/*
 * memset - fill memory with a constant byte
 *
 * Copyright (c) 2012-2020, Arm Limited.
 * SPDX-License-Identifier: MIT
 */

/* Assumptions:
 *
 * ARMv8-a, AArch64, Advanced SIMD, unaligned accesses.
 *
 */

#define dstin   x0
#define val     x1
#define valw    w1
#define count   x2
#define dst     x3
#define dstend  x4
#define zva_val x5

.global memset
.type memset,%function
memset:

	dup     v0.16B, valw
	add     dstend, dstin, count

	cmp     count, 96
	b.hi    .Lset_long
	cmp     count, 16
	b.hs    .Lset_medium
	mov     val, v0.D[0]

	/* Set 0..15 bytes.  */
	tbz     count, 3, 1f
	str     val, [dstin]
	str     val, [dstend, -8]
	ret
	nop
1:      tbz     count, 2, 2f
	str     valw, [dstin]
	str     valw, [dstend, -4]
	ret
2:      cbz     count, 3f
	strb    valw, [dstin]
	tbz     count, 1, 3f
	strh    valw, [dstend, -2]
3:      ret

	/* Set 17..96 bytes.  */
.Lset_medium:
	str     q0, [dstin]
	tbnz    count, 6, .Lset96
	str     q0, [dstend, -16]
	tbz     count, 5, 1f
	str     q0, [dstin, 16]
	str     q0, [dstend, -32]
1:      ret

	.p2align 4
	/* Set 64..96 bytes.  Write 64 bytes from the start and
	   32 bytes from the end.  */
.Lset96:
	str     q0, [dstin, 16]
	stp     q0, q0, [dstin, 32]
	stp     q0, q0, [dstend, -32]
	ret

	.p2align 4
.Lset_long:
	and     valw, valw, 255
	bic     dst, dstin, 15
	str     q0, [dstin]
	cmp     count, 160
	ccmp    valw, 0, 0, hs
	b.ne    .Lno_zva

#ifndef SKIP_ZVA_CHECK
	mrs     zva_val, dczid_el0
	and     zva_val, zva_val, 31
	cmp     zva_val, 4              /* ZVA size is 64 bytes.  */
	b.ne    .Lno_zva
#endif
	str     q0, [dst, 16]
	stp     q0, q0, [dst, 32]
	bic     dst, dst, 63
	sub     count, dstend, dst      /* Count is now 64 too large.  */
	sub     count, count, 128       /* Adjust count and bias for loop.  */

	.p2align 4
.Lzva_loop:
	add     dst, dst, 64
	dc      zva, dst
	subs    count, count, 64
	b.hi    .Lzva_loop
	stp     q0, q0, [dstend, -64]
	stp     q0, q0, [dstend, -32]
	ret

.Lno_zva:
	sub     count, dstend, dst      /* Count is 16 too large.  */
	sub     dst, dst, 16            /* Dst is biased by -32.  */
	sub     count, count, 64 + 16   /* Adjust count and bias for loop.  */
.Lno_zva_loop:
	stp     q0, q0, [dst, 32]
	stp     q0, q0, [dst, 64]!
	subs    count, count, 64
	b.hi    .Lno_zva_loop
	stp     q0, q0, [dstend, -64]
	stp     q0, q0, [dstend, -32]
	ret

.size memset,.-memset