blob: 7ce924754685b775eb0d01d8f90bc32402aabe57 [file] [log] [blame]
lh9ed821d2023-04-07 01:36:19 -07001/* Copyright (C) 1991-2015 Free Software Foundation, Inc.
2 This file is part of the GNU C Library.
3
4 The GNU C Library is free software; you can redistribute it and/or
5 modify it under the terms of the GNU Lesser General Public
6 License as published by the Free Software Foundation; either
7 version 2.1 of the License, or (at your option) any later version.
8
9 The GNU C Library is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 Lesser General Public License for more details.
13
14 You should have received a copy of the GNU Lesser General Public
15 License along with the GNU C Library; if not, see
16 <http://www.gnu.org/licenses/>. */
17
18#ifdef HAVE_CONFIG_H
19# include "config.h"
20#endif
21
22#undef __ptr_t
23#define __ptr_t void *
24
25#if defined HAVE_STRING_H || defined _LIBC
26# include <string.h>
27#endif
28
29#undef memcmp
30
31#ifndef MEMCMP
32# define MEMCMP memcmp
33#endif
34
35#ifdef _LIBC
36
37# include <memcopy.h>
38# include <endian.h>
39
40# if __BYTE_ORDER == __BIG_ENDIAN
41# define WORDS_BIGENDIAN
42# endif
43
44#else /* Not in the GNU C library. */
45
46# include <sys/types.h>
47
48/* Type to use for aligned memory operations.
49 This should normally be the biggest type supported by a single load
50 and store. Must be an unsigned type. */
51# define op_t unsigned long int
52# define OPSIZ (sizeof(op_t))
53
54/* Threshold value for when to enter the unrolled loops. */
55# define OP_T_THRES 16
56
57/* Type to use for unaligned operations. */
58typedef unsigned char byte;
59
60#endif /* In the GNU C library. */
61
62/* Provide the appropriate builtins to shift two registers based on
63 the alignment of a pointer held in a third register, and to reverse
64 the bytes in a word. */
65#ifdef __tilegx__
66#define DBLALIGN __insn_dblalign
67#define REVBYTES __insn_revbytes
68#else
69#define DBLALIGN __insn_dword_align
70#define REVBYTES __insn_bytex
71#endif
72
73#ifdef WORDS_BIGENDIAN
74# define CMP_LT_OR_GT(a, b) ((a) > (b) ? 1 : -1)
75#else
76# define CMP_LT_OR_GT(a, b) (REVBYTES(a) > REVBYTES(b) ? 1 : -1)
77#endif
78
79/* BE VERY CAREFUL IF YOU CHANGE THIS CODE! */
80
81/* The strategy of this memcmp is:
82
83 1. Compare bytes until one of the block pointers is aligned.
84
85 2. Compare using memcmp_common_alignment or
86 memcmp_not_common_alignment, regarding the alignment of the other
87 block after the initial byte operations. The maximum number of
88 full words (of type op_t) are compared in this way.
89
90 3. Compare the few remaining bytes. */
91
92static int memcmp_common_alignment (long, long, size_t) __THROW;
93
94/* memcmp_common_alignment -- Compare blocks at SRCP1 and SRCP2 with LEN `op_t'
95 objects (not LEN bytes!). Both SRCP1 and SRCP2 should be aligned for
96 memory operations on `op_t's. */
97static int
98memcmp_common_alignment (srcp1, srcp2, len)
99 long int srcp1;
100 long int srcp2;
101 size_t len;
102{
103 op_t a0, a1;
104 op_t b0, b1;
105
106 switch (len % 4)
107 {
108 default: /* Avoid warning about uninitialized local variables. */
109 case 2:
110 a0 = ((op_t *) srcp1)[0];
111 b0 = ((op_t *) srcp2)[0];
112 srcp1 += OPSIZ;
113 srcp2 += OPSIZ;
114 len += 2;
115 goto do1;
116 case 3:
117 a1 = ((op_t *) srcp1)[0];
118 b1 = ((op_t *) srcp2)[0];
119 srcp1 += OPSIZ;
120 srcp2 += OPSIZ;
121 len += 1;
122 goto do2;
123 case 0:
124 if (OP_T_THRES <= 3 * OPSIZ && len == 0)
125 return 0;
126 a0 = ((op_t *) srcp1)[0];
127 b0 = ((op_t *) srcp2)[0];
128 srcp1 += OPSIZ;
129 srcp2 += OPSIZ;
130 goto do3;
131 case 1:
132 a1 = ((op_t *) srcp1)[0];
133 b1 = ((op_t *) srcp2)[0];
134 srcp1 += OPSIZ;
135 srcp2 += OPSIZ;
136 len -= 1;
137 if (OP_T_THRES <= 3 * OPSIZ && len == 0)
138 goto do0;
139 /* Fall through. */
140 }
141
142 do
143 {
144 a0 = ((op_t *) srcp1)[0];
145 b0 = ((op_t *) srcp2)[0];
146 srcp1 += OPSIZ;
147 srcp2 += OPSIZ;
148 if (__glibc_likely (a1 != b1))
149 return CMP_LT_OR_GT (a1, b1);
150
151 do3:
152 a1 = ((op_t *) srcp1)[0];
153 b1 = ((op_t *) srcp2)[0];
154 srcp1 += OPSIZ;
155 srcp2 += OPSIZ;
156 if (__glibc_likely (a0 != b0))
157 return CMP_LT_OR_GT (a0, b0);
158
159 do2:
160 a0 = ((op_t *) srcp1)[0];
161 b0 = ((op_t *) srcp2)[0];
162 srcp1 += OPSIZ;
163 srcp2 += OPSIZ;
164 if (__glibc_likely (a1 != b1))
165 return CMP_LT_OR_GT (a1, b1);
166
167 do1:
168 a1 = ((op_t *) srcp1)[0];
169 b1 = ((op_t *) srcp2)[0];
170 srcp1 += OPSIZ;
171 srcp2 += OPSIZ;
172 if (__glibc_likely (a0 != b0))
173 return CMP_LT_OR_GT (a0, b0);
174
175 len -= 4;
176 }
177 while (len != 0);
178
179 /* This is the right position for do0. Please don't move
180 it into the loop. */
181 do0:
182 if (__glibc_likely (a1 != b1))
183 return CMP_LT_OR_GT (a1, b1);
184 return 0;
185}
186
187static int memcmp_not_common_alignment (long, long, size_t) __THROW;
188
189/* memcmp_not_common_alignment -- Compare blocks at SRCP1 and SRCP2 with LEN
190 `op_t' objects (not LEN bytes!). SRCP2 should be aligned for memory
191 operations on `op_t', but SRCP1 *should be unaligned*. */
192static int
193memcmp_not_common_alignment (srcp1, srcp2, len)
194 long int srcp1;
195 long int srcp2;
196 size_t len;
197{
198 void * srcp1i;
199 op_t a0, a1, a2, a3;
200 op_t b0, b1, b2, b3;
201 op_t x;
202
203 /* Calculate how to shift a word read at the memory operation
204 aligned srcp1 to make it aligned for comparison. */
205
206 srcp1i = (void *) srcp1;
207
208 /* Make SRCP1 aligned by rounding it down to the beginning of the `op_t'
209 it points in the middle of. */
210 srcp1 &= -OPSIZ;
211
212 switch (len % 4)
213 {
214 default: /* Avoid warning about uninitialized local variables. */
215 case 2:
216 a1 = ((op_t *) srcp1)[0];
217 a2 = ((op_t *) srcp1)[1];
218 b2 = ((op_t *) srcp2)[0];
219 srcp1 += 2 * OPSIZ;
220 srcp2 += 1 * OPSIZ;
221 len += 2;
222 goto do1;
223 case 3:
224 a0 = ((op_t *) srcp1)[0];
225 a1 = ((op_t *) srcp1)[1];
226 b1 = ((op_t *) srcp2)[0];
227 srcp1 += 2 * OPSIZ;
228 srcp2 += 1 * OPSIZ;
229 len += 1;
230 goto do2;
231 case 0:
232 if (OP_T_THRES <= 3 * OPSIZ && len == 0)
233 return 0;
234 a3 = ((op_t *) srcp1)[0];
235 a0 = ((op_t *) srcp1)[1];
236 b0 = ((op_t *) srcp2)[0];
237 srcp1 += 2 * OPSIZ;
238 srcp2 += 1 * OPSIZ;
239 goto do3;
240 case 1:
241 a2 = ((op_t *) srcp1)[0];
242 a3 = ((op_t *) srcp1)[1];
243 b3 = ((op_t *) srcp2)[0];
244 srcp1 += 2 * OPSIZ;
245 srcp2 += 1 * OPSIZ;
246 len -= 1;
247 if (OP_T_THRES <= 3 * OPSIZ && len == 0)
248 goto do0;
249 /* Fall through. */
250 }
251
252 do
253 {
254 a0 = ((op_t *) srcp1)[0];
255 b0 = ((op_t *) srcp2)[0];
256 x = DBLALIGN (a2, a3, srcp1i);
257 srcp1 += OPSIZ;
258 srcp2 += OPSIZ;
259 if (__glibc_likely (x != b3))
260 return CMP_LT_OR_GT (x, b3);
261
262 do3:
263 a1 = ((op_t *) srcp1)[0];
264 b1 = ((op_t *) srcp2)[0];
265 x = DBLALIGN (a3, a0, srcp1i);
266 srcp1 += OPSIZ;
267 srcp2 += OPSIZ;
268 if (__glibc_likely (x != b0))
269 return CMP_LT_OR_GT (x, b0);
270
271 do2:
272 a2 = ((op_t *) srcp1)[0];
273 b2 = ((op_t *) srcp2)[0];
274 x = DBLALIGN (a0, a1, srcp1i);
275 srcp1 += OPSIZ;
276 srcp2 += OPSIZ;
277 if (__glibc_likely (x != b1))
278 return CMP_LT_OR_GT (x, b1);
279
280 do1:
281 a3 = ((op_t *) srcp1)[0];
282 b3 = ((op_t *) srcp2)[0];
283 x = DBLALIGN (a1, a2, srcp1i);
284 srcp1 += OPSIZ;
285 srcp2 += OPSIZ;
286 if (__glibc_likely (x != b2))
287 return CMP_LT_OR_GT (x, b2);
288
289 len -= 4;
290 }
291 while (len != 0);
292
293 /* This is the right position for do0. Please don't move
294 it into the loop. */
295 do0:
296 x = DBLALIGN (a2, a3, srcp1i);
297 if (__glibc_likely (x != b3))
298 return CMP_LT_OR_GT (x, b3);
299 return 0;
300}
301
302int
303MEMCMP (s1, s2, len)
304 const __ptr_t s1;
305 const __ptr_t s2;
306 size_t len;
307{
308 op_t a0;
309 op_t b0;
310 long int srcp1 = (long int) s1;
311 long int srcp2 = (long int) s2;
312 int res;
313
314 if (len >= OP_T_THRES)
315 {
316 /* There are at least some bytes to compare. No need to test
317 for LEN == 0 in this alignment loop. */
318 while (srcp2 % OPSIZ != 0)
319 {
320 a0 = ((byte *) srcp1)[0];
321 b0 = ((byte *) srcp2)[0];
322 srcp1 += 1;
323 srcp2 += 1;
324 res = a0 - b0;
325 if (__glibc_likely (res != 0))
326 return res;
327 len -= 1;
328 }
329
330 /* SRCP2 is now aligned for memory operations on `op_t'.
331 SRCP1 alignment determines if we can do a simple,
332 aligned compare or need to shuffle bits. */
333
334 if (srcp1 % OPSIZ == 0)
335 res = memcmp_common_alignment (srcp1, srcp2, len / OPSIZ);
336 else
337 res = memcmp_not_common_alignment (srcp1, srcp2, len / OPSIZ);
338 if (res != 0)
339 return res;
340
341 /* Number of bytes remaining in the interval [0..OPSIZ-1]. */
342 srcp1 += len & -OPSIZ;
343 srcp2 += len & -OPSIZ;
344 len %= OPSIZ;
345 }
346
347 /* There are just a few bytes to compare. Use byte memory operations. */
348 while (len != 0)
349 {
350 a0 = ((byte *) srcp1)[0];
351 b0 = ((byte *) srcp2)[0];
352 srcp1 += 1;
353 srcp2 += 1;
354 res = a0 - b0;
355 if (__glibc_likely (res != 0))
356 return res;
357 len -= 1;
358 }
359
360 return 0;
361}
362libc_hidden_builtin_def(memcmp)
363#ifdef weak_alias
364# undef bcmp
365weak_alias (memcmp, bcmp)
366#endif