string.h 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494
  1. #ifndef _I386_STRING_H_
  2. #define _I386_STRING_H_
  3. #ifdef __KERNEL__
  4. #include <linux/config.h>
  5. /*
  6. * On a 486 or Pentium, we are better off not using the
  7. * byte string operations. But on a 386 or a PPro the
  8. * byte string ops are faster than doing it by hand
  9. * (MUCH faster on a Pentium).
  10. */
  11. /*
  12. * This string-include defines all string functions as inline
  13. * functions. Use gcc. It also assumes ds=es=data space, this should be
  14. * normal. Most of the string-functions are rather heavily hand-optimized,
  15. * see especially strsep,strstr,str[c]spn. They should work, but are not
  16. * very easy to understand. Everything is done entirely within the register
  17. * set, making the functions fast and clean. String instructions have been
  18. * used through-out, making for "slightly" unclear code :-)
  19. *
  20. * NO Copyright (C) 1991, 1992 Linus Torvalds,
  21. * consider these trivial functions to be PD.
  22. */
  23. /* AK: in fact I bet it would be better to move this stuff all out of line.
  24. */
  25. #define __HAVE_ARCH_STRCPY
  26. static inline char * strcpy(char * dest,const char *src)
  27. {
  28. int d0, d1, d2;
  29. __asm__ __volatile__(
  30. "1:\tlodsb\n\t"
  31. "stosb\n\t"
  32. "testb %%al,%%al\n\t"
  33. "jne 1b"
  34. : "=&S" (d0), "=&D" (d1), "=&a" (d2)
  35. :"0" (src),"1" (dest) : "memory");
  36. return dest;
  37. }
  38. #define __HAVE_ARCH_STRNCPY
  39. static inline char * strncpy(char * dest,const char *src,size_t count)
  40. {
  41. int d0, d1, d2, d3;
  42. __asm__ __volatile__(
  43. "1:\tdecl %2\n\t"
  44. "js 2f\n\t"
  45. "lodsb\n\t"
  46. "stosb\n\t"
  47. "testb %%al,%%al\n\t"
  48. "jne 1b\n\t"
  49. "rep\n\t"
  50. "stosb\n"
  51. "2:"
  52. : "=&S" (d0), "=&D" (d1), "=&c" (d2), "=&a" (d3)
  53. :"0" (src),"1" (dest),"2" (count) : "memory");
  54. return dest;
  55. }
  56. #define __HAVE_ARCH_STRCAT
  57. static inline char * strcat(char * dest,const char * src)
  58. {
  59. int d0, d1, d2, d3;
  60. __asm__ __volatile__(
  61. "repne\n\t"
  62. "scasb\n\t"
  63. "decl %1\n"
  64. "1:\tlodsb\n\t"
  65. "stosb\n\t"
  66. "testb %%al,%%al\n\t"
  67. "jne 1b"
  68. : "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
  69. : "0" (src), "1" (dest), "2" (0), "3" (0xffffffffu):"memory");
  70. return dest;
  71. }
  72. #define __HAVE_ARCH_STRNCAT
  73. static inline char * strncat(char * dest,const char * src,size_t count)
  74. {
  75. int d0, d1, d2, d3;
  76. __asm__ __volatile__(
  77. "repne\n\t"
  78. "scasb\n\t"
  79. "decl %1\n\t"
  80. "movl %8,%3\n"
  81. "1:\tdecl %3\n\t"
  82. "js 2f\n\t"
  83. "lodsb\n\t"
  84. "stosb\n\t"
  85. "testb %%al,%%al\n\t"
  86. "jne 1b\n"
  87. "2:\txorl %2,%2\n\t"
  88. "stosb"
  89. : "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
  90. : "0" (src),"1" (dest),"2" (0),"3" (0xffffffffu), "g" (count)
  91. : "memory");
  92. return dest;
  93. }
  94. #define __HAVE_ARCH_STRCMP
  95. static inline int strcmp(const char * cs,const char * ct)
  96. {
  97. int d0, d1;
  98. register int __res;
  99. __asm__ __volatile__(
  100. "1:\tlodsb\n\t"
  101. "scasb\n\t"
  102. "jne 2f\n\t"
  103. "testb %%al,%%al\n\t"
  104. "jne 1b\n\t"
  105. "xorl %%eax,%%eax\n\t"
  106. "jmp 3f\n"
  107. "2:\tsbbl %%eax,%%eax\n\t"
  108. "orb $1,%%al\n"
  109. "3:"
  110. :"=a" (__res), "=&S" (d0), "=&D" (d1)
  111. :"1" (cs),"2" (ct)
  112. :"memory");
  113. return __res;
  114. }
  115. #define __HAVE_ARCH_STRNCMP
  116. static inline int strncmp(const char * cs,const char * ct,size_t count)
  117. {
  118. register int __res;
  119. int d0, d1, d2;
  120. __asm__ __volatile__(
  121. "1:\tdecl %3\n\t"
  122. "js 2f\n\t"
  123. "lodsb\n\t"
  124. "scasb\n\t"
  125. "jne 3f\n\t"
  126. "testb %%al,%%al\n\t"
  127. "jne 1b\n"
  128. "2:\txorl %%eax,%%eax\n\t"
  129. "jmp 4f\n"
  130. "3:\tsbbl %%eax,%%eax\n\t"
  131. "orb $1,%%al\n"
  132. "4:"
  133. :"=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2)
  134. :"1" (cs),"2" (ct),"3" (count)
  135. :"memory");
  136. return __res;
  137. }
  138. #define __HAVE_ARCH_STRCHR
  139. static inline char * strchr(const char * s, int c)
  140. {
  141. int d0;
  142. register char * __res;
  143. __asm__ __volatile__(
  144. "movb %%al,%%ah\n"
  145. "1:\tlodsb\n\t"
  146. "cmpb %%ah,%%al\n\t"
  147. "je 2f\n\t"
  148. "testb %%al,%%al\n\t"
  149. "jne 1b\n\t"
  150. "movl $1,%1\n"
  151. "2:\tmovl %1,%0\n\t"
  152. "decl %0"
  153. :"=a" (__res), "=&S" (d0)
  154. :"1" (s),"0" (c)
  155. :"memory");
  156. return __res;
  157. }
  158. #define __HAVE_ARCH_STRRCHR
  159. static inline char * strrchr(const char * s, int c)
  160. {
  161. int d0, d1;
  162. register char * __res;
  163. __asm__ __volatile__(
  164. "movb %%al,%%ah\n"
  165. "1:\tlodsb\n\t"
  166. "cmpb %%ah,%%al\n\t"
  167. "jne 2f\n\t"
  168. "leal -1(%%esi),%0\n"
  169. "2:\ttestb %%al,%%al\n\t"
  170. "jne 1b"
  171. :"=g" (__res), "=&S" (d0), "=&a" (d1)
  172. :"0" (0),"1" (s),"2" (c)
  173. :"memory");
  174. return __res;
  175. }
  176. #define __HAVE_ARCH_STRLEN
  177. static inline size_t strlen(const char * s)
  178. {
  179. int d0;
  180. register int __res;
  181. __asm__ __volatile__(
  182. "repne\n\t"
  183. "scasb\n\t"
  184. "notl %0\n\t"
  185. "decl %0"
  186. :"=c" (__res), "=&D" (d0)
  187. :"1" (s),"a" (0), "0" (0xffffffffu)
  188. :"memory");
  189. return __res;
  190. }
  191. static __always_inline void * __memcpy(void * to, const void * from, size_t n)
  192. {
  193. int d0, d1, d2;
  194. __asm__ __volatile__(
  195. "rep ; movsl\n\t"
  196. "movl %4,%%ecx\n\t"
  197. "andl $3,%%ecx\n\t"
  198. #if 1 /* want to pay 2 byte penalty for a chance to skip microcoded rep? */
  199. "jz 1f\n\t"
  200. #endif
  201. "rep ; movsb\n\t"
  202. "1:"
  203. : "=&c" (d0), "=&D" (d1), "=&S" (d2)
  204. : "0" (n/4), "g" (n), "1" ((long) to), "2" ((long) from)
  205. : "memory");
  206. return (to);
  207. }
  208. /*
  209. * This looks ugly, but the compiler can optimize it totally,
  210. * as the count is constant.
  211. */
  212. static __always_inline void * __constant_memcpy(void * to, const void * from, size_t n)
  213. {
  214. long esi, edi;
  215. if (!n) return to;
  216. #if 1 /* want to do small copies with non-string ops? */
  217. switch (n) {
  218. case 1: *(char*)to = *(char*)from; return to;
  219. case 2: *(short*)to = *(short*)from; return to;
  220. case 4: *(int*)to = *(int*)from; return to;
  221. #if 1 /* including those doable with two moves? */
  222. case 3: *(short*)to = *(short*)from;
  223. *((char*)to+2) = *((char*)from+2); return to;
  224. case 5: *(int*)to = *(int*)from;
  225. *((char*)to+4) = *((char*)from+4); return to;
  226. case 6: *(int*)to = *(int*)from;
  227. *((short*)to+2) = *((short*)from+2); return to;
  228. case 8: *(int*)to = *(int*)from;
  229. *((int*)to+1) = *((int*)from+1); return to;
  230. #endif
  231. }
  232. #endif
  233. esi = (long) from;
  234. edi = (long) to;
  235. if (n >= 5*4) {
  236. /* large block: use rep prefix */
  237. int ecx;
  238. __asm__ __volatile__(
  239. "rep ; movsl"
  240. : "=&c" (ecx), "=&D" (edi), "=&S" (esi)
  241. : "0" (n/4), "1" (edi),"2" (esi)
  242. : "memory"
  243. );
  244. } else {
  245. /* small block: don't clobber ecx + smaller code */
  246. if (n >= 4*4) __asm__ __volatile__("movsl"
  247. :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
  248. if (n >= 3*4) __asm__ __volatile__("movsl"
  249. :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
  250. if (n >= 2*4) __asm__ __volatile__("movsl"
  251. :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
  252. if (n >= 1*4) __asm__ __volatile__("movsl"
  253. :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
  254. }
  255. switch (n % 4) {
  256. /* tail */
  257. case 0: return to;
  258. case 1: __asm__ __volatile__("movsb"
  259. :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
  260. return to;
  261. case 2: __asm__ __volatile__("movsw"
  262. :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
  263. return to;
  264. default: __asm__ __volatile__("movsw\n\tmovsb"
  265. :"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
  266. return to;
  267. }
  268. }
  269. #define __HAVE_ARCH_MEMCPY
  270. #ifdef CONFIG_X86_USE_3DNOW
  271. #include <asm/mmx.h>
  272. /*
  273. * This CPU favours 3DNow strongly (eg AMD Athlon)
  274. */
  275. static inline void * __constant_memcpy3d(void * to, const void * from, size_t len)
  276. {
  277. if (len < 512)
  278. return __constant_memcpy(to, from, len);
  279. return _mmx_memcpy(to, from, len);
  280. }
  281. static __inline__ void *__memcpy3d(void *to, const void *from, size_t len)
  282. {
  283. if (len < 512)
  284. return __memcpy(to, from, len);
  285. return _mmx_memcpy(to, from, len);
  286. }
  287. #define memcpy(t, f, n) \
  288. (__builtin_constant_p(n) ? \
  289. __constant_memcpy3d((t),(f),(n)) : \
  290. __memcpy3d((t),(f),(n)))
  291. #else
  292. /*
  293. * No 3D Now!
  294. */
  295. #define memcpy(t, f, n) \
  296. (__builtin_constant_p(n) ? \
  297. __constant_memcpy((t),(f),(n)) : \
  298. __memcpy((t),(f),(n)))
  299. #endif
  300. #define __HAVE_ARCH_MEMMOVE
  301. void *memmove(void * dest,const void * src, size_t n);
  302. #define memcmp __builtin_memcmp
  303. #define __HAVE_ARCH_MEMCHR
  304. static inline void * memchr(const void * cs,int c,size_t count)
  305. {
  306. int d0;
  307. register void * __res;
  308. if (!count)
  309. return NULL;
  310. __asm__ __volatile__(
  311. "repne\n\t"
  312. "scasb\n\t"
  313. "je 1f\n\t"
  314. "movl $1,%0\n"
  315. "1:\tdecl %0"
  316. :"=D" (__res), "=&c" (d0)
  317. :"a" (c),"0" (cs),"1" (count)
  318. :"memory");
  319. return __res;
  320. }
  321. static inline void * __memset_generic(void * s, char c,size_t count)
  322. {
  323. int d0, d1;
  324. __asm__ __volatile__(
  325. "rep\n\t"
  326. "stosb"
  327. : "=&c" (d0), "=&D" (d1)
  328. :"a" (c),"1" (s),"0" (count)
  329. :"memory");
  330. return s;
  331. }
  332. /* we might want to write optimized versions of these later */
  333. #define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
  334. /*
  335. * memset(x,0,y) is a reasonably common thing to do, so we want to fill
  336. * things 32 bits at a time even when we don't know the size of the
  337. * area at compile-time..
  338. */
  339. static __always_inline void * __constant_c_memset(void * s, unsigned long c, size_t count)
  340. {
  341. int d0, d1;
  342. __asm__ __volatile__(
  343. "rep ; stosl\n\t"
  344. "testb $2,%b3\n\t"
  345. "je 1f\n\t"
  346. "stosw\n"
  347. "1:\ttestb $1,%b3\n\t"
  348. "je 2f\n\t"
  349. "stosb\n"
  350. "2:"
  351. :"=&c" (d0), "=&D" (d1)
  352. :"a" (c), "q" (count), "0" (count/4), "1" ((long) s)
  353. :"memory");
  354. return (s);
  355. }
  356. /* Added by Gertjan van Wingerde to make minix and sysv module work */
  357. #define __HAVE_ARCH_STRNLEN
  358. static inline size_t strnlen(const char * s, size_t count)
  359. {
  360. int d0;
  361. register int __res;
  362. __asm__ __volatile__(
  363. "movl %2,%0\n\t"
  364. "jmp 2f\n"
  365. "1:\tcmpb $0,(%0)\n\t"
  366. "je 3f\n\t"
  367. "incl %0\n"
  368. "2:\tdecl %1\n\t"
  369. "cmpl $-1,%1\n\t"
  370. "jne 1b\n"
  371. "3:\tsubl %2,%0"
  372. :"=a" (__res), "=&d" (d0)
  373. :"c" (s),"1" (count)
  374. :"memory");
  375. return __res;
  376. }
  377. /* end of additional stuff */
  378. #define __HAVE_ARCH_STRSTR
  379. extern char *strstr(const char *cs, const char *ct);
  380. /*
  381. * This looks horribly ugly, but the compiler can optimize it totally,
  382. * as we by now know that both pattern and count is constant..
  383. */
  384. static __always_inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count)
  385. {
  386. switch (count) {
  387. case 0:
  388. return s;
  389. case 1:
  390. *(unsigned char *)s = pattern;
  391. return s;
  392. case 2:
  393. *(unsigned short *)s = pattern;
  394. return s;
  395. case 3:
  396. *(unsigned short *)s = pattern;
  397. *(2+(unsigned char *)s) = pattern;
  398. return s;
  399. case 4:
  400. *(unsigned long *)s = pattern;
  401. return s;
  402. }
  403. #define COMMON(x) \
  404. __asm__ __volatile__( \
  405. "rep ; stosl" \
  406. x \
  407. : "=&c" (d0), "=&D" (d1) \
  408. : "a" (pattern),"0" (count/4),"1" ((long) s) \
  409. : "memory")
  410. {
  411. int d0, d1;
  412. switch (count % 4) {
  413. case 0: COMMON(""); return s;
  414. case 1: COMMON("\n\tstosb"); return s;
  415. case 2: COMMON("\n\tstosw"); return s;
  416. default: COMMON("\n\tstosw\n\tstosb"); return s;
  417. }
  418. }
  419. #undef COMMON
  420. }
  421. #define __constant_c_x_memset(s, c, count) \
  422. (__builtin_constant_p(count) ? \
  423. __constant_c_and_count_memset((s),(c),(count)) : \
  424. __constant_c_memset((s),(c),(count)))
  425. #define __memset(s, c, count) \
  426. (__builtin_constant_p(count) ? \
  427. __constant_count_memset((s),(c),(count)) : \
  428. __memset_generic((s),(c),(count)))
  429. #define __HAVE_ARCH_MEMSET
  430. #define memset(s, c, count) \
  431. (__builtin_constant_p(c) ? \
  432. __constant_c_x_memset((s),(0x01010101UL*(unsigned char)(c)),(count)) : \
  433. __memset((s),(c),(count)))
  434. /*
  435. * find the first occurrence of byte 'c', or 1 past the area if none
  436. */
  437. #define __HAVE_ARCH_MEMSCAN
  438. static inline void * memscan(void * addr, int c, size_t size)
  439. {
  440. if (!size)
  441. return addr;
  442. __asm__("repnz; scasb\n\t"
  443. "jnz 1f\n\t"
  444. "dec %%edi\n"
  445. "1:"
  446. : "=D" (addr), "=c" (size)
  447. : "0" (addr), "1" (size), "a" (c)
  448. : "memory");
  449. return addr;
  450. }
  451. #endif /* __KERNEL__ */
  452. #endif