string_32.h 7.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343
  1. #ifndef _ASM_X86_STRING_32_H
  2. #define _ASM_X86_STRING_32_H
  3. #ifdef __KERNEL__
  4. /* Let gcc decide whether to inline or use the out of line functions */
  5. #define __HAVE_ARCH_STRCPY
  6. extern char *strcpy(char *dest, const char *src);
  7. #define __HAVE_ARCH_STRNCPY
  8. extern char *strncpy(char *dest, const char *src, size_t count);
  9. #define __HAVE_ARCH_STRCAT
  10. extern char *strcat(char *dest, const char *src);
  11. #define __HAVE_ARCH_STRNCAT
  12. extern char *strncat(char *dest, const char *src, size_t count);
  13. #define __HAVE_ARCH_STRCMP
  14. extern int strcmp(const char *cs, const char *ct);
  15. #define __HAVE_ARCH_STRNCMP
  16. extern int strncmp(const char *cs, const char *ct, size_t count);
  17. #define __HAVE_ARCH_STRCHR
  18. extern char *strchr(const char *s, int c);
  19. #define __HAVE_ARCH_STRLEN
  20. extern size_t strlen(const char *s);
  21. static __always_inline void *__memcpy(void *to, const void *from, size_t n)
  22. {
  23. int d0, d1, d2;
  24. asm volatile("rep ; movsl\n\t"
  25. "movl %4,%%ecx\n\t"
  26. "andl $3,%%ecx\n\t"
  27. "jz 1f\n\t"
  28. "rep ; movsb\n\t"
  29. "1:"
  30. : "=&c" (d0), "=&D" (d1), "=&S" (d2)
  31. : "0" (n / 4), "g" (n), "1" ((long)to), "2" ((long)from)
  32. : "memory");
  33. return to;
  34. }
  35. /*
  36. * This looks ugly, but the compiler can optimize it totally,
  37. * as the count is constant.
  38. */
  39. static __always_inline void *__constant_memcpy(void *to, const void *from,
  40. size_t n)
  41. {
  42. long esi, edi;
  43. if (!n)
  44. return to;
  45. switch (n) {
  46. case 1:
  47. *(char *)to = *(char *)from;
  48. return to;
  49. case 2:
  50. *(short *)to = *(short *)from;
  51. return to;
  52. case 4:
  53. *(int *)to = *(int *)from;
  54. return to;
  55. case 3:
  56. *(short *)to = *(short *)from;
  57. *((char *)to + 2) = *((char *)from + 2);
  58. return to;
  59. case 5:
  60. *(int *)to = *(int *)from;
  61. *((char *)to + 4) = *((char *)from + 4);
  62. return to;
  63. case 6:
  64. *(int *)to = *(int *)from;
  65. *((short *)to + 2) = *((short *)from + 2);
  66. return to;
  67. case 8:
  68. *(int *)to = *(int *)from;
  69. *((int *)to + 1) = *((int *)from + 1);
  70. return to;
  71. }
  72. esi = (long)from;
  73. edi = (long)to;
  74. if (n >= 5 * 4) {
  75. /* large block: use rep prefix */
  76. int ecx;
  77. asm volatile("rep ; movsl"
  78. : "=&c" (ecx), "=&D" (edi), "=&S" (esi)
  79. : "0" (n / 4), "1" (edi), "2" (esi)
  80. : "memory"
  81. );
  82. } else {
  83. /* small block: don't clobber ecx + smaller code */
  84. if (n >= 4 * 4)
  85. asm volatile("movsl"
  86. : "=&D"(edi), "=&S"(esi)
  87. : "0"(edi), "1"(esi)
  88. : "memory");
  89. if (n >= 3 * 4)
  90. asm volatile("movsl"
  91. : "=&D"(edi), "=&S"(esi)
  92. : "0"(edi), "1"(esi)
  93. : "memory");
  94. if (n >= 2 * 4)
  95. asm volatile("movsl"
  96. : "=&D"(edi), "=&S"(esi)
  97. : "0"(edi), "1"(esi)
  98. : "memory");
  99. if (n >= 1 * 4)
  100. asm volatile("movsl"
  101. : "=&D"(edi), "=&S"(esi)
  102. : "0"(edi), "1"(esi)
  103. : "memory");
  104. }
  105. switch (n % 4) {
  106. /* tail */
  107. case 0:
  108. return to;
  109. case 1:
  110. asm volatile("movsb"
  111. : "=&D"(edi), "=&S"(esi)
  112. : "0"(edi), "1"(esi)
  113. : "memory");
  114. return to;
  115. case 2:
  116. asm volatile("movsw"
  117. : "=&D"(edi), "=&S"(esi)
  118. : "0"(edi), "1"(esi)
  119. : "memory");
  120. return to;
  121. default:
  122. asm volatile("movsw\n\tmovsb"
  123. : "=&D"(edi), "=&S"(esi)
  124. : "0"(edi), "1"(esi)
  125. : "memory");
  126. return to;
  127. }
  128. }
  129. #define __HAVE_ARCH_MEMCPY
  130. #ifdef CONFIG_X86_USE_3DNOW
  131. #include <asm/mmx.h>
  132. /*
  133. * This CPU favours 3DNow strongly (eg AMD Athlon)
  134. */
  135. static inline void *__constant_memcpy3d(void *to, const void *from, size_t len)
  136. {
  137. if (len < 512)
  138. return __constant_memcpy(to, from, len);
  139. return _mmx_memcpy(to, from, len);
  140. }
  141. static inline void *__memcpy3d(void *to, const void *from, size_t len)
  142. {
  143. if (len < 512)
  144. return __memcpy(to, from, len);
  145. return _mmx_memcpy(to, from, len);
  146. }
  147. #define memcpy(t, f, n) \
  148. (__builtin_constant_p((n)) \
  149. ? __constant_memcpy3d((t), (f), (n)) \
  150. : __memcpy3d((t), (f), (n)))
  151. #else
  152. /*
  153. * No 3D Now!
  154. */
  155. #ifndef CONFIG_KMEMCHECK
  156. #if (__GNUC__ >= 4)
  157. #define memcpy(t, f, n) __builtin_memcpy(t, f, n)
  158. #else
  159. #define memcpy(t, f, n) \
  160. (__builtin_constant_p((n)) \
  161. ? __constant_memcpy((t), (f), (n)) \
  162. : __memcpy((t), (f), (n)))
  163. #endif
  164. #else
  165. /*
  166. * kmemcheck becomes very happy if we use the REP instructions unconditionally,
  167. * because it means that we know both memory operands in advance.
  168. */
  169. #define memcpy(t, f, n) __memcpy((t), (f), (n))
  170. #endif
  171. #endif
  172. #define __HAVE_ARCH_MEMMOVE
  173. void *memmove(void *dest, const void *src, size_t n);
  174. #define memcmp __builtin_memcmp
  175. #define __HAVE_ARCH_MEMCHR
  176. extern void *memchr(const void *cs, int c, size_t count);
  177. static inline void *__memset_generic(void *s, char c, size_t count)
  178. {
  179. int d0, d1;
  180. asm volatile("rep\n\t"
  181. "stosb"
  182. : "=&c" (d0), "=&D" (d1)
  183. : "a" (c), "1" (s), "0" (count)
  184. : "memory");
  185. return s;
  186. }
  187. /* we might want to write optimized versions of these later */
  188. #define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count))
  189. /*
  190. * memset(x, 0, y) is a reasonably common thing to do, so we want to fill
  191. * things 32 bits at a time even when we don't know the size of the
  192. * area at compile-time..
  193. */
  194. static __always_inline
  195. void *__constant_c_memset(void *s, unsigned long c, size_t count)
  196. {
  197. int d0, d1;
  198. asm volatile("rep ; stosl\n\t"
  199. "testb $2,%b3\n\t"
  200. "je 1f\n\t"
  201. "stosw\n"
  202. "1:\ttestb $1,%b3\n\t"
  203. "je 2f\n\t"
  204. "stosb\n"
  205. "2:"
  206. : "=&c" (d0), "=&D" (d1)
  207. : "a" (c), "q" (count), "0" (count/4), "1" ((long)s)
  208. : "memory");
  209. return s;
  210. }
  211. /* Added by Gertjan van Wingerde to make minix and sysv module work */
  212. #define __HAVE_ARCH_STRNLEN
  213. extern size_t strnlen(const char *s, size_t count);
  214. /* end of additional stuff */
  215. #define __HAVE_ARCH_STRSTR
  216. extern char *strstr(const char *cs, const char *ct);
  217. /*
  218. * This looks horribly ugly, but the compiler can optimize it totally,
  219. * as we by now know that both pattern and count is constant..
  220. */
  221. static __always_inline
  222. void *__constant_c_and_count_memset(void *s, unsigned long pattern,
  223. size_t count)
  224. {
  225. switch (count) {
  226. case 0:
  227. return s;
  228. case 1:
  229. *(unsigned char *)s = pattern & 0xff;
  230. return s;
  231. case 2:
  232. *(unsigned short *)s = pattern & 0xffff;
  233. return s;
  234. case 3:
  235. *(unsigned short *)s = pattern & 0xffff;
  236. *((unsigned char *)s + 2) = pattern & 0xff;
  237. return s;
  238. case 4:
  239. *(unsigned long *)s = pattern;
  240. return s;
  241. }
  242. #define COMMON(x) \
  243. asm volatile("rep ; stosl" \
  244. x \
  245. : "=&c" (d0), "=&D" (d1) \
  246. : "a" (eax), "0" (count/4), "1" ((long)s) \
  247. : "memory")
  248. {
  249. int d0, d1;
  250. #if __GNUC__ == 4 && __GNUC_MINOR__ == 0
  251. /* Workaround for broken gcc 4.0 */
  252. register unsigned long eax asm("%eax") = pattern;
  253. #else
  254. unsigned long eax = pattern;
  255. #endif
  256. switch (count % 4) {
  257. case 0:
  258. COMMON("");
  259. return s;
  260. case 1:
  261. COMMON("\n\tstosb");
  262. return s;
  263. case 2:
  264. COMMON("\n\tstosw");
  265. return s;
  266. default:
  267. COMMON("\n\tstosw\n\tstosb");
  268. return s;
  269. }
  270. }
  271. #undef COMMON
  272. }
  273. #define __constant_c_x_memset(s, c, count) \
  274. (__builtin_constant_p(count) \
  275. ? __constant_c_and_count_memset((s), (c), (count)) \
  276. : __constant_c_memset((s), (c), (count)))
  277. #define __memset(s, c, count) \
  278. (__builtin_constant_p(count) \
  279. ? __constant_count_memset((s), (c), (count)) \
  280. : __memset_generic((s), (c), (count)))
  281. #define __HAVE_ARCH_MEMSET
  282. #if (__GNUC__ >= 4)
  283. #define memset(s, c, count) __builtin_memset(s, c, count)
  284. #else
  285. #define memset(s, c, count) \
  286. (__builtin_constant_p(c) \
  287. ? __constant_c_x_memset((s), (0x01010101UL * (unsigned char)(c)), \
  288. (count)) \
  289. : __memset((s), (c), (count)))
  290. #endif
  291. /*
  292. * find the first occurrence of byte 'c', or 1 past the area if none
  293. */
  294. #define __HAVE_ARCH_MEMSCAN
  295. extern void *memscan(void *addr, int c, size_t size);
  296. #endif /* __KERNEL__ */
  297. #endif /* _ASM_X86_STRING_32_H */