lz4defs.h 3.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158
  1. /*
  2. * lz4defs.h -- architecture specific defines
  3. *
  4. * Copyright (C) 2013, LG Electronics, Kyungsik Lee <kyungsik.lee@lge.com>
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License version 2 as
  8. * published by the Free Software Foundation.
  9. */
  10. /*
  11. * Detects 64 bits mode
  12. */
  13. #if defined(CONFIG_64BIT)
  14. #define LZ4_ARCH64 1
  15. #else
  16. #define LZ4_ARCH64 0
  17. #endif
  18. /*
  19. * Architecture-specific macros
  20. */
  21. #define BYTE u8
  22. typedef struct _U16_S { u16 v; } U16_S;
  23. typedef struct _U32_S { u32 v; } U32_S;
  24. typedef struct _U64_S { u64 v; } U64_S;
  25. #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)
  26. #define A16(x) (((U16_S *)(x))->v)
  27. #define A32(x) (((U32_S *)(x))->v)
  28. #define A64(x) (((U64_S *)(x))->v)
  29. #define PUT4(s, d) (A32(d) = A32(s))
  30. #define PUT8(s, d) (A64(d) = A64(s))
  31. #define LZ4_READ_LITTLEENDIAN_16(d, s, p) \
  32. (d = s - A16(p))
  33. #define LZ4_WRITE_LITTLEENDIAN_16(p, v) \
  34. do { \
  35. A16(p) = v; \
  36. p += 2; \
  37. } while (0)
  38. #else /* CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS */
  39. #define A64(x) get_unaligned((u64 *)&(((U16_S *)(x))->v))
  40. #define A32(x) get_unaligned((u32 *)&(((U16_S *)(x))->v))
  41. #define A16(x) get_unaligned((u16 *)&(((U16_S *)(x))->v))
  42. #define PUT4(s, d) \
  43. put_unaligned(get_unaligned((const u32 *) s), (u32 *) d)
  44. #define PUT8(s, d) \
  45. put_unaligned(get_unaligned((const u64 *) s), (u64 *) d)
  46. #define LZ4_READ_LITTLEENDIAN_16(d, s, p) \
  47. (d = s - get_unaligned_le16(p))
  48. #define LZ4_WRITE_LITTLEENDIAN_16(p, v) \
  49. do { \
  50. put_unaligned_le16(v, (u16 *)(p)); \
  51. p += 2; \
  52. } while (0)
  53. #endif
  54. #define COPYLENGTH 8
  55. #define ML_BITS 4
  56. #define ML_MASK ((1U << ML_BITS) - 1)
  57. #define RUN_BITS (8 - ML_BITS)
  58. #define RUN_MASK ((1U << RUN_BITS) - 1)
  59. #define MEMORY_USAGE 14
  60. #define MINMATCH 4
  61. #define SKIPSTRENGTH 6
  62. #define LASTLITERALS 5
  63. #define MFLIMIT (COPYLENGTH + MINMATCH)
  64. #define MINLENGTH (MFLIMIT + 1)
  65. #define MAXD_LOG 16
  66. #define MAXD (1 << MAXD_LOG)
  67. #define MAXD_MASK (u32)(MAXD - 1)
  68. #define MAX_DISTANCE (MAXD - 1)
  69. #define HASH_LOG (MAXD_LOG - 1)
  70. #define HASHTABLESIZE (1 << HASH_LOG)
  71. #define MAX_NB_ATTEMPTS 256
  72. #define OPTIMAL_ML (int)((ML_MASK-1)+MINMATCH)
  73. #define LZ4_64KLIMIT ((1<<16) + (MFLIMIT - 1))
  74. #define HASHLOG64K ((MEMORY_USAGE - 2) + 1)
  75. #define HASH64KTABLESIZE (1U << HASHLOG64K)
  76. #define LZ4_HASH_VALUE(p) (((A32(p)) * 2654435761U) >> \
  77. ((MINMATCH * 8) - (MEMORY_USAGE-2)))
  78. #define LZ4_HASH64K_VALUE(p) (((A32(p)) * 2654435761U) >> \
  79. ((MINMATCH * 8) - HASHLOG64K))
  80. #define HASH_VALUE(p) (((A32(p)) * 2654435761U) >> \
  81. ((MINMATCH * 8) - HASH_LOG))
  82. #if LZ4_ARCH64/* 64-bit */
  83. #define STEPSIZE 8
  84. #define LZ4_COPYSTEP(s, d) \
  85. do { \
  86. PUT8(s, d); \
  87. d += 8; \
  88. s += 8; \
  89. } while (0)
  90. #define LZ4_COPYPACKET(s, d) LZ4_COPYSTEP(s, d)
  91. #define LZ4_SECURECOPY(s, d, e) \
  92. do { \
  93. if (d < e) { \
  94. LZ4_WILDCOPY(s, d, e); \
  95. } \
  96. } while (0)
  97. #define HTYPE u32
  98. #ifdef __BIG_ENDIAN
  99. #define LZ4_NBCOMMONBYTES(val) (__builtin_clzll(val) >> 3)
  100. #else
  101. #define LZ4_NBCOMMONBYTES(val) (__builtin_ctzll(val) >> 3)
  102. #endif
  103. #else /* 32-bit */
  104. #define STEPSIZE 4
  105. #define LZ4_COPYSTEP(s, d) \
  106. do { \
  107. PUT4(s, d); \
  108. d += 4; \
  109. s += 4; \
  110. } while (0)
  111. #define LZ4_COPYPACKET(s, d) \
  112. do { \
  113. LZ4_COPYSTEP(s, d); \
  114. LZ4_COPYSTEP(s, d); \
  115. } while (0)
  116. #define LZ4_SECURECOPY LZ4_WILDCOPY
  117. #define HTYPE const u8*
  118. #ifdef __BIG_ENDIAN
  119. #define LZ4_NBCOMMONBYTES(val) (__builtin_clz(val) >> 3)
  120. #else
  121. #define LZ4_NBCOMMONBYTES(val) (__builtin_ctz(val) >> 3)
  122. #endif
  123. #endif
  124. #define LZ4_WILDCOPY(s, d, e) \
  125. do { \
  126. LZ4_COPYPACKET(s, d); \
  127. } while (d < e)
  128. #define LZ4_BLINDCOPY(s, d, l) \
  129. do { \
  130. u8 *e = (d) + l; \
  131. LZ4_WILDCOPY(s, d, e); \
  132. d = e; \
  133. } while (0)