mips-extns.h 8.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297
  1. /*
  2. * Copyright 2003-2011 NetLogic Microsystems, Inc. (NetLogic). All rights
  3. * reserved.
  4. *
  5. * This software is available to you under a choice of one of two
  6. * licenses. You may choose to be licensed under the terms of the GNU
  7. * General Public License (GPL) Version 2, available from the file
  8. * COPYING in the main directory of this source tree, or the NetLogic
  9. * license below:
  10. *
  11. * Redistribution and use in source and binary forms, with or without
  12. * modification, are permitted provided that the following conditions
  13. * are met:
  14. *
  15. * 1. Redistributions of source code must retain the above copyright
  16. * notice, this list of conditions and the following disclaimer.
  17. * 2. Redistributions in binary form must reproduce the above copyright
  18. * notice, this list of conditions and the following disclaimer in
  19. * the documentation and/or other materials provided with the
  20. * distribution.
  21. *
  22. * THIS SOFTWARE IS PROVIDED BY NETLOGIC ``AS IS'' AND ANY EXPRESS OR
  23. * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
  24. * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  25. * ARE DISCLAIMED. IN NO EVENT SHALL NETLOGIC OR CONTRIBUTORS BE LIABLE
  26. * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
  27. * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
  28. * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
  29. * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
  30. * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
  31. * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
  32. * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  33. */
  34. #ifndef _ASM_NLM_MIPS_EXTS_H
  35. #define _ASM_NLM_MIPS_EXTS_H
  36. /*
  37. * XLR and XLP interrupt request and interrupt mask registers
  38. */
  39. #define read_c0_eirr() __read_64bit_c0_register($9, 6)
  40. #define read_c0_eimr() __read_64bit_c0_register($9, 7)
  41. #define write_c0_eirr(val) __write_64bit_c0_register($9, 6, val)
  42. /*
  43. * Writing EIMR in 32 bit is a special case, the lower 8 bit of the
  44. * EIMR is shadowed in the status register, so we cannot save and
  45. * restore status register for split read.
  46. */
  47. #define write_c0_eimr(val) \
  48. do { \
  49. if (sizeof(unsigned long) == 4) { \
  50. unsigned long __flags; \
  51. \
  52. local_irq_save(__flags); \
  53. __asm__ __volatile__( \
  54. ".set\tmips64\n\t" \
  55. "dsll\t%L0, %L0, 32\n\t" \
  56. "dsrl\t%L0, %L0, 32\n\t" \
  57. "dsll\t%M0, %M0, 32\n\t" \
  58. "or\t%L0, %L0, %M0\n\t" \
  59. "dmtc0\t%L0, $9, 7\n\t" \
  60. ".set\tmips0" \
  61. : : "r" (val)); \
  62. __flags = (__flags & 0xffff00ff) | (((val) & 0xff) << 8);\
  63. local_irq_restore(__flags); \
  64. } else \
  65. __write_64bit_c0_register($9, 7, (val)); \
  66. } while (0)
  67. /*
  68. * Handling the 64 bit EIMR and EIRR registers in 32-bit mode with
  69. * standard functions will be very inefficient. This provides
  70. * optimized functions for the normal operations on the registers.
  71. *
  72. * Call with interrupts disabled.
  73. */
  74. static inline void ack_c0_eirr(int irq)
  75. {
  76. __asm__ __volatile__(
  77. ".set push\n\t"
  78. ".set mips64\n\t"
  79. ".set noat\n\t"
  80. "li $1, 1\n\t"
  81. "dsllv $1, $1, %0\n\t"
  82. "dmtc0 $1, $9, 6\n\t"
  83. ".set pop"
  84. : : "r" (irq));
  85. }
  86. static inline void set_c0_eimr(int irq)
  87. {
  88. __asm__ __volatile__(
  89. ".set push\n\t"
  90. ".set mips64\n\t"
  91. ".set noat\n\t"
  92. "li $1, 1\n\t"
  93. "dsllv %0, $1, %0\n\t"
  94. "dmfc0 $1, $9, 7\n\t"
  95. "or $1, %0\n\t"
  96. "dmtc0 $1, $9, 7\n\t"
  97. ".set pop"
  98. : "+r" (irq));
  99. }
  100. static inline void clear_c0_eimr(int irq)
  101. {
  102. __asm__ __volatile__(
  103. ".set push\n\t"
  104. ".set mips64\n\t"
  105. ".set noat\n\t"
  106. "li $1, 1\n\t"
  107. "dsllv %0, $1, %0\n\t"
  108. "dmfc0 $1, $9, 7\n\t"
  109. "or $1, %0\n\t"
  110. "xor $1, %0\n\t"
  111. "dmtc0 $1, $9, 7\n\t"
  112. ".set pop"
  113. : "+r" (irq));
  114. }
  115. /*
  116. * Read c0 eimr and c0 eirr, do AND of the two values, the result is
  117. * the interrupts which are raised and are not masked.
  118. */
  119. static inline uint64_t read_c0_eirr_and_eimr(void)
  120. {
  121. uint64_t val;
  122. #ifdef CONFIG_64BIT
  123. val = read_c0_eimr() & read_c0_eirr();
  124. #else
  125. __asm__ __volatile__(
  126. ".set push\n\t"
  127. ".set mips64\n\t"
  128. ".set noat\n\t"
  129. "dmfc0 %M0, $9, 6\n\t"
  130. "dmfc0 %L0, $9, 7\n\t"
  131. "and %M0, %L0\n\t"
  132. "dsll %L0, %M0, 32\n\t"
  133. "dsra %M0, %M0, 32\n\t"
  134. "dsra %L0, %L0, 32\n\t"
  135. ".set pop"
  136. : "=r" (val));
  137. #endif
  138. return val;
  139. }
  140. static inline int hard_smp_processor_id(void)
  141. {
  142. return __read_32bit_c0_register($15, 1) & 0x3ff;
  143. }
  144. static inline int nlm_nodeid(void)
  145. {
  146. return (__read_32bit_c0_register($15, 1) >> 5) & 0x3;
  147. }
  148. static inline unsigned int nlm_core_id(void)
  149. {
  150. return (read_c0_ebase() & 0x1c) >> 2;
  151. }
  152. static inline unsigned int nlm_thread_id(void)
  153. {
  154. return read_c0_ebase() & 0x3;
  155. }
  156. #define __read_64bit_c2_split(source, sel) \
  157. ({ \
  158. unsigned long long __val; \
  159. unsigned long __flags; \
  160. \
  161. local_irq_save(__flags); \
  162. if (sel == 0) \
  163. __asm__ __volatile__( \
  164. ".set\tmips64\n\t" \
  165. "dmfc2\t%M0, " #source "\n\t" \
  166. "dsll\t%L0, %M0, 32\n\t" \
  167. "dsra\t%M0, %M0, 32\n\t" \
  168. "dsra\t%L0, %L0, 32\n\t" \
  169. ".set\tmips0\n\t" \
  170. : "=r" (__val)); \
  171. else \
  172. __asm__ __volatile__( \
  173. ".set\tmips64\n\t" \
  174. "dmfc2\t%M0, " #source ", " #sel "\n\t" \
  175. "dsll\t%L0, %M0, 32\n\t" \
  176. "dsra\t%M0, %M0, 32\n\t" \
  177. "dsra\t%L0, %L0, 32\n\t" \
  178. ".set\tmips0\n\t" \
  179. : "=r" (__val)); \
  180. local_irq_restore(__flags); \
  181. \
  182. __val; \
  183. })
  184. #define __write_64bit_c2_split(source, sel, val) \
  185. do { \
  186. unsigned long __flags; \
  187. \
  188. local_irq_save(__flags); \
  189. if (sel == 0) \
  190. __asm__ __volatile__( \
  191. ".set\tmips64\n\t" \
  192. "dsll\t%L0, %L0, 32\n\t" \
  193. "dsrl\t%L0, %L0, 32\n\t" \
  194. "dsll\t%M0, %M0, 32\n\t" \
  195. "or\t%L0, %L0, %M0\n\t" \
  196. "dmtc2\t%L0, " #source "\n\t" \
  197. ".set\tmips0\n\t" \
  198. : : "r" (val)); \
  199. else \
  200. __asm__ __volatile__( \
  201. ".set\tmips64\n\t" \
  202. "dsll\t%L0, %L0, 32\n\t" \
  203. "dsrl\t%L0, %L0, 32\n\t" \
  204. "dsll\t%M0, %M0, 32\n\t" \
  205. "or\t%L0, %L0, %M0\n\t" \
  206. "dmtc2\t%L0, " #source ", " #sel "\n\t" \
  207. ".set\tmips0\n\t" \
  208. : : "r" (val)); \
  209. local_irq_restore(__flags); \
  210. } while (0)
  211. #define __read_32bit_c2_register(source, sel) \
  212. ({ uint32_t __res; \
  213. if (sel == 0) \
  214. __asm__ __volatile__( \
  215. ".set\tmips32\n\t" \
  216. "mfc2\t%0, " #source "\n\t" \
  217. ".set\tmips0\n\t" \
  218. : "=r" (__res)); \
  219. else \
  220. __asm__ __volatile__( \
  221. ".set\tmips32\n\t" \
  222. "mfc2\t%0, " #source ", " #sel "\n\t" \
  223. ".set\tmips0\n\t" \
  224. : "=r" (__res)); \
  225. __res; \
  226. })
  227. #define __read_64bit_c2_register(source, sel) \
  228. ({ unsigned long long __res; \
  229. if (sizeof(unsigned long) == 4) \
  230. __res = __read_64bit_c2_split(source, sel); \
  231. else if (sel == 0) \
  232. __asm__ __volatile__( \
  233. ".set\tmips64\n\t" \
  234. "dmfc2\t%0, " #source "\n\t" \
  235. ".set\tmips0\n\t" \
  236. : "=r" (__res)); \
  237. else \
  238. __asm__ __volatile__( \
  239. ".set\tmips64\n\t" \
  240. "dmfc2\t%0, " #source ", " #sel "\n\t" \
  241. ".set\tmips0\n\t" \
  242. : "=r" (__res)); \
  243. __res; \
  244. })
  245. #define __write_64bit_c2_register(register, sel, value) \
  246. do { \
  247. if (sizeof(unsigned long) == 4) \
  248. __write_64bit_c2_split(register, sel, value); \
  249. else if (sel == 0) \
  250. __asm__ __volatile__( \
  251. ".set\tmips64\n\t" \
  252. "dmtc2\t%z0, " #register "\n\t" \
  253. ".set\tmips0\n\t" \
  254. : : "Jr" (value)); \
  255. else \
  256. __asm__ __volatile__( \
  257. ".set\tmips64\n\t" \
  258. "dmtc2\t%z0, " #register ", " #sel "\n\t" \
  259. ".set\tmips0\n\t" \
  260. : : "Jr" (value)); \
  261. } while (0)
  262. #define __write_32bit_c2_register(reg, sel, value) \
  263. ({ \
  264. if (sel == 0) \
  265. __asm__ __volatile__( \
  266. ".set\tmips32\n\t" \
  267. "mtc2\t%z0, " #reg "\n\t" \
  268. ".set\tmips0\n\t" \
  269. : : "Jr" (value)); \
  270. else \
  271. __asm__ __volatile__( \
  272. ".set\tmips32\n\t" \
  273. "mtc2\t%z0, " #reg ", " #sel "\n\t" \
  274. ".set\tmips0\n\t" \
  275. : : "Jr" (value)); \
  276. })
  277. #endif /*_ASM_NLM_MIPS_EXTS_H */