system.h 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113
  1. /*
  2. * include/asm-v850/system.h -- Low-level interrupt/thread ops
  3. *
  4. * Copyright (C) 2001,02,03 NEC Electronics Corporation
  5. * Copyright (C) 2001,02,03 Miles Bader <miles@gnu.org>
  6. *
  7. * This file is subject to the terms and conditions of the GNU General
  8. * Public License. See the file COPYING in the main directory of this
  9. * archive for more details.
  10. *
  11. * Written by Miles Bader <miles@gnu.org>
  12. */
  13. #ifndef __V850_SYSTEM_H__
  14. #define __V850_SYSTEM_H__
  15. #include <linux/linkage.h>
  16. #include <asm/ptrace.h>
  17. #define prepare_to_switch() do { } while (0)
  18. /*
  19. * switch_to(n) should switch tasks to task ptr, first checking that
  20. * ptr isn't the current task, in which case it does nothing.
  21. */
  22. struct thread_struct;
  23. extern void *switch_thread (struct thread_struct *last,
  24. struct thread_struct *next);
  25. #define switch_to(prev,next,last) \
  26. do { \
  27. if (prev != next) { \
  28. (last) = switch_thread (&prev->thread, &next->thread); \
  29. } \
  30. } while (0)
  31. /* Enable/disable interrupts. */
  32. #define local_irq_enable() __asm__ __volatile__ ("ei")
  33. #define local_irq_disable() __asm__ __volatile__ ("di")
  34. #define local_save_flags(flags) \
  35. __asm__ __volatile__ ("stsr %1, %0" : "=r" (flags) : "i" (SR_PSW))
  36. #define local_restore_flags(flags) \
  37. __asm__ __volatile__ ("ldsr %0, %1" :: "r" (flags), "i" (SR_PSW))
  38. /* For spinlocks etc */
  39. #define local_irq_save(flags) \
  40. do { local_save_flags (flags); local_irq_disable (); } while (0)
  41. #define local_irq_restore(flags) \
  42. local_restore_flags (flags);
  43. static inline int irqs_disabled (void)
  44. {
  45. unsigned flags;
  46. local_save_flags (flags);
  47. return !!(flags & 0x20);
  48. }
  49. /*
  50. * Force strict CPU ordering.
  51. * Not really required on v850...
  52. */
  53. #define nop() __asm__ __volatile__ ("nop")
  54. #define mb() __asm__ __volatile__ ("" ::: "memory")
  55. #define rmb() mb ()
  56. #define wmb() mb ()
  57. #define read_barrier_depends() ((void)0)
  58. #define set_rmb(var, value) do { xchg (&var, value); } while (0)
  59. #define set_mb(var, value) set_rmb (var, value)
  60. #define set_wmb(var, value) do { var = value; wmb (); } while (0)
  61. #define smp_mb() mb ()
  62. #define smp_rmb() rmb ()
  63. #define smp_wmb() wmb ()
  64. #define smp_read_barrier_depends() read_barrier_depends()
  65. #define xchg(ptr, with) \
  66. ((__typeof__ (*(ptr)))__xchg ((unsigned long)(with), (ptr), sizeof (*(ptr))))
  67. #define tas(ptr) (xchg ((ptr), 1))
  68. extern inline unsigned long __xchg (unsigned long with,
  69. __volatile__ void *ptr, int size)
  70. {
  71. unsigned long tmp, flags;
  72. local_irq_save (flags);
  73. switch (size) {
  74. case 1:
  75. tmp = *(unsigned char *)ptr;
  76. *(unsigned char *)ptr = with;
  77. break;
  78. case 2:
  79. tmp = *(unsigned short *)ptr;
  80. *(unsigned short *)ptr = with;
  81. break;
  82. case 4:
  83. tmp = *(unsigned long *)ptr;
  84. *(unsigned long *)ptr = with;
  85. break;
  86. }
  87. local_irq_restore (flags);
  88. return tmp;
  89. }
  90. #define arch_align_stack(x) (x)
  91. #endif /* __V850_SYSTEM_H__ */