cache.h 1.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263
  1. /*
  2. * include/asm-blackfin/cache.h
  3. */
  4. #ifndef __ARCH_BLACKFIN_CACHE_H
  5. #define __ARCH_BLACKFIN_CACHE_H
  6. /*
  7. * Bytes per L1 cache line
  8. * Blackfin loads 32 bytes for cache
  9. */
  10. #define L1_CACHE_SHIFT 5
  11. #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
  12. #define SMP_CACHE_BYTES L1_CACHE_BYTES
  13. #ifdef CONFIG_SMP
  14. #define __cacheline_aligned
  15. #else
  16. #define ____cacheline_aligned
  17. /*
  18. * Put cacheline_aliged data to L1 data memory
  19. */
  20. #ifdef CONFIG_CACHELINE_ALIGNED_L1
  21. #define __cacheline_aligned \
  22. __attribute__((__aligned__(L1_CACHE_BYTES), \
  23. __section__(".data_l1.cacheline_aligned")))
  24. #endif
  25. #endif
  26. /*
  27. * largest L1 which this arch supports
  28. */
  29. #define L1_CACHE_SHIFT_MAX 5
  30. #if defined(CONFIG_SMP) && \
  31. !defined(CONFIG_BFIN_CACHE_COHERENT)
  32. # if defined(CONFIG_BFIN_ICACHEABLE) || defined(CONFIG_BFIN_L2_ICACHEABLE)
  33. # define __ARCH_SYNC_CORE_ICACHE
  34. # endif
  35. # if defined(CONFIG_BFIN_DCACHEABLE) || defined(CONFIG_BFIN_L2_DCACHEABLE)
  36. # define __ARCH_SYNC_CORE_DCACHE
  37. # endif
  38. #ifndef __ASSEMBLY__
  39. asmlinkage void __raw_smp_mark_barrier_asm(void);
  40. asmlinkage void __raw_smp_check_barrier_asm(void);
  41. static inline void smp_mark_barrier(void)
  42. {
  43. __raw_smp_mark_barrier_asm();
  44. }
  45. static inline void smp_check_barrier(void)
  46. {
  47. __raw_smp_check_barrier_asm();
  48. }
  49. void resync_core_dcache(void);
  50. void resync_core_icache(void);
  51. #endif
  52. #endif
  53. #endif