cache.h 1.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566
  1. /*
  2. * Copyright 2004-2009 Analog Devices Inc.
  3. *
  4. * Licensed under the GPL-2 or later.
  5. */
  6. #ifndef __ARCH_BLACKFIN_CACHE_H
  7. #define __ARCH_BLACKFIN_CACHE_H
  8. /*
  9. * Bytes per L1 cache line
  10. * Blackfin loads 32 bytes for cache
  11. */
  12. #define L1_CACHE_SHIFT 5
  13. #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
  14. #define SMP_CACHE_BYTES L1_CACHE_BYTES
  15. #ifdef CONFIG_SMP
  16. #define __cacheline_aligned
  17. #else
  18. #define ____cacheline_aligned
  19. /*
  20. * Put cacheline_aliged data to L1 data memory
  21. */
  22. #ifdef CONFIG_CACHELINE_ALIGNED_L1
  23. #define __cacheline_aligned \
  24. __attribute__((__aligned__(L1_CACHE_BYTES), \
  25. __section__(".data_l1.cacheline_aligned")))
  26. #endif
  27. #endif
  28. /*
  29. * largest L1 which this arch supports
  30. */
  31. #define L1_CACHE_SHIFT_MAX 5
  32. #if defined(CONFIG_SMP) && \
  33. !defined(CONFIG_BFIN_CACHE_COHERENT)
  34. # if defined(CONFIG_BFIN_EXTMEM_ICACHEABLE) || defined(CONFIG_BFIN_L2_ICACHEABLE)
  35. # define __ARCH_SYNC_CORE_ICACHE
  36. # endif
  37. # if defined(CONFIG_BFIN_EXTMEM_DCACHEABLE) || defined(CONFIG_BFIN_L2_DCACHEABLE)
  38. # define __ARCH_SYNC_CORE_DCACHE
  39. # endif
  40. #ifndef __ASSEMBLY__
  41. asmlinkage void __raw_smp_mark_barrier_asm(void);
  42. asmlinkage void __raw_smp_check_barrier_asm(void);
  43. static inline void smp_mark_barrier(void)
  44. {
  45. __raw_smp_mark_barrier_asm();
  46. }
  47. static inline void smp_check_barrier(void)
  48. {
  49. __raw_smp_check_barrier_asm();
  50. }
  51. void resync_core_dcache(void);
  52. void resync_core_icache(void);
  53. #endif
  54. #endif
  55. #endif