cache.h 1.4 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768
  1. /*
  2. * Copyright 2004-2009 Analog Devices Inc.
  3. *
  4. * Licensed under the GPL-2 or later.
  5. */
  6. #ifndef __ARCH_BLACKFIN_CACHE_H
  7. #define __ARCH_BLACKFIN_CACHE_H
  8. /*
  9. * Bytes per L1 cache line
  10. * Blackfin loads 32 bytes for cache
  11. */
  12. #define L1_CACHE_SHIFT 5
  13. #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
  14. #define SMP_CACHE_BYTES L1_CACHE_BYTES
  15. #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
  16. #ifdef CONFIG_SMP
  17. #define __cacheline_aligned
  18. #else
  19. #define ____cacheline_aligned
  20. /*
  21. * Put cacheline_aliged data to L1 data memory
  22. */
  23. #ifdef CONFIG_CACHELINE_ALIGNED_L1
  24. #define __cacheline_aligned \
  25. __attribute__((__aligned__(L1_CACHE_BYTES), \
  26. __section__(".data_l1.cacheline_aligned")))
  27. #endif
  28. #endif
  29. /*
  30. * largest L1 which this arch supports
  31. */
  32. #define L1_CACHE_SHIFT_MAX 5
  33. #if defined(CONFIG_SMP) && \
  34. !defined(CONFIG_BFIN_CACHE_COHERENT)
  35. # if defined(CONFIG_BFIN_EXTMEM_ICACHEABLE) || defined(CONFIG_BFIN_L2_ICACHEABLE)
  36. # define __ARCH_SYNC_CORE_ICACHE
  37. # endif
  38. # if defined(CONFIG_BFIN_EXTMEM_DCACHEABLE) || defined(CONFIG_BFIN_L2_DCACHEABLE)
  39. # define __ARCH_SYNC_CORE_DCACHE
  40. # endif
  41. #ifndef __ASSEMBLY__
  42. asmlinkage void __raw_smp_mark_barrier_asm(void);
  43. asmlinkage void __raw_smp_check_barrier_asm(void);
  44. static inline void smp_mark_barrier(void)
  45. {
  46. __raw_smp_mark_barrier_asm();
  47. }
  48. static inline void smp_check_barrier(void)
  49. {
  50. __raw_smp_check_barrier_asm();
  51. }
  52. void resync_core_dcache(void);
  53. void resync_core_icache(void);
  54. #endif
  55. #endif
  56. #endif