0076-riscv-cpu-Add-cache-operations-for-T-HEAD-CPUs.patch 4.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153
  1. From b6da98cd39612bb5660afbcad06e3a6bac43563e Mon Sep 17 00:00:00 2001
  2. From: Samuel Holland <[email protected]>
  3. Date: Sat, 11 Sep 2021 23:27:42 -0500
  4. Subject: [PATCH 76/90] riscv: cpu: Add cache operations for T-HEAD CPUs
  5. Signed-off-by: Samuel Holland <[email protected]>
  6. ---
  7. arch/riscv/cpu/Makefile | 1 +
  8. arch/riscv/cpu/thead/cache.c | 119 +++++++++++++++++++++++++++++++++++
  9. arch/riscv/lib/cache.c | 2 +-
  10. 3 files changed, 121 insertions(+), 1 deletion(-)
  11. create mode 100644 arch/riscv/cpu/thead/cache.c
  12. --- a/arch/riscv/cpu/Makefile
  13. +++ b/arch/riscv/cpu/Makefile
  14. @@ -5,3 +5,4 @@
  15. extra-y = start.o
  16. obj-y += cpu.o mtrap.o
  17. +obj-y += thead/cache.o
  18. --- /dev/null
  19. +++ b/arch/riscv/cpu/thead/cache.c
  20. @@ -0,0 +1,119 @@
  21. +// SPDX-License-Identifier: GPL-2.0+
  22. +
  23. +#include <asm/cache.h>
  24. +#include <asm/csr.h>
  25. +
  26. +#define CSR_MHCR 0x7c1
  27. +#define CSR_MCOR 0x7c2
  28. +#define CSR_MHINT 0x7c5
  29. +
  30. +#define MHCR_IE BIT(0) /* icache enable */
  31. +#define MHCR_DE BIT(1) /* dcache enable */
  32. +#define MHCR_WA BIT(2) /* dcache write allocate */
  33. +#define MHCR_WB BIT(3) /* dcache write back */
  34. +#define MHCR_RS BIT(4) /* return stack enable */
  35. +#define MHCR_BPE BIT(5) /* branch prediction enable */
  36. +#define MHCR_BTB BIT(6) /* branch target prediction enable */
  37. +#define MHCR_WBR BIT(8) /* write burst enable */
  38. +#define MHCR_L0BTB BIT(12)
  39. +
  40. +#define MCOR_CACHE_SEL_ICACHE (0x1 << 0)
  41. +#define MCOR_CACHE_SEL_DCACHE (0x2 << 0)
  42. +#define MCOR_CACHE_SEL_BOTH (0x3 << 0)
  43. +#define MCOR_INV BIT(4)
  44. +#define MCOR_CLR BIT(5)
  45. +#define MCOR_BHT_INV BIT(16)
  46. +#define MCOR_BTB_INV BIT(17)
  47. +
  48. +#define MHINT_DPLD BIT(2) /* dcache prefetch enable */
  49. +#define MHINT_AMR_PAGE (0x0 << 3)
  50. +#define MHINT_AMR_LIMIT_3 (0x1 << 3)
  51. +#define MHINT_AMR_LIMIT_64 (0x2 << 3)
  52. +#define MHINT_AMR_LIMIT_128 (0x3 << 3)
  53. +#define MHINT_IPLD BIT(8) /* icache prefetch enable */
  54. +#define MHINT_IWPE BIT(9) /* icache prediction enable */
  55. +#define MHINT_DIS_PREFETCH_2 (0x0 << 13)
  56. +#define MHINT_DIS_PREFETCH_4 (0x1 << 13)
  57. +#define MHINT_DIS_PREFETCH_8 (0x2 << 13)
  58. +#define MHINT_DIS_PREFETCH_16 (0x3 << 13)
  59. +
  60. +#define sync_i() asm volatile (".long 0x01a0000b" ::: "memory")
  61. +
  62. +void flush_dcache_all(void)
  63. +{
  64. + asm volatile (".long 0x0030000b" ::: "memory"); /* dcache.ciall */
  65. + sync_i();
  66. +}
  67. +
  68. +void flush_dcache_range(unsigned long start, unsigned long end)
  69. +{
  70. + register unsigned long i asm("a0") = start & -CONFIG_SYS_CACHELINE_SIZE;
  71. +
  72. + for (; i < end; i += CONFIG_SYS_CACHELINE_SIZE)
  73. + asm volatile (".long 0x02b5000b" ::: "memory"); /* dcache.cipa a0 */
  74. + sync_i();
  75. +}
  76. +
  77. +void invalidate_icache_range(unsigned long start, unsigned long end)
  78. +{
  79. + register unsigned long i asm("a0") = start & -CONFIG_SYS_CACHELINE_SIZE;
  80. +
  81. + for (; i < end; i += CONFIG_SYS_CACHELINE_SIZE)
  82. + asm volatile (".long 0x0385000b" ::: "memory"); /* icache.ipa a0 */
  83. + sync_i();
  84. +}
  85. +
  86. +void invalidate_dcache_range(unsigned long start, unsigned long end)
  87. +{
  88. + register unsigned long i asm("a0") = start & -CONFIG_SYS_CACHELINE_SIZE;
  89. +
  90. + for (; i < end; i += CONFIG_SYS_CACHELINE_SIZE)
  91. + asm volatile (".long 0x02a5000b" ::: "memory"); /* dcache.ipa a0 */
  92. + sync_i();
  93. +}
  94. +
  95. +#if 0
  96. +void icache_enable(void)
  97. +{
  98. + asm volatile (".long 0x0100000b" ::: "memory"); /* icache.iall */
  99. + sync_i();
  100. + csr_set(CSR_MHCR, MHCR_IE | MHCR_RS | MHCR_BPE | MHCR_BTB | MHCR_L0BTB);
  101. + csr_set(CSR_MHINT, MHINT_IPLD | MHINT_IWPE);
  102. +}
  103. +
  104. +void icache_disable(void)
  105. +{
  106. + csr_clear(CSR_MHCR, MHCR_IE);
  107. +}
  108. +
  109. +int icache_status(void)
  110. +{
  111. + return csr_read(CSR_MHCR) & MHCR_IE;
  112. +}
  113. +
  114. +void dcache_enable(void)
  115. +{
  116. + asm volatile (".long 0x0020000b" ::: "memory"); /* dcache.iall */
  117. + sync_i();
  118. + csr_set(CSR_MHCR, MHCR_DE | MHCR_WA | MHCR_WB | MHCR_WBR);
  119. + csr_set(CSR_MHINT, MHINT_DPLD | MHINT_AMR_LIMIT_3);
  120. +}
  121. +
  122. +void dcache_disable(void)
  123. +{
  124. + asm volatile (".long 0x0010000b" ::: "memory"); /* dcache.call */
  125. + sync_i();
  126. + csr_clear(CSR_MHCR, MHCR_DE);
  127. +}
  128. +
  129. +int dcache_status(void)
  130. +{
  131. + return csr_read(CSR_MHCR) & MHCR_DE;
  132. +}
  133. +
  134. +void enable_caches(void)
  135. +{
  136. + icache_enable();
  137. + dcache_enable();
  138. +}
  139. +#endif
  140. --- a/arch/riscv/lib/cache.c
  141. +++ b/arch/riscv/lib/cache.c
  142. @@ -20,7 +20,7 @@ __weak void flush_dcache_range(unsigned
  143. {
  144. }
  145. -void invalidate_icache_range(unsigned long start, unsigned long end)
  146. +__weak void invalidate_icache_range(unsigned long start, unsigned long end)
  147. {
  148. /*
  149. * RISC-V does not have an instruction for invalidating parts of the