920-cache-wround.patch 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135
  1. --- a/arch/mips/include/asm/r4kcache.h
  2. +++ b/arch/mips/include/asm/r4kcache.h
  3. @@ -20,10 +20,25 @@
  4. #ifdef CONFIG_BCM47XX
  5. #include <asm/paccess.h>
  6. #include <linux/ssb/ssb.h>
  7. -#define BCM4710_DUMMY_RREG() ((void) *((u8 *) KSEG1ADDR(SSB_ENUM_BASE + SSB_IMSTATE)))
  8. +#define BCM4710_DUMMY_RREG() bcm4710_dummy_rreg()
  9. +
  10. +static inline unsigned long bcm4710_dummy_rreg(void) {
  11. + return (*(volatile unsigned long *)(KSEG1ADDR(SSB_ENUM_BASE + SSB_IMSTATE)));
  12. +}
  13. +
  14. +#define BCM4710_FILL_TLB(addr) bcm4710_fill_tlb((void*)(addr))
  15. +
  16. +static inline unsigned long bcm4710_fill_tlb(void *addr) {
  17. + return (*(unsigned long *)addr);
  18. +}
  19. +
  20. +#define BCM4710_PROTECTED_FILL_TLB(addr) bcm4710_protected_fill_tlb((void*)(addr))
  21. +
  22. +static inline void bcm4710_protected_fill_tlb(void *addr) {
  23. + unsigned long x;
  24. + get_dbe(x, (unsigned long *)addr);;
  25. +}
  26. -#define BCM4710_FILL_TLB(addr) (*(volatile unsigned long *)(addr))
  27. -#define BCM4710_PROTECTED_FILL_TLB(addr) ({ unsigned long x; get_dbe(x, (volatile unsigned long *)(addr)); })
  28. #else
  29. #define BCM4710_DUMMY_RREG()
  30. --- a/arch/mips/mm/tlbex.c
  31. +++ b/arch/mips/mm/tlbex.c
  32. @@ -544,6 +544,9 @@ build_get_pgde32(u32 **p, unsigned int t
  33. #endif
  34. uasm_i_addu(p, ptr, tmp, ptr);
  35. #else
  36. +#ifdef CONFIG_BCM47XX
  37. + uasm_i_nop(p);
  38. +#endif
  39. UASM_i_LA_mostly(p, ptr, pgdc);
  40. #endif
  41. uasm_i_mfc0(p, tmp, C0_BADVADDR); /* get faulting address */
  42. @@ -674,12 +677,12 @@ static void __cpuinit build_r4000_tlb_re
  43. /* No need for uasm_i_nop */
  44. }
  45. -#ifdef CONFIG_BCM47XX
  46. - uasm_i_nop(&p);
  47. -#endif
  48. #ifdef CONFIG_64BIT
  49. build_get_pmde64(&p, &l, &r, K0, K1); /* get pmd in K1 */
  50. #else
  51. +# ifdef CONFIG_BCM47XX
  52. + uasm_i_nop(&p);
  53. +# endif
  54. build_get_pgde32(&p, K0, K1); /* get pgd in K1 */
  55. #endif
  56. @@ -687,6 +690,9 @@ static void __cpuinit build_r4000_tlb_re
  57. build_update_entries(&p, K0, K1);
  58. build_tlb_write_entry(&p, &l, &r, tlb_random);
  59. uasm_l_leave(&l, p);
  60. +#ifdef CONFIG_BCM47XX
  61. + uasm_i_nop(&p);
  62. +#endif
  63. uasm_i_eret(&p); /* return from trap */
  64. #ifdef CONFIG_64BIT
  65. @@ -1084,12 +1090,12 @@ build_r4000_tlbchange_handler_head(u32 *
  66. struct uasm_reloc **r, unsigned int pte,
  67. unsigned int ptr)
  68. {
  69. -#ifdef CONFIG_BCM47XX
  70. - uasm_i_nop(p);
  71. -#endif
  72. #ifdef CONFIG_64BIT
  73. build_get_pmde64(p, l, r, pte, ptr); /* get pmd in ptr */
  74. #else
  75. +# ifdef CONFIG_BCM47XX
  76. + uasm_i_nop(p);
  77. +# endif
  78. build_get_pgde32(p, pte, ptr); /* get pgd in ptr */
  79. #endif
  80. @@ -1117,6 +1123,9 @@ build_r4000_tlbchange_handler_tail(u32 *
  81. build_update_entries(p, tmp, ptr);
  82. build_tlb_write_entry(p, l, r, tlb_indexed);
  83. uasm_l_leave(l, *p);
  84. +#ifdef CONFIG_BCM47XX
  85. + uasm_i_nop(p);
  86. +#endif
  87. uasm_i_eret(p); /* return from trap */
  88. #ifdef CONFIG_64BIT
  89. --- a/arch/mips/kernel/genex.S
  90. +++ b/arch/mips/kernel/genex.S
  91. @@ -22,6 +22,19 @@
  92. #include <asm/page.h>
  93. #include <asm/thread_info.h>
  94. +#ifdef CONFIG_BCM47XX
  95. +# ifdef eret
  96. +# undef eret
  97. +# endif
  98. +# define eret \
  99. + .set push; \
  100. + .set noreorder; \
  101. + nop; \
  102. + nop; \
  103. + eret; \
  104. + .set pop;
  105. +#endif
  106. +
  107. #define PANIC_PIC(msg) \
  108. .set push; \
  109. .set reorder; \
  110. @@ -54,7 +67,6 @@ NESTED(except_vec3_generic, 0, sp)
  111. .set noat
  112. #ifdef CONFIG_BCM47XX
  113. nop
  114. - nop
  115. #endif
  116. #if R5432_CP0_INTERRUPT_WAR
  117. mfc0 k0, CP0_INDEX
  118. @@ -79,6 +91,9 @@ NESTED(except_vec3_r4000, 0, sp)
  119. .set push
  120. .set mips3
  121. .set noat
  122. +#ifdef CONFIG_BCM47XX
  123. + nop
  124. +#endif
  125. mfc0 k1, CP0_CAUSE
  126. li k0, 31<<2
  127. andi k1, k1, 0x7c