headsmp.S 2.3 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182
  1. /*
  2. * SMP support for R-Mobile / SH-Mobile
  3. *
  4. * Copyright (C) 2010 Magnus Damm
  5. * Copyright (C) 2010 Takashi Yoshii
  6. *
  7. * Based on vexpress, Copyright (c) 2003 ARM Limited, All Rights Reserved
  8. *
  9. * This program is free software; you can redistribute it and/or modify
  10. * it under the terms of the GNU General Public License version 2 as
  11. * published by the Free Software Foundation.
  12. */
  13. #include <linux/linkage.h>
  14. #include <linux/init.h>
  15. #include <asm/memory.h>
  16. __CPUINIT
  17. /* Cache invalidation nicked from arch/arm/mach-imx/head-v7.S, thanks!
  18. *
  19. * The secondary kernel init calls v7_flush_dcache_all before it enables
  20. * the L1; however, the L1 comes out of reset in an undefined state, so
  21. * the clean + invalidate performed by v7_flush_dcache_all causes a bunch
  22. * of cache lines with uninitialized data and uninitialized tags to get
  23. * written out to memory, which does really unpleasant things to the main
  24. * processor. We fix this by performing an invalidate, rather than a
  25. * clean + invalidate, before jumping into the kernel.
  26. *
  27. * This funciton is cloned from arch/arm/mach-tegra/headsmp.S, and needs
  28. * to be called for both secondary cores startup and primary core resume
  29. * procedures. Ideally, it should be moved into arch/arm/mm/cache-v7.S.
  30. */
  31. ENTRY(v7_invalidate_l1)
  32. mov r0, #0
  33. mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
  34. mcr p15, 2, r0, c0, c0, 0
  35. mrc p15, 1, r0, c0, c0, 0
  36. ldr r1, =0x7fff
  37. and r2, r1, r0, lsr #13
  38. ldr r1, =0x3ff
  39. and r3, r1, r0, lsr #3 @ NumWays - 1
  40. add r2, r2, #1 @ NumSets
  41. and r0, r0, #0x7
  42. add r0, r0, #4 @ SetShift
  43. clz r1, r3 @ WayShift
  44. add r4, r3, #1 @ NumWays
  45. 1: sub r2, r2, #1 @ NumSets--
  46. mov r3, r4 @ Temp = NumWays
  47. 2: subs r3, r3, #1 @ Temp--
  48. mov r5, r3, lsl r1
  49. mov r6, r2, lsl r0
  50. orr r5, r5, r6 @ Reg = (Temp<<WayShift)|(NumSets<<SetShift)
  51. mcr p15, 0, r5, c7, c6, 2
  52. bgt 2b
  53. cmp r2, #0
  54. bgt 1b
  55. dsb
  56. isb
  57. mov pc, lr
  58. ENDPROC(v7_invalidate_l1)
  59. ENTRY(shmobile_invalidate_start)
  60. bl v7_invalidate_l1
  61. b secondary_startup
  62. ENDPROC(shmobile_invalidate_start)
  63. /*
  64. * Reset vector for secondary CPUs.
  65. * This will be mapped at address 0 by SBAR register.
  66. * We need _long_ jump to the physical address.
  67. */
  68. .align 12
  69. ENTRY(shmobile_secondary_vector)
  70. ldr pc, 1f
  71. 1: .long shmobile_invalidate_start - PAGE_OFFSET + PLAT_PHYS_OFFSET
  72. ENDPROC(shmobile_secondary_vector)