relocate_kernel.S 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131
  1. /*
  2. * kexec for arm64
  3. *
  4. * Copyright (C) Linaro.
  5. * Copyright (C) Huawei Futurewei Technologies.
  6. *
  7. * This program is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License version 2 as
  9. * published by the Free Software Foundation.
  10. */
  11. #include <linux/kexec.h>
  12. #include <linux/linkage.h>
  13. #include <asm/assembler.h>
  14. #include <asm/kexec.h>
  15. #include <asm/page.h>
  16. #include <asm/sysreg.h>
  17. /*
  18. * arm64_relocate_new_kernel - Put a 2nd stage image in place and boot it.
  19. *
  20. * The memory that the old kernel occupies may be overwritten when coping the
  21. * new image to its final location. To assure that the
  22. * arm64_relocate_new_kernel routine which does that copy is not overwritten,
  23. * all code and data needed by arm64_relocate_new_kernel must be between the
  24. * symbols arm64_relocate_new_kernel and arm64_relocate_new_kernel_end. The
  25. * machine_kexec() routine will copy arm64_relocate_new_kernel to the kexec
  26. * control_code_page, a special page which has been set up to be preserved
  27. * during the copy operation.
  28. */
  29. ENTRY(arm64_relocate_new_kernel)
  30. /* Setup the list loop variables. */
  31. mov x17, x1 /* x17 = kimage_start */
  32. mov x16, x0 /* x16 = kimage_head */
  33. raw_dcache_line_size x15, x0 /* x15 = dcache line size */
  34. mov x14, xzr /* x14 = entry ptr */
  35. mov x13, xzr /* x13 = copy dest */
  36. /* Clear the sctlr_el2 flags. */
  37. mrs x0, CurrentEL
  38. cmp x0, #CurrentEL_EL2
  39. b.ne 1f
  40. mrs x0, sctlr_el2
  41. ldr x1, =SCTLR_ELx_FLAGS
  42. bic x0, x0, x1
  43. msr sctlr_el2, x0
  44. isb
  45. 1:
  46. /* Check if the new image needs relocation. */
  47. tbnz x16, IND_DONE_BIT, .Ldone
  48. .Lloop:
  49. and x12, x16, PAGE_MASK /* x12 = addr */
  50. /* Test the entry flags. */
  51. .Ltest_source:
  52. tbz x16, IND_SOURCE_BIT, .Ltest_indirection
  53. /* Invalidate dest page to PoC. */
  54. mov x0, x13
  55. add x20, x0, #PAGE_SIZE
  56. sub x1, x15, #1
  57. bic x0, x0, x1
  58. 2: dc ivac, x0
  59. add x0, x0, x15
  60. cmp x0, x20
  61. b.lo 2b
  62. dsb sy
  63. mov x20, x13
  64. mov x21, x12
  65. copy_page x20, x21, x0, x1, x2, x3, x4, x5, x6, x7
  66. /* dest += PAGE_SIZE */
  67. add x13, x13, PAGE_SIZE
  68. b .Lnext
  69. .Ltest_indirection:
  70. tbz x16, IND_INDIRECTION_BIT, .Ltest_destination
  71. /* ptr = addr */
  72. mov x14, x12
  73. b .Lnext
  74. .Ltest_destination:
  75. tbz x16, IND_DESTINATION_BIT, .Lnext
  76. /* dest = addr */
  77. mov x13, x12
  78. .Lnext:
  79. /* entry = *ptr++ */
  80. ldr x16, [x14], #8
  81. /* while (!(entry & DONE)) */
  82. tbz x16, IND_DONE_BIT, .Lloop
  83. .Ldone:
  84. /* wait for writes from copy_page to finish */
  85. dsb nsh
  86. ic iallu
  87. dsb nsh
  88. isb
  89. /* Start new image. */
  90. mov x0, xzr
  91. mov x1, xzr
  92. mov x2, xzr
  93. mov x3, xzr
  94. br x17
  95. ENDPROC(arm64_relocate_new_kernel)
  96. .ltorg
  97. .align 3 /* To keep the 64-bit values below naturally aligned. */
  98. .Lcopy_end:
  99. .org KEXEC_CONTROL_PAGE_SIZE
  100. /*
  101. * arm64_relocate_new_kernel_size - Number of bytes to copy to the
  102. * control_code_page.
  103. */
  104. .globl arm64_relocate_new_kernel_size
  105. arm64_relocate_new_kernel_size:
  106. .quad .Lcopy_end - arm64_relocate_new_kernel