ram.ld 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212
  1. /*
  2. * Copyright (c) 2021-2023 HPMicro
  3. * SPDX-License-Identifier: BSD-3-Clause
  4. */
  5. ENTRY(_start)
  6. STACK_SIZE = DEFINED(_stack_size) ? _stack_size : 0x4000;
  7. HEAP_SIZE = DEFINED(_heap_size) ? _heap_size : 0x4000;
  8. MEMORY
  9. {
  10. ILM (wx) : ORIGIN = 0x00000000, LENGTH = 256K
  11. DLM (w) : ORIGIN = 0x00080000, LENGTH = 256K
  12. AXI_SRAM (wx) : ORIGIN = 0x01080000, LENGTH = 512K /* AXI SRAM0 */
  13. NONCACHEABLE_RAM (w) : ORIGIN = 0x01100000, LENGTH = 256K /* AXI SRAM1 */
  14. SHARE_RAM (w) : ORIGIN = 0x0117C000, LENGTH = 16K
  15. AHB_SRAM (w) : ORIGIN = 0xF0300000, LENGTH = 32k
  16. APB_SRAM (w): ORIGIN = 0xF40F0000, LENGTH = 8k
  17. }
  18. SECTIONS
  19. {
  20. .start : {
  21. . = ALIGN(8);
  22. KEEP(*(.start))
  23. } > ILM
  24. .vectors : {
  25. . = ALIGN(8);
  26. KEEP(*(.isr_vector))
  27. KEEP(*(.vector_table))
  28. . = ALIGN(8);
  29. } > ILM
  30. .rel : {
  31. KEEP(*(.rel*))
  32. } > ILM
  33. .text : {
  34. . = ALIGN(8);
  35. *(.text)
  36. *(.text*)
  37. *(.rodata)
  38. *(.rodata*)
  39. *(.srodata)
  40. *(.srodata*)
  41. *(.hash)
  42. *(.dyn*)
  43. *(.gnu*)
  44. *(.pl*)
  45. KEEP(*(.eh_frame))
  46. *(.eh_frame*)
  47. KEEP (*(.init))
  48. KEEP (*(.fini))
  49. /* section information for usbh class */
  50. . = ALIGN(8);
  51. __usbh_class_info_start__ = .;
  52. KEEP(*(.usbh_class_info))
  53. __usbh_class_info_end__ = .;
  54. . = ALIGN(8);
  55. PROVIDE (__etext = .);
  56. PROVIDE (_etext = .);
  57. PROVIDE (etext = .);
  58. } > ILM
  59. .data : AT(etext) {
  60. . = ALIGN(8);
  61. __data_start__ = .;
  62. __global_pointer$ = . + 0x800;
  63. *(.data)
  64. *(.data*)
  65. *(.sdata)
  66. *(.sdata*)
  67. *(.tdata)
  68. *(.tdata*)
  69. KEEP(*(.jcr))
  70. KEEP(*(.dynamic))
  71. KEEP(*(.got*))
  72. KEEP(*(.got))
  73. KEEP(*(.gcc_except_table))
  74. KEEP(*(.gcc_except_table.*))
  75. . = ALIGN(8);
  76. PROVIDE(__preinit_array_start = .);
  77. KEEP(*(.preinit_array))
  78. PROVIDE(__preinit_array_end = .);
  79. . = ALIGN(8);
  80. PROVIDE(__init_array_start = .);
  81. KEEP(*(SORT_BY_INIT_PRIORITY(.init_array.*)))
  82. KEEP(*(.init_array))
  83. PROVIDE(__init_array_end = .);
  84. . = ALIGN(8);
  85. PROVIDE(__finit_array_start = .);
  86. KEEP(*(SORT_BY_INIT_PRIORITY(.finit_array.*)))
  87. KEEP(*(.finit_array))
  88. PROVIDE(__finit_array_end = .);
  89. . = ALIGN(8);
  90. KEEP(*crtbegin*.o(.ctors))
  91. KEEP(*(EXCLUDE_FILE (*crtend*.o) .ctors))
  92. KEEP(*(SORT(.ctors.*)))
  93. KEEP(*(.ctors))
  94. . = ALIGN(8);
  95. KEEP(*crtbegin*.o(.dtors))
  96. KEEP(*(EXCLUDE_FILE (*crtend*.o) .dtors))
  97. KEEP(*(SORT(.dtors.*)))
  98. KEEP(*(.dtors))
  99. . = ALIGN(8);
  100. __data_end__ = .;
  101. PROVIDE (__edata = .);
  102. PROVIDE (_edata = .);
  103. PROVIDE (edata = .);
  104. } > AXI_SRAM
  105. .fast : AT(etext + __data_end__ - __data_start__) {
  106. . = ALIGN(8);
  107. PROVIDE(__ramfunc_start__ = .);
  108. *(.fast)
  109. . = ALIGN(8);
  110. PROVIDE(__ramfunc_end__ = .);
  111. } > ILM
  112. .bss (NOLOAD) : {
  113. . = ALIGN(8);
  114. __bss_start__ = .;
  115. *(.bss)
  116. *(.bss*)
  117. *(.tbss*)
  118. *(.sbss*)
  119. *(.scommon)
  120. *(.scommon*)
  121. *(.tcommon*)
  122. *(.dynsbss*)
  123. *(COMMON)
  124. . = ALIGN(8);
  125. _end = .;
  126. __bss_end__ = .;
  127. } > AXI_SRAM
  128. .framebuffer (NOLOAD) : {
  129. . = ALIGN(8);
  130. KEEP(*(.framebuffer))
  131. . = ALIGN(8);
  132. } > AXI_SRAM
  133. .noncacheable.init : AT(etext + __data_end__ - __data_start__ + __ramfunc_end__ - __ramfunc_start__) {
  134. . = ALIGN(8);
  135. __noncacheable_init_start__ = .;
  136. KEEP(*(.noncacheable.init))
  137. __noncacheable_init_end__ = .;
  138. . = ALIGN(8);
  139. } > NONCACHEABLE_RAM
  140. .noncacheable.bss (NOLOAD) : {
  141. . = ALIGN(8);
  142. KEEP(*(.noncacheable))
  143. __noncacheable_bss_start__ = .;
  144. KEEP(*(.noncacheable.bss))
  145. __noncacheable_bss_end__ = .;
  146. . = ALIGN(8);
  147. } > NONCACHEABLE_RAM
  148. .ahb_sram (NOLOAD) : {
  149. KEEP(*(.ahb_sram))
  150. } > AHB_SRAM
  151. .apb_sram (NOLOAD) : {
  152. KEEP(*(.backup_sram))
  153. } > APB_SRAM
  154. .sh_mem (NOLOAD) : {
  155. KEEP(*(.sh_mem))
  156. } > SHARE_RAM
  157. .fast_ram (NOLOAD) : {
  158. KEEP(*(.fast_ram))
  159. } > DLM
  160. .heap (NOLOAD) : {
  161. . = ALIGN(8);
  162. __heap_start__ = .;
  163. . += HEAP_SIZE;
  164. __heap_end__ = .;
  165. } > DLM
  166. .stack (NOLOAD) : {
  167. . = ALIGN(8);
  168. __stack_base__ = .;
  169. . += STACK_SIZE;
  170. . = ALIGN(8);
  171. PROVIDE (_stack = .);
  172. PROVIDE (_stack_safe = .);
  173. } > DLM
  174. __noncacheable_start__ = ORIGIN(NONCACHEABLE_RAM);
  175. __noncacheable_end__ = ORIGIN(NONCACHEABLE_RAM) + LENGTH(NONCACHEABLE_RAM);
  176. __share_mem_start__ = ORIGIN(SHARE_RAM);
  177. __share_mem_end__ = ORIGIN(SHARE_RAM) + LENGTH(SHARE_RAM);
  178. ASSERT((STACK_SIZE + HEAP_SIZE) <= 256K, "stack and heap total size larger than 256k")
  179. }