realview_vmm.lds.S 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140
  1. #include <rtt_api.h>
  2. OUTPUT_FORMAT("elf32-littlearm", "elf32-littlearm", "elf32-littlearm")
  3. OUTPUT_ARCH(arm)
  4. SECTIONS
  5. {
  6. . = VMM_BEGIN;
  7. __text_start = .;
  8. .text.share :
  9. {
  10. KEEP(*(.vectors))
  11. *(.text.isr)
  12. *(.vmm_glue)
  13. }
  14. ASSERT(SIZEOF(.text.share) <= VMM_SHARE_TEXT_PGSZ, ".text.share too big")
  15. . = VMM_BEGIN + VMM_SHARE_TEXT_PGSZ;
  16. /* the vectore page is saved here
  17. * {
  18. * }
  19. */
  20. . = VMM_SHARE_DATA_POS;
  21. .data.share :
  22. {
  23. __data_share_start = .;
  24. *(.data.share*)
  25. __data_share_end = .;
  26. }
  27. ASSERT(SIZEOF(.data.share) <= (VMM_SHARE_DATA_PGSZ), ".data.share is too big")
  28. . = VMM_SHARE_BSS_POS;
  29. .bss.share :
  30. {
  31. __bss_share_start = .;
  32. *(.bss.share*)
  33. __bss_share_end = .;
  34. }
  35. ASSERT(SIZEOF(.bss.share) <= (VMM_SHARE_BSS_PGSZ), ".bss.share is too big")
  36. . = VMM_SHARE_CTX_POS;
  37. .vmm.share :
  38. {
  39. /* the vmm context goes here */
  40. __vmm_share_start = .;
  41. *(.vmm.share*)
  42. __vmm_share_end = .;
  43. }
  44. ASSERT(SIZEOF(.vmm.share) <= (VMM_SHARE_CTX_PGSZ), "vmm share context is too big")
  45. . = VMM_BEGIN + VMM_SHARE_PGSZ;
  46. .text :
  47. {
  48. *(.vmm_init)
  49. *(.text)
  50. *(.text.*)
  51. /* section information for finsh shell */
  52. . = ALIGN(4);
  53. __fsymtab_start = .;
  54. KEEP(*(FSymTab))
  55. __fsymtab_end = .;
  56. . = ALIGN(4);
  57. __vsymtab_start = .;
  58. KEEP(*(VSymTab))
  59. __vsymtab_end = .;
  60. . = ALIGN(4);
  61. /* section information for modules */
  62. . = ALIGN(4);
  63. __rtmsymtab_start = .;
  64. KEEP(*(RTMSymTab))
  65. __rtmsymtab_end = .;
  66. /* section information for initialization */
  67. . = ALIGN(4);
  68. __rt_init_start = .;
  69. KEEP(*(SORT(.rti_fn*)))
  70. __rt_init_end = .;
  71. }
  72. __text_end = .;
  73. __rodata_start = .;
  74. .rodata : { *(.rodata) *(.rodata.*) }
  75. __rodata_end = .;
  76. . = ALIGN(4);
  77. .ctors :
  78. {
  79. PROVIDE(__ctors_start__ = .);
  80. KEEP(*(SORT(.ctors.*)))
  81. KEEP(*(.ctors))
  82. PROVIDE(__ctors_end__ = .);
  83. }
  84. .dtors :
  85. {
  86. PROVIDE(__dtors_start__ = .);
  87. KEEP(*(SORT(.dtors.*)))
  88. KEEP(*(.dtors))
  89. PROVIDE(__dtors_end__ = .);
  90. }
  91. __data_start = .;
  92. . = ALIGN(8);
  93. .data :
  94. {
  95. *(.data)
  96. *(.data.*)
  97. }
  98. __data_end = .;
  99. . = ALIGN(8);
  100. __bss_start = __data_end;
  101. .bss :
  102. {
  103. vmm_stack_start = .;
  104. . = vmm_stack_start + RT_VMM_STACK_SIZE;
  105. vmm_stack_end = .;
  106. *(.bss)
  107. *(.bss.*)
  108. *(COMMON)
  109. . = ALIGN(4);
  110. }
  111. . = ALIGN(4);
  112. __bss_end = .;
  113. /* Stabs debugging sections. */
  114. .stab 0 : { *(.stab) }
  115. .stabstr 0 : { *(.stabstr) }
  116. .stab.excl 0 : { *(.stab.excl) }
  117. .stab.exclstr 0 : { *(.stab.exclstr) }
  118. .stab.index 0 : { *(.stab.index) }
  119. .stab.indexstr 0 : { *(.stab.indexstr) }
  120. .comment 0 : { *(.comment) }
  121. _end = .;
  122. }