cp15.h 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168
  1. /*
  2. * Copyright (c) 2006-2021, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2011-09-15 Bernard first version
  9. */
  10. #include "raspi.h"
  11. #ifndef __CP15_H__
  12. #define __CP15_H__
  13. #ifndef __STATIC_FORCEINLINE
  14. #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
  15. #endif
  16. #define __WFI() __asm__ volatile ("wfi":::"memory")
  17. #define __WFE() __asm__ volatile ("wfe":::"memory")
  18. #define __SEV() __asm__ volatile ("sev")
  19. __STATIC_FORCEINLINE void __ISB(void)
  20. {
  21. __asm__ volatile ("isb 0xF":::"memory");
  22. }
  23. /**
  24. \brief Data Synchronization Barrier
  25. \details Acts as a special kind of Data Memory Barrier.
  26. It completes when all explicit memory accesses before this instruction complete.
  27. */
  28. __STATIC_FORCEINLINE void __DSB(void)
  29. {
  30. __asm__ volatile ("dsb 0xF":::"memory");
  31. }
  32. /**
  33. \brief Data Memory Barrier
  34. \details Ensures the apparent order of the explicit memory operations before
  35. and after the instruction, without ensuring their completion.
  36. */
  37. __STATIC_FORCEINLINE void __DMB(void)
  38. {
  39. __asm__ volatile ("dmb 0xF":::"memory");
  40. }
  41. #ifdef RT_USING_SMP
  42. static inline void send_ipi_msg(int cpu, int ipi_vector)
  43. {
  44. IPI_MAILBOX_SET(cpu) = 1 << ipi_vector;
  45. }
  46. static inline void setup_bootstrap_addr(int cpu, int addr)
  47. {
  48. CORE_MAILBOX3_SET(cpu) = addr;
  49. }
  50. static inline void enable_cpu_ipi_intr(int cpu)
  51. {
  52. COREMB_INTCTL(cpu) = IPI_MAILBOX_INT_MASK;
  53. }
  54. static inline void enable_cpu_timer_intr(int cpu)
  55. {
  56. CORETIMER_INTCTL(cpu) = 0x8;
  57. }
  58. static inline void enable_cntv(void)
  59. {
  60. rt_uint32_t cntv_ctl;
  61. cntv_ctl = 1;
  62. asm volatile ("mcr p15, 0, %0, c14, c3, 1" :: "r"(cntv_ctl)); // write CNTV_CTL
  63. }
  64. static inline void disable_cntv(void)
  65. {
  66. rt_uint32_t cntv_ctl;
  67. cntv_ctl = 0;
  68. asm volatile ("mcr p15, 0, %0, c14, c3, 1" :: "r"(cntv_ctl)); // write CNTV_CTL
  69. }
  70. static inline void mask_cntv(void)
  71. {
  72. rt_uint32_t cntv_ctl;
  73. cntv_ctl = 2;
  74. asm volatile ("mcr p15, 0, %0, c14, c3, 1" :: "r"(cntv_ctl)); // write CNTV_CTL
  75. }
  76. static inline void unmask_cntv(void)
  77. {
  78. rt_uint32_t cntv_ctl;
  79. cntv_ctl = 1;
  80. asm volatile ("mcr p15, 0, %0, c14, c3, 1" :: "r"(cntv_ctl)); // write CNTV_CTL
  81. }
  82. static inline rt_uint64_t read_cntvct(void)
  83. {
  84. rt_uint32_t val,val1;
  85. asm volatile ("mrrc p15, 1, %0, %1, c14" : "=r" (val),"=r" (val1));
  86. return (val);
  87. }
  88. static inline rt_uint64_t read_cntvoff(void)
  89. {
  90. rt_uint64_t val;
  91. asm volatile ("mrrc p15, 4, %Q0, %R0, c14" : "=r" (val));
  92. return (val);
  93. }
  94. static inline rt_uint32_t read_cntv_tval(void)
  95. {
  96. rt_uint32_t val;
  97. asm volatile ("mrc p15, 0, %0, c14, c3, 0" : "=r"(val));
  98. return val;
  99. }
  100. static inline void write_cntv_tval(rt_uint32_t val)
  101. {
  102. asm volatile ("mcr p15, 0, %0, c14, c3, 0" :: "r"(val));
  103. return;
  104. }
  105. static inline rt_uint32_t read_cntfrq(void)
  106. {
  107. rt_uint32_t val;
  108. asm volatile ("mrc p15, 0, %0, c14, c0, 0" : "=r"(val));
  109. return val;
  110. }
  111. static inline rt_uint32_t read_cntctrl(void)
  112. {
  113. rt_uint32_t val;
  114. asm volatile ("mrc p15, 0, %0, c14, c1, 0" : "=r"(val));
  115. return val;
  116. }
  117. static inline rt_uint32_t write_cntctrl(rt_uint32_t val)
  118. {
  119. asm volatile ("mcr p15, 0, %0, c14, c1, 0" : :"r"(val));
  120. return val;
  121. }
  122. #endif
  123. unsigned long rt_cpu_get_smp_id(void);
  124. void rt_cpu_mmu_disable(void);
  125. void rt_cpu_mmu_enable(void);
  126. void rt_cpu_tlb_set(volatile unsigned long*);
  127. void rt_cpu_dcache_clean_flush(void);
  128. void rt_cpu_icache_flush(void);
  129. void rt_cpu_vector_set_base(unsigned int addr);
  130. void rt_hw_mmu_init(void);
  131. void rt_hw_vector_init(void);
  132. void set_timer_counter(unsigned int counter);
  133. void set_timer_control(unsigned int control);
  134. #endif