core_cmInstr.h 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684
  1. /**************************************************************************//**
  2. * @file core_cmInstr.h
  3. * @brief CMSIS Cortex-M Core Instruction Access Header File
  4. * @version V1.40
  5. * @date 16. February 2010
  6. *
  7. * @note
  8. * Copyright (C) 2009-2010 ARM Limited. All rights reserved.
  9. *
  10. * @par
  11. * ARM Limited (ARM) is supplying this software for use with Cortex-M
  12. * processor based microcontrollers. This file can be freely distributed
  13. * within development tools that are supporting such ARM based processors.
  14. *
  15. * @par
  16. * THIS SOFTWARE IS PROVIDED "AS IS". NO WARRANTIES, WHETHER EXPRESS, IMPLIED
  17. * OR STATUTORY, INCLUDING, BUT NOT LIMITED TO, IMPLIED WARRANTIES OF
  18. * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE APPLY TO THIS SOFTWARE.
  19. * ARM SHALL NOT, IN ANY CIRCUMSTANCES, BE LIABLE FOR SPECIAL, INCIDENTAL, OR
  20. * CONSEQUENTIAL DAMAGES, FOR ANY REASON WHATSOEVER.
  21. *
  22. ******************************************************************************/
  23. #ifndef __CORE_CMINSTR_H__
  24. #define __CORE_CMINSTR_H__
  25. /* ########################## Core Instruction Access ######################### */
  26. #if defined ( __CC_ARM ) /*------------------ RealView Compiler ----------------*/
  27. /* ARM armcc specific functions */
  28. /**
  29. * @brief No Operation
  30. *
  31. * No Operation does nothing. This instruction can be used for code alignment
  32. * purposes.
  33. */
  34. #define __NOP __nop
  35. /**
  36. * @brief Wait For Interrupt
  37. *
  38. * Wait For Interrupt is a hint instruction that suspends execution until
  39. * one of a number of events occurs.
  40. */
  41. #define __WFI __wfi
  42. /**
  43. * @brief Wait For Event
  44. *
  45. * Wait For Event is a hint instruction that permits the processor to enter
  46. * a low-power state until one of a number of events occurs.
  47. */
  48. #define __WFE __wfe
  49. /**
  50. * @brief Send Event
  51. *
  52. * Send Event is a hint instruction. It causes an event to be signaled
  53. * to the CPU.
  54. */
  55. #define __SEV __sev
  56. /**
  57. * @brief Instruction Synchronization Barrier
  58. *
  59. * Instruction Synchronization Barrier flushes the pipeline in the processor,
  60. * so that all instructions following the ISB are fetched from cache or
  61. * memory, after the instruction has been completed
  62. */
  63. #define __ISB() __isb(0xF)
  64. /**
  65. * @brief Data Synchronization Barrier
  66. *
  67. * The DSB instruction operation acts as a special kind of Data Memory Barrier.
  68. * The DSB operation completes when all explicit memory accesses before this
  69. * instruction complete.
  70. */
  71. #define __DSB() __dsb(0xF)
  72. /**
  73. * @brief Data Memory Barrier
  74. *
  75. * DMB ensures the apparent order of the explicit memory operations before
  76. * and after the instruction, without ensuring their completion.
  77. */
  78. #define __DMB() __dmb(0xF)
  79. /**
  80. * @brief Reverse byte order (32 bit)
  81. *
  82. * @param value value to reverse
  83. * @return reversed value
  84. *
  85. * Reverse byte order in integer value
  86. */
  87. #define __REV __rev
  88. /**
  89. * @brief Reverse byte order (16 bit)
  90. *
  91. * @param value value to reverse
  92. * @return reversed value
  93. *
  94. * Reverse byte order in unsigned short value
  95. */
  96. #if (__ARMCC_VERSION < 400677)
  97. extern uint32_t __REV16(uint16_t value);
  98. #else /* (__ARMCC_VERSION >= 400677) */
  99. static __INLINE __ASM uint32_t __REV16(uint16_t value)
  100. {
  101. rev16 r0, r0
  102. bx lr
  103. }
  104. #endif /* __ARMCC_VERSION */
  105. /**
  106. * @brief Reverse byte order in signed short value with sign extension to integer
  107. *
  108. * @param value value to reverse
  109. * @return reversed value
  110. *
  111. * Reverse byte order in signed short value with sign extension to integer
  112. */
  113. #if (__ARMCC_VERSION < 400677)
  114. extern int32_t __REVSH(int16_t value);
  115. #else /* (__ARMCC_VERSION >= 400677) */
  116. static __INLINE __ASM int32_t __REVSH(int16_t value)
  117. {
  118. revsh r0, r0
  119. bx lr
  120. }
  121. #endif /* __ARMCC_VERSION */
  122. #if (__CORTEX_M >= 0x03)
  123. /**
  124. * @brief Reverse bit order of value
  125. *
  126. * @param value value to reverse
  127. * @return reversed value
  128. *
  129. * Reverse bit order of value
  130. */
  131. #define __RBIT __rbit
  132. /**
  133. * @brief LDR Exclusive (8 bit)
  134. *
  135. * @param *addr address pointer
  136. * @return value of (*address)
  137. *
  138. * Exclusive LDR command for 8 bit value
  139. */
  140. #define __LDREXB(ptr) ((unsigned char ) __ldrex(ptr))
  141. /**
  142. * @brief LDR Exclusive (16 bit)
  143. *
  144. * @param *addr address pointer
  145. * @return value of (*address)
  146. *
  147. * Exclusive LDR command for 16 bit values
  148. */
  149. #define __LDREXH(ptr) ((unsigned short) __ldrex(ptr))
  150. /**
  151. * @brief LDR Exclusive (32 bit)
  152. *
  153. * @param *addr address pointer
  154. * @return value of (*address)
  155. *
  156. * Exclusive LDR command for 32 bit values
  157. */
  158. #define __LDREXW(ptr) ((unsigned int ) __ldrex(ptr))
  159. /**
  160. * @brief STR Exclusive (8 bit)
  161. *
  162. * @param value value to store
  163. * @param *addr address pointer
  164. * @return successful / failed
  165. *
  166. * Exclusive STR command for 8 bit values
  167. */
  168. #define __STREXB(value, ptr) __strex(value, ptr)
  169. /**
  170. * @brief STR Exclusive (16 bit)
  171. *
  172. * @param value value to store
  173. * @param *addr address pointer
  174. * @return successful / failed
  175. *
  176. * Exclusive STR command for 16 bit values
  177. */
  178. #define __STREXH(value, ptr) __strex(value, ptr)
  179. /**
  180. * @brief STR Exclusive (32 bit)
  181. *
  182. * @param value value to store
  183. * @param *addr address pointer
  184. * @return successful / failed
  185. *
  186. * Exclusive STR command for 32 bit values
  187. */
  188. #define __STREXW(value, ptr) __strex(value, ptr)
  189. /**
  190. * @brief Remove the exclusive lock created by ldrex
  191. *
  192. * Removes the exclusive lock which is created by ldrex.
  193. */
  194. #if (__ARMCC_VERSION < 400000)
  195. extern void __CLREX(void);
  196. #else /* (__ARMCC_VERSION >= 400000) */
  197. #define __CLREX __clrex
  198. #endif /* __ARMCC_VERSION */
  199. #endif /* (__CORTEX_M >= 0x03) */
  200. #elif (defined (__ICCARM__)) /*---------------- ICC Compiler ---------------------*/
  201. /* IAR iccarm specific functions */
  202. #if defined (__ICCARM__)
  203. #include <intrinsics.h> /* IAR Intrinsics */
  204. #endif
  205. #pragma diag_suppress=Pe940
  206. /**
  207. * @brief No Operation
  208. *
  209. * No Operation does nothing. This instruction can be used for code alignment
  210. * purposes.
  211. */
  212. #define __NOP __no_operation
  213. /**
  214. * @brief Wait For Interrupt
  215. *
  216. * Wait For Interrupt is a hint instruction that suspends execution until
  217. * one of a number of events occurs.
  218. */
  219. static __INLINE void __WFI() { __ASM ("wfi"); }
  220. /**
  221. * @brief Wait For Event
  222. *
  223. * Wait For Event is a hint instruction that permits the processor to enter
  224. * a low-power state until one of a number of events occurs.
  225. */
  226. static __INLINE void __WFE() { __ASM ("wfe"); }
  227. /**
  228. * @brief Send Event
  229. *
  230. * Send Event is a hint instruction. It causes an event to be signaled
  231. * to the CPU.
  232. */
  233. static __INLINE void __SEV() { __ASM ("sev"); }
  234. /**
  235. * @brief Instruction Synchronization Barrier
  236. *
  237. * Instruction Synchronization Barrier flushes the pipeline in the processor,
  238. * so that all instructions following the ISB are fetched from cache or
  239. * memory, after the instruction has been completed
  240. */
  241. /* intrinsic void __ISB(void) (see intrinsics.h */
  242. /**
  243. * @brief Data Synchronization Barrier
  244. *
  245. * The DSB instruction operation acts as a special kind of Data Memory Barrier.
  246. * The DSB operation completes when all explicit memory accesses before this
  247. * instruction complete.
  248. */
  249. /* intrinsic void __DSB(void) (see intrinsics.h */
  250. /**
  251. * @brief Data Memory Barrier
  252. *
  253. * DMB ensures the apparent order of the explicit memory operations before
  254. * and after the instruction, without ensuring their completion.
  255. */
  256. /* intrinsic void __DMB(void) (see intrinsics.h */
  257. /**
  258. * @brief Reverse byte order (32 bit)
  259. *
  260. * @param value value to reverse
  261. * @return reversed value
  262. *
  263. * Reverse byte order in integer value
  264. */
  265. /* intrinsic uint32_t __REV(uint32_t value) (see intrinsics.h */
  266. /**
  267. * @brief Reverse byte order (16 bit)
  268. *
  269. * @param value value to reverse
  270. * @return reversed value
  271. *
  272. * Reverse byte order in unsigned short value
  273. */
  274. static uint32_t __REV16(uint16_t value)
  275. {
  276. __ASM("rev16 r0, r0");
  277. }
  278. /**
  279. * @brief Reverse byte order in signed short value with sign extension to integer
  280. *
  281. * @param value value to reverse
  282. * @return reversed value
  283. *
  284. * Reverse byte order in signed short value with sign extension to integer
  285. */
  286. /* intrinsic uint32_t __REVSH(uint32_t value) (see intrinsics.h */
  287. #if (__CORTEX_M >= 0x03)
  288. /**
  289. * @brief Reverse bit order of value
  290. *
  291. * @param value value to reverse
  292. * @return reversed value
  293. *
  294. * Reverse bit order of value
  295. */
  296. static uint32_t __RBIT(uint32_t value)
  297. {
  298. __ASM("rbit r0, r0");
  299. }
  300. /**
  301. * @brief LDR Exclusive (8 bit)
  302. *
  303. * @param *addr address pointer
  304. * @return value of (*address)
  305. *
  306. * Exclusive LDR command for 8 bit value
  307. */
  308. static uint8_t __LDREXB(uint8_t *addr)
  309. {
  310. __ASM("ldrexb r0, [r0]");
  311. }
  312. /**
  313. * @brief LDR Exclusive (16 bit)
  314. *
  315. * @param *addr address pointer
  316. * @return value of (*address)
  317. *
  318. * Exclusive LDR command for 16 bit values
  319. */
  320. static uint16_t __LDREXH(uint16_t *addr)
  321. {
  322. __ASM("ldrexh r0, [r0]");
  323. }
  324. /**
  325. * @brief LDR Exclusive (32 bit)
  326. *
  327. * @param *addr address pointer
  328. * @return value of (*address)
  329. *
  330. * Exclusive LDR command for 32 bit values
  331. */
  332. /* intrinsic unsigned long __LDREX(unsigned long *) (see intrinsics.h */
  333. static uint32_t __LDREXW(uint32_t *addr)
  334. {
  335. __ASM("ldrex r0, [r0]");
  336. }
  337. /**
  338. * @brief STR Exclusive (8 bit)
  339. *
  340. * @param value value to store
  341. * @param *addr address pointer
  342. * @return successful / failed
  343. *
  344. * Exclusive STR command for 8 bit values
  345. */
  346. static uint32_t __STREXB(uint8_t value, uint8_t *addr)
  347. {
  348. __ASM("strexb r0, r0, [r1]");
  349. }
  350. /**
  351. * @brief STR Exclusive (16 bit)
  352. *
  353. * @param value value to store
  354. * @param *addr address pointer
  355. * @return successful / failed
  356. *
  357. * Exclusive STR command for 16 bit values
  358. */
  359. static uint32_t __STREXH(uint16_t value, uint16_t *addr)
  360. {
  361. __ASM("strexh r0, r0, [r1]");
  362. }
  363. /**
  364. * @brief STR Exclusive (32 bit)
  365. *
  366. * @param value value to store
  367. * @param *addr address pointer
  368. * @return successful / failed
  369. *
  370. * Exclusive STR command for 32 bit values
  371. */
  372. /* intrinsic unsigned long __STREX(unsigned long, unsigned long) (see intrinsics.h */
  373. static uint32_t __STREXW(uint32_t value, uint32_t *addr)
  374. {
  375. __ASM("strex r0, r0, [r1]");
  376. }
  377. /**
  378. * @brief Remove the exclusive lock created by ldrex
  379. *
  380. * Removes the exclusive lock which is created by ldrex.
  381. */
  382. static __INLINE void __CLREX() { __ASM ("clrex"); }
  383. #endif /* (__CORTEX_M >= 0x03) */
  384. #pragma diag_default=Pe940
  385. #elif (defined (__GNUC__)) /*------------------ GNU Compiler ---------------------*/
  386. /* GNU gcc specific functions */
  387. /**
  388. * @brief No Operation
  389. *
  390. * No Operation does nothing. This instruction can be used for code alignment
  391. * purposes.
  392. */
  393. static __INLINE void __NOP() { __ASM volatile ("nop"); }
  394. /**
  395. * @brief Wait For Interrupt
  396. *
  397. * Wait For Interrupt is a hint instruction that suspends execution until
  398. * one of a number of events occurs.
  399. */
  400. static __INLINE void __WFI() { __ASM volatile ("wfi"); }
  401. /**
  402. * @brief Wait For Event
  403. *
  404. * Wait For Event is a hint instruction that permits the processor to enter
  405. * a low-power state until one of a number of events occurs.
  406. */
  407. static __INLINE void __WFE() { __ASM volatile ("wfe"); }
  408. /**
  409. * @brief Send Event
  410. *
  411. * Send Event is a hint instruction. It causes an event to be signaled
  412. * to the CPU.
  413. */
  414. static __INLINE void __SEV() { __ASM volatile ("sev"); }
  415. /**
  416. * @brief Instruction Synchronization Barrier
  417. *
  418. * Instruction Synchronization Barrier flushes the pipeline in the processor,
  419. * so that all instructions following the ISB are fetched from cache or
  420. * memory, after the instruction has been completed
  421. */
  422. static __INLINE void __ISB() { __ASM volatile ("isb"); }
  423. /**
  424. * @brief Data Synchronization Barrier
  425. *
  426. * The DSB instruction operation acts as a special kind of Data Memory Barrier.
  427. * The DSB operation completes when all explicit memory accesses before this
  428. * instruction complete.
  429. */
  430. static __INLINE void __DSB() { __ASM volatile ("dsb"); }
  431. /**
  432. * @brief Data Memory Barrier
  433. *
  434. * DMB ensures the apparent order of the explicit memory operations before
  435. * and after the instruction, without ensuring their completion.
  436. */
  437. static __INLINE void __DMB() { __ASM volatile ("dmb"); }
  438. /**
  439. * @brief Reverse byte order (32 bit)
  440. *
  441. * @param value value to reverse
  442. * @return reversed value
  443. *
  444. * Reverse byte order in integer value
  445. */
  446. static __INLINE uint32_t __REV(uint32_t value)
  447. {
  448. uint32_t result=0;
  449. __ASM volatile ("rev %0, %1" : "=r" (result) : "r" (value) );
  450. return(result);
  451. }
  452. /**
  453. * @brief Reverse byte order (16 bit)
  454. *
  455. * @param value value to reverse
  456. * @return reversed value
  457. *
  458. * Reverse byte order in unsigned short value
  459. */
  460. static __INLINE uint32_t __REV16(uint16_t value)
  461. {
  462. uint32_t result=0;
  463. __ASM volatile ("rev16 %0, %1" : "=r" (result) : "r" (value) );
  464. return(result);
  465. }
  466. /**
  467. * @brief Reverse byte order in signed short value with sign extension to integer
  468. *
  469. * @param value value to reverse
  470. * @return reversed value
  471. *
  472. * Reverse byte order in signed short value with sign extension to integer
  473. */
  474. static __INLINE int32_t __REVSH(int16_t value)
  475. {
  476. uint32_t result=0;
  477. __ASM volatile ("revsh %0, %1" : "=r" (result) : "r" (value) );
  478. return(result);
  479. }
  480. #if (__CORTEX_M >= 0x03)
  481. /**
  482. * @brief Reverse bit order of value
  483. *
  484. * @param value value to reverse
  485. * @return reversed value
  486. *
  487. * Reverse bit order of value
  488. */
  489. static __INLINE uint32_t __RBIT(uint32_t value)
  490. {
  491. uint32_t result=0;
  492. __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
  493. return(result);
  494. }
  495. /**
  496. * @brief LDR Exclusive (8 bit)
  497. *
  498. * @param *addr address pointer
  499. * @return value of (*address)
  500. *
  501. * Exclusive LDR command for 8 bit value
  502. */
  503. static __INLINE uint8_t __LDREXB(uint8_t *addr)
  504. {
  505. uint8_t result=0;
  506. __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) );
  507. return(result);
  508. }
  509. /**
  510. * @brief LDR Exclusive (16 bit)
  511. *
  512. * @param *addr address pointer
  513. * @return value of (*address)
  514. *
  515. * Exclusive LDR command for 16 bit values
  516. */
  517. static __INLINE uint16_t __LDREXH(uint16_t *addr)
  518. {
  519. uint16_t result=0;
  520. __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) );
  521. return(result);
  522. }
  523. /**
  524. * @brief LDR Exclusive (32 bit)
  525. *
  526. * @param *addr address pointer
  527. * @return value of (*address)
  528. *
  529. * Exclusive LDR command for 32 bit values
  530. */
  531. static __INLINE uint32_t __LDREXW(uint32_t *addr)
  532. {
  533. uint32_t result=0;
  534. __ASM volatile ("ldrex %0, [%1]" : "=r" (result) : "r" (addr) );
  535. return(result);
  536. }
  537. /**
  538. * @brief STR Exclusive (8 bit)
  539. *
  540. * @param value value to store
  541. * @param *addr address pointer
  542. * @return successful / failed
  543. *
  544. * Exclusive STR command for 8 bit values
  545. */
  546. static __INLINE uint32_t __STREXB(uint8_t value, uint8_t *addr)
  547. {
  548. uint32_t result=0;
  549. __ASM volatile ("strexb %0, %2, [%1]" : "=r" (result) : "r" (addr), "r" (value) );
  550. return(result);
  551. }
  552. /**
  553. * @brief STR Exclusive (16 bit)
  554. *
  555. * @param value value to store
  556. * @param *addr address pointer
  557. * @return successful / failed
  558. *
  559. * Exclusive STR command for 16 bit values
  560. */
  561. static __INLINE uint32_t __STREXH(uint16_t value, uint16_t *addr)
  562. {
  563. uint32_t result=0;
  564. __ASM volatile ("strexh %0, %2, [%1]" : "=r" (result) : "r" (addr), "r" (value) );
  565. return(result);
  566. }
  567. /**
  568. * @brief STR Exclusive (32 bit)
  569. *
  570. * @param value value to store
  571. * @param *addr address pointer
  572. * @return successful / failed
  573. *
  574. * Exclusive STR command for 32 bit values
  575. */
  576. static __INLINE uint32_t __STREXW(uint32_t value, uint32_t *addr)
  577. {
  578. uint32_t result=0;
  579. __ASM volatile ("strex %0, %2, [%1]" : "=r" (result) : "r" (addr), "r" (value) );
  580. return(result);
  581. }
  582. /**
  583. * @brief Remove the exclusive lock created by ldrex
  584. *
  585. * Removes the exclusive lock which is created by ldrex.
  586. */
  587. static __INLINE void __CLREX() { __ASM volatile ("clrex"); }
  588. #endif /* (__CORTEX_M >= 0x03) */
  589. #elif (defined (__TASKING__)) /*--------------- TASKING Compiler -----------------*/
  590. /* TASKING carm specific functions */
  591. /*
  592. * The CMSIS functions have been implemented as intrinsics in the compiler.
  593. * Please use "carm -?i" to get an up to date list of all instrinsics,
  594. * Including the CMSIS ones.
  595. */
  596. #endif
  597. #endif // __CORE_CMINSTR_H__