cmsis_armclang_ltm.h 54 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891
  1. /**************************************************************************//**
  2. * @file cmsis_armclang_ltm.h
  3. * @brief CMSIS compiler armclang (Arm Compiler 6) header file
  4. * @version V1.2.0
  5. * @date 08. May 2019
  6. ******************************************************************************/
  7. /*
  8. * Copyright (c) 2018-2019 Arm Limited. All rights reserved.
  9. *
  10. * SPDX-License-Identifier: Apache-2.0
  11. *
  12. * Licensed under the Apache License, Version 2.0 (the License); you may
  13. * not use this file except in compliance with the License.
  14. * You may obtain a copy of the License at
  15. *
  16. * www.apache.org/licenses/LICENSE-2.0
  17. *
  18. * Unless required by applicable law or agreed to in writing, software
  19. * distributed under the License is distributed on an AS IS BASIS, WITHOUT
  20. * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  21. * See the License for the specific language governing permissions and
  22. * limitations under the License.
  23. */
  24. /*lint -esym(9058, IRQn)*/ /* disable MISRA 2012 Rule 2.4 for IRQn */
  25. #ifndef __CMSIS_ARMCLANG_H
  26. #define __CMSIS_ARMCLANG_H
  27. #pragma clang system_header /* treat file as system include file */
  28. #ifndef __ARM_COMPAT_H
  29. #include <arm_compat.h> /* Compatibility header for Arm Compiler 5 intrinsics */
  30. #endif
  31. /* CMSIS compiler specific defines */
  32. #ifndef __ASM
  33. #define __ASM __asm
  34. #endif
  35. #ifndef __INLINE
  36. #define __INLINE __inline
  37. #endif
  38. #ifndef __STATIC_INLINE
  39. #define __STATIC_INLINE static __inline
  40. #endif
  41. #ifndef __STATIC_FORCEINLINE
  42. #define __STATIC_FORCEINLINE __attribute__((always_inline)) static __inline
  43. #endif
  44. #ifndef __NO_RETURN
  45. #define __NO_RETURN __attribute__((__noreturn__))
  46. #endif
  47. #ifndef __USED
  48. #define __USED __attribute__((used))
  49. #endif
  50. #ifndef __WEAK
  51. #define __WEAK __attribute__((weak))
  52. #endif
  53. #ifndef __PACKED
  54. #define __PACKED __attribute__((packed, aligned(1)))
  55. #endif
  56. #ifndef __PACKED_STRUCT
  57. #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
  58. #endif
  59. #ifndef __PACKED_UNION
  60. #define __PACKED_UNION union __attribute__((packed, aligned(1)))
  61. #endif
  62. #ifndef __UNALIGNED_UINT32 /* deprecated */
  63. #pragma clang diagnostic push
  64. #pragma clang diagnostic ignored "-Wpacked"
  65. /*lint -esym(9058, T_UINT32)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32 */
  66. struct __attribute__((packed)) T_UINT32 { uint32_t v; };
  67. #pragma clang diagnostic pop
  68. #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
  69. #endif
  70. #ifndef __UNALIGNED_UINT16_WRITE
  71. #pragma clang diagnostic push
  72. #pragma clang diagnostic ignored "-Wpacked"
  73. /*lint -esym(9058, T_UINT16_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_WRITE */
  74. __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
  75. #pragma clang diagnostic pop
  76. #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
  77. #endif
  78. #ifndef __UNALIGNED_UINT16_READ
  79. #pragma clang diagnostic push
  80. #pragma clang diagnostic ignored "-Wpacked"
  81. /*lint -esym(9058, T_UINT16_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT16_READ */
  82. __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
  83. #pragma clang diagnostic pop
  84. #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
  85. #endif
  86. #ifndef __UNALIGNED_UINT32_WRITE
  87. #pragma clang diagnostic push
  88. #pragma clang diagnostic ignored "-Wpacked"
  89. /*lint -esym(9058, T_UINT32_WRITE)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_WRITE */
  90. __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
  91. #pragma clang diagnostic pop
  92. #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
  93. #endif
  94. #ifndef __UNALIGNED_UINT32_READ
  95. #pragma clang diagnostic push
  96. #pragma clang diagnostic ignored "-Wpacked"
  97. /*lint -esym(9058, T_UINT32_READ)*/ /* disable MISRA 2012 Rule 2.4 for T_UINT32_READ */
  98. __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
  99. #pragma clang diagnostic pop
  100. #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
  101. #endif
  102. #ifndef __ALIGNED
  103. #define __ALIGNED(x) __attribute__((aligned(x)))
  104. #endif
  105. #ifndef __RESTRICT
  106. #define __RESTRICT __restrict
  107. #endif
  108. #ifndef __COMPILER_BARRIER
  109. #define __COMPILER_BARRIER() __ASM volatile("":::"memory")
  110. #endif
  111. /* ######################### Startup and Lowlevel Init ######################## */
  112. #ifndef __PROGRAM_START
  113. #define __PROGRAM_START __main
  114. #endif
  115. #ifndef __INITIAL_SP
  116. #define __INITIAL_SP Image$$ARM_LIB_STACK$$ZI$$Limit
  117. #endif
  118. #ifndef __STACK_LIMIT
  119. #define __STACK_LIMIT Image$$ARM_LIB_STACK$$ZI$$Base
  120. #endif
  121. #ifndef __VECTOR_TABLE
  122. #define __VECTOR_TABLE __Vectors
  123. #endif
  124. #ifndef __VECTOR_TABLE_ATTRIBUTE
  125. #define __VECTOR_TABLE_ATTRIBUTE __attribute((used, section("RESET")))
  126. #endif
  127. /* ########################### Core Function Access ########################### */
  128. /** \ingroup CMSIS_Core_FunctionInterface
  129. \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
  130. @{
  131. */
  132. /**
  133. \brief Enable IRQ Interrupts
  134. \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
  135. Can only be executed in Privileged modes.
  136. */
  137. /* intrinsic void __enable_irq(); see arm_compat.h */
  138. /**
  139. \brief Disable IRQ Interrupts
  140. \details Disables IRQ interrupts by setting the I-bit in the CPSR.
  141. Can only be executed in Privileged modes.
  142. */
  143. /* intrinsic void __disable_irq(); see arm_compat.h */
  144. /**
  145. \brief Get Control Register
  146. \details Returns the content of the Control Register.
  147. \return Control Register value
  148. */
  149. __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
  150. {
  151. uint32_t result;
  152. __ASM volatile ("MRS %0, control" : "=r" (result) );
  153. return(result);
  154. }
  155. #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
  156. /**
  157. \brief Get Control Register (non-secure)
  158. \details Returns the content of the non-secure Control Register when in secure mode.
  159. \return non-secure Control Register value
  160. */
  161. __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
  162. {
  163. uint32_t result;
  164. __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
  165. return(result);
  166. }
  167. #endif
  168. /**
  169. \brief Set Control Register
  170. \details Writes the given value to the Control Register.
  171. \param [in] control Control Register value to set
  172. */
  173. __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
  174. {
  175. __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
  176. }
  177. #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
  178. /**
  179. \brief Set Control Register (non-secure)
  180. \details Writes the given value to the non-secure Control Register when in secure state.
  181. \param [in] control Control Register value to set
  182. */
  183. __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
  184. {
  185. __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
  186. }
  187. #endif
  188. /**
  189. \brief Get IPSR Register
  190. \details Returns the content of the IPSR Register.
  191. \return IPSR Register value
  192. */
  193. __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
  194. {
  195. uint32_t result;
  196. __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
  197. return(result);
  198. }
  199. /**
  200. \brief Get APSR Register
  201. \details Returns the content of the APSR Register.
  202. \return APSR Register value
  203. */
  204. __STATIC_FORCEINLINE uint32_t __get_APSR(void)
  205. {
  206. uint32_t result;
  207. __ASM volatile ("MRS %0, apsr" : "=r" (result) );
  208. return(result);
  209. }
  210. /**
  211. \brief Get xPSR Register
  212. \details Returns the content of the xPSR Register.
  213. \return xPSR Register value
  214. */
  215. __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
  216. {
  217. uint32_t result;
  218. __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
  219. return(result);
  220. }
  221. /**
  222. \brief Get Process Stack Pointer
  223. \details Returns the current value of the Process Stack Pointer (PSP).
  224. \return PSP Register value
  225. */
  226. __STATIC_FORCEINLINE uint32_t __get_PSP(void)
  227. {
  228. uint32_t result;
  229. __ASM volatile ("MRS %0, psp" : "=r" (result) );
  230. return(result);
  231. }
  232. #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
  233. /**
  234. \brief Get Process Stack Pointer (non-secure)
  235. \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
  236. \return PSP Register value
  237. */
  238. __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
  239. {
  240. uint32_t result;
  241. __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
  242. return(result);
  243. }
  244. #endif
  245. /**
  246. \brief Set Process Stack Pointer
  247. \details Assigns the given value to the Process Stack Pointer (PSP).
  248. \param [in] topOfProcStack Process Stack Pointer value to set
  249. */
  250. __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
  251. {
  252. __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
  253. }
  254. #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
  255. /**
  256. \brief Set Process Stack Pointer (non-secure)
  257. \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
  258. \param [in] topOfProcStack Process Stack Pointer value to set
  259. */
  260. __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
  261. {
  262. __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
  263. }
  264. #endif
  265. /**
  266. \brief Get Main Stack Pointer
  267. \details Returns the current value of the Main Stack Pointer (MSP).
  268. \return MSP Register value
  269. */
  270. __STATIC_FORCEINLINE uint32_t __get_MSP(void)
  271. {
  272. uint32_t result;
  273. __ASM volatile ("MRS %0, msp" : "=r" (result) );
  274. return(result);
  275. }
  276. #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
  277. /**
  278. \brief Get Main Stack Pointer (non-secure)
  279. \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
  280. \return MSP Register value
  281. */
  282. __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
  283. {
  284. uint32_t result;
  285. __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
  286. return(result);
  287. }
  288. #endif
  289. /**
  290. \brief Set Main Stack Pointer
  291. \details Assigns the given value to the Main Stack Pointer (MSP).
  292. \param [in] topOfMainStack Main Stack Pointer value to set
  293. */
  294. __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
  295. {
  296. __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
  297. }
  298. #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
  299. /**
  300. \brief Set Main Stack Pointer (non-secure)
  301. \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
  302. \param [in] topOfMainStack Main Stack Pointer value to set
  303. */
  304. __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
  305. {
  306. __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
  307. }
  308. #endif
  309. #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
  310. /**
  311. \brief Get Stack Pointer (non-secure)
  312. \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
  313. \return SP Register value
  314. */
  315. __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
  316. {
  317. uint32_t result;
  318. __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
  319. return(result);
  320. }
  321. /**
  322. \brief Set Stack Pointer (non-secure)
  323. \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
  324. \param [in] topOfStack Stack Pointer value to set
  325. */
  326. __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
  327. {
  328. __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
  329. }
  330. #endif
  331. /**
  332. \brief Get Priority Mask
  333. \details Returns the current state of the priority mask bit from the Priority Mask Register.
  334. \return Priority Mask value
  335. */
  336. __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
  337. {
  338. uint32_t result;
  339. __ASM volatile ("MRS %0, primask" : "=r" (result) );
  340. return(result);
  341. }
  342. #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
  343. /**
  344. \brief Get Priority Mask (non-secure)
  345. \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
  346. \return Priority Mask value
  347. */
  348. __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
  349. {
  350. uint32_t result;
  351. __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
  352. return(result);
  353. }
  354. #endif
  355. /**
  356. \brief Set Priority Mask
  357. \details Assigns the given value to the Priority Mask Register.
  358. \param [in] priMask Priority Mask
  359. */
  360. __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
  361. {
  362. __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
  363. }
  364. #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
  365. /**
  366. \brief Set Priority Mask (non-secure)
  367. \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
  368. \param [in] priMask Priority Mask
  369. */
  370. __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
  371. {
  372. __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
  373. }
  374. #endif
  375. #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
  376. (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
  377. (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
  378. /**
  379. \brief Enable FIQ
  380. \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
  381. Can only be executed in Privileged modes.
  382. */
  383. #define __enable_fault_irq __enable_fiq /* see arm_compat.h */
  384. /**
  385. \brief Disable FIQ
  386. \details Disables FIQ interrupts by setting the F-bit in the CPSR.
  387. Can only be executed in Privileged modes.
  388. */
  389. #define __disable_fault_irq __disable_fiq /* see arm_compat.h */
  390. /**
  391. \brief Get Base Priority
  392. \details Returns the current value of the Base Priority register.
  393. \return Base Priority register value
  394. */
  395. __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
  396. {
  397. uint32_t result;
  398. __ASM volatile ("MRS %0, basepri" : "=r" (result) );
  399. return(result);
  400. }
  401. #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
  402. /**
  403. \brief Get Base Priority (non-secure)
  404. \details Returns the current value of the non-secure Base Priority register when in secure state.
  405. \return Base Priority register value
  406. */
  407. __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
  408. {
  409. uint32_t result;
  410. __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
  411. return(result);
  412. }
  413. #endif
  414. /**
  415. \brief Set Base Priority
  416. \details Assigns the given value to the Base Priority register.
  417. \param [in] basePri Base Priority value to set
  418. */
  419. __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
  420. {
  421. __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
  422. }
  423. #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
  424. /**
  425. \brief Set Base Priority (non-secure)
  426. \details Assigns the given value to the non-secure Base Priority register when in secure state.
  427. \param [in] basePri Base Priority value to set
  428. */
  429. __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
  430. {
  431. __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
  432. }
  433. #endif
  434. /**
  435. \brief Set Base Priority with condition
  436. \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
  437. or the new value increases the BASEPRI priority level.
  438. \param [in] basePri Base Priority value to set
  439. */
  440. __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
  441. {
  442. __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
  443. }
  444. /**
  445. \brief Get Fault Mask
  446. \details Returns the current value of the Fault Mask register.
  447. \return Fault Mask register value
  448. */
  449. __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
  450. {
  451. uint32_t result;
  452. __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
  453. return(result);
  454. }
  455. #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
  456. /**
  457. \brief Get Fault Mask (non-secure)
  458. \details Returns the current value of the non-secure Fault Mask register when in secure state.
  459. \return Fault Mask register value
  460. */
  461. __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
  462. {
  463. uint32_t result;
  464. __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
  465. return(result);
  466. }
  467. #endif
  468. /**
  469. \brief Set Fault Mask
  470. \details Assigns the given value to the Fault Mask register.
  471. \param [in] faultMask Fault Mask value to set
  472. */
  473. __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
  474. {
  475. __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
  476. }
  477. #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
  478. /**
  479. \brief Set Fault Mask (non-secure)
  480. \details Assigns the given value to the non-secure Fault Mask register when in secure state.
  481. \param [in] faultMask Fault Mask value to set
  482. */
  483. __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
  484. {
  485. __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
  486. }
  487. #endif
  488. #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
  489. (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
  490. (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
  491. #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
  492. (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
  493. /**
  494. \brief Get Process Stack Pointer Limit
  495. Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
  496. Stack Pointer Limit register hence zero is returned always in non-secure
  497. mode.
  498. \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
  499. \return PSPLIM Register value
  500. */
  501. __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
  502. {
  503. #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
  504. (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
  505. // without main extensions, the non-secure PSPLIM is RAZ/WI
  506. return 0U;
  507. #else
  508. uint32_t result;
  509. __ASM volatile ("MRS %0, psplim" : "=r" (result) );
  510. return result;
  511. #endif
  512. }
  513. #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
  514. /**
  515. \brief Get Process Stack Pointer Limit (non-secure)
  516. Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
  517. Stack Pointer Limit register hence zero is returned always in non-secure
  518. mode.
  519. \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
  520. \return PSPLIM Register value
  521. */
  522. __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
  523. {
  524. #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
  525. // without main extensions, the non-secure PSPLIM is RAZ/WI
  526. return 0U;
  527. #else
  528. uint32_t result;
  529. __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
  530. return result;
  531. #endif
  532. }
  533. #endif
  534. /**
  535. \brief Set Process Stack Pointer Limit
  536. Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
  537. Stack Pointer Limit register hence the write is silently ignored in non-secure
  538. mode.
  539. \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
  540. \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
  541. */
  542. __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
  543. {
  544. #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
  545. (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
  546. // without main extensions, the non-secure PSPLIM is RAZ/WI
  547. (void)ProcStackPtrLimit;
  548. #else
  549. __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
  550. #endif
  551. }
  552. #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
  553. /**
  554. \brief Set Process Stack Pointer (non-secure)
  555. Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
  556. Stack Pointer Limit register hence the write is silently ignored in non-secure
  557. mode.
  558. \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
  559. \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
  560. */
  561. __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
  562. {
  563. #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
  564. // without main extensions, the non-secure PSPLIM is RAZ/WI
  565. (void)ProcStackPtrLimit;
  566. #else
  567. __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
  568. #endif
  569. }
  570. #endif
  571. /**
  572. \brief Get Main Stack Pointer Limit
  573. Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
  574. Stack Pointer Limit register hence zero is returned always.
  575. \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
  576. \return MSPLIM Register value
  577. */
  578. __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
  579. {
  580. #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
  581. (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
  582. // without main extensions, the non-secure MSPLIM is RAZ/WI
  583. return 0U;
  584. #else
  585. uint32_t result;
  586. __ASM volatile ("MRS %0, msplim" : "=r" (result) );
  587. return result;
  588. #endif
  589. }
  590. #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
  591. /**
  592. \brief Get Main Stack Pointer Limit (non-secure)
  593. Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
  594. Stack Pointer Limit register hence zero is returned always.
  595. \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
  596. \return MSPLIM Register value
  597. */
  598. __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
  599. {
  600. #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
  601. // without main extensions, the non-secure MSPLIM is RAZ/WI
  602. return 0U;
  603. #else
  604. uint32_t result;
  605. __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
  606. return result;
  607. #endif
  608. }
  609. #endif
  610. /**
  611. \brief Set Main Stack Pointer Limit
  612. Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
  613. Stack Pointer Limit register hence the write is silently ignored.
  614. \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
  615. \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set
  616. */
  617. __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
  618. {
  619. #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
  620. (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
  621. // without main extensions, the non-secure MSPLIM is RAZ/WI
  622. (void)MainStackPtrLimit;
  623. #else
  624. __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
  625. #endif
  626. }
  627. #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
  628. /**
  629. \brief Set Main Stack Pointer Limit (non-secure)
  630. Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
  631. Stack Pointer Limit register hence the write is silently ignored.
  632. \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
  633. \param [in] MainStackPtrLimit Main Stack Pointer value to set
  634. */
  635. __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
  636. {
  637. #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
  638. // without main extensions, the non-secure MSPLIM is RAZ/WI
  639. (void)MainStackPtrLimit;
  640. #else
  641. __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
  642. #endif
  643. }
  644. #endif
  645. #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
  646. (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
  647. /**
  648. \brief Get FPSCR
  649. \details Returns the current value of the Floating Point Status/Control register.
  650. \return Floating Point Status/Control register value
  651. */
  652. #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
  653. (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
  654. #define __get_FPSCR (uint32_t)__builtin_arm_get_fpscr
  655. #else
  656. #define __get_FPSCR() ((uint32_t)0U)
  657. #endif
  658. /**
  659. \brief Set FPSCR
  660. \details Assigns the given value to the Floating Point Status/Control register.
  661. \param [in] fpscr Floating Point Status/Control value to set
  662. */
  663. #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
  664. (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
  665. #define __set_FPSCR __builtin_arm_set_fpscr
  666. #else
  667. #define __set_FPSCR(x) ((void)(x))
  668. #endif
  669. /*@} end of CMSIS_Core_RegAccFunctions */
  670. /* ########################## Core Instruction Access ######################### */
  671. /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
  672. Access to dedicated instructions
  673. @{
  674. */
  675. /* Define macros for porting to both thumb1 and thumb2.
  676. * For thumb1, use low register (r0-r7), specified by constraint "l"
  677. * Otherwise, use general registers, specified by constraint "r" */
  678. #if defined (__thumb__) && !defined (__thumb2__)
  679. #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
  680. #define __CMSIS_GCC_USE_REG(r) "l" (r)
  681. #else
  682. #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
  683. #define __CMSIS_GCC_USE_REG(r) "r" (r)
  684. #endif
  685. /**
  686. \brief No Operation
  687. \details No Operation does nothing. This instruction can be used for code alignment purposes.
  688. */
  689. #define __NOP __builtin_arm_nop
  690. /**
  691. \brief Wait For Interrupt
  692. \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
  693. */
  694. #define __WFI __builtin_arm_wfi
  695. /**
  696. \brief Wait For Event
  697. \details Wait For Event is a hint instruction that permits the processor to enter
  698. a low-power state until one of a number of events occurs.
  699. */
  700. #define __WFE __builtin_arm_wfe
  701. /**
  702. \brief Send Event
  703. \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
  704. */
  705. #define __SEV __builtin_arm_sev
  706. /**
  707. \brief Instruction Synchronization Barrier
  708. \details Instruction Synchronization Barrier flushes the pipeline in the processor,
  709. so that all instructions following the ISB are fetched from cache or memory,
  710. after the instruction has been completed.
  711. */
  712. #define __ISB() __builtin_arm_isb(0xF)
  713. /**
  714. \brief Data Synchronization Barrier
  715. \details Acts as a special kind of Data Memory Barrier.
  716. It completes when all explicit memory accesses before this instruction complete.
  717. */
  718. #define __DSB() __builtin_arm_dsb(0xF)
  719. /**
  720. \brief Data Memory Barrier
  721. \details Ensures the apparent order of the explicit memory operations before
  722. and after the instruction, without ensuring their completion.
  723. */
  724. #define __DMB() __builtin_arm_dmb(0xF)
  725. /**
  726. \brief Reverse byte order (32 bit)
  727. \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
  728. \param [in] value Value to reverse
  729. \return Reversed value
  730. */
  731. #define __REV(value) __builtin_bswap32(value)
  732. /**
  733. \brief Reverse byte order (16 bit)
  734. \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
  735. \param [in] value Value to reverse
  736. \return Reversed value
  737. */
  738. #define __REV16(value) __ROR(__REV(value), 16)
  739. /**
  740. \brief Reverse byte order (16 bit)
  741. \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
  742. \param [in] value Value to reverse
  743. \return Reversed value
  744. */
  745. #define __REVSH(value) (int16_t)__builtin_bswap16(value)
  746. /**
  747. \brief Rotate Right in unsigned value (32 bit)
  748. \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
  749. \param [in] op1 Value to rotate
  750. \param [in] op2 Number of Bits to rotate
  751. \return Rotated value
  752. */
  753. __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
  754. {
  755. op2 %= 32U;
  756. if (op2 == 0U)
  757. {
  758. return op1;
  759. }
  760. return (op1 >> op2) | (op1 << (32U - op2));
  761. }
  762. /**
  763. \brief Breakpoint
  764. \details Causes the processor to enter Debug state.
  765. Debug tools can use this to investigate system state when the instruction at a particular address is reached.
  766. \param [in] value is ignored by the processor.
  767. If required, a debugger can use it to store additional information about the breakpoint.
  768. */
  769. #define __BKPT(value) __ASM volatile ("bkpt "#value)
  770. /**
  771. \brief Reverse bit order of value
  772. \details Reverses the bit order of the given value.
  773. \param [in] value Value to reverse
  774. \return Reversed value
  775. */
  776. #define __RBIT __builtin_arm_rbit
  777. /**
  778. \brief Count leading zeros
  779. \details Counts the number of leading zeros of a data value.
  780. \param [in] value Value to count the leading zeros
  781. \return number of leading zeros in value
  782. */
  783. __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value)
  784. {
  785. /* Even though __builtin_clz produces a CLZ instruction on ARM, formally
  786. __builtin_clz(0) is undefined behaviour, so handle this case specially.
  787. This guarantees ARM-compatible results if happening to compile on a non-ARM
  788. target, and ensures the compiler doesn't decide to activate any
  789. optimisations using the logic "value was passed to __builtin_clz, so it
  790. is non-zero".
  791. ARM Compiler 6.10 and possibly earlier will optimise this test away, leaving a
  792. single CLZ instruction.
  793. */
  794. if (value == 0U)
  795. {
  796. return 32U;
  797. }
  798. return __builtin_clz(value);
  799. }
  800. #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
  801. (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
  802. (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
  803. (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
  804. /**
  805. \brief LDR Exclusive (8 bit)
  806. \details Executes a exclusive LDR instruction for 8 bit value.
  807. \param [in] ptr Pointer to data
  808. \return value of type uint8_t at (*ptr)
  809. */
  810. #define __LDREXB (uint8_t)__builtin_arm_ldrex
  811. /**
  812. \brief LDR Exclusive (16 bit)
  813. \details Executes a exclusive LDR instruction for 16 bit values.
  814. \param [in] ptr Pointer to data
  815. \return value of type uint16_t at (*ptr)
  816. */
  817. #define __LDREXH (uint16_t)__builtin_arm_ldrex
  818. /**
  819. \brief LDR Exclusive (32 bit)
  820. \details Executes a exclusive LDR instruction for 32 bit values.
  821. \param [in] ptr Pointer to data
  822. \return value of type uint32_t at (*ptr)
  823. */
  824. #define __LDREXW (uint32_t)__builtin_arm_ldrex
  825. /**
  826. \brief STR Exclusive (8 bit)
  827. \details Executes a exclusive STR instruction for 8 bit values.
  828. \param [in] value Value to store
  829. \param [in] ptr Pointer to location
  830. \return 0 Function succeeded
  831. \return 1 Function failed
  832. */
  833. #define __STREXB (uint32_t)__builtin_arm_strex
  834. /**
  835. \brief STR Exclusive (16 bit)
  836. \details Executes a exclusive STR instruction for 16 bit values.
  837. \param [in] value Value to store
  838. \param [in] ptr Pointer to location
  839. \return 0 Function succeeded
  840. \return 1 Function failed
  841. */
  842. #define __STREXH (uint32_t)__builtin_arm_strex
  843. /**
  844. \brief STR Exclusive (32 bit)
  845. \details Executes a exclusive STR instruction for 32 bit values.
  846. \param [in] value Value to store
  847. \param [in] ptr Pointer to location
  848. \return 0 Function succeeded
  849. \return 1 Function failed
  850. */
  851. #define __STREXW (uint32_t)__builtin_arm_strex
  852. /**
  853. \brief Remove the exclusive lock
  854. \details Removes the exclusive lock which is created by LDREX.
  855. */
  856. #define __CLREX __builtin_arm_clrex
  857. #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
  858. (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
  859. (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
  860. (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
  861. #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
  862. (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
  863. (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
  864. /**
  865. \brief Signed Saturate
  866. \details Saturates a signed value.
  867. \param [in] value Value to be saturated
  868. \param [in] sat Bit position to saturate to (1..32)
  869. \return Saturated value
  870. */
  871. #define __SSAT __builtin_arm_ssat
  872. /**
  873. \brief Unsigned Saturate
  874. \details Saturates an unsigned value.
  875. \param [in] value Value to be saturated
  876. \param [in] sat Bit position to saturate to (0..31)
  877. \return Saturated value
  878. */
  879. #define __USAT __builtin_arm_usat
  880. /**
  881. \brief Rotate Right with Extend (32 bit)
  882. \details Moves each bit of a bitstring right by one bit.
  883. The carry input is shifted in at the left end of the bitstring.
  884. \param [in] value Value to rotate
  885. \return Rotated value
  886. */
  887. __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
  888. {
  889. uint32_t result;
  890. __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
  891. return(result);
  892. }
  893. /**
  894. \brief LDRT Unprivileged (8 bit)
  895. \details Executes a Unprivileged LDRT instruction for 8 bit value.
  896. \param [in] ptr Pointer to data
  897. \return value of type uint8_t at (*ptr)
  898. */
  899. __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
  900. {
  901. uint32_t result;
  902. __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
  903. return ((uint8_t) result); /* Add explicit type cast here */
  904. }
  905. /**
  906. \brief LDRT Unprivileged (16 bit)
  907. \details Executes a Unprivileged LDRT instruction for 16 bit values.
  908. \param [in] ptr Pointer to data
  909. \return value of type uint16_t at (*ptr)
  910. */
  911. __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
  912. {
  913. uint32_t result;
  914. __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
  915. return ((uint16_t) result); /* Add explicit type cast here */
  916. }
  917. /**
  918. \brief LDRT Unprivileged (32 bit)
  919. \details Executes a Unprivileged LDRT instruction for 32 bit values.
  920. \param [in] ptr Pointer to data
  921. \return value of type uint32_t at (*ptr)
  922. */
  923. __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
  924. {
  925. uint32_t result;
  926. __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
  927. return(result);
  928. }
  929. /**
  930. \brief STRT Unprivileged (8 bit)
  931. \details Executes a Unprivileged STRT instruction for 8 bit values.
  932. \param [in] value Value to store
  933. \param [in] ptr Pointer to location
  934. */
  935. __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
  936. {
  937. __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
  938. }
  939. /**
  940. \brief STRT Unprivileged (16 bit)
  941. \details Executes a Unprivileged STRT instruction for 16 bit values.
  942. \param [in] value Value to store
  943. \param [in] ptr Pointer to location
  944. */
  945. __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
  946. {
  947. __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
  948. }
  949. /**
  950. \brief STRT Unprivileged (32 bit)
  951. \details Executes a Unprivileged STRT instruction for 32 bit values.
  952. \param [in] value Value to store
  953. \param [in] ptr Pointer to location
  954. */
  955. __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
  956. {
  957. __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
  958. }
  959. #else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
  960. (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
  961. (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
  962. /**
  963. \brief Signed Saturate
  964. \details Saturates a signed value.
  965. \param [in] value Value to be saturated
  966. \param [in] sat Bit position to saturate to (1..32)
  967. \return Saturated value
  968. */
  969. __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
  970. {
  971. if ((sat >= 1U) && (sat <= 32U))
  972. {
  973. const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
  974. const int32_t min = -1 - max ;
  975. if (val > max)
  976. {
  977. return max;
  978. }
  979. else if (val < min)
  980. {
  981. return min;
  982. }
  983. }
  984. return val;
  985. }
  986. /**
  987. \brief Unsigned Saturate
  988. \details Saturates an unsigned value.
  989. \param [in] value Value to be saturated
  990. \param [in] sat Bit position to saturate to (0..31)
  991. \return Saturated value
  992. */
  993. __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
  994. {
  995. if (sat <= 31U)
  996. {
  997. const uint32_t max = ((1U << sat) - 1U);
  998. if (val > (int32_t)max)
  999. {
  1000. return max;
  1001. }
  1002. else if (val < 0)
  1003. {
  1004. return 0U;
  1005. }
  1006. }
  1007. return (uint32_t)val;
  1008. }
  1009. #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
  1010. (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
  1011. (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
  1012. #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
  1013. (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
  1014. /**
  1015. \brief Load-Acquire (8 bit)
  1016. \details Executes a LDAB instruction for 8 bit value.
  1017. \param [in] ptr Pointer to data
  1018. \return value of type uint8_t at (*ptr)
  1019. */
  1020. __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
  1021. {
  1022. uint32_t result;
  1023. __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
  1024. return ((uint8_t) result);
  1025. }
  1026. /**
  1027. \brief Load-Acquire (16 bit)
  1028. \details Executes a LDAH instruction for 16 bit values.
  1029. \param [in] ptr Pointer to data
  1030. \return value of type uint16_t at (*ptr)
  1031. */
  1032. __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
  1033. {
  1034. uint32_t result;
  1035. __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
  1036. return ((uint16_t) result);
  1037. }
  1038. /**
  1039. \brief Load-Acquire (32 bit)
  1040. \details Executes a LDA instruction for 32 bit values.
  1041. \param [in] ptr Pointer to data
  1042. \return value of type uint32_t at (*ptr)
  1043. */
  1044. __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
  1045. {
  1046. uint32_t result;
  1047. __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
  1048. return(result);
  1049. }
  1050. /**
  1051. \brief Store-Release (8 bit)
  1052. \details Executes a STLB instruction for 8 bit values.
  1053. \param [in] value Value to store
  1054. \param [in] ptr Pointer to location
  1055. */
  1056. __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
  1057. {
  1058. __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
  1059. }
  1060. /**
  1061. \brief Store-Release (16 bit)
  1062. \details Executes a STLH instruction for 16 bit values.
  1063. \param [in] value Value to store
  1064. \param [in] ptr Pointer to location
  1065. */
  1066. __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
  1067. {
  1068. __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
  1069. }
  1070. /**
  1071. \brief Store-Release (32 bit)
  1072. \details Executes a STL instruction for 32 bit values.
  1073. \param [in] value Value to store
  1074. \param [in] ptr Pointer to location
  1075. */
  1076. __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
  1077. {
  1078. __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
  1079. }
  1080. /**
  1081. \brief Load-Acquire Exclusive (8 bit)
  1082. \details Executes a LDAB exclusive instruction for 8 bit value.
  1083. \param [in] ptr Pointer to data
  1084. \return value of type uint8_t at (*ptr)
  1085. */
  1086. #define __LDAEXB (uint8_t)__builtin_arm_ldaex
  1087. /**
  1088. \brief Load-Acquire Exclusive (16 bit)
  1089. \details Executes a LDAH exclusive instruction for 16 bit values.
  1090. \param [in] ptr Pointer to data
  1091. \return value of type uint16_t at (*ptr)
  1092. */
  1093. #define __LDAEXH (uint16_t)__builtin_arm_ldaex
  1094. /**
  1095. \brief Load-Acquire Exclusive (32 bit)
  1096. \details Executes a LDA exclusive instruction for 32 bit values.
  1097. \param [in] ptr Pointer to data
  1098. \return value of type uint32_t at (*ptr)
  1099. */
  1100. #define __LDAEX (uint32_t)__builtin_arm_ldaex
  1101. /**
  1102. \brief Store-Release Exclusive (8 bit)
  1103. \details Executes a STLB exclusive instruction for 8 bit values.
  1104. \param [in] value Value to store
  1105. \param [in] ptr Pointer to location
  1106. \return 0 Function succeeded
  1107. \return 1 Function failed
  1108. */
  1109. #define __STLEXB (uint32_t)__builtin_arm_stlex
  1110. /**
  1111. \brief Store-Release Exclusive (16 bit)
  1112. \details Executes a STLH exclusive instruction for 16 bit values.
  1113. \param [in] value Value to store
  1114. \param [in] ptr Pointer to location
  1115. \return 0 Function succeeded
  1116. \return 1 Function failed
  1117. */
  1118. #define __STLEXH (uint32_t)__builtin_arm_stlex
  1119. /**
  1120. \brief Store-Release Exclusive (32 bit)
  1121. \details Executes a STL exclusive instruction for 32 bit values.
  1122. \param [in] value Value to store
  1123. \param [in] ptr Pointer to location
  1124. \return 0 Function succeeded
  1125. \return 1 Function failed
  1126. */
  1127. #define __STLEX (uint32_t)__builtin_arm_stlex
  1128. #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
  1129. (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
  1130. /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
  1131. /* ################### Compiler specific Intrinsics ########################### */
  1132. /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
  1133. Access to dedicated SIMD instructions
  1134. @{
  1135. */
  1136. #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
  1137. __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
  1138. {
  1139. uint32_t result;
  1140. __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1141. return(result);
  1142. }
  1143. __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
  1144. {
  1145. uint32_t result;
  1146. __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1147. return(result);
  1148. }
  1149. __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
  1150. {
  1151. uint32_t result;
  1152. __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1153. return(result);
  1154. }
  1155. __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
  1156. {
  1157. uint32_t result;
  1158. __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1159. return(result);
  1160. }
  1161. __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
  1162. {
  1163. uint32_t result;
  1164. __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1165. return(result);
  1166. }
  1167. __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
  1168. {
  1169. uint32_t result;
  1170. __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1171. return(result);
  1172. }
  1173. __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
  1174. {
  1175. uint32_t result;
  1176. __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1177. return(result);
  1178. }
  1179. __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
  1180. {
  1181. uint32_t result;
  1182. __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1183. return(result);
  1184. }
  1185. __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
  1186. {
  1187. uint32_t result;
  1188. __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1189. return(result);
  1190. }
  1191. __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
  1192. {
  1193. uint32_t result;
  1194. __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1195. return(result);
  1196. }
  1197. __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
  1198. {
  1199. uint32_t result;
  1200. __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1201. return(result);
  1202. }
  1203. __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
  1204. {
  1205. uint32_t result;
  1206. __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1207. return(result);
  1208. }
  1209. __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
  1210. {
  1211. uint32_t result;
  1212. __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1213. return(result);
  1214. }
  1215. __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
  1216. {
  1217. uint32_t result;
  1218. __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1219. return(result);
  1220. }
  1221. __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
  1222. {
  1223. uint32_t result;
  1224. __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1225. return(result);
  1226. }
  1227. __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
  1228. {
  1229. uint32_t result;
  1230. __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1231. return(result);
  1232. }
  1233. __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
  1234. {
  1235. uint32_t result;
  1236. __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1237. return(result);
  1238. }
  1239. __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
  1240. {
  1241. uint32_t result;
  1242. __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1243. return(result);
  1244. }
  1245. __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
  1246. {
  1247. uint32_t result;
  1248. __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1249. return(result);
  1250. }
  1251. __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
  1252. {
  1253. uint32_t result;
  1254. __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1255. return(result);
  1256. }
  1257. __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
  1258. {
  1259. uint32_t result;
  1260. __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1261. return(result);
  1262. }
  1263. __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
  1264. {
  1265. uint32_t result;
  1266. __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1267. return(result);
  1268. }
  1269. __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
  1270. {
  1271. uint32_t result;
  1272. __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1273. return(result);
  1274. }
  1275. __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
  1276. {
  1277. uint32_t result;
  1278. __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1279. return(result);
  1280. }
  1281. __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
  1282. {
  1283. uint32_t result;
  1284. __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1285. return(result);
  1286. }
  1287. __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
  1288. {
  1289. uint32_t result;
  1290. __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1291. return(result);
  1292. }
  1293. __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
  1294. {
  1295. uint32_t result;
  1296. __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1297. return(result);
  1298. }
  1299. __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
  1300. {
  1301. uint32_t result;
  1302. __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1303. return(result);
  1304. }
  1305. __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
  1306. {
  1307. uint32_t result;
  1308. __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1309. return(result);
  1310. }
  1311. __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
  1312. {
  1313. uint32_t result;
  1314. __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1315. return(result);
  1316. }
  1317. __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
  1318. {
  1319. uint32_t result;
  1320. __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1321. return(result);
  1322. }
  1323. __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
  1324. {
  1325. uint32_t result;
  1326. __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1327. return(result);
  1328. }
  1329. __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
  1330. {
  1331. uint32_t result;
  1332. __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1333. return(result);
  1334. }
  1335. __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
  1336. {
  1337. uint32_t result;
  1338. __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1339. return(result);
  1340. }
  1341. __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
  1342. {
  1343. uint32_t result;
  1344. __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1345. return(result);
  1346. }
  1347. __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
  1348. {
  1349. uint32_t result;
  1350. __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1351. return(result);
  1352. }
  1353. __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
  1354. {
  1355. uint32_t result;
  1356. __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1357. return(result);
  1358. }
  1359. __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
  1360. {
  1361. uint32_t result;
  1362. __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
  1363. return(result);
  1364. }
  1365. #define __SSAT16(ARG1,ARG2) \
  1366. ({ \
  1367. int32_t __RES, __ARG1 = (ARG1); \
  1368. __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
  1369. __RES; \
  1370. })
  1371. #define __USAT16(ARG1,ARG2) \
  1372. ({ \
  1373. uint32_t __RES, __ARG1 = (ARG1); \
  1374. __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
  1375. __RES; \
  1376. })
  1377. __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
  1378. {
  1379. uint32_t result;
  1380. __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
  1381. return(result);
  1382. }
  1383. __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
  1384. {
  1385. uint32_t result;
  1386. __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1387. return(result);
  1388. }
  1389. __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
  1390. {
  1391. uint32_t result;
  1392. __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
  1393. return(result);
  1394. }
  1395. __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
  1396. {
  1397. uint32_t result;
  1398. __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1399. return(result);
  1400. }
  1401. __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
  1402. {
  1403. uint32_t result;
  1404. __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1405. return(result);
  1406. }
  1407. __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
  1408. {
  1409. uint32_t result;
  1410. __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1411. return(result);
  1412. }
  1413. __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
  1414. {
  1415. uint32_t result;
  1416. __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
  1417. return(result);
  1418. }
  1419. __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
  1420. {
  1421. uint32_t result;
  1422. __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
  1423. return(result);
  1424. }
  1425. __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
  1426. {
  1427. union llreg_u{
  1428. uint32_t w32[2];
  1429. uint64_t w64;
  1430. } llr;
  1431. llr.w64 = acc;
  1432. #ifndef __ARMEB__ /* Little endian */
  1433. __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
  1434. #else /* Big endian */
  1435. __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
  1436. #endif
  1437. return(llr.w64);
  1438. }
  1439. __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
  1440. {
  1441. union llreg_u{
  1442. uint32_t w32[2];
  1443. uint64_t w64;
  1444. } llr;
  1445. llr.w64 = acc;
  1446. #ifndef __ARMEB__ /* Little endian */
  1447. __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
  1448. #else /* Big endian */
  1449. __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
  1450. #endif
  1451. return(llr.w64);
  1452. }
  1453. __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
  1454. {
  1455. uint32_t result;
  1456. __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1457. return(result);
  1458. }
  1459. __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
  1460. {
  1461. uint32_t result;
  1462. __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1463. return(result);
  1464. }
  1465. __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
  1466. {
  1467. uint32_t result;
  1468. __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
  1469. return(result);
  1470. }
  1471. __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
  1472. {
  1473. uint32_t result;
  1474. __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
  1475. return(result);
  1476. }
  1477. __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
  1478. {
  1479. union llreg_u{
  1480. uint32_t w32[2];
  1481. uint64_t w64;
  1482. } llr;
  1483. llr.w64 = acc;
  1484. #ifndef __ARMEB__ /* Little endian */
  1485. __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
  1486. #else /* Big endian */
  1487. __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
  1488. #endif
  1489. return(llr.w64);
  1490. }
  1491. __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
  1492. {
  1493. union llreg_u{
  1494. uint32_t w32[2];
  1495. uint64_t w64;
  1496. } llr;
  1497. llr.w64 = acc;
  1498. #ifndef __ARMEB__ /* Little endian */
  1499. __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
  1500. #else /* Big endian */
  1501. __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
  1502. #endif
  1503. return(llr.w64);
  1504. }
  1505. __STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
  1506. {
  1507. uint32_t result;
  1508. __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1509. return(result);
  1510. }
  1511. __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
  1512. {
  1513. int32_t result;
  1514. __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1515. return(result);
  1516. }
  1517. __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
  1518. {
  1519. int32_t result;
  1520. __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
  1521. return(result);
  1522. }
  1523. #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
  1524. ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
  1525. #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
  1526. ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
  1527. __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
  1528. {
  1529. int32_t result;
  1530. __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
  1531. return(result);
  1532. }
  1533. #endif /* (__ARM_FEATURE_DSP == 1) */
  1534. /*@} end of group CMSIS_SIMD_intrinsics */
  1535. #endif /* __CMSIS_ARMCLANG_H */