STM32F769IDiscovery  1.00
uDANTE Audio Networking with STM32F7 DISCO board
cmsis_armcc_V6.h
Go to the documentation of this file.
1 /**************************************************************************/
7 /* Copyright (c) 2009 - 2015 ARM LIMITED
8 
9  All rights reserved.
10  Redistribution and use in source and binary forms, with or without
11  modification, are permitted provided that the following conditions are met:
12  - Redistributions of source code must retain the above copyright
13  notice, this list of conditions and the following disclaimer.
14  - Redistributions in binary form must reproduce the above copyright
15  notice, this list of conditions and the following disclaimer in the
16  documentation and/or other materials provided with the distribution.
17  - Neither the name of ARM nor the names of its contributors may be used
18  to endorse or promote products derived from this software without
19  specific prior written permission.
20  *
21  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22  AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23  IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
24  ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
25  LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26  CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
27  SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
28  INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
29  CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30  ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
31  POSSIBILITY OF SUCH DAMAGE.
32  ---------------------------------------------------------------------------*/
33 
34 
35 #ifndef __CMSIS_ARMCC_V6_H
36 #define __CMSIS_ARMCC_V6_H
37 
38 
39 /* ########################### Core Function Access ########################### */
50 __attribute__((always_inline)) __STATIC_INLINE void __enable_irq(void)
51 {
52  __ASM volatile ("cpsie i" : : : "memory");
53 }
54 
55 
61 __attribute__((always_inline)) __STATIC_INLINE void __disable_irq(void)
62 {
63  __ASM volatile ("cpsid i" : : : "memory");
64 }
65 
66 
72 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_CONTROL(void)
73 {
74  uint32_t result;
75 
76  __ASM volatile ("MRS %0, control" : "=r" (result) );
77  return(result);
78 }
79 
80 
81 #if (__ARM_FEATURE_CMSE == 3U)
82 
87 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_CONTROL_NS(void)
88 {
89  uint32_t result;
90 
91  __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
92  return(result);
93 }
94 #endif
95 
96 
102 __attribute__((always_inline)) __STATIC_INLINE void __set_CONTROL(uint32_t control)
103 {
104  __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
105 }
106 
107 
108 #if (__ARM_FEATURE_CMSE == 3U)
109 
114 __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_CONTROL_NS(uint32_t control)
115 {
116  __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
117 }
118 #endif
119 
120 
126 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_IPSR(void)
127 {
128  uint32_t result;
129 
130  __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
131  return(result);
132 }
133 
134 
135 #if (__ARM_FEATURE_CMSE == 3U)
136 
141 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_IPSR_NS(void)
142 {
143  uint32_t result;
144 
145  __ASM volatile ("MRS %0, ipsr_ns" : "=r" (result) );
146  return(result);
147 }
148 #endif
149 
150 
156 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_APSR(void)
157 {
158  uint32_t result;
159 
160  __ASM volatile ("MRS %0, apsr" : "=r" (result) );
161  return(result);
162 }
163 
164 
165 #if (__ARM_FEATURE_CMSE == 3U)
166 
171 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_APSR_NS(void)
172 {
173  uint32_t result;
174 
175  __ASM volatile ("MRS %0, apsr_ns" : "=r" (result) );
176  return(result);
177 }
178 #endif
179 
180 
186 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_xPSR(void)
187 {
188  uint32_t result;
189 
190  __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
191  return(result);
192 }
193 
194 
195 #if (__ARM_FEATURE_CMSE == 3U)
196 
201 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_xPSR_NS(void)
202 {
203  uint32_t result;
204 
205  __ASM volatile ("MRS %0, xpsr_ns" : "=r" (result) );
206  return(result);
207 }
208 #endif
209 
210 
216 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_PSP(void)
217 {
218  register uint32_t result;
219 
220  __ASM volatile ("MRS %0, psp" : "=r" (result) );
221  return(result);
222 }
223 
224 
225 #if (__ARM_FEATURE_CMSE == 3U)
226 
231 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_PSP_NS(void)
232 {
233  register uint32_t result;
234 
235  __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
236  return(result);
237 }
238 #endif
239 
240 
246 __attribute__((always_inline)) __STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
247 {
248  __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : "sp");
249 }
250 
251 
252 #if (__ARM_FEATURE_CMSE == 3U)
253 
258 __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
259 {
260  __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : "sp");
261 }
262 #endif
263 
264 
270 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_MSP(void)
271 {
272  register uint32_t result;
273 
274  __ASM volatile ("MRS %0, msp" : "=r" (result) );
275  return(result);
276 }
277 
278 
279 #if (__ARM_FEATURE_CMSE == 3U)
280 
285 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_MSP_NS(void)
286 {
287  register uint32_t result;
288 
289  __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
290  return(result);
291 }
292 #endif
293 
294 
300 __attribute__((always_inline)) __STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
301 {
302  __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : "sp");
303 }
304 
305 
306 #if (__ARM_FEATURE_CMSE == 3U)
307 
312 __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
313 {
314  __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : "sp");
315 }
316 #endif
317 
318 
324 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_PRIMASK(void)
325 {
326  uint32_t result;
327 
328  __ASM volatile ("MRS %0, primask" : "=r" (result) );
329  return(result);
330 }
331 
332 
333 #if (__ARM_FEATURE_CMSE == 3U)
334 
339 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_PRIMASK_NS(void)
340 {
341  uint32_t result;
342 
343  __ASM volatile ("MRS %0, primask_ns" : "=r" (result) );
344  return(result);
345 }
346 #endif
347 
348 
354 __attribute__((always_inline)) __STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
355 {
356  __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
357 }
358 
359 
360 #if (__ARM_FEATURE_CMSE == 3U)
361 
366 __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
367 {
368  __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
369 }
370 #endif
371 
372 
373 #if ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) /* ToDo: ARMCC_V6: check if this is ok for cortex >=3 */
374 
380 __attribute__((always_inline)) __STATIC_INLINE void __enable_fault_irq(void)
381 {
382  __ASM volatile ("cpsie f" : : : "memory");
383 }
384 
385 
391 __attribute__((always_inline)) __STATIC_INLINE void __disable_fault_irq(void)
392 {
393  __ASM volatile ("cpsid f" : : : "memory");
394 }
395 
396 
402 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_BASEPRI(void)
403 {
404  uint32_t result;
405 
406  __ASM volatile ("MRS %0, basepri" : "=r" (result) );
407  return(result);
408 }
409 
410 
411 #if (__ARM_FEATURE_CMSE == 3U)
412 
417 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_BASEPRI_NS(void)
418 {
419  uint32_t result;
420 
421  __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
422  return(result);
423 }
424 #endif
425 
426 
432 __attribute__((always_inline)) __STATIC_INLINE void __set_BASEPRI(uint32_t value)
433 {
434  __ASM volatile ("MSR basepri, %0" : : "r" (value) : "memory");
435 }
436 
437 
438 #if (__ARM_FEATURE_CMSE == 3U)
439 
444 __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_BASEPRI_NS(uint32_t value)
445 {
446  __ASM volatile ("MSR basepri_ns, %0" : : "r" (value) : "memory");
447 }
448 #endif
449 
450 
457 __attribute__((always_inline)) __STATIC_INLINE void __set_BASEPRI_MAX(uint32_t value)
458 {
459  __ASM volatile ("MSR basepri_max, %0" : : "r" (value) : "memory");
460 }
461 
462 
463 #if (__ARM_FEATURE_CMSE == 3U)
464 
470 __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_BASEPRI_MAX_NS(uint32_t value)
471 {
472  __ASM volatile ("MSR basepri_max_ns, %0" : : "r" (value) : "memory");
473 }
474 #endif
475 
476 
482 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_FAULTMASK(void)
483 {
484  uint32_t result;
485 
486  __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
487  return(result);
488 }
489 
490 
491 #if (__ARM_FEATURE_CMSE == 3U)
492 
497 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_FAULTMASK_NS(void)
498 {
499  uint32_t result;
500 
501  __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
502  return(result);
503 }
504 #endif
505 
506 
512 __attribute__((always_inline)) __STATIC_INLINE void __set_FAULTMASK(uint32_t faultMask)
513 {
514  __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
515 }
516 
517 
518 #if (__ARM_FEATURE_CMSE == 3U)
519 
524 __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
525 {
526  __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
527 }
528 #endif
529 
530 
531 #endif /* ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_8M__ == 1U)) */
532 
533 
534 #if (__ARM_ARCH_8M__ == 1U)
535 
541 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_PSPLIM(void)
542 {
543  register uint32_t result;
544 
545  __ASM volatile ("MRS %0, psplim" : "=r" (result) );
546  return(result);
547 }
548 
549 
550 #if (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M') /* ToDo: ARMCC_V6: check predefined macro for mainline */
551 
556 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_PSPLIM_NS(void)
557 {
558  register uint32_t result;
559 
560  __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
561  return(result);
562 }
563 #endif
564 
565 
571 __attribute__((always_inline)) __STATIC_INLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
572 {
573  __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
574 }
575 
576 
577 #if (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M') /* ToDo: ARMCC_V6: check predefined macro for mainline */
578 
583 __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
584 {
585  __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
586 }
587 #endif
588 
589 
595 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_MSPLIM(void)
596 {
597  register uint32_t result;
598 
599  __ASM volatile ("MRS %0, msplim" : "=r" (result) );
600 
601  return(result);
602 }
603 
604 
605 #if (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M') /* ToDo: ARMCC_V6: check predefined macro for mainline */
606 
611 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_MSPLIM_NS(void)
612 {
613  register uint32_t result;
614 
615  __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
616  return(result);
617 }
618 #endif
619 
620 
626 __attribute__((always_inline)) __STATIC_INLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
627 {
628  __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
629 }
630 
631 
632 #if (__ARM_FEATURE_CMSE == 3U) && (__ARM_ARCH_PROFILE == 'M') /* ToDo: ARMCC_V6: check predefined macro for mainline */
633 
638 __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
639 {
640  __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
641 }
642 #endif
643 
644 #endif /* (__ARM_ARCH_8M__ == 1U) */
645 
646 
647 #if ((__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) /* ToDo: ARMCC_V6: check if this is ok for cortex >=4 */
648 
654 #define __get_FPSCR __builtin_arm_get_fpscr
655 #if 0
656 __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_FPSCR(void)
657 {
658 #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
659  uint32_t result;
660 
661  __ASM volatile (""); /* Empty asm statement works as a scheduling barrier */
662  __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
663  __ASM volatile ("");
664  return(result);
665 #else
666  return(0);
667 #endif
668 }
669 #endif
670 
671 #if (__ARM_FEATURE_CMSE == 3U)
672 
677 __attribute__((always_inline)) __STATIC_INLINE uint32_t __TZ_get_FPSCR_NS(void)
678 {
679 #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
680  uint32_t result;
681 
682  __ASM volatile (""); /* Empty asm statement works as a scheduling barrier */
683  __ASM volatile ("VMRS %0, fpscr_ns" : "=r" (result) );
684  __ASM volatile ("");
685  return(result);
686 #else
687  return(0);
688 #endif
689 }
690 #endif
691 
692 
698 #define __set_FPSCR __builtin_arm_set_fpscr
699 #if 0
700 __attribute__((always_inline)) __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
701 {
702 #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
703  __ASM volatile (""); /* Empty asm statement works as a scheduling barrier */
704  __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc");
705  __ASM volatile ("");
706 #endif
707 }
708 #endif
709 
710 #if (__ARM_FEATURE_CMSE == 3U)
711 
716 __attribute__((always_inline)) __STATIC_INLINE void __TZ_set_FPSCR_NS(uint32_t fpscr)
717 {
718 #if (__FPU_PRESENT == 1U) && (__FPU_USED == 1U)
719  __ASM volatile (""); /* Empty asm statement works as a scheduling barrier */
720  __ASM volatile ("VMSR fpscr_ns, %0" : : "r" (fpscr) : "vfpcc");
721  __ASM volatile ("");
722 #endif
723 }
724 #endif
725 
726 #endif /* ((__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) */
727 
728 
729 
733 /* ########################## Core Instruction Access ######################### */
739 /* Define macros for porting to both thumb1 and thumb2.
740  * For thumb1, use low register (r0-r7), specified by constraint "l"
741  * Otherwise, use general registers, specified by constraint "r" */
742 #if defined (__thumb__) && !defined (__thumb2__)
743 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
744 #define __CMSIS_GCC_USE_REG(r) "l" (r)
745 #else
746 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
747 #define __CMSIS_GCC_USE_REG(r) "r" (r)
748 #endif
749 
754 #define __NOP __builtin_arm_nop
755 
760 #define __WFI __builtin_arm_wfi
761 
762 
768 #define __WFE __builtin_arm_wfe
769 
770 
775 #define __SEV __builtin_arm_sev
776 
777 
784 #define __ISB() __builtin_arm_isb(0xF);
785 
791 #define __DSB() __builtin_arm_dsb(0xF);
792 
793 
799 #define __DMB() __builtin_arm_dmb(0xF);
800 
801 
808 #define __REV __builtin_bswap32
809 
810 
817 #define __REV16 __builtin_bswap16 /* ToDo: ARMCC_V6: check if __builtin_bswap16 could be used */
818 #if 0
819 __attribute__((always_inline)) __STATIC_INLINE uint32_t __REV16(uint32_t value)
820 {
821  uint32_t result;
822 
823  __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
824  return(result);
825 }
826 #endif
827 
828 
835  /* ToDo: ARMCC_V6: check if __builtin_bswap16 could be used */
836 __attribute__((always_inline)) __STATIC_INLINE int32_t __REVSH(int32_t value)
837 {
838  int32_t result;
839 
840  __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
841  return(result);
842 }
843 
844 
852 __attribute__((always_inline)) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
853 {
854  return (op1 >> op2) | (op1 << (32U - op2));
855 }
856 
857 
865 #define __BKPT(value) __ASM volatile ("bkpt "#value)
866 
867 
874  /* ToDo: ARMCC_V6: check if __builtin_arm_rbit is supported */
875 __attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
876 {
877  uint32_t result;
878 
879 #if ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) /* ToDo: ARMCC_V6: check if this is ok for cortex >=3 */
880  __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
881 #else
882  int32_t s = 4 /*sizeof(v)*/ * 8 - 1; /* extra shift needed at end */
883 
884  result = value; /* r will be reversed bits of v; first get LSB of v */
885  for (value >>= 1U; value; value >>= 1U)
886  {
887  result <<= 1U;
888  result |= value & 1U;
889  s--;
890  }
891  result <<= s; /* shift when v's highest bits are zero */
892 #endif
893  return(result);
894 }
895 
896 
903 #define __CLZ __builtin_clz
904 
905 
906 #if ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) /* ToDo: ARMCC_V6: check if this is ok for cortex >=3 */
907 
914 #define __LDREXB (uint8_t)__builtin_arm_ldrex
915 
916 
923 #define __LDREXH (uint16_t)__builtin_arm_ldrex
924 
925 
932 #define __LDREXW (uint32_t)__builtin_arm_ldrex
933 
934 
943 #define __STREXB (uint32_t)__builtin_arm_strex
944 
945 
954 #define __STREXH (uint32_t)__builtin_arm_strex
955 
956 
965 #define __STREXW (uint32_t)__builtin_arm_strex
966 
967 
972 #define __CLREX __builtin_arm_clrex
973 
974 
982 /*#define __SSAT __builtin_arm_ssat*/
983 #define __SSAT(ARG1,ARG2) \
984 ({ \
985  int32_t __RES, __ARG1 = (ARG1); \
986  __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
987  __RES; \
988  })
989 
990 
998 #define __USAT __builtin_arm_usat
999 #if 0
1000 #define __USAT(ARG1,ARG2) \
1001 ({ \
1002  uint32_t __RES, __ARG1 = (ARG1); \
1003  __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1004  __RES; \
1005  })
1006 #endif
1007 
1008 
1016 __attribute__((always_inline)) __STATIC_INLINE uint32_t __RRX(uint32_t value)
1017 {
1018  uint32_t result;
1019 
1020  __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
1021  return(result);
1022 }
1023 
1024 
1031 __attribute__((always_inline)) __STATIC_INLINE uint8_t __LDRBT(volatile uint8_t *ptr)
1032 {
1033  uint32_t result;
1034 
1035  __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
1036  return ((uint8_t) result); /* Add explicit type cast here */
1037 }
1038 
1039 
1046 __attribute__((always_inline)) __STATIC_INLINE uint16_t __LDRHT(volatile uint16_t *ptr)
1047 {
1048  uint32_t result;
1049 
1050  __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
1051  return ((uint16_t) result); /* Add explicit type cast here */
1052 }
1053 
1054 
1061 __attribute__((always_inline)) __STATIC_INLINE uint32_t __LDRT(volatile uint32_t *ptr)
1062 {
1063  uint32_t result;
1064 
1065  __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
1066  return(result);
1067 }
1068 
1069 
1076 __attribute__((always_inline)) __STATIC_INLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
1077 {
1078  __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1079 }
1080 
1081 
1088 __attribute__((always_inline)) __STATIC_INLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
1089 {
1090  __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1091 }
1092 
1093 
1100 __attribute__((always_inline)) __STATIC_INLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
1101 {
1102  __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
1103 }
1104 
1105 #endif /* ((__ARM_ARCH_7M__ == 1U) || (__ARM_ARCH_7EM__ == 1U) || (__ARM_ARCH_8M__ == 1U)) */
1106 
1107 
1108 #if (__ARM_ARCH_8M__ == 1U)
1109 
1116 __attribute__((always_inline)) __STATIC_INLINE uint8_t __LDAB(volatile uint8_t *ptr)
1117 {
1118  uint32_t result;
1119 
1120  __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
1121  return ((uint8_t) result);
1122 }
1123 
1124 
1131 __attribute__((always_inline)) __STATIC_INLINE uint16_t __LDAH(volatile uint16_t *ptr)
1132 {
1133  uint32_t result;
1134 
1135  __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
1136  return ((uint16_t) result);
1137 }
1138 
1139 
1146 __attribute__((always_inline)) __STATIC_INLINE uint32_t __LDA(volatile uint32_t *ptr)
1147 {
1148  uint32_t result;
1149 
1150  __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
1151  return(result);
1152 }
1153 
1154 
1161 __attribute__((always_inline)) __STATIC_INLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
1162 {
1163  __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1164 }
1165 
1166 
1173 __attribute__((always_inline)) __STATIC_INLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
1174 {
1175  __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1176 }
1177 
1178 
1185 __attribute__((always_inline)) __STATIC_INLINE void __STL(uint32_t value, volatile uint32_t *ptr)
1186 {
1187  __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
1188 }
1189 
1190 
1197 #define __LDAEXB (uint8_t)__builtin_arm_ldaex
1198 
1199 
1206 #define __LDAEXH (uint16_t)__builtin_arm_ldaex
1207 
1208 
1215 #define __LDAEX (uint32_t)__builtin_arm_ldaex
1216 
1217 
1226 #define __STLEXB (uint32_t)__builtin_arm_stlex
1227 
1228 
1237 #define __STLEXH (uint32_t)__builtin_arm_stlex
1238 
1239 
1248 #define __STLEX (uint32_t)__builtin_arm_stlex
1249 
1250 #endif /* (__ARM_ARCH_8M__ == 1U) */
1251  /* end of group CMSIS_Core_InstructionInterface */
1253 
1254 
1255 /* ################### Compiler specific Intrinsics ########################### */
1261 #if (__ARM_FEATURE_DSP == 1U) /* ToDo: ARMCC_V6: This should be ARCH >= ARMv7-M + SIMD */
1262 
1263 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
1264 {
1265  uint32_t result;
1266 
1267  __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1268  return(result);
1269 }
1270 
1271 __attribute__((always_inline)) __STATIC_INLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
1272 {
1273  uint32_t result;
1274 
1275  __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1276  return(result);
1277 }
1278 
1279 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
1280 {
1281  uint32_t result;
1282 
1283  __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1284  return(result);
1285 }
1286 
1287 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
1288 {
1289  uint32_t result;
1290 
1291  __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1292  return(result);
1293 }
1294 
1295 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
1296 {
1297  uint32_t result;
1298 
1299  __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1300  return(result);
1301 }
1302 
1303 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
1304 {
1305  uint32_t result;
1306 
1307  __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1308  return(result);
1309 }
1310 
1311 
1312 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
1313 {
1314  uint32_t result;
1315 
1316  __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1317  return(result);
1318 }
1319 
1320 __attribute__((always_inline)) __STATIC_INLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
1321 {
1322  uint32_t result;
1323 
1324  __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1325  return(result);
1326 }
1327 
1328 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
1329 {
1330  uint32_t result;
1331 
1332  __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1333  return(result);
1334 }
1335 
1336 __attribute__((always_inline)) __STATIC_INLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
1337 {
1338  uint32_t result;
1339 
1340  __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1341  return(result);
1342 }
1343 
1344 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
1345 {
1346  uint32_t result;
1347 
1348  __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1349  return(result);
1350 }
1351 
1352 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
1353 {
1354  uint32_t result;
1355 
1356  __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1357  return(result);
1358 }
1359 
1360 
1361 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
1362 {
1363  uint32_t result;
1364 
1365  __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1366  return(result);
1367 }
1368 
1369 __attribute__((always_inline)) __STATIC_INLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
1370 {
1371  uint32_t result;
1372 
1373  __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1374  return(result);
1375 }
1376 
1377 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
1378 {
1379  uint32_t result;
1380 
1381  __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1382  return(result);
1383 }
1384 
1385 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
1386 {
1387  uint32_t result;
1388 
1389  __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1390  return(result);
1391 }
1392 
1393 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
1394 {
1395  uint32_t result;
1396 
1397  __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1398  return(result);
1399 }
1400 
1401 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
1402 {
1403  uint32_t result;
1404 
1405  __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1406  return(result);
1407 }
1408 
1409 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
1410 {
1411  uint32_t result;
1412 
1413  __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1414  return(result);
1415 }
1416 
1417 __attribute__((always_inline)) __STATIC_INLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
1418 {
1419  uint32_t result;
1420 
1421  __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1422  return(result);
1423 }
1424 
1425 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
1426 {
1427  uint32_t result;
1428 
1429  __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1430  return(result);
1431 }
1432 
1433 __attribute__((always_inline)) __STATIC_INLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
1434 {
1435  uint32_t result;
1436 
1437  __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1438  return(result);
1439 }
1440 
1441 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
1442 {
1443  uint32_t result;
1444 
1445  __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1446  return(result);
1447 }
1448 
1449 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
1450 {
1451  uint32_t result;
1452 
1453  __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1454  return(result);
1455 }
1456 
1457 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
1458 {
1459  uint32_t result;
1460 
1461  __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1462  return(result);
1463 }
1464 
1465 __attribute__((always_inline)) __STATIC_INLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
1466 {
1467  uint32_t result;
1468 
1469  __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1470  return(result);
1471 }
1472 
1473 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
1474 {
1475  uint32_t result;
1476 
1477  __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1478  return(result);
1479 }
1480 
1481 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
1482 {
1483  uint32_t result;
1484 
1485  __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1486  return(result);
1487 }
1488 
1489 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
1490 {
1491  uint32_t result;
1492 
1493  __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1494  return(result);
1495 }
1496 
1497 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
1498 {
1499  uint32_t result;
1500 
1501  __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1502  return(result);
1503 }
1504 
1505 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
1506 {
1507  uint32_t result;
1508 
1509  __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1510  return(result);
1511 }
1512 
1513 __attribute__((always_inline)) __STATIC_INLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
1514 {
1515  uint32_t result;
1516 
1517  __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1518  return(result);
1519 }
1520 
1521 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
1522 {
1523  uint32_t result;
1524 
1525  __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1526  return(result);
1527 }
1528 
1529 __attribute__((always_inline)) __STATIC_INLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
1530 {
1531  uint32_t result;
1532 
1533  __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1534  return(result);
1535 }
1536 
1537 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
1538 {
1539  uint32_t result;
1540 
1541  __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1542  return(result);
1543 }
1544 
1545 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
1546 {
1547  uint32_t result;
1548 
1549  __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1550  return(result);
1551 }
1552 
1553 __attribute__((always_inline)) __STATIC_INLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
1554 {
1555  uint32_t result;
1556 
1557  __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1558  return(result);
1559 }
1560 
1561 __attribute__((always_inline)) __STATIC_INLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
1562 {
1563  uint32_t result;
1564 
1565  __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1566  return(result);
1567 }
1568 
1569 #define __SSAT16(ARG1,ARG2) \
1570 ({ \
1571  uint32_t __RES, __ARG1 = (ARG1); \
1572  __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1573  __RES; \
1574  })
1575 
1576 #define __USAT16(ARG1,ARG2) \
1577 ({ \
1578  uint32_t __RES, __ARG1 = (ARG1); \
1579  __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
1580  __RES; \
1581  })
1582 
1583 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UXTB16(uint32_t op1)
1584 {
1585  uint32_t result;
1586 
1587  __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
1588  return(result);
1589 }
1590 
1591 __attribute__((always_inline)) __STATIC_INLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
1592 {
1593  uint32_t result;
1594 
1595  __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1596  return(result);
1597 }
1598 
1599 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SXTB16(uint32_t op1)
1600 {
1601  uint32_t result;
1602 
1603  __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
1604  return(result);
1605 }
1606 
1607 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
1608 {
1609  uint32_t result;
1610 
1611  __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1612  return(result);
1613 }
1614 
1615 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
1616 {
1617  uint32_t result;
1618 
1619  __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1620  return(result);
1621 }
1622 
1623 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
1624 {
1625  uint32_t result;
1626 
1627  __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1628  return(result);
1629 }
1630 
1631 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
1632 {
1633  uint32_t result;
1634 
1635  __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1636  return(result);
1637 }
1638 
1639 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
1640 {
1641  uint32_t result;
1642 
1643  __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1644  return(result);
1645 }
1646 
1647 __attribute__((always_inline)) __STATIC_INLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
1648 {
1649  union llreg_u{
1650  uint32_t w32[2];
1651  uint64_t w64;
1652  } llr;
1653  llr.w64 = acc;
1654 
1655 #ifndef __ARMEB__ /* Little endian */
1656  __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1657 #else /* Big endian */
1658  __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1659 #endif
1660 
1661  return(llr.w64);
1662 }
1663 
1664 __attribute__((always_inline)) __STATIC_INLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
1665 {
1666  union llreg_u{
1667  uint32_t w32[2];
1668  uint64_t w64;
1669  } llr;
1670  llr.w64 = acc;
1671 
1672 #ifndef __ARMEB__ /* Little endian */
1673  __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1674 #else /* Big endian */
1675  __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1676 #endif
1677 
1678  return(llr.w64);
1679 }
1680 
1681 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
1682 {
1683  uint32_t result;
1684 
1685  __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1686  return(result);
1687 }
1688 
1689 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
1690 {
1691  uint32_t result;
1692 
1693  __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1694  return(result);
1695 }
1696 
1697 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
1698 {
1699  uint32_t result;
1700 
1701  __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1702  return(result);
1703 }
1704 
1705 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
1706 {
1707  uint32_t result;
1708 
1709  __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
1710  return(result);
1711 }
1712 
1713 __attribute__((always_inline)) __STATIC_INLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
1714 {
1715  union llreg_u{
1716  uint32_t w32[2];
1717  uint64_t w64;
1718  } llr;
1719  llr.w64 = acc;
1720 
1721 #ifndef __ARMEB__ /* Little endian */
1722  __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1723 #else /* Big endian */
1724  __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1725 #endif
1726 
1727  return(llr.w64);
1728 }
1729 
1730 __attribute__((always_inline)) __STATIC_INLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
1731 {
1732  union llreg_u{
1733  uint32_t w32[2];
1734  uint64_t w64;
1735  } llr;
1736  llr.w64 = acc;
1737 
1738 #ifndef __ARMEB__ /* Little endian */
1739  __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
1740 #else /* Big endian */
1741  __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
1742 #endif
1743 
1744  return(llr.w64);
1745 }
1746 
1747 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
1748 {
1749  uint32_t result;
1750 
1751  __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1752  return(result);
1753 }
1754 
1755 __attribute__((always_inline)) __STATIC_INLINE int32_t __QADD( int32_t op1, int32_t op2)
1756 {
1757  int32_t result;
1758 
1759  __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1760  return(result);
1761 }
1762 
1763 __attribute__((always_inline)) __STATIC_INLINE int32_t __QSUB( int32_t op1, int32_t op2)
1764 {
1765  int32_t result;
1766 
1767  __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
1768  return(result);
1769 }
1770 
1771 #define __PKHBT(ARG1,ARG2,ARG3) \
1772 ({ \
1773  uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1774  __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
1775  __RES; \
1776  })
1777 
1778 #define __PKHTB(ARG1,ARG2,ARG3) \
1779 ({ \
1780  uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
1781  if (ARG3 == 0) \
1782  __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
1783  else \
1784  __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
1785  __RES; \
1786  })
1787 
1788 __attribute__((always_inline)) __STATIC_INLINE uint32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
1789 {
1790  int32_t result;
1791 
1792  __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
1793  return(result);
1794 }
1795 
1796 #endif /* (__ARM_FEATURE_DSP == 1U) */
1797 
1800 #endif /* __CMSIS_ARMCC_V6_H */
#define __CMSIS_GCC_USE_REG(r)
__STATIC_INLINE uint32_t __get_APSR(void)
Get APSR Register.
Definition: cmsis_armcc.h:93
#define __REV16
Reverse byte order (16 bit)
__STATIC_INLINE uint32_t __get_MSP(void)
Get Main Stack Pointer.
Definition: cmsis_armcc.h:141
__STATIC_INLINE uint32_t __get_CONTROL(void)
Get Control Register.
Definition: cmsis_armcc.h:57
__STATIC_INLINE uint32_t __get_xPSR(void)
Get xPSR Register.
Definition: cmsis_armcc.h:105
__STATIC_INLINE uint32_t __get_PRIMASK(void)
Get Priority Mask.
Definition: cmsis_armcc.h:165
__attribute__((always_inline)) __STATIC_INLINE void __enable_irq(void)
Enable IRQ Interrupts.
__STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
Set Main Stack Pointer.
Definition: cmsis_armcc.h:153
__STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
Set Priority Mask.
Definition: cmsis_armcc.h:177
__STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
Set Process Stack Pointer.
Definition: cmsis_armcc.h:129
#define __ROR
Rotate Right in unsigned value (32 bit)
Definition: cmsis_armcc.h:417
__STATIC_INLINE void __set_CONTROL(uint32_t control)
Set Control Register.
Definition: cmsis_armcc.h:69
__STATIC_INLINE uint32_t __get_IPSR(void)
Get IPSR Register.
Definition: cmsis_armcc.h:81
#define __CMSIS_GCC_OUT_REG(r)
uint32_t op2
__STATIC_INLINE uint32_t __get_PSP(void)
Get Process Stack Pointer.
Definition: cmsis_armcc.h:117