Merge pull request #128 from logxen/edge
[clinton/Smoothieware.git] / gcc4mbed / external / mbed / LPC1768 / core_cmInstr.h
CommitLineData
8fcce42e
AG
1/**************************************************************************//**\r
2 * @file core_cmInstr.h\r
3 * @brief CMSIS Cortex-M Core Instruction Access Header File\r
4 * @version V3.00\r
5 * @date 09. December 2011\r
6 *\r
7 * @note\r
8 * Copyright (C) 2009-2011 ARM Limited. All rights reserved.\r
9 *\r
10 * @par\r
11 * ARM Limited (ARM) is supplying this software for use with Cortex-M \r
12 * processor based microcontrollers. This file can be freely distributed \r
13 * within development tools that are supporting such ARM based processors. \r
14 *\r
15 * @par\r
16 * THIS SOFTWARE IS PROVIDED "AS IS". NO WARRANTIES, WHETHER EXPRESS, IMPLIED\r
17 * OR STATUTORY, INCLUDING, BUT NOT LIMITED TO, IMPLIED WARRANTIES OF\r
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE APPLY TO THIS SOFTWARE.\r
19 * ARM SHALL NOT, IN ANY CIRCUMSTANCES, BE LIABLE FOR SPECIAL, INCIDENTAL, OR\r
20 * CONSEQUENTIAL DAMAGES, FOR ANY REASON WHATSOEVER.\r
21 *\r
22 ******************************************************************************/\r
23\r
24#ifndef __CORE_CMINSTR_H\r
25#define __CORE_CMINSTR_H\r
26\r
27\r
28/* ########################## Core Instruction Access ######################### */\r
29/** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface\r
30 Access to dedicated instructions\r
31 @{\r
32*/\r
33\r
34#if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/\r
35/* ARM armcc specific functions */\r
36\r
37#if (__ARMCC_VERSION < 400677)\r
38 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"\r
39#endif\r
40\r
41\r
42/** \brief No Operation\r
43\r
44 No Operation does nothing. This instruction can be used for code alignment purposes.\r
45 */\r
46#define __NOP __nop\r
47\r
48\r
49/** \brief Wait For Interrupt\r
50\r
51 Wait For Interrupt is a hint instruction that suspends execution\r
52 until one of a number of events occurs.\r
53 */\r
54#define __WFI __wfi\r
55\r
56\r
57/** \brief Wait For Event\r
58\r
59 Wait For Event is a hint instruction that permits the processor to enter\r
60 a low-power state until one of a number of events occurs.\r
61 */\r
62#define __WFE __wfe\r
63\r
64\r
65/** \brief Send Event\r
66\r
67 Send Event is a hint instruction. It causes an event to be signaled to the CPU.\r
68 */\r
69#define __SEV __sev\r
70\r
71\r
72/** \brief Instruction Synchronization Barrier\r
73\r
74 Instruction Synchronization Barrier flushes the pipeline in the processor, \r
75 so that all instructions following the ISB are fetched from cache or \r
76 memory, after the instruction has been completed.\r
77 */\r
78#define __ISB() __isb(0xF)\r
79\r
80\r
81/** \brief Data Synchronization Barrier\r
82\r
83 This function acts as a special kind of Data Memory Barrier. \r
84 It completes when all explicit memory accesses before this instruction complete.\r
85 */\r
86#define __DSB() __dsb(0xF)\r
87\r
88\r
89/** \brief Data Memory Barrier\r
90\r
91 This function ensures the apparent order of the explicit memory operations before \r
92 and after the instruction, without ensuring their completion.\r
93 */\r
94#define __DMB() __dmb(0xF)\r
95\r
96\r
97/** \brief Reverse byte order (32 bit)\r
98\r
99 This function reverses the byte order in integer value.\r
100\r
101 \param [in] value Value to reverse\r
102 \return Reversed value\r
103 */\r
104#define __REV __rev\r
105\r
106\r
107/** \brief Reverse byte order (16 bit)\r
108\r
109 This function reverses the byte order in two unsigned short values.\r
110\r
111 \param [in] value Value to reverse\r
112 \return Reversed value\r
113 */\r
114static __attribute__((section(".rev16_text"))) __INLINE __ASM uint32_t __REV16(uint32_t value)\r
115{\r
116 rev16 r0, r0\r
117 bx lr\r
118}\r
119\r
120\r
121/** \brief Reverse byte order in signed short value\r
122\r
123 This function reverses the byte order in a signed short value with sign extension to integer.\r
124\r
125 \param [in] value Value to reverse\r
126 \return Reversed value\r
127 */\r
128static __attribute__((section(".revsh_text"))) __INLINE __ASM int32_t __REVSH(int32_t value)\r
129{\r
130 revsh r0, r0\r
131 bx lr\r
132}\r
133\r
134\r
135#if (__CORTEX_M >= 0x03)\r
136\r
137/** \brief Reverse bit order of value\r
138\r
139 This function reverses the bit order of the given value.\r
140\r
141 \param [in] value Value to reverse\r
142 \return Reversed value\r
143 */\r
144#define __RBIT __rbit\r
145\r
146\r
147/** \brief LDR Exclusive (8 bit)\r
148\r
149 This function performs a exclusive LDR command for 8 bit value.\r
150\r
151 \param [in] ptr Pointer to data\r
152 \return value of type uint8_t at (*ptr)\r
153 */\r
154#define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))\r
155\r
156\r
157/** \brief LDR Exclusive (16 bit)\r
158\r
159 This function performs a exclusive LDR command for 16 bit values.\r
160\r
161 \param [in] ptr Pointer to data\r
162 \return value of type uint16_t at (*ptr)\r
163 */\r
164#define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))\r
165\r
166\r
167/** \brief LDR Exclusive (32 bit)\r
168\r
169 This function performs a exclusive LDR command for 32 bit values.\r
170\r
171 \param [in] ptr Pointer to data\r
172 \return value of type uint32_t at (*ptr)\r
173 */\r
174#define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))\r
175\r
176\r
177/** \brief STR Exclusive (8 bit)\r
178\r
179 This function performs a exclusive STR command for 8 bit values.\r
180\r
181 \param [in] value Value to store\r
182 \param [in] ptr Pointer to location\r
183 \return 0 Function succeeded\r
184 \return 1 Function failed\r
185 */\r
186#define __STREXB(value, ptr) __strex(value, ptr)\r
187\r
188\r
189/** \brief STR Exclusive (16 bit)\r
190\r
191 This function performs a exclusive STR command for 16 bit values.\r
192\r
193 \param [in] value Value to store\r
194 \param [in] ptr Pointer to location\r
195 \return 0 Function succeeded\r
196 \return 1 Function failed\r
197 */\r
198#define __STREXH(value, ptr) __strex(value, ptr)\r
199\r
200\r
201/** \brief STR Exclusive (32 bit)\r
202\r
203 This function performs a exclusive STR command for 32 bit values.\r
204\r
205 \param [in] value Value to store\r
206 \param [in] ptr Pointer to location\r
207 \return 0 Function succeeded\r
208 \return 1 Function failed\r
209 */\r
210#define __STREXW(value, ptr) __strex(value, ptr)\r
211\r
212\r
213/** \brief Remove the exclusive lock\r
214\r
215 This function removes the exclusive lock which is created by LDREX.\r
216\r
217 */\r
218#define __CLREX __clrex\r
219\r
220\r
221/** \brief Signed Saturate\r
222\r
223 This function saturates a signed value.\r
224\r
225 \param [in] value Value to be saturated\r
226 \param [in] sat Bit position to saturate to (1..32)\r
227 \return Saturated value\r
228 */\r
229#define __SSAT __ssat\r
230\r
231\r
232/** \brief Unsigned Saturate\r
233\r
234 This function saturates an unsigned value.\r
235\r
236 \param [in] value Value to be saturated\r
237 \param [in] sat Bit position to saturate to (0..31)\r
238 \return Saturated value\r
239 */\r
240#define __USAT __usat\r
241\r
242\r
243/** \brief Count leading zeros\r
244\r
245 This function counts the number of leading zeros of a data value.\r
246\r
247 \param [in] value Value to count the leading zeros\r
248 \return number of leading zeros in value\r
249 */\r
250#define __CLZ __clz \r
251\r
252#endif /* (__CORTEX_M >= 0x03) */\r
253\r
254\r
255\r
256#elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/\r
257/* IAR iccarm specific functions */\r
258\r
259#include <cmsis_iar.h>\r
260\r
261\r
262#elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/\r
263/* GNU gcc specific functions */\r
264\r
265/** \brief No Operation\r
266\r
267 No Operation does nothing. This instruction can be used for code alignment purposes.\r
268 */\r
269__attribute__( ( always_inline ) ) static __INLINE void __NOP(void)\r
270{\r
271 __ASM volatile ("nop");\r
272}\r
273\r
274\r
275/** \brief Wait For Interrupt\r
276\r
277 Wait For Interrupt is a hint instruction that suspends execution\r
278 until one of a number of events occurs.\r
279 */\r
280__attribute__( ( always_inline ) ) static __INLINE void __WFI(void)\r
281{\r
282 __ASM volatile ("wfi");\r
283}\r
284\r
285\r
286/** \brief Wait For Event\r
287\r
288 Wait For Event is a hint instruction that permits the processor to enter\r
289 a low-power state until one of a number of events occurs.\r
290 */\r
291__attribute__( ( always_inline ) ) static __INLINE void __WFE(void)\r
292{\r
293 __ASM volatile ("wfe");\r
294}\r
295\r
296\r
297/** \brief Send Event\r
298\r
299 Send Event is a hint instruction. It causes an event to be signaled to the CPU.\r
300 */\r
301__attribute__( ( always_inline ) ) static __INLINE void __SEV(void)\r
302{\r
303 __ASM volatile ("sev");\r
304}\r
305\r
306\r
307/** \brief Instruction Synchronization Barrier\r
308\r
309 Instruction Synchronization Barrier flushes the pipeline in the processor, \r
310 so that all instructions following the ISB are fetched from cache or \r
311 memory, after the instruction has been completed.\r
312 */\r
313__attribute__( ( always_inline ) ) static __INLINE void __ISB(void)\r
314{\r
315 __ASM volatile ("isb");\r
316}\r
317\r
318\r
319/** \brief Data Synchronization Barrier\r
320\r
321 This function acts as a special kind of Data Memory Barrier. \r
322 It completes when all explicit memory accesses before this instruction complete.\r
323 */\r
324__attribute__( ( always_inline ) ) static __INLINE void __DSB(void)\r
325{\r
326 __ASM volatile ("dsb");\r
327}\r
328\r
329\r
330/** \brief Data Memory Barrier\r
331\r
332 This function ensures the apparent order of the explicit memory operations before \r
333 and after the instruction, without ensuring their completion.\r
334 */\r
335__attribute__( ( always_inline ) ) static __INLINE void __DMB(void)\r
336{\r
337 __ASM volatile ("dmb");\r
338}\r
339\r
340\r
341/** \brief Reverse byte order (32 bit)\r
342\r
343 This function reverses the byte order in integer value.\r
344\r
345 \param [in] value Value to reverse\r
346 \return Reversed value\r
347 */\r
348__attribute__( ( always_inline ) ) static __INLINE uint32_t __REV(uint32_t value)\r
349{\r
350 uint32_t result;\r
351 \r
352 __ASM volatile ("rev %0, %1" : "=r" (result) : "r" (value) );\r
353 return(result);\r
354}\r
355\r
356\r
357/** \brief Reverse byte order (16 bit)\r
358\r
359 This function reverses the byte order in two unsigned short values.\r
360\r
361 \param [in] value Value to reverse\r
362 \return Reversed value\r
363 */\r
364__attribute__( ( always_inline ) ) static __INLINE uint32_t __REV16(uint32_t value)\r
365{\r
366 uint32_t result;\r
367 \r
368 __ASM volatile ("rev16 %0, %1" : "=r" (result) : "r" (value) );\r
369 return(result);\r
370}\r
371\r
372\r
373/** \brief Reverse byte order in signed short value\r
374\r
375 This function reverses the byte order in a signed short value with sign extension to integer.\r
376\r
377 \param [in] value Value to reverse\r
378 \return Reversed value\r
379 */\r
380__attribute__( ( always_inline ) ) static __INLINE int32_t __REVSH(int32_t value)\r
381{\r
382 uint32_t result;\r
383 \r
384 __ASM volatile ("revsh %0, %1" : "=r" (result) : "r" (value) );\r
385 return(result);\r
386}\r
387\r
388\r
389#if (__CORTEX_M >= 0x03)\r
390\r
391/** \brief Reverse bit order of value\r
392\r
393 This function reverses the bit order of the given value.\r
394\r
395 \param [in] value Value to reverse\r
396 \return Reversed value\r
397 */\r
398__attribute__( ( always_inline ) ) static __INLINE uint32_t __RBIT(uint32_t value)\r
399{\r
400 uint32_t result;\r
401 \r
402 __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );\r
403 return(result);\r
404}\r
405\r
406\r
407/** \brief LDR Exclusive (8 bit)\r
408\r
409 This function performs a exclusive LDR command for 8 bit value.\r
410\r
411 \param [in] ptr Pointer to data\r
412 \return value of type uint8_t at (*ptr)\r
413 */\r
414__attribute__( ( always_inline ) ) static __INLINE uint8_t __LDREXB(volatile uint8_t *addr)\r
415{\r
416 uint8_t result;\r
417 \r
418 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) );\r
419 return(result);\r
420}\r
421\r
422\r
423/** \brief LDR Exclusive (16 bit)\r
424\r
425 This function performs a exclusive LDR command for 16 bit values.\r
426\r
427 \param [in] ptr Pointer to data\r
428 \return value of type uint16_t at (*ptr)\r
429 */\r
430__attribute__( ( always_inline ) ) static __INLINE uint16_t __LDREXH(volatile uint16_t *addr)\r
431{\r
432 uint16_t result;\r
433 \r
434 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) );\r
435 return(result);\r
436}\r
437\r
438\r
439/** \brief LDR Exclusive (32 bit)\r
440\r
441 This function performs a exclusive LDR command for 32 bit values.\r
442\r
443 \param [in] ptr Pointer to data\r
444 \return value of type uint32_t at (*ptr)\r
445 */\r
446__attribute__( ( always_inline ) ) static __INLINE uint32_t __LDREXW(volatile uint32_t *addr)\r
447{\r
448 uint32_t result;\r
449 \r
450 __ASM volatile ("ldrex %0, [%1]" : "=r" (result) : "r" (addr) );\r
451 return(result);\r
452}\r
453\r
454\r
455/** \brief STR Exclusive (8 bit)\r
456\r
457 This function performs a exclusive STR command for 8 bit values.\r
458\r
459 \param [in] value Value to store\r
460 \param [in] ptr Pointer to location\r
461 \return 0 Function succeeded\r
462 \return 1 Function failed\r
463 */\r
464__attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)\r
465{\r
466 uint32_t result;\r
467 \r
468 __ASM volatile ("strexb %0, %2, [%1]" : "=&r" (result) : "r" (addr), "r" (value) );\r
469 return(result);\r
470}\r
471\r
472\r
473/** \brief STR Exclusive (16 bit)\r
474\r
475 This function performs a exclusive STR command for 16 bit values.\r
476\r
477 \param [in] value Value to store\r
478 \param [in] ptr Pointer to location\r
479 \return 0 Function succeeded\r
480 \return 1 Function failed\r
481 */\r
482__attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)\r
483{\r
484 uint32_t result;\r
485 \r
486 __ASM volatile ("strexh %0, %2, [%1]" : "=&r" (result) : "r" (addr), "r" (value) );\r
487 return(result);\r
488}\r
489\r
490\r
491/** \brief STR Exclusive (32 bit)\r
492\r
493 This function performs a exclusive STR command for 32 bit values.\r
494\r
495 \param [in] value Value to store\r
496 \param [in] ptr Pointer to location\r
497 \return 0 Function succeeded\r
498 \return 1 Function failed\r
499 */\r
500__attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)\r
501{\r
502 uint32_t result;\r
503 \r
504 __ASM volatile ("strex %0, %2, [%1]" : "=&r" (result) : "r" (addr), "r" (value) );\r
505 return(result);\r
506}\r
507\r
508\r
509/** \brief Remove the exclusive lock\r
510\r
511 This function removes the exclusive lock which is created by LDREX.\r
512\r
513 */\r
514__attribute__( ( always_inline ) ) static __INLINE void __CLREX(void)\r
515{\r
516 __ASM volatile ("clrex");\r
517}\r
518\r
519\r
520/** \brief Signed Saturate\r
521\r
522 This function saturates a signed value.\r
523\r
524 \param [in] value Value to be saturated\r
525 \param [in] sat Bit position to saturate to (1..32)\r
526 \return Saturated value\r
527 */\r
528#define __SSAT(ARG1,ARG2) \\r
529({ \\r
530 uint32_t __RES, __ARG1 = (ARG1); \\r
531 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \\r
532 __RES; \\r
533 })\r
534\r
535\r
536/** \brief Unsigned Saturate\r
537\r
538 This function saturates an unsigned value.\r
539\r
540 \param [in] value Value to be saturated\r
541 \param [in] sat Bit position to saturate to (0..31)\r
542 \return Saturated value\r
543 */\r
544#define __USAT(ARG1,ARG2) \\r
545({ \\r
546 uint32_t __RES, __ARG1 = (ARG1); \\r
547 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \\r
548 __RES; \\r
549 })\r
550\r
551\r
552/** \brief Count leading zeros\r
553\r
554 This function counts the number of leading zeros of a data value.\r
555\r
556 \param [in] value Value to count the leading zeros\r
557 \return number of leading zeros in value\r
558 */\r
559__attribute__( ( always_inline ) ) static __INLINE uint8_t __CLZ(uint32_t value)\r
560{\r
561 uint8_t result;\r
562 \r
563 __ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );\r
564 return(result);\r
565}\r
566\r
567#endif /* (__CORTEX_M >= 0x03) */\r
568\r
569\r
570\r
571\r
572#elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/\r
573/* TASKING carm specific functions */\r
574\r
575/*\r
576 * The CMSIS functions have been implemented as intrinsics in the compiler.\r
577 * Please use "carm -?i" to get an up to date list of all intrinsics,\r
578 * Including the CMSIS ones.\r
579 */\r
580\r
581#endif\r
582\r
583/*@}*/ /* end of group CMSIS_Core_InstructionInterface */\r
584\r
585#endif /* __CORE_CMINSTR_H */\r