00001
00024 #ifdef __cplusplus
00025 extern "C" {
00026 #endif
00027
00028 #ifndef __CORE_CM4_SIMD_H
00029 #define __CORE_CM4_SIMD_H
00030
00031
00032
00033
00034
00035
00036
00037
00043 #if defined ( __CC_ARM )
00044
00045
00046
00047 #define __SADD8 __sadd8
00048 #define __QADD8 __qadd8
00049 #define __SHADD8 __shadd8
00050 #define __UADD8 __uadd8
00051 #define __UQADD8 __uqadd8
00052 #define __UHADD8 __uhadd8
00053 #define __SSUB8 __ssub8
00054 #define __QSUB8 __qsub8
00055 #define __SHSUB8 __shsub8
00056 #define __USUB8 __usub8
00057 #define __UQSUB8 __uqsub8
00058 #define __UHSUB8 __uhsub8
00059 #define __SADD16 __sadd16
00060 #define __QADD16 __qadd16
00061 #define __SHADD16 __shadd16
00062 #define __UADD16 __uadd16
00063 #define __UQADD16 __uqadd16
00064 #define __UHADD16 __uhadd16
00065 #define __SSUB16 __ssub16
00066 #define __QSUB16 __qsub16
00067 #define __SHSUB16 __shsub16
00068 #define __USUB16 __usub16
00069 #define __UQSUB16 __uqsub16
00070 #define __UHSUB16 __uhsub16
00071 #define __SASX __sasx
00072 #define __QASX __qasx
00073 #define __SHASX __shasx
00074 #define __UASX __uasx
00075 #define __UQASX __uqasx
00076 #define __UHASX __uhasx
00077 #define __SSAX __ssax
00078 #define __QSAX __qsax
00079 #define __SHSAX __shsax
00080 #define __USAX __usax
00081 #define __UQSAX __uqsax
00082 #define __UHSAX __uhsax
00083 #define __USAD8 __usad8
00084 #define __USADA8 __usada8
00085 #define __SSAT16 __ssat16
00086 #define __USAT16 __usat16
00087 #define __UXTB16 __uxtb16
00088 #define __UXTAB16 __uxtab16
00089 #define __SXTB16 __sxtb16
00090 #define __SXTAB16 __sxtab16
00091 #define __SMUAD __smuad
00092 #define __SMUADX __smuadx
00093 #define __SMLAD __smlad
00094 #define __SMLADX __smladx
00095 #define __SMLALD __smlald
00096 #define __SMLALDX __smlaldx
00097 #define __SMUSD __smusd
00098 #define __SMUSDX __smusdx
00099 #define __SMLSD __smlsd
00100 #define __SMLSDX __smlsdx
00101 #define __SMLSLD __smlsld
00102 #define __SMLSLDX __smlsldx
00103 #define __SEL __sel
00104 #define __QADD __qadd
00105 #define __QSUB __qsub
00106
00107 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
00108 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
00109
00110 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
00111 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
00112
00113
00114
00115
00116
00117
00118 #elif defined ( __ICCARM__ )
00119
00120
00121
00122 #include <cmsis_iar.h>
00123
00124
00125
00126
00127
00128 #elif defined ( __TMS470__ )
00129
00130
00131
00132 #include <cmsis_ccs.h>
00133
00134
00135
00136
00137
00138 #elif defined ( __GNUC__ )
00139
00140
00141
00142 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
00143 {
00144 uint32_t result;
00145
00146 __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00147 return(result);
00148 }
00149
00150 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
00151 {
00152 uint32_t result;
00153
00154 __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00155 return(result);
00156 }
00157
00158 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
00159 {
00160 uint32_t result;
00161
00162 __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00163 return(result);
00164 }
00165
00166 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
00167 {
00168 uint32_t result;
00169
00170 __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00171 return(result);
00172 }
00173
00174 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
00175 {
00176 uint32_t result;
00177
00178 __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00179 return(result);
00180 }
00181
00182 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
00183 {
00184 uint32_t result;
00185
00186 __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00187 return(result);
00188 }
00189
00190
00191 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
00192 {
00193 uint32_t result;
00194
00195 __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00196 return(result);
00197 }
00198
00199 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
00200 {
00201 uint32_t result;
00202
00203 __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00204 return(result);
00205 }
00206
00207 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
00208 {
00209 uint32_t result;
00210
00211 __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00212 return(result);
00213 }
00214
00215 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
00216 {
00217 uint32_t result;
00218
00219 __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00220 return(result);
00221 }
00222
00223 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
00224 {
00225 uint32_t result;
00226
00227 __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00228 return(result);
00229 }
00230
00231 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
00232 {
00233 uint32_t result;
00234
00235 __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00236 return(result);
00237 }
00238
00239
00240 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
00241 {
00242 uint32_t result;
00243
00244 __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00245 return(result);
00246 }
00247
00248 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
00249 {
00250 uint32_t result;
00251
00252 __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00253 return(result);
00254 }
00255
00256 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
00257 {
00258 uint32_t result;
00259
00260 __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00261 return(result);
00262 }
00263
00264 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
00265 {
00266 uint32_t result;
00267
00268 __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00269 return(result);
00270 }
00271
00272 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
00273 {
00274 uint32_t result;
00275
00276 __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00277 return(result);
00278 }
00279
00280 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
00281 {
00282 uint32_t result;
00283
00284 __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00285 return(result);
00286 }
00287
00288 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
00289 {
00290 uint32_t result;
00291
00292 __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00293 return(result);
00294 }
00295
00296 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
00297 {
00298 uint32_t result;
00299
00300 __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00301 return(result);
00302 }
00303
00304 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
00305 {
00306 uint32_t result;
00307
00308 __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00309 return(result);
00310 }
00311
00312 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
00313 {
00314 uint32_t result;
00315
00316 __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00317 return(result);
00318 }
00319
00320 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
00321 {
00322 uint32_t result;
00323
00324 __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00325 return(result);
00326 }
00327
00328 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
00329 {
00330 uint32_t result;
00331
00332 __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00333 return(result);
00334 }
00335
00336 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
00337 {
00338 uint32_t result;
00339
00340 __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00341 return(result);
00342 }
00343
00344 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
00345 {
00346 uint32_t result;
00347
00348 __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00349 return(result);
00350 }
00351
00352 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
00353 {
00354 uint32_t result;
00355
00356 __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00357 return(result);
00358 }
00359
00360 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
00361 {
00362 uint32_t result;
00363
00364 __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00365 return(result);
00366 }
00367
00368 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
00369 {
00370 uint32_t result;
00371
00372 __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00373 return(result);
00374 }
00375
00376 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
00377 {
00378 uint32_t result;
00379
00380 __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00381 return(result);
00382 }
00383
00384 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
00385 {
00386 uint32_t result;
00387
00388 __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00389 return(result);
00390 }
00391
00392 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
00393 {
00394 uint32_t result;
00395
00396 __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00397 return(result);
00398 }
00399
00400 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
00401 {
00402 uint32_t result;
00403
00404 __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00405 return(result);
00406 }
00407
00408 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
00409 {
00410 uint32_t result;
00411
00412 __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00413 return(result);
00414 }
00415
00416 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
00417 {
00418 uint32_t result;
00419
00420 __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00421 return(result);
00422 }
00423
00424 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
00425 {
00426 uint32_t result;
00427
00428 __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00429 return(result);
00430 }
00431
00432 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
00433 {
00434 uint32_t result;
00435
00436 __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00437 return(result);
00438 }
00439
00440 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
00441 {
00442 uint32_t result;
00443
00444 __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
00445 return(result);
00446 }
00447
00448 #define __SSAT16(ARG1,ARG2) \
00449 ({ \
00450 uint32_t __RES, __ARG1 = (ARG1); \
00451 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
00452 __RES; \
00453 })
00454
00455 #define __USAT16(ARG1,ARG2) \
00456 ({ \
00457 uint32_t __RES, __ARG1 = (ARG1); \
00458 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
00459 __RES; \
00460 })
00461
00462 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UXTB16(uint32_t op1)
00463 {
00464 uint32_t result;
00465
00466 __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
00467 return(result);
00468 }
00469
00470 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
00471 {
00472 uint32_t result;
00473
00474 __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00475 return(result);
00476 }
00477
00478 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SXTB16(uint32_t op1)
00479 {
00480 uint32_t result;
00481
00482 __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
00483 return(result);
00484 }
00485
00486 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
00487 {
00488 uint32_t result;
00489
00490 __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00491 return(result);
00492 }
00493
00494 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
00495 {
00496 uint32_t result;
00497
00498 __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00499 return(result);
00500 }
00501
00502 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
00503 {
00504 uint32_t result;
00505
00506 __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00507 return(result);
00508 }
00509
00510 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
00511 {
00512 uint32_t result;
00513
00514 __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
00515 return(result);
00516 }
00517
00518 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
00519 {
00520 uint32_t result;
00521
00522 __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
00523 return(result);
00524 }
00525
00526 #define __SMLALD(ARG1,ARG2,ARG3) \
00527 ({ \
00528 uint32_t __ARG1 = (ARG1), __ARG2 = (ARG2), __ARG3_H = (uint32_t)((uint64_t)(ARG3) >> 32), __ARG3_L = (uint32_t)((uint64_t)(ARG3) & 0xFFFFFFFFUL); \
00529 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (__ARG3_L), "=r" (__ARG3_H) : "r" (__ARG1), "r" (__ARG2), "0" (__ARG3_L), "1" (__ARG3_H) ); \
00530 (uint64_t)(((uint64_t)__ARG3_H << 32) | __ARG3_L); \
00531 })
00532
00533 #define __SMLALDX(ARG1,ARG2,ARG3) \
00534 ({ \
00535 uint32_t __ARG1 = (ARG1), __ARG2 = (ARG2), __ARG3_H = (uint32_t)((uint64_t)(ARG3) >> 32), __ARG3_L = (uint32_t)((uint64_t)(ARG3) & 0xFFFFFFFFUL); \
00536 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (__ARG3_L), "=r" (__ARG3_H) : "r" (__ARG1), "r" (__ARG2), "0" (__ARG3_L), "1" (__ARG3_H) ); \
00537 (uint64_t)(((uint64_t)__ARG3_H << 32) | __ARG3_L); \
00538 })
00539
00540 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
00541 {
00542 uint32_t result;
00543
00544 __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00545 return(result);
00546 }
00547
00548 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
00549 {
00550 uint32_t result;
00551
00552 __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00553 return(result);
00554 }
00555
00556 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
00557 {
00558 uint32_t result;
00559
00560 __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
00561 return(result);
00562 }
00563
00564 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
00565 {
00566 uint32_t result;
00567
00568 __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
00569 return(result);
00570 }
00571
00572 #define __SMLSLD(ARG1,ARG2,ARG3) \
00573 ({ \
00574 uint32_t __ARG1 = (ARG1), __ARG2 = (ARG2), __ARG3_H = (uint32_t)((ARG3) >> 32), __ARG3_L = (uint32_t)((ARG3) & 0xFFFFFFFFUL); \
00575 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (__ARG3_L), "=r" (__ARG3_H) : "r" (__ARG1), "r" (__ARG2), "0" (__ARG3_L), "1" (__ARG3_H) ); \
00576 (uint64_t)(((uint64_t)__ARG3_H << 32) | __ARG3_L); \
00577 })
00578
00579 #define __SMLSLDX(ARG1,ARG2,ARG3) \
00580 ({ \
00581 uint32_t __ARG1 = (ARG1), __ARG2 = (ARG2), __ARG3_H = (uint32_t)((ARG3) >> 32), __ARG3_L = (uint32_t)((ARG3) & 0xFFFFFFFFUL); \
00582 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (__ARG3_L), "=r" (__ARG3_H) : "r" (__ARG1), "r" (__ARG2), "0" (__ARG3_L), "1" (__ARG3_H) ); \
00583 (uint64_t)(((uint64_t)__ARG3_H << 32) | __ARG3_L); \
00584 })
00585
00586 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
00587 {
00588 uint32_t result;
00589
00590 __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00591 return(result);
00592 }
00593
00594 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QADD(uint32_t op1, uint32_t op2)
00595 {
00596 uint32_t result;
00597
00598 __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00599 return(result);
00600 }
00601
00602 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __QSUB(uint32_t op1, uint32_t op2)
00603 {
00604 uint32_t result;
00605
00606 __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
00607 return(result);
00608 }
00609
00610 #define __PKHBT(ARG1,ARG2,ARG3) \
00611 ({ \
00612 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
00613 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
00614 __RES; \
00615 })
00616
00617 #define __PKHTB(ARG1,ARG2,ARG3) \
00618 ({ \
00619 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
00620 if (ARG3 == 0) \
00621 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
00622 else \
00623 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
00624 __RES; \
00625 })
00626
00627
00628
00629
00630
00631 #elif defined ( __TASKING__ )
00632
00633
00634
00635
00636
00637
00638
00639
00640 #endif
00641
00645 #endif
00646
00647 #ifdef __cplusplus
00648 }
00649 #endif