Go to the documentation of this file.
27 #ifndef IFXCPU_INTRINSICSGNUC_H
28 #define IFXCPU_INTRINSICSGNUC_H
32 #if defined(SCTB_EMBEDDED)
33 # define IFXCPU_INTRINSICSGNUC_USE_MACHINE_INTRINSICS 0
35 # define IFXCPU_INTRINSICSGNUC_USE_MACHINE_INTRINSICS 1
41 #if IFXCPU_INTRINSICSGNUC_USE_MACHINE_INTRINSICS
42 #include "machine/intrinsics.h"
47 #define STRINGIFY(x) #x
51 #define __non_return_call(fun) __asm__ volatile ("ja "#fun)
57 __asm__ volatile (
"jli %0"::
"a"(fun));
67 #define __minX(X,Y) ( ((X) < (Y)) ? (X) : (Y) )
68 #define __maxX(X,Y) ( ((X) > (Y)) ? (X) : (Y) )
69 #define __saturateX(X,Min,Max) ( __minX(__maxX(X, Min), Max) )
70 #define __checkrangeX(X,Min,Max) (((X) >= (Min)) && ((X) <= (Max)))
77 #define __saturate(X,Min,Max) ( __min(__max(X, Min), Max) )
84 #define __saturateu(X,Min,Max) ( __minu(__maxu(X, Min), Max) )
98 __asm__ volatile (
"max %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
107 __asm__ volatile (
"max.h %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
115 __asm__ volatile (
"max.u %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
124 __asm__ volatile (
"min %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
133 __asm__ volatile (
"min.h %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
142 __asm__ volatile (
"min.u %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
153 #define __sqrf(X) ((X) * (X))
154 #define __sqrtf(X) sqrtf(X)
155 #define __checkrange(X,Min,Max) (((X) >= (Min)) && ((X) <= (Max)))
157 #define __roundf(X) ((((X) - (sint32)(X)) > 0.5) ? (1 + (sint32)(X)) : ((sint32)(X)))
158 #define __absf(X) ( ((X) < 0.0) ? -(X) : (X) )
159 #define __minf(X,Y) ( ((X) < (Y)) ? (X) : (Y) )
160 #define __maxf(X,Y) ( ((X) > (Y)) ? (X) : (Y) )
161 #define __saturatef(X,Min,Max) ( __minf(__maxf(X, Min), Max) )
162 #define __checkrangef(X,Min,Max) (((X) >= (Min)) && ((X) <= (Max)))
164 #define __abs_stdreal(X) ( ((X) > 0.0) ? (X) : -(X) )
165 #define __min_stdreal(X,Y) ( ((X) < (Y)) ? (X) : (Y) )
166 #define __max_stdreal(X,Y) ( ((X) > (Y)) ? (X) : (Y) )
167 #define __saturate_stdreal(X,Min,Max) ( __min_stdreal(__max_stdreal(X, Min), Max) )
169 #define __neqf(X,Y) ( ((X) > (Y)) || ((X) < (Y)) )
170 #define __leqf(X,Y) ( !((X) > (Y)) )
171 #define __geqf(X,Y) ( !((X) < (Y)) )
188 __asm__ volatile (
"cls %0,%1":
"=d"(res):
"d"(a):
"memory");
197 __asm__ volatile (
"q31tof %0,%1,%2":
"=d"(res):
"d"(a),
"d"(0):
"memory");
206 __asm__ volatile (
"ftoq31 %0,%1,%2":
"=d"(res):
"d"(a),
"d"(0):
"memory");
215 __asm__ volatile (
"dextr %0,%H1,%L1,0x11":
"=&d" (res):
"d" (a):
"memory");
224 __asm__ volatile (
"maddrs.q %0,%1,%2U,%3U,1":
"=d"(res):
"d"(a),
"d"(b),
"d"(c):
"memory");
233 __asm__ volatile (
"madds.q %0,%1,%2U,%3U,1":
"=d"(res):
"d"(a),
"d"(b),
"d"(c):
"memory");
242 __asm__ volatile (
"mul.q %0,%1,%2,1":
"=d"(res):
"d"(a),
"d"(b):
"memory");
251 __asm__ volatile (
"mul.q %0,%1,%2,1":
"=d"(res):
"d"(a),
"d"(b):
"memory");
260 __asm__ volatile (
"mov.u %0,0x8000 \n\
262 insert %0,%0,0,0,0x10 "
263 :
"=&d"(res):
"d"(a):
"memory");
272 __asm__ volatile (
"sh %0,%1,16":
"=d"(res):
"d"(a):
"memory");
281 __asm__ volatile (
"sh %0,%1,-16":
"=d"(res):
"d"(a):
"memory");
290 __asm__ volatile (
"sh %0,%1,-16":
"=d"(res):
"d"(a):
"memory");
299 __asm__ volatile (
"jge %2,0,0f \n\
302 dextr %L0,%H1,%L1,%2 \n\
304 0:dextr %H0,%H1,%L1,%2 \n\
307 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
316 __asm__ volatile (
"shas %0,%1,%2":
"=d"(res):
"d"(a),
"d"(b):
"memory");
325 __asm__ volatile (
"shas %0,%1,%2":
"=d"(res):
"d"(a),
"d"(b):
"memory");
334 __asm__ volatile (
"sh %0,%1,16":
"=d"(res):
"d"(a):
"memory");
350 __asm__ volatile (
"mov %%d14,%2 \n\
353 :
"=d" (res) :
"d" (a),
"d" (p),
"d" (w):
"d14",
"d15");
362 __asm__ volatile (
"mov %%d14,%2 \n\
365 :
"=d" (res) :
"d" (a),
"d" (p),
"d" (w):
"d14",
"d15");
372 #define __getbit(address, bitoffset) ((*(address) & (1U << (bitoffset))) != 0)
376 #define __imaskldmst(address, value, bitoffset, bits) \
378 __asm("imask %A0,%1,%2,%3":"=d"((long long)tmp):"d"(value),"d"(bitoffset),"i"(bits): "memory");\
379 __asm("ldmst %0,%A1"::"i"(address),"d"(tmp));}
386 __asm__ volatile (
"ins.t %0,%1,%2,%3,%4":
"=d"(res):
"d"(trg),
"i"(trgbit),
"d"(src),
"i"(srcbit));
395 __asm__ volatile (
"mov %%d14,%2 \n\
397 insert %0,%1,%2,%%e14"
398 :
"=d"(res):
"d"(a),
"d"(b),
"d"(p),
"d"(w):
"d14",
"d15");
407 __asm__ volatile (
"insn.t %0,%1,%2,%3,%4":
"=d"(res):
"d"(trg),
"i"(trgbit),
"d"(src),
"i"(srcbit));
413 #define __putbit(value,address,bitoffset ) __imaskldmst(address, value, bitoffset,1)
423 #if !IFXCPU_INTRINSICSGNUC_USE_MACHINE_INTRINSICS
426 #define __bisr(intlvl) __asm__ volatile ("bisr "#intlvl : : : "memory")
431 #define __disable() __asm__ volatile ("disable" : : : "memory")
438 __asm__ volatile(
"disable %0":
"=d"(res));
444 #define __enable() __asm__ volatile ("enable" : : : "memory")
450 __asm__ volatile (
"restore %0"::
"d"(ie));
453 #if !IFXCPU_INTRINSICSGNUC_USE_MACHINE_INTRINSICS
456 #define __syscall(svcno) __tric_syscall(svcno)
457 #define __tric_syscall(svcno) __asm__ volatile ("syscall "STRINGIFY(svcno) : : : "memory")
471 __asm__ volatile(
"cachea.wi [%0]0"::
"a"(p));
477 __asm__ volatile(
"cachei.wi [%0]0"::
"a"(p));
485 __asm__ volatile(
"cachea.wi [%0+]0"::
"a"(p));
496 __asm__ volatile(
"mul %%e12,%1,%2 \n\
497 dextr %0,%%d13,%%d12,%3"
498 :
"=d"(res):
"d"(a),
"d"(b),
"d"(offset):
"d12",
"d13");
507 __asm__ volatile(
"dextr %0,%1,%1,%2":
"=d"(res):
"d"(operand),
"d"(count):
"memory");
516 __asm__ volatile(
"rsub %2,%2,0 \n\
518 :
"=d"(res):
"d"(operand),
"d"(count):
"memory");
534 __asm__ volatile (
"abs.b %0,%1"
535 :
"=d"(res):
"d"(a):
"memory");
544 __asm__ volatile (
"abs.h %0,%1"
545 :
"=d"(res):
"d"(a):
"memory");
554 __asm__ volatile (
"abss.h %0,%1"
555 :
"=d"(res):
"d"(a):
"memory");
564 __asm__ volatile (
"extr %0,%1,0,8"
565 :
"=d"(res):
"d"(a):
"memory");
574 __asm__ volatile (
"extr %0,%1,8,8"
575 :
"=d"(res):
"d"(a):
"memory");
584 __asm__ volatile (
"extr %0,%1,16,8"
585 :
"=d"(res):
"d"(a):
"memory");
594 __asm__ volatile (
"extr %0,%1,24,8"
595 :
"=d"(res):
"d"(a):
"memory");
604 __asm__ volatile (
"extr %0,%1,0,16"
605 :
"=d"(res):
"d"(a):
"memory");
614 __asm__ volatile (
"extr %0,%1,16,16"
615 :
"=d"(res):
"d"(a):
"memory");
624 __asm__ volatile (
"extr %0,%1,0,8"
625 :
"=d"(res):
"d"(a):
"memory");
634 __asm__ volatile (
"extr %0,%1,8,8"
635 :
"=d"(res):
"d"(a):
"memory");
644 __asm__ volatile (
"extr %0,%1,16,8"
645 :
"=d"(res):
"d"(a):
"memory");
654 __asm__ volatile (
"extr %0,%1,24,8"
655 :
"=d"(res):
"d"(a):
"memory");
664 __asm__ volatile (
"extr %0,%1,0,16"
665 :
"=d"(res):
"d"(a):
"memory");
674 __asm__ volatile (
"extr %0,%1,16,16"
675 :
"=d"(res):
"d"(a):
"memory");
684 __asm__ volatile (
"ld.w %0,[%1]0 \n\
686 :
"=d"(res):
"a"(a):
"memory");
696 __asm__ volatile (
"ld.w %0,[%1]0 \n\
698 :
"=d"(res):
"a"(a):
"memory");
708 __asm__ volatile (
"ld.w %0,[%1]0 \n\
710 :
"=d"(res):
"a"(a):
"memory");
720 __asm__ volatile (
"ld.w %0,[%1]0 \n\
722 :
"=d"(res):
"a"(a):
"memory");
732 __asm__ volatile (
"ld.w %0,[%1]0 \n\
734 :
"=d"(res):
"a"(a):
"memory");
742 __asm__ volatile (
"ld.w %0,[%1]0 \n\
744 :
"=d"(res):
"a"(a):
"memory");
753 __asm__ volatile (
"ld.w %0,[%1]0 \n\
755 :
"=d"(res):
"a"(a):
"memory");
764 __asm__ volatile (
"ld.w %0,[%1]0 \n\
766 :
"=d"(res):
"a"(a):
"memory");
775 __asm__ volatile (
"ld.w %0,[%1]0 \n\
777 :
"=d"(res):
"a"(a):
"memory");
786 __asm__ volatile (
"ld.w %0,[%1]0 \n\
788 :
"=d"(res):
"a"(a):
"memory");
797 __asm__ volatile (
"ld.w %0,[%1]0 \n\
799 :
"=d"(res):
"a"(a):
"memory");
808 __asm__ volatile (
"ld.w %0,[%1]0 \n\
810 :
"=d"(res):
"a"(a):
"memory");
819 __asm__ volatile (
"insert %3,%3,%4,8,8 \n\
820 insert %4,%1,%2,8,8 \n\
821 insert %0,%4,%3,16,16 "
822 :
"=d"(res):
"d"(a),
"d"(b),
"d"(c),
"d"(d):
"memory");
838 __asm__ volatile (
"insert %0,%1,%2,16,16"
839 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
855 __asm__ volatile (
"insert %3,%3,%4,8,8 \n\
856 insert %1,%1,%2,8,8 \n\
857 insert %0,%1,%3,16,16"
858 :
"=d"(res):
"d"(a),
"d"(b),
"d"(c),
"d"(d):
"memory");
867 __asm__ volatile (
"insert %0,%1,%2,16,16"
868 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
877 __asm__ volatile (
"insert %0,%1,%2,0,8"
878 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
887 __asm__ volatile (
"insert %0,%1,%2,8,8"
888 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
897 __asm__ volatile (
"insert %0,%1,%2,16,8"
898 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
907 __asm__ volatile (
"insert %0,%1,%2,24,8"
908 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
917 __asm__ volatile (
"insert %0,%1,%2,0,8"
918 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
927 __asm__ volatile (
"insert %0,%1,%2,8,8"
928 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
937 __asm__ volatile (
"insert %0,%1,%2,16,8"
938 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
947 __asm__ volatile (
"insert %0,%1,%2,24,8"
948 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
957 __asm__ volatile (
"insert %0,%1,%2,0,16"
958 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
967 __asm__ volatile (
"insert %0,%1,%2,16,16"
968 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
977 __asm__ volatile (
"insert %0,%1,%2,0,16"
978 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
987 __asm__ volatile (
"insert %0,%1,%2,16,16"
988 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
997 __asm__ volatile (
"min.b %0,%1,%2"
998 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
1007 __asm__ volatile (
"min.bu %0,%1,%2"
1008 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
1017 __asm__ volatile (
"min.h %0,%1,%2"
1018 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
1027 __asm__ volatile (
"min.hu %0,%1,%2"
1028 :
"=d"(res):
"d"(a),
"d"(b):
"memory");
1036 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1037 insert %%d15,%%d15,%1,0,8 \n\
1039 ::
"a"(
a),
"d"(b):
"d15",
"memory");
1046 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1047 insert %%d15,%%d15,%1,8,8 \n\
1049 ::
"a"(
a),
"d"(b):
"d15",
"memory");
1056 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1057 insert %%d15,%%d15,%1,16,8 \n\
1059 ::
"a"(
a),
"d"(b):
"d15",
"memory");
1066 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1067 insert %%d15,%%d15,%1,24,8 \n\
1069 ::
"a"(
a),
"d"(b):
"d15",
"memory");
1076 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1077 insert %%d15,%%d15,%1,0,16 \n\
1079 ::
"a"(
a),
"d"(b):
"d15",
"memory");
1086 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1087 insert %%d15,%%d15,%1,16,16 \n\
1089 ::
"a"(
a),
"d"(b):
"d15",
"memory");
1096 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1097 insert %%d15,%%d15,%1,0,8 \n\
1099 ::
"a"(
a),
"d"(b):
"d15",
"memory");
1106 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1107 insert %%d15,%%d15,%1,8,8 \n\
1109 ::
"a"(
a),
"d"(b):
"d15",
"memory");
1116 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1117 insert %%d15,%%d15,%1,16,8 \n\
1119 ::
"a"(
a),
"d"(b):
"d15",
"memory");
1126 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1127 insert %%d15,%%d15,%1,24,8 \n\
1129 ::
"a"(
a),
"d"(b):
"d15",
"memory");
1136 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1137 insert %%d15,%%d15,%1,0,16 \n\
1139 ::
"a"(
a),
"d"(b):
"d15",
"memory");
1146 __asm__ volatile (
"ld.w %%d15,[%0] \n\
1147 insert %%d15,%%d15,%1,16,16 \n\
1149 ::
"a"(
a),
"d"(b):
"d15",
"memory");
1162 #define __abs(a) __builtin_abs(a)
1169 __asm__ volatile (
"absdif %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
1178 __asm__ volatile (
"abss %0, %1":
"=d" (res) :
"d" (a));
1187 __asm__ volatile (
"clo %0,%1":
"=d"(res):
"d"(a));
1196 __asm__ volatile (
"cls %0,%1":
"=d"(res):
"d"(a));
1202 #define __clz(a) __builtin_clz(a)
1209 __asm__ volatile (
"insert %0,%1,0,31,1":
"=d" (res) :
"d" (d):
"memory");
1218 __asm__ volatile (
"insert %0,%1,0,31,1":
"=d" (res) :
"d" (f):
"memory");
1223 #if !IFXCPU_INTRINSICSGNUC_USE_MACHINE_INTRINSICS
1226 #define __mfcr(regaddr) \
1227 ({ sint32 res; __asm__ volatile ("mfcr %0,%1": "=d" (res) :"i"(regaddr): "memory"); res; })
1233 #define __mtcr(regaddr,val) __asm__ volatile ("mtcr %0,%1\n\tisync"::"i"(regaddr),"d"(val):"memory")
1242 __asm__ volatile (
"parity %0,%1":
"=d" (res) :
"d" (a):
"memory");
1251 __asm__ volatile (
"sat.b %0,%1":
"=d"(res):
"d"(a));
1260 __asm__ volatile (
"sat.bu %0,%1":
"=d"(res):
"d"(a));
1269 __asm__ volatile (
"sat.h %0,%1":
"=d"(res):
"d"(a));
1278 __asm__ volatile (
"sat.hu %0,%1":
"=d"(res):
"d"(a));
1295 __asm__ volatile (
"adds %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
1304 __asm__ volatile (
"adds.u %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
1313 __asm__ volatile (
"subs %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
1322 __asm__ volatile (
"subs.u %0, %1, %2":
"=d" (res) :
"d" (a),
"d" (b));
1340 __asm__ volatile (
"debug" : : :
"memory");
1347 __asm__ volatile (
"dsync" : : :
"memory");
1354 __asm__ volatile (
"isync" : : :
"memory");
1361 __asm__ volatile(
"mov %H2,%1 \n\
1363 ::
"a"(address),
"d"(mask),
"d"((
long long)value));
1370 __asm__ volatile (
"nop" : : :
"memory");
1379 ::
"a"(((
sint8*)cnt)-1));
1386 __asm__ volatile (
"rslcx" : : :
"memory");
1393 __asm__ volatile (
"svlcx" : : :
"memory");
1401 __asm__ volatile(
"swap.w [%1]0,%2":
"=d"(res):
"a"(place),
"0"(value));
1407 #define NOP(n) __asm(".rept " #n "\n\tnop\n\t.endr\n")
1412 #define __extru(src,start,size) \
1413 ({ sint32 res; asm volatile (" extr.u\t %0,%1,%2,%3" : "=d" (res) : \
1414 "d" (src),"i" (start),"i" (size) : "memory"); res; })
1417 #define __setareg(areg,val) \
1418 { uint32 reg_val= (uint32)val; \
1419 asm volatile (" mov.a\t %%"#areg",%0"::"d"(reg_val)); }
1425 __asm__ volatile(
"mov %%d0,0\n\
1438 unsigned int value,
unsigned int condition)
1441 #ifdef IFX_USE_GNUC_TRICORE_1_6
1442 __extension__
unsigned long long reg64
1443 = value | (
unsigned long long) condition << 32;
1445 __asm__ __volatile__ (
"cmpswap.w [%[addr]]0, %A[reg]"
1446 : [reg]
"+d" (reg64)
1447 : [addr]
"a" (address)
1454 if (condition == *address)
1456 __swap((
void *)address,value );