Version

menu_open
Wwise SDK 2024.1.0
AkSimdShuffle.h
Go to the documentation of this file.
1 /*******************************************************************************
2 The content of this file includes portions of the AUDIOKINETIC Wwise Technology
3 released in source code form as part of the SDK installer package.
4 
5 Commercial License Usage
6 
7 Licensees holding valid commercial licenses to the AUDIOKINETIC Wwise Technology
8 may use this file in accordance with the end user license agreement provided
9 with the software or, alternatively, in accordance with the terms contained in a
10 written agreement between you and Audiokinetic Inc.
11 
12 Apache License Usage
13 
14 Alternatively, this file may be used under the Apache License, Version 2.0 (the
15 "Apache License"); you may not use this file except in compliance with the
16 Apache License. You may obtain a copy of the Apache License at
17 http://www.apache.org/licenses/LICENSE-2.0.
18 
19 Unless required by applicable law or agreed to in writing, software distributed
20 under the Apache License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
21 OR CONDITIONS OF ANY KIND, either express or implied. See the Apache License for
22 the specific language governing permissions and limitations under the License.
23 
24  Copyright (c) 2024 Audiokinetic Inc.
25 *******************************************************************************/
26 
27 /// \file
28 /// _AKSIMD_LOCAL::SHUFFLE_V4F32<zyxw>(a, b) - arm_neon implementation
29 
30 #ifndef __AK_SIMD_SHUFFLE_H__
31 #define __AK_SIMD_SHUFFLE_H__
32 
33 namespace _AKSIMD_LOCAL
34 {
35  // Same as _mm_shuffle_ps( a, b, _MM_SHUFFLE( z, y, x, w ) ). There is no default implementation
36  // of this template function. Every required combination of zyxw needs to be explicitely implemented
37  // below.
38  template< int zyxw > AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32( const AKSIMD_V4F32& a, const AKSIMD_V4F32& b );
39 
41  {
42  //return akshuffle_xxaa( xyzw, abcd );
43  AKSIMD_V2F32 xx = vdup_lane_f32( vget_low_f32( xyzw ), 0 );
44  AKSIMD_V2F32 aa = vdup_lane_f32( vget_low_f32( abcd ), 0 );
45  AKSIMD_V4F32 xxaa = vcombine_f32( xx, aa );
46  return xxaa;
47  }
48  /* 1 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxaa( xyzw, abcd ); }
49  /* 2 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxaa( xyzw, abcd ); }
50  /* 3 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxaa( xyzw, abcd ); }
51  /* 4 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xyaa( xyzw, abcd ); }
52  /* 5 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yyaa( xyzw, abcd ); }
53  /* 6 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zyaa( xyzw, abcd ); }
54  /* 7 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wyaa( xyzw, abcd ); }
55  /* 8 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzaa( xyzw, abcd ); }
56  /* 9 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzaa( xyzw, abcd ); }
57  /* 10 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzaa( xyzw, abcd ); }
58  /* 11 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzaa( xyzw, abcd ); }
59  /* 12 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwaa( xyzw, abcd ); }
60  /* 13 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywaa( xyzw, abcd ); }
61  /* 14 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwaa( xyzw, abcd ); }
63  {
64  //return akshuffle_wwaa( xyzw, abcd );
65  AKSIMD_V2F32 ww = vdup_lane_f32( vget_high_f32( xyzw ), 1 );
66  AKSIMD_V2F32 aa = vdup_lane_f32( vget_low_f32( abcd ), 0 );
67  AKSIMD_V4F32 wwaa = vcombine_f32( ww, aa );
68  return wwaa;
69 
70  }
71  /* 16 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxba( xyzw, abcd ); }
73  {
74  //return akshuffle_yxba( xyzw, abcd );
75  AKSIMD_V2F32 yx = vrev64_f32( vget_low_f32( xyzw ) );
76  AKSIMD_V2F32 ba = vrev64_f32( vget_low_f32( abcd ) );
77  AKSIMD_V4F32 yxba = vcombine_f32( yx , ba );
78  return yxba;
79  }
80  /* 18 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxba( xyzw, abcd ); }
81  /* 19 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxba( xyzw, abcd ); }
82  /* 20 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xyba( xyzw, abcd ); }
83  /* 21 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yyba( xyzw, abcd ); }
84  /* 22 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zyba( xyzw, abcd ); }
85  /* 23 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wyba( xyzw, abcd ); }
86  /* 24 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzba( xyzw, abcd ); }
87  /* 25 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzba( xyzw, abcd ); }
88  /* 26 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzba( xyzw, abcd ); }
89  /* 27 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzba( xyzw, abcd ); }
90  /* 28 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwba( xyzw, abcd ); }
91  /* 29 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywba( xyzw, abcd ); }
93  {
94  //return akshuffle_zwba( xyzw, abcd );
95  AKSIMD_V2F32 zw = vget_high_f32( xyzw );
96  AKSIMD_V2F32 ba = vrev64_f32( vget_low_f32( abcd ) );
97  AKSIMD_V4F32 zwba = vcombine_f32( zw , ba );
98  return zwba;
99  }
100  /* 31 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwba( xyzw, abcd ); }
101  /* 32 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxca( xyzw, abcd ); }
102  /* 33 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxca( xyzw, abcd ); }
103  /* 34 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxca( xyzw, abcd ); }
104  /* 35 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxca( xyzw, abcd ); }
105  /* 36 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xyca( xyzw, abcd ); }
106  /* 37 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yyca( xyzw, abcd ); }
107  /* 38 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zyca( xyzw, abcd ); }
108  /* 39 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wyca( xyzw, abcd ); }
109  /* 40 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzca( xyzw, abcd ); }
110  /* 41 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzca( xyzw, abcd ); }
111  /* 42 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzca( xyzw, abcd ); }
112  /* 43 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzca( xyzw, abcd ); }
113  /* 44 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwca( xyzw, abcd ); }
114  /* 45 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywca( xyzw, abcd ); }
115  /* 46 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwca( xyzw, abcd ); }
116  /* 47 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwca( xyzw, abcd ); }
117  /* 48 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxda( xyzw, abcd ); }
118  /* 49 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxda( xyzw, abcd ); }
119  /* 50 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxda( xyzw, abcd ); }
120  /* 51 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxda( xyzw, abcd ); }
121  /* 52 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xyda( xyzw, abcd ); }
122  /* 53 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yyda( xyzw, abcd ); }
124  {
125  //return akshuffle_zyda( xyzw, abcd );
126  AKSIMD_V4F32 bcda = vextq_f32( abcd, abcd, 1 );
127  AKSIMD_V4F32 yzwx = vextq_f32( xyzw, xyzw, 1 );
128  AKSIMD_V4F32 zyda = vcombine_f32( vrev64_f32( vget_low_f32( yzwx ) ), vget_high_f32( bcda ) );
129  return zyda;
130  }
131  /* 55 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wyda( xyzw, abcd ); }
132  /* 56 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzda( xyzw, abcd ); }
134  {
135  float32x2_t a21 = vget_high_f32(vextq_f32(a, a, 3));
136  float32x2_t b03 = vget_low_f32(vextq_f32(b, b, 3));
137  return vcombine_f32(a21, b03);
138  }
139  /* 58 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzda( xyzw, abcd ); }
140  /* 59 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzda( xyzw, abcd ); }
141  /* 60 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwda( xyzw, abcd ); }
142  /* 61 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywda( xyzw, abcd ); }
143  /* 62 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwda( xyzw, abcd ); }
144  /* 63 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwda( xyzw, abcd ); }
145  /* 64 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxab( xyzw, abcd ); }
146  /* 65 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxab( xyzw, abcd ); }
147  /* 66 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxab( xyzw, abcd ); }
148  /* 67 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxab( xyzw, abcd ); }
150  {
151  //return akshuffle_xyab( xyzw, abcd );
152  AKSIMD_V4F32 xyab = vcombine_f32( vget_low_f32( xyzw ) , vget_low_f32( abcd ) );
153  return xyab;
154  }
155  /* 69 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yyab( xyzw, abcd ); }
156  /* 70 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zyab( xyzw, abcd ); }
157  /* 71 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wyab( xyzw, abcd ); }
158  /* 72 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzab( xyzw, abcd ); }
160  {
161  //return akshuffle_yzab( xyzw, abcd );
162  AKSIMD_V4F32 yzwx = vextq_f32( xyzw, xyzw, 1 );
163  AKSIMD_V2F32 yz = vget_low_f32( yzwx );
164  AKSIMD_V2F32 ab = vget_low_f32( abcd );
165  AKSIMD_V4F32 yzab = vcombine_f32( yz, ab );
166  return yzab;
167  }
168  /* 74 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzab( xyzw, abcd ); }
169  /* 75 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzab( xyzw, abcd ); }
170  /* 76 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwab( xyzw, abcd ); }
171  /* 77 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywab( xyzw, abcd ); }
173  {
174  //return akshuffle_zwab( xyzw, abcd );
175  AKSIMD_V2F32 zw = vget_high_f32( xyzw );
176  AKSIMD_V2F32 ab = vget_low_f32( abcd );
177  AKSIMD_V4F32 zwab = vcombine_f32( zw, ab );
178  return zwab;
179  }
180  /* 79 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwab( xyzw, abcd ); }
181  /* 80 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxbb( xyzw, abcd ); }
182  /* 81 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxbb( xyzw, abcd ); }
183  /* 82 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxbb( xyzw, abcd ); }
184  /* 83 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxbb( xyzw, abcd ); }
185  /* 84 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xybb( xyzw, abcd ); }
187  {
188  //return akshuffle_yybb( xyzw, abcd );
189  AKSIMD_V2F32 yy = vdup_lane_f32( vget_low_f32( xyzw ), 1 );
190  AKSIMD_V2F32 bb = vdup_lane_f32( vget_low_f32( abcd ), 1 );
191  AKSIMD_V4F32 yybb = vcombine_f32( yy, bb );
192  return yybb;
193  }
194  /* 86 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zybb( xyzw, abcd ); }
195  /* 87 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wybb( xyzw, abcd ); }
196  /* 88 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzbb( xyzw, abcd ); }
197  /* 89 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzbb( xyzw, abcd ); }
198  /* 90 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzbb( xyzw, abcd ); }
199  /* 91 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzbb( xyzw, abcd ); }
200  /* 92 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwbb( xyzw, abcd ); }
201  /* 93 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywbb( xyzw, abcd ); }
202  /* 94 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwbb( xyzw, abcd ); }
203  /* 95 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwbb( xyzw, abcd ); }
204  /* 96 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxcb( xyzw, abcd ); }
205  /* 97 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxcb( xyzw, abcd ); }
206  /* 98 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxcb( xyzw, abcd ); }
207  /* 99 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxcb( xyzw, abcd ); }
208  /* 100 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xycb( xyzw, abcd ); }
209  /* 101 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yycb( xyzw, abcd ); }
210  /* 102 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zycb( xyzw, abcd ); }
211  /* 103 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wycb( xyzw, abcd ); }
212  /* 104 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzcb( xyzw, abcd ); }
213  /* 105 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzcb( xyzw, abcd ); }
214  /* 106 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzcb( xyzw, abcd ); }
215  /* 107 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzcb( xyzw, abcd ); }
216  /* 108 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwcb( xyzw, abcd ); }
217  /* 109 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywcb( xyzw, abcd ); }
218  /* 110 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwcb( xyzw, abcd ); }
219  /* 111 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwcb( xyzw, abcd ); }
220  /* 112 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxdb( xyzw, abcd ); }
221  /* 113 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxdb( xyzw, abcd ); }
223  {
224  //return akshuffle_zxdb( xyzw, abcd );
225 
226  float32x2x2_t xz_yw = vtrn_f32( vget_low_f32( xyzw ), vget_high_f32( xyzw ) );
227  float32x2x2_t ac_bd = vtrn_f32( vget_low_f32( abcd ), vget_high_f32( abcd ) );
228  AKSIMD_V4F32 zxdb = vcombine_f32( vrev64_f32( xz_yw.val[0] ), vrev64_f32( ac_bd.val[1] ) );
229 
230  return zxdb;
231  }
232  /* 115 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxdb( xyzw, abcd ); }
233  /* 116 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xydb( xyzw, abcd ); }
234  /* 117 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yydb( xyzw, abcd ); }
235  /* 118 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zydb( xyzw, abcd ); }
236  /* 119 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wydb( xyzw, abcd ); }
237  /* 120 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzdb( xyzw, abcd ); }
238  /* 121 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzdb( xyzw, abcd ); }
239  /* 122 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzdb( xyzw, abcd ); }
240  /* 123 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzdb( xyzw, abcd ); }
241  /* 124 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwdb( xyzw, abcd ); }
242  /* 125 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywdb( xyzw, abcd ); }
243  /* 126 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwdb( xyzw, abcd ); }
244  /* 127 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwdb( xyzw, abcd ); }
245  /* 128 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxac( xyzw, abcd ); }
247  {
248  //return akshuffle_yxac( xyzw, abcd );
249  AKSIMD_V2F32 ac = vtrn_f32( vget_low_f32( abcd ), vget_high_f32( abcd ) ).val[0];
250  AKSIMD_V2F32 yx = vrev64_f32( vget_low_f32( xyzw ) );
251  AKSIMD_V4F32 yxac = vcombine_f32( yx, ac );
252  return yxac;
253  }
254  /* 130 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxac( xyzw, abcd ); }
255  /* 131 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxac( xyzw, abcd ); }
256  /* 132 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xyac( xyzw, abcd ); }
257  /* 133 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yyac( xyzw, abcd ); }
258  /* 134 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zyac( xyzw, abcd ); }
259  /* 135 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wyac( xyzw, abcd ); }
261  {
262  //return akshuffle_xzac( xyzw, abcd );
263  float32x2x2_t xz_yw = vtrn_f32( vget_low_f32( xyzw ), vget_high_f32( xyzw ) );
264  float32x2x2_t ac_bd = vtrn_f32( vget_low_f32( abcd ), vget_high_f32( abcd ) );
265  AKSIMD_V4F32 xzac = vcombine_f32( xz_yw.val[0], ac_bd.val[0] );
266  return xzac;
267  }
269  {
270  //return akshuffle_yzac( xyzw, abcd );
271  AKSIMD_V4F32 yzwx = vextq_f32( xyzw, xyzw, 1 );
272  AKSIMD_V2F32 yz = vget_low_f32( yzwx );
273  float32x2x2_t ac_bd = vtrn_f32( vget_low_f32( abcd ), vget_high_f32( abcd ) );
274  AKSIMD_V4F32 yzac = vcombine_f32( yz, ac_bd.val[0] );
275  return yzac;
276  }
277  /* 138 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzac( xyzw, abcd ); }
278  /* 139 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzac( xyzw, abcd ); }
280  {
281  //return akshuffle_xwac( xyzw, abcd );
282  AKSIMD_V2F32 xw = vrev64_f32( vext_f32( vget_high_f32( xyzw ) , vget_low_f32( xyzw ) , 1 ) );
283  float32x2x2_t ac_bd = vtrn_f32( vget_low_f32( abcd ), vget_high_f32( abcd ) );
284  AKSIMD_V4F32 xwac = vcombine_f32( xw, ac_bd.val[0] );;
285  return xwac;
286  }
288  {
289  //return akshuffle_ywac( xyzw, abcd );
290  float32x2x2_t xz_yw = vtrn_f32( vget_low_f32( xyzw ), vget_high_f32( xyzw ) );
291  float32x2x2_t ac_bd = vtrn_f32( vget_low_f32( abcd ), vget_high_f32( abcd ) );
292  AKSIMD_V4F32 ywac = vcombine_f32( xz_yw.val[1], ac_bd.val[0] );
293  return ywac;
294  }
295  /* 142 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwac( xyzw, abcd ); }
296  /* 143 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwac( xyzw, abcd ); }
297  /* 144 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxbc( xyzw, abcd ); }
298  /* 145 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxbc( xyzw, abcd ); }
299  /* 146 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxbc( xyzw, abcd ); }
300  /* 147 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxbc( xyzw, abcd ); }
301  /* 148 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xybc( xyzw, abcd ); }
302  /* 149 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yybc( xyzw, abcd ); }
303  /* 150 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zybc( xyzw, abcd ); }
304  /* 151 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wybc( xyzw, abcd ); }
305  /* 152 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzbc( xyzw, abcd ); }
307  {
308  //return akshuffle_yzbc( xyzw, abcd );
309  AKSIMD_V4F32 yzwx = vextq_f32( xyzw, xyzw, 1 );
310  AKSIMD_V2F32 yz = vget_low_f32( yzwx );
311  AKSIMD_V2F32 bc = vext_f32( vget_low_f32( abcd ) , vget_high_f32( abcd ) , 1 );
312  AKSIMD_V4F32 yzbc = vcombine_f32( yz , bc );
313  return yzbc;
314  }
315  /* 154 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzbc( xyzw, abcd ); }
316  /* 155 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzbc( xyzw, abcd ); }
318  {
319  //return akshuffle_xwbc( xyzw, abcd );
320  AKSIMD_V2F32 xw = vrev64_f32( vext_f32( vget_high_f32( xyzw ) , vget_low_f32( xyzw ) , 1 ) );
321  AKSIMD_V2F32 bc = vext_f32( vget_low_f32( abcd ) , vget_high_f32( abcd ) , 1 );
322  AKSIMD_V4F32 xwbc = vcombine_f32( xw, bc );
323  return xwbc;
324  }
326  {
327  //return akshuffle_ywbc( xyzw, abcd );
328  AKSIMD_V2F32 yw = vext_f32( vget_low_f32( xyzw ) , vrev64_f32( vget_high_f32( xyzw ) ), 1 );
329  AKSIMD_V2F32 bc = vext_f32( vget_low_f32( abcd ) , vget_high_f32( abcd ) , 1 );
330  AKSIMD_V4F32 ywbc = vcombine_f32( yw, bc );
331  return ywbc;
332  }
333  /* 158 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwbc( xyzw, abcd ); }
334  /* 159 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwbc( xyzw, abcd ); }
336  {
337  AKSIMD_V2F32 xx = vdup_lane_f32( vget_low_f32( xyzw ), 0 );
338  AKSIMD_V2F32 cc = vdup_lane_f32( vget_high_f32( abcd ), 0 );
339  AKSIMD_V4F32 xxcc = vcombine_f32( xx, cc );
340  return xxcc;
341  }
342  /* 161 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxcc( xyzw, abcd ); }
343  /* 162 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxcc( xyzw, abcd ); }
344  /* 163 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxcc( xyzw, abcd ); }
345  /* 164 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xycc( xyzw, abcd ); }
346  /* 165 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yycc( xyzw, abcd ); }
347  /* 166 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zycc( xyzw, abcd ); }
348  /* 167 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wycc( xyzw, abcd ); }
349  /* 168 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzcc( xyzw, abcd ); }
350  /* 169 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzcc( xyzw, abcd ); }
352  {
353  AKSIMD_V2F32 zz = vdup_lane_f32( vget_high_f32( xyzw ), 0 );
354  AKSIMD_V2F32 cc = vdup_lane_f32( vget_high_f32( abcd ), 0 );
355  AKSIMD_V4F32 zzcc = vcombine_f32( zz, cc );
356  return zzcc;
357  }
358  /* 171 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzcc( xyzw, abcd ); }
359  /* 172 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwcc( xyzw, abcd ); }
360  /* 173 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywcc( xyzw, abcd ); }
361  /* 174 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwcc( xyzw, abcd ); }
362  /* 175 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwcc( xyzw, abcd ); }
363  /* 176 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxdc( xyzw, abcd ); }
365  {
366  AKSIMD_V2F32 yx = vrev64_f32( vget_low_f32( xyzw ) );
367  AKSIMD_V2F32 dc = vrev64_f32( vget_high_f32( abcd ) );
368  AKSIMD_V4F32 yxdc = vcombine_f32( yx, dc );
369  return yxdc;
370  }
371  /* 178 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxdc( xyzw, abcd ); }
372  /* 179 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxdc( xyzw, abcd ); }
373  /* 180 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xydc( xyzw, abcd ); }
374  /* 181 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yydc( xyzw, abcd ); }
375  /* 182 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zydc( xyzw, abcd ); }
376  /* 183 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wydc( xyzw, abcd ); }
377  /* 184 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzdc( xyzw, abcd ); }
378  /* 185 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzdc( xyzw, abcd ); }
379  /* 186 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzdc( xyzw, abcd ); }
381  {
382  //return akshuffle_wzdc( xyzw, abcd );
383  AKSIMD_V2F32 wz = vrev64_f32( vget_high_f32( xyzw ) );
384  AKSIMD_V2F32 dc = vrev64_f32( vget_high_f32( abcd ) );
385  AKSIMD_V4F32 wzdc = vcombine_f32( wz , dc );
386  return wzdc;
387  }
388  /* 188 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwdc( xyzw, abcd ); }
389  /* 189 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywdc( xyzw, abcd ); }
390  /* 190 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwdc( xyzw, abcd ); }
391  /* 191 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwdc( xyzw, abcd ); }
392  /* 192 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxad( xyzw, abcd ); }
393  /* 193 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxad( xyzw, abcd ); }
394  /* 194 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxad( xyzw, abcd ); }
395  /* 195 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxad( xyzw, abcd ); }
396  /* 196 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xyad( xyzw, abcd ); }
397  /* 197 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yyad( xyzw, abcd ); }
398  /* 198 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zyad( xyzw, abcd ); }
399  /* 199 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wyad( xyzw, abcd ); }
401  {
402  //return akshuffle_xzad( xyzw, abcd );
403  AKSIMD_V2F32 xz = vext_f32( vrev64_f32( vget_low_f32( xyzw ) ), vget_high_f32( xyzw ) , 1 );
404  AKSIMD_V2F32 ad = vrev64_f32( vext_f32( vget_high_f32( abcd ) , vget_low_f32( abcd ) , 1 ) );
405  AKSIMD_V4F32 xzad = vcombine_f32( xz , ad );
406  return xzad;
407  }
409  {
410  //return akshuffle_yzad( xyzw, abcd );
411  AKSIMD_V4F32 yzwx = vextq_f32( xyzw, xyzw, 1 );
412  AKSIMD_V2F32 yz = vget_low_f32( yzwx );
413  AKSIMD_V2F32 ad = vrev64_f32( vext_f32( vget_high_f32( abcd ) , vget_low_f32( abcd ) , 1 ) );
414  AKSIMD_V4F32 yzad = vcombine_f32( yz , ad );
415  return yzad;
416  }
417  /* 202 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzad( xyzw, abcd ); }
418  /* 203 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzad( xyzw, abcd ); }
420  {
421  //return akshuffle_xwad( xyzw, abcd );
422  AKSIMD_V2F32 xw = vrev64_f32( vext_f32( vget_high_f32( xyzw ) , vget_low_f32( xyzw ) , 1 ) );
423  AKSIMD_V2F32 ad = vrev64_f32( vext_f32( vget_high_f32( abcd ) , vget_low_f32( abcd ) , 1 ) );
424  AKSIMD_V4F32 xwad = vcombine_f32( xw, ad );
425  return xwad;
426  }
427  /* 205 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywad( xyzw, abcd ); }
428  /* 206 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwad( xyzw, abcd ); }
429  /* 207 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwad( xyzw, abcd ); }
430  /* 208 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxbd( xyzw, abcd ); }
431  /* 209 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxbd( xyzw, abcd ); }
432  /* 210 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxbd( xyzw, abcd ); }
433  /* 211 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxbd( xyzw, abcd ); }
434  /* 212 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xybd( xyzw, abcd ); }
435  /* 213 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yybd( xyzw, abcd ); }
436  /* 214 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zybd( xyzw, abcd ); }
437  /* 215 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wybd( xyzw, abcd ); }
439  {
440  //return akshuffle_xzbd( xyzw, abcd );
441  float32x2x2_t xz_yw = vtrn_f32( vget_low_f32( xyzw ), vget_high_f32( xyzw ) );
442  float32x2x2_t ac_bd = vtrn_f32( vget_low_f32( abcd ), vget_high_f32( abcd ) );
443  AKSIMD_V4F32 xzbd = vcombine_f32( xz_yw.val[0], ac_bd.val[1] );
444  return xzbd;
445  }
447  {
448  //return akshuffle_yzbd( xyzw, abcd );
449  AKSIMD_V4F32 yzwx = vextq_f32( xyzw, xyzw, 1 );
450  AKSIMD_V2F32 yz = vget_low_f32( yzwx );
451  float32x2x2_t ac_bd = vtrn_f32( vget_low_f32( abcd ), vget_high_f32( abcd ) );
452  AKSIMD_V4F32 yzbd = vcombine_f32( yz, ac_bd.val[1] );
453  return yzbd;
454  }
455  /* 218 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzbd( xyzw, abcd ); }
456  /* 219 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzbd( xyzw, abcd ); }
458  {
459  //return akshuffle_xwbd( xyzw, abcd );
460  AKSIMD_V2F32 xw = vrev64_f32( vext_f32( vget_high_f32( xyzw ) , vget_low_f32( xyzw ) , 1 ) );
461  AKSIMD_V2F32 bd = vext_f32( vget_low_f32( abcd ) , vrev64_f32( vget_high_f32( abcd ) ), 1 );
462 
463  AKSIMD_V4F32 xwbd = vcombine_f32( xw, bd );
464  return xwbd;
465  }
467  {
468  //return akshuffle_ywbd( xyzw, abcd );
469  float32x2x2_t xz_yw = vtrn_f32( vget_low_f32( xyzw ), vget_high_f32( xyzw ) );
470  float32x2x2_t ac_bd = vtrn_f32( vget_low_f32( abcd ), vget_high_f32( abcd ) );
471  AKSIMD_V4F32 ywbd = vcombine_f32( xz_yw.val[1], ac_bd.val[1] );
472  return ywbd;
473  }
474  /* 222 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwbd( xyzw, abcd ); }
475  /* 223 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwbd( xyzw, abcd ); }
476  /* 224 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxcd( xyzw, abcd ); }
477  /* 225 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxcd( xyzw, abcd ); }
478  /* 226 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxcd( xyzw, abcd ); }
479  /* 227 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxcd( xyzw, abcd ); }
481  {
482  //return akshuffle_xycd( xyzw, abcd );
483  AKSIMD_V2F32 xy = vget_low_f32( xyzw );
484  AKSIMD_V2F32 cd = vget_high_f32( abcd );
485  return vcombine_f32( xy, cd );
486  }
487  /* 229 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yycd( xyzw, abcd ); }
488  /* 230 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zycd( xyzw, abcd ); }
489  /* 231 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wycd( xyzw, abcd ); }
490  /* 232 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzcd( xyzw, abcd ); }
491  /* 233 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzcd( xyzw, abcd ); }
492  /* 234 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzcd( xyzw, abcd ); }
493  /* 235 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzcd( xyzw, abcd ); }
494  /* 236 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwcd( xyzw, abcd ); }
495  /* 237 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywcd( xyzw, abcd ); }
497  {
498  //return akshuffle_zwcd( xyzw, abcd );
499  AKSIMD_V2F32 zw = vget_high_f32( xyzw );
500  AKSIMD_V2F32 cd = vget_high_f32( abcd );
501  AKSIMD_V4F32 zwcd = vcombine_f32( zw , cd );
502  return zwcd;
503  }
504  /* 239 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwcd( xyzw, abcd ); }
505  /* 240 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxdd( xyzw, abcd ); }
506  /* 241 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxdd( xyzw, abcd ); }
507  /* 242 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxdd( xyzw, abcd ); }
508  /* 243 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxdd( xyzw, abcd ); }
509  /* 244 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xydd( xyzw, abcd ); }
511  {
512  AKSIMD_V2F32 yy = vdup_lane_f32( vget_low_f32( xyzw ), 1 );
513  AKSIMD_V2F32 dd = vdup_lane_f32( vget_high_f32( abcd ), 1 );
514  AKSIMD_V4F32 yydd = vcombine_f32( yy, dd );
515  return yydd;
516  }
517  /* 246 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zydd( xyzw, abcd ); }
518  /* 247 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wydd( xyzw, abcd ); }
519  /* 248 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzdd( xyzw, abcd ); }
520  /* 249 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzdd( xyzw, abcd ); }
521  /* 250 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzdd( xyzw, abcd ); }
522  /* 251 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzdd( xyzw, abcd ); }
523  /* 252 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwdd( xyzw, abcd ); }
524  /* 253 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywdd( xyzw, abcd ); }
525  /* 254 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwdd( xyzw, abcd ); }
527  {
528  //return akshuffle_wwdd( xyzw, abcd );
529  AKSIMD_V2F32 ww = vdup_lane_f32( vget_high_f32( xyzw ), 1 );
530  AKSIMD_V2F32 dd = vdup_lane_f32( vget_high_f32( abcd ), 1 );
531  AKSIMD_V4F32 wwdd = vcombine_f32( ww, dd );
532  return wwdd;
533  }
534 }
535 
536 #endif // __AK_SIMD_SHUFFLE_H__
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 1, 2, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 0, 3, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 2, 0, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(0, 1, 3, 2)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:92
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(1, 0, 1, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 1, 3, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 3, 3, 3)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 0, 3, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32(const AKSIMD_V4F32 &a, const AKSIMD_V4F32 &b)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(1, 0, 2, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 3, 2, 3)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 0, 2, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
float32x4_t AKSIMD_V4F32
Vector of 4 32-bit floats.
Definition: AkSimdTypes.h:62
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 0, 2, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 0, 2, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 2, 1, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 3, 1, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(0, 1, 0, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:72
float32x2_t AKSIMD_V2F32
Vector of 2 32-bit floats.
Definition: AkSimdTypes.h:61
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(0, 0, 3, 3)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:62
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(0, 3, 1, 2)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(1, 0, 3, 2)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 2, 2, 2)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(1, 3, 0, 2)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 0, 3, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 1, 3, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 3, 0, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 0, 0, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 2, 3, 2)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(0, 3, 2, 1)>(const AKSIMD_V4F32 &a, const AKSIMD_V4F32 &b)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(0, 0, 0, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:40
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 1, 3, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 0, 2, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 1, 2, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 1, 2, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
#define AkForceInline
Definition: AkTypes.h:63
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 1, 3, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(1, 1, 1, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)

Was this page helpful?

Need Support?

Questions? Problems? Need more info? Contact us, and we can help!

Visit our Support page

Tell us about your project. We're here to help.

Register your project and we'll help you get started with no strings attached!

Get started with Wwise