ESPResSo
Extensible Simulation Package for Research on Soft Matter Systems
Loading...
Searching...
No Matches
CollideSweepDoublePrecisionThermalizedAVX.cpp
Go to the documentation of this file.
1//======================================================================================================================
2//
3// This file is part of waLBerla. waLBerla is free software: you can
4// redistribute it and/or modify it under the terms of the GNU General Public
5// License as published by the Free Software Foundation, either version 3 of
6// the License, or (at your option) any later version.
7//
8// waLBerla is distributed in the hope that it will be useful, but WITHOUT
9// ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
10// FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
11// for more details.
12//
13// You should have received a copy of the GNU General Public License along
14// with waLBerla (see COPYING.txt). If not, see <http://www.gnu.org/licenses/>.
15//
16//! \\file CollideSweepDoublePrecisionThermalizedAVX.cpp
17//! \\ingroup lbm
18//! \\author lbmpy
19//======================================================================================================================
20
21// kernel generated with pystencils v1.2, lbmpy v1.2, lbmpy_walberla/pystencils_walberla from waLBerla commit 4d10e7f2358fc4a4f7e99195d0f67f0b759ecb6f
22
23#include <cmath>
24
26#include "core/DataTypes.h"
27#include "core/Macros.h"
28
29#include <immintrin.h>
30
31#include "philox_rand.h"
32
33#define FUNC_PREFIX
34
35#if (defined WALBERLA_CXX_COMPILER_IS_GNU) || (defined WALBERLA_CXX_COMPILER_IS_CLANG)
36#pragma GCC diagnostic push
37#pragma GCC diagnostic ignored "-Wfloat-equal"
38#pragma GCC diagnostic ignored "-Wshadow"
39#pragma GCC diagnostic ignored "-Wconversion"
40#pragma GCC diagnostic ignored "-Wunused-variable"
41#endif
42
43#if (defined WALBERLA_CXX_COMPILER_IS_INTEL)
44#pragma warning push
45#pragma warning(disable : 1599)
46#endif
47
48using namespace std;
49
50namespace walberla {
51namespace pystencils {
52
53namespace internal_25bc51f30ec2c20f3ee9796f7dcb65c6 {
54static FUNC_PREFIX void collidesweepdoubleprecisionthermalizedavx_collidesweepdoubleprecisionthermalizedavx(double *RESTRICT const _data_force, double *RESTRICT _data_pdfs, int64_t const _size_force_0, int64_t const _size_force_1, int64_t const _size_force_2, int64_t const _stride_force_1, int64_t const _stride_force_2, int64_t const _stride_force_3, int64_t const _stride_pdfs_1, int64_t const _stride_pdfs_2, int64_t const _stride_pdfs_3, uint32_t block_offset_0, uint32_t block_offset_1, uint32_t block_offset_2, double kT, double omega_bulk, double omega_even, double omega_odd, double omega_shear, uint32_t seed, uint32_t time_step) {
55 const double xi_28 = omega_bulk * 0.5;
56 const double xi_55 = omega_shear * 0.041666666666666664;
57 const double xi_60 = omega_bulk * 0.041666666666666664;
58 const double xi_71 = omega_shear * 0.125;
59 const double xi_109 = 2.4494897427831779;
60 const double xi_134 = omega_odd * 0.25;
61 const double xi_145 = omega_odd * 0.083333333333333329;
62 const double xi_198 = omega_shear * 0.25;
63 const double xi_211 = omega_odd * 0.041666666666666664;
64 const double xi_213 = omega_odd * 0.125;
65 const double rr_0 = 0.0;
66 const double xi_53 = rr_0 * 0.041666666666666664;
67 for (int64_t ctr_2 = 0; ctr_2 < _size_force_2; ctr_2 += 1) {
68 double *RESTRICT _data_pdfs_20_34 = _data_pdfs + _stride_pdfs_2 * ctr_2 + 4 * _stride_pdfs_3;
69 double *RESTRICT _data_pdfs_20_36 = _data_pdfs + _stride_pdfs_2 * ctr_2 + 6 * _stride_pdfs_3;
70 double *RESTRICT _data_pdfs_20_315 = _data_pdfs + _stride_pdfs_2 * ctr_2 + 15 * _stride_pdfs_3;
71 double *RESTRICT _data_pdfs_20_310 = _data_pdfs + _stride_pdfs_2 * ctr_2 + 10 * _stride_pdfs_3;
72 double *RESTRICT _data_pdfs_20_312 = _data_pdfs + _stride_pdfs_2 * ctr_2 + 12 * _stride_pdfs_3;
73 double *RESTRICT _data_pdfs_20_318 = _data_pdfs + _stride_pdfs_2 * ctr_2 + 18 * _stride_pdfs_3;
74 double *RESTRICT _data_pdfs_20_39 = _data_pdfs + _stride_pdfs_2 * ctr_2 + 9 * _stride_pdfs_3;
75 double *RESTRICT _data_pdfs_20_31 = _data_pdfs + _stride_pdfs_2 * ctr_2 + _stride_pdfs_3;
76 double *RESTRICT _data_pdfs_20_37 = _data_pdfs + _stride_pdfs_2 * ctr_2 + 7 * _stride_pdfs_3;
77 double *RESTRICT _data_pdfs_20_30 = _data_pdfs + _stride_pdfs_2 * ctr_2;
78 double *RESTRICT _data_force_20_31 = _data_force + _stride_force_2 * ctr_2 + _stride_force_3;
79 double *RESTRICT _data_pdfs_20_316 = _data_pdfs + _stride_pdfs_2 * ctr_2 + 16 * _stride_pdfs_3;
80 double *RESTRICT _data_pdfs_20_313 = _data_pdfs + _stride_pdfs_2 * ctr_2 + 13 * _stride_pdfs_3;
81 double *RESTRICT _data_pdfs_20_38 = _data_pdfs + _stride_pdfs_2 * ctr_2 + 8 * _stride_pdfs_3;
82 double *RESTRICT _data_pdfs_20_33 = _data_pdfs + _stride_pdfs_2 * ctr_2 + 3 * _stride_pdfs_3;
83 double *RESTRICT _data_force_20_32 = _data_force + _stride_force_2 * ctr_2 + 2 * _stride_force_3;
84 double *RESTRICT _data_pdfs_20_314 = _data_pdfs + _stride_pdfs_2 * ctr_2 + 14 * _stride_pdfs_3;
85 double *RESTRICT _data_force_20_30 = _data_force + _stride_force_2 * ctr_2;
86 double *RESTRICT _data_pdfs_20_317 = _data_pdfs + _stride_pdfs_2 * ctr_2 + 17 * _stride_pdfs_3;
87 double *RESTRICT _data_pdfs_20_311 = _data_pdfs + _stride_pdfs_2 * ctr_2 + 11 * _stride_pdfs_3;
88 double *RESTRICT _data_pdfs_20_32 = _data_pdfs + _stride_pdfs_2 * ctr_2 + 2 * _stride_pdfs_3;
89 double *RESTRICT _data_pdfs_20_35 = _data_pdfs + _stride_pdfs_2 * ctr_2 + 5 * _stride_pdfs_3;
90 for (int64_t ctr_1 = 0; ctr_1 < _size_force_1; ctr_1 += 1) {
91 double *RESTRICT _data_pdfs_20_34_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_34;
92 double *RESTRICT _data_pdfs_20_36_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_36;
93 double *RESTRICT _data_pdfs_20_315_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_315;
94 double *RESTRICT _data_pdfs_20_310_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_310;
95 double *RESTRICT _data_pdfs_20_312_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_312;
96 double *RESTRICT _data_pdfs_20_318_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_318;
97 double *RESTRICT _data_pdfs_20_39_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_39;
98 double *RESTRICT _data_pdfs_20_31_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_31;
99 double *RESTRICT _data_pdfs_20_37_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_37;
100 double *RESTRICT _data_pdfs_20_30_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_30;
101 double *RESTRICT _data_force_20_31_10 = _stride_force_1 * ctr_1 + _data_force_20_31;
102 double *RESTRICT _data_pdfs_20_316_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_316;
103 double *RESTRICT _data_pdfs_20_313_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_313;
104 double *RESTRICT _data_pdfs_20_38_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_38;
105 double *RESTRICT _data_pdfs_20_33_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_33;
106 double *RESTRICT _data_force_20_32_10 = _stride_force_1 * ctr_1 + _data_force_20_32;
107 double *RESTRICT _data_pdfs_20_314_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_314;
108 double *RESTRICT _data_force_20_30_10 = _stride_force_1 * ctr_1 + _data_force_20_30;
109 double *RESTRICT _data_pdfs_20_317_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_317;
110 double *RESTRICT _data_pdfs_20_311_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_311;
111 double *RESTRICT _data_pdfs_20_32_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_32;
112 double *RESTRICT _data_pdfs_20_35_10 = _stride_pdfs_1 * ctr_1 + _data_pdfs_20_35;
113 {
114 for (int64_t ctr_0 = 0; ctr_0 < (int64_t)((_size_force_0) / (4)) * (4); ctr_0 += 4) {
115 const __m256d xi_244 = _mm256_load_pd(&_data_pdfs_20_34_10[ctr_0]);
116 const __m256d xi_245 = _mm256_load_pd(&_data_pdfs_20_36_10[ctr_0]);
117 const __m256d xi_246 = _mm256_load_pd(&_data_pdfs_20_315_10[ctr_0]);
118 const __m256d xi_247 = _mm256_load_pd(&_data_pdfs_20_310_10[ctr_0]);
119 const __m256d xi_248 = _mm256_load_pd(&_data_pdfs_20_312_10[ctr_0]);
120 const __m256d xi_249 = _mm256_load_pd(&_data_pdfs_20_318_10[ctr_0]);
121 const __m256d xi_250 = _mm256_load_pd(&_data_pdfs_20_39_10[ctr_0]);
122 const __m256d xi_251 = _mm256_load_pd(&_data_pdfs_20_31_10[ctr_0]);
123 const __m256d xi_252 = _mm256_load_pd(&_data_pdfs_20_37_10[ctr_0]);
124 const __m256d xi_253 = _mm256_load_pd(&_data_pdfs_20_30_10[ctr_0]);
125 const __m256d xi_254 = _mm256_load_pd(&_data_force_20_31_10[ctr_0]);
126 const __m256d xi_255 = _mm256_load_pd(&_data_pdfs_20_316_10[ctr_0]);
127 const __m256d xi_256 = _mm256_load_pd(&_data_pdfs_20_313_10[ctr_0]);
128 const __m256d xi_257 = _mm256_load_pd(&_data_pdfs_20_38_10[ctr_0]);
129 const __m256d xi_258 = _mm256_load_pd(&_data_pdfs_20_33_10[ctr_0]);
130 const __m256d xi_259 = _mm256_load_pd(&_data_force_20_32_10[ctr_0]);
131 const __m256d xi_260 = _mm256_load_pd(&_data_pdfs_20_314_10[ctr_0]);
132 const __m256d xi_261 = _mm256_load_pd(&_data_force_20_30_10[ctr_0]);
133 const __m256d xi_262 = _mm256_load_pd(&_data_pdfs_20_317_10[ctr_0]);
134 const __m256d xi_263 = _mm256_load_pd(&_data_pdfs_20_311_10[ctr_0]);
135 const __m256d xi_264 = _mm256_load_pd(&_data_pdfs_20_32_10[ctr_0]);
136 const __m256d xi_265 = _mm256_load_pd(&_data_pdfs_20_35_10[ctr_0]);
137
138 __m256d random_7_0{};
139 __m256d random_7_1{};
140 if (kT > 0.) {
141 philox_double2(time_step, _mm256_add_epi32(_mm256_add_epi32(_mm256_set_epi32(7, 6, 5, 4, 3, 2, 1, 0), _mm256_set_epi32(ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0)), _mm256_set_epi32(((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)))), block_offset_1 + ctr_1, block_offset_2 + ctr_2, 7, seed, random_7_0, random_7_1);
142 }
143
144 __m256d random_6_0{};
145 __m256d random_6_1{};
146 if (kT > 0.) {
147 philox_double2(time_step, _mm256_add_epi32(_mm256_add_epi32(_mm256_set_epi32(7, 6, 5, 4, 3, 2, 1, 0), _mm256_set_epi32(ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0)), _mm256_set_epi32(((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)))), block_offset_1 + ctr_1, block_offset_2 + ctr_2, 6, seed, random_6_0, random_6_1);
148 }
149
150 __m256d random_5_0{};
151 __m256d random_5_1{};
152 if (kT > 0.) {
153 philox_double2(time_step, _mm256_add_epi32(_mm256_add_epi32(_mm256_set_epi32(7, 6, 5, 4, 3, 2, 1, 0), _mm256_set_epi32(ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0)), _mm256_set_epi32(((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)))), block_offset_1 + ctr_1, block_offset_2 + ctr_2, 5, seed, random_5_0, random_5_1);
154 }
155
156 __m256d random_4_0{};
157 __m256d random_4_1{};
158 if (kT > 0.) {
159 philox_double2(time_step, _mm256_add_epi32(_mm256_add_epi32(_mm256_set_epi32(7, 6, 5, 4, 3, 2, 1, 0), _mm256_set_epi32(ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0)), _mm256_set_epi32(((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)))), block_offset_1 + ctr_1, block_offset_2 + ctr_2, 4, seed, random_4_0, random_4_1);
160 }
161
162 __m256d random_3_0{};
163 __m256d random_3_1{};
164 if (kT > 0.) {
165 philox_double2(time_step, _mm256_add_epi32(_mm256_add_epi32(_mm256_set_epi32(7, 6, 5, 4, 3, 2, 1, 0), _mm256_set_epi32(ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0)), _mm256_set_epi32(((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)))), block_offset_1 + ctr_1, block_offset_2 + ctr_2, 3, seed, random_3_0, random_3_1);
166 }
167
168 __m256d random_2_0{};
169 __m256d random_2_1{};
170 if (kT > 0.) {
171 philox_double2(time_step, _mm256_add_epi32(_mm256_add_epi32(_mm256_set_epi32(7, 6, 5, 4, 3, 2, 1, 0), _mm256_set_epi32(ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0)), _mm256_set_epi32(((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)))), block_offset_1 + ctr_1, block_offset_2 + ctr_2, 2, seed, random_2_0, random_2_1);
172 }
173
174 __m256d random_1_0{};
175 __m256d random_1_1{};
176 if (kT > 0.) {
177 philox_double2(time_step, _mm256_add_epi32(_mm256_add_epi32(_mm256_set_epi32(7, 6, 5, 4, 3, 2, 1, 0), _mm256_set_epi32(ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0)), _mm256_set_epi32(((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)))), block_offset_1 + ctr_1, block_offset_2 + ctr_2, 1, seed, random_1_0, random_1_1);
178 }
179
180 __m256d random_0_0{};
181 __m256d random_0_1{};
182 if (kT > 0.) {
183 philox_double2(time_step, _mm256_add_epi32(_mm256_add_epi32(_mm256_set_epi32(7, 6, 5, 4, 3, 2, 1, 0), _mm256_set_epi32(ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0, ctr_0)), _mm256_set_epi32(((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)), ((int64_t)(block_offset_0)))), block_offset_1 + ctr_1, block_offset_2 + ctr_2, 0, seed, random_0_0, random_0_1);
184 }
185 const __m256d xi_2 = _mm256_add_pd(xi_249, xi_260);
186 const __m256d xi_3 = _mm256_add_pd(xi_2, xi_244);
187 const __m256d xi_4 = _mm256_add_pd(_mm256_add_pd(xi_246, xi_251), xi_263);
188 const __m256d xi_5 = _mm256_add_pd(xi_248, xi_265);
189 const __m256d xi_6 = _mm256_add_pd(xi_245, xi_262);
190 const __m256d xi_8 = _mm256_mul_pd(xi_250, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
191 const __m256d xi_9 = _mm256_mul_pd(xi_252, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
192 const __m256d xi_10 = _mm256_mul_pd(xi_262, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
193 const __m256d xi_11 = _mm256_mul_pd(xi_256, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
194 const __m256d xi_12 = _mm256_mul_pd(xi_258, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
195 const __m256d xi_13 = _mm256_add_pd(_mm256_add_pd(xi_10, xi_11), xi_12);
196 const __m256d xi_14 = _mm256_mul_pd(xi_264, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
197 const __m256d xi_15 = _mm256_mul_pd(xi_247, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
198 const __m256d xi_16 = _mm256_add_pd(xi_14, xi_15);
199 const __m256d xi_17 = _mm256_mul_pd(xi_255, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
200 const __m256d xi_18 = _mm256_mul_pd(xi_248, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
201 const __m256d xi_19 = _mm256_add_pd(xi_17, xi_18);
202 const __m256d xi_20 = _mm256_mul_pd(xi_249, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
203 const __m256d xi_21 = _mm256_add_pd(xi_10, xi_20);
204 const __m256d xi_22 = _mm256_mul_pd(xi_246, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
205 const __m256d xi_23 = _mm256_mul_pd(xi_245, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
206 const __m256d xi_24 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(xi_17, xi_22), xi_23), xi_263);
207 const __m256d xi_29 = _mm256_mul_pd(xi_254, _mm256_set_pd(0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666));
208 const __m256d xi_30 = _mm256_mul_pd(xi_254, _mm256_set_pd(0.083333333333333329, 0.083333333333333329, 0.083333333333333329, 0.083333333333333329));
209 const __m256d xi_42 = _mm256_mul_pd(xi_261, _mm256_set_pd(0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666));
210 const __m256d xi_43 = _mm256_mul_pd(xi_261, _mm256_set_pd(0.083333333333333329, 0.083333333333333329, 0.083333333333333329, 0.083333333333333329));
211 const __m256d xi_49 = _mm256_mul_pd(xi_259, _mm256_set_pd(0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666));
212 const __m256d xi_50 = _mm256_mul_pd(xi_259, _mm256_set_pd(0.083333333333333329, 0.083333333333333329, 0.083333333333333329, 0.083333333333333329));
213 const __m256d xi_67 = _mm256_mul_pd(xi_254, _mm256_set_pd(0.25, 0.25, 0.25, 0.25));
214 const __m256d xi_72 = _mm256_mul_pd(xi_254, _mm256_set_pd(xi_71, xi_71, xi_71, xi_71));
215 const __m256d xi_114 = _mm256_mul_pd(xi_253, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
216 const __m256d xi_118 = _mm256_mul_pd(xi_263, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
217 const __m256d xi_119 = _mm256_add_pd(xi_118, xi_18);
218 const __m256d xi_120 = _mm256_add_pd(_mm256_mul_pd(xi_257, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_8);
219 const __m256d xi_122 = _mm256_mul_pd(xi_260, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
220 const __m256d xi_123 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(xi_11, xi_122), xi_15), xi_21);
221 const __m256d xi_125 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_246, _mm256_set_pd(2.0, 2.0, 2.0, 2.0)), _mm256_mul_pd(xi_248, _mm256_set_pd(2.0, 2.0, 2.0, 2.0))), _mm256_mul_pd(xi_255, _mm256_set_pd(2.0, 2.0, 2.0, 2.0))), _mm256_mul_pd(xi_263, _mm256_set_pd(2.0, 2.0, 2.0, 2.0)));
222 const __m256d xi_126 = _mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_244, _mm256_set_pd(5.0, 5.0, 5.0, 5.0)), _mm256_mul_pd(xi_258, _mm256_set_pd(5.0, 5.0, 5.0, 5.0))), xi_125);
223 const __m256d xi_128 = _mm256_mul_pd(xi_256, _mm256_set_pd(2.0, 2.0, 2.0, 2.0));
224 const __m256d xi_129 = _mm256_mul_pd(xi_260, _mm256_set_pd(2.0, 2.0, 2.0, 2.0));
225 const __m256d xi_130 = _mm256_add_pd(_mm256_mul_pd(xi_249, _mm256_set_pd(2.0, 2.0, 2.0, 2.0)), _mm256_mul_pd(xi_262, _mm256_set_pd(2.0, 2.0, 2.0, 2.0)));
226 const __m256d xi_132 = _mm256_add_pd(xi_118, xi_248);
227 const __m256d xi_133 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(xi_132, xi_14), xi_22), xi_251), xi_255);
228 const __m256d xi_135 = _mm256_mul_pd(xi_133, _mm256_set_pd(xi_134, xi_134, xi_134, xi_134));
229 const __m256d xi_136 = _mm256_add_pd(_mm256_set_pd(-0.5, -0.5, -0.5, -0.5), random_5_1);
230 const __m256d xi_141 = _mm256_mul_pd(xi_252, _mm256_set_pd(2.0, 2.0, 2.0, 2.0));
231 const __m256d xi_142 = _mm256_mul_pd(xi_247, _mm256_set_pd(2.0, 2.0, 2.0, 2.0));
232 const __m256d xi_143 = _mm256_add_pd(_mm256_mul_pd(xi_250, _mm256_set_pd(2.0, 2.0, 2.0, 2.0)), _mm256_mul_pd(xi_257, _mm256_set_pd(-2.0, -2.0, -2.0, -2.0)));
233 const __m256d xi_144 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_141, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_14), xi_142), xi_143), xi_19), xi_4);
234 const __m256d xi_146 = _mm256_mul_pd(xi_144, _mm256_set_pd(xi_145, xi_145, xi_145, xi_145));
235 const __m256d xi_147 = _mm256_add_pd(_mm256_set_pd(-0.5, -0.5, -0.5, -0.5), random_3_0);
236 const __m256d xi_152 = _mm256_add_pd(_mm256_set_pd(-0.5, -0.5, -0.5, -0.5), random_0_1);
237 const __m256d xi_166 = _mm256_add_pd(xi_122, xi_256);
238 const __m256d xi_167 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(xi_12, xi_166), xi_20), xi_244), xi_262);
239 const __m256d xi_168 = _mm256_mul_pd(xi_167, _mm256_set_pd(xi_134, xi_134, xi_134, xi_134));
240 const __m256d xi_169 = _mm256_add_pd(_mm256_set_pd(-0.5, -0.5, -0.5, -0.5), random_4_1);
241 const __m256d xi_171 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_142, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_13), xi_141), xi_143), xi_3);
242 const __m256d xi_172 = _mm256_mul_pd(xi_171, _mm256_set_pd(xi_145, xi_145, xi_145, xi_145));
243 const __m256d xi_173 = _mm256_add_pd(_mm256_set_pd(-0.5, -0.5, -0.5, -0.5), random_4_0);
244 const __m256d xi_178 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(xi_119, xi_23), xi_246), xi_255), xi_265);
245 const __m256d xi_179 = _mm256_mul_pd(xi_178, _mm256_set_pd(xi_134, xi_134, xi_134, xi_134));
246 const __m256d xi_180 = _mm256_add_pd(_mm256_set_pd(-0.5, -0.5, -0.5, -0.5), random_5_0);
247 const __m256d xi_182 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_128, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_129, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), xi_130), xi_24), xi_5);
248 const __m256d xi_183 = _mm256_mul_pd(xi_182, _mm256_set_pd(xi_145, xi_145, xi_145, xi_145));
249 const __m256d xi_184 = _mm256_add_pd(_mm256_set_pd(-0.5, -0.5, -0.5, -0.5), random_3_1);
250 const __m256d xi_212 = _mm256_mul_pd(xi_182, _mm256_set_pd(xi_211, xi_211, xi_211, xi_211));
251 const __m256d xi_214 = _mm256_mul_pd(xi_178, _mm256_set_pd(xi_213, xi_213, xi_213, xi_213));
252 const __m256d xi_220 = _mm256_mul_pd(xi_144, _mm256_set_pd(xi_211, xi_211, xi_211, xi_211));
253 const __m256d xi_221 = _mm256_mul_pd(xi_133, _mm256_set_pd(xi_213, xi_213, xi_213, xi_213));
254 const __m256d xi_235 = _mm256_mul_pd(xi_167, _mm256_set_pd(xi_213, xi_213, xi_213, xi_213));
255 const __m256d xi_236 = _mm256_mul_pd(xi_171, _mm256_set_pd(xi_211, xi_211, xi_211, xi_211));
256 const __m256d xi_31 = _mm256_mul_pd(xi_30, _mm256_set_pd(rr_0, rr_0, rr_0, rr_0));
257 const __m256d xi_44 = _mm256_mul_pd(xi_43, _mm256_set_pd(rr_0, rr_0, rr_0, rr_0));
258 const __m256d xi_51 = _mm256_mul_pd(xi_50, _mm256_set_pd(rr_0, rr_0, rr_0, rr_0));
259 const __m256d xi_54 = _mm256_mul_pd(xi_261, _mm256_set_pd(xi_53, xi_53, xi_53, xi_53));
260 const __m256d xi_59 = _mm256_mul_pd(xi_254, _mm256_set_pd(xi_53, xi_53, xi_53, xi_53));
261 const __m256d xi_81 = _mm256_mul_pd(xi_259, _mm256_set_pd(xi_53, xi_53, xi_53, xi_53));
262 const __m256d vel0Term = _mm256_add_pd(_mm256_add_pd(xi_247, xi_257), xi_3);
263 const __m256d vel1Term = _mm256_add_pd(xi_252, xi_4);
264 const __m256d vel2Term = _mm256_add_pd(xi_256, xi_5);
265 const __m256d rho = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(vel0Term, vel1Term), vel2Term), xi_250), xi_253), xi_255), xi_258), xi_264), xi_6);
266 const __m256d xi_105 = _mm256_mul_pd(rho, _mm256_set_pd(kT, kT, kT, kT));
267 const __m256d xi_106 = _mm256_sqrt_pd(_mm256_mul_pd(xi_105, _mm256_add_pd(_mm256_mul_pd(_mm256_set_pd(-1.0, -1.0, -1.0, -1.0), _mm256_mul_pd(_mm256_add_pd(_mm256_mul_pd(_mm256_set_pd(-1.0, -1.0, -1.0, -1.0), _mm256_set_pd(omega_even, omega_even, omega_even, omega_even)), _mm256_set_pd(1.0, 1.0, 1.0, 1.0)), _mm256_add_pd(_mm256_mul_pd(_mm256_set_pd(-1.0, -1.0, -1.0, -1.0), _mm256_set_pd(omega_even, omega_even, omega_even, omega_even)), _mm256_set_pd(1.0, 1.0, 1.0, 1.0)))), _mm256_set_pd(1.0, 1.0, 1.0, 1.0))));
268 const __m256d xi_107 = _mm256_mul_pd(_mm256_mul_pd(xi_106, _mm256_add_pd(_mm256_set_pd(-0.5, -0.5, -0.5, -0.5), random_6_0)), _mm256_set_pd(3.7416573867739413, 3.7416573867739413, 3.7416573867739413, 3.7416573867739413));
269 const __m256d xi_108 = _mm256_mul_pd(_mm256_mul_pd(xi_106, _mm256_add_pd(_mm256_set_pd(-0.5, -0.5, -0.5, -0.5), random_7_0)), _mm256_set_pd(5.4772255750516612, 5.4772255750516612, 5.4772255750516612, 5.4772255750516612));
270 const __m256d xi_110 = _mm256_mul_pd(_mm256_mul_pd(_mm256_add_pd(_mm256_set_pd(-0.5, -0.5, -0.5, -0.5), random_2_1), _mm256_set_pd(xi_109, xi_109, xi_109, xi_109)), _mm256_sqrt_pd(_mm256_mul_pd(xi_105, _mm256_add_pd(_mm256_mul_pd(_mm256_set_pd(-1.0, -1.0, -1.0, -1.0), _mm256_mul_pd(_mm256_add_pd(_mm256_mul_pd(_mm256_set_pd(-1.0, -1.0, -1.0, -1.0), _mm256_set_pd(omega_bulk, omega_bulk, omega_bulk, omega_bulk)), _mm256_set_pd(1.0, 1.0, 1.0, 1.0)), _mm256_add_pd(_mm256_mul_pd(_mm256_set_pd(-1.0, -1.0, -1.0, -1.0), _mm256_set_pd(omega_bulk, omega_bulk, omega_bulk, omega_bulk)), _mm256_set_pd(1.0, 1.0, 1.0, 1.0)))), _mm256_set_pd(1.0, 1.0, 1.0, 1.0)))));
271 const __m256d xi_111 = _mm256_mul_pd(_mm256_mul_pd(xi_106, _mm256_add_pd(_mm256_set_pd(-0.5, -0.5, -0.5, -0.5), random_6_1)), _mm256_set_pd(8.3666002653407556, 8.3666002653407556, 8.3666002653407556, 8.3666002653407556));
272 const __m256d xi_137 = _mm256_sqrt_pd(_mm256_mul_pd(xi_105, _mm256_add_pd(_mm256_mul_pd(_mm256_set_pd(-1.0, -1.0, -1.0, -1.0), _mm256_mul_pd(_mm256_add_pd(_mm256_mul_pd(_mm256_set_pd(-1.0, -1.0, -1.0, -1.0), _mm256_set_pd(omega_odd, omega_odd, omega_odd, omega_odd)), _mm256_set_pd(1.0, 1.0, 1.0, 1.0)), _mm256_add_pd(_mm256_mul_pd(_mm256_set_pd(-1.0, -1.0, -1.0, -1.0), _mm256_set_pd(omega_odd, omega_odd, omega_odd, omega_odd)), _mm256_set_pd(1.0, 1.0, 1.0, 1.0)))), _mm256_set_pd(1.0, 1.0, 1.0, 1.0))));
273 const __m256d xi_138 = _mm256_mul_pd(xi_137, _mm256_set_pd(1.4142135623730951, 1.4142135623730951, 1.4142135623730951, 1.4142135623730951));
274 const __m256d xi_139 = _mm256_mul_pd(xi_138, _mm256_set_pd(0.5, 0.5, 0.5, 0.5));
275 const __m256d xi_140 = _mm256_mul_pd(xi_136, xi_139);
276 const __m256d xi_148 = _mm256_mul_pd(xi_137, _mm256_set_pd(xi_109, xi_109, xi_109, xi_109));
277 const __m256d xi_149 = _mm256_mul_pd(xi_148, _mm256_set_pd(0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666));
278 const __m256d xi_150 = _mm256_mul_pd(xi_147, xi_149);
279 const __m256d xi_151 = _mm256_add_pd(_mm256_mul_pd(xi_146, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_150, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)));
280 const __m256d xi_153 = _mm256_sqrt_pd(_mm256_mul_pd(xi_105, _mm256_add_pd(_mm256_mul_pd(_mm256_set_pd(-1.0, -1.0, -1.0, -1.0), _mm256_mul_pd(_mm256_add_pd(_mm256_mul_pd(_mm256_set_pd(-1.0, -1.0, -1.0, -1.0), _mm256_set_pd(omega_shear, omega_shear, omega_shear, omega_shear)), _mm256_set_pd(1.0, 1.0, 1.0, 1.0)), _mm256_add_pd(_mm256_mul_pd(_mm256_set_pd(-1.0, -1.0, -1.0, -1.0), _mm256_set_pd(omega_shear, omega_shear, omega_shear, omega_shear)), _mm256_set_pd(1.0, 1.0, 1.0, 1.0)))), _mm256_set_pd(1.0, 1.0, 1.0, 1.0))));
281 const __m256d xi_154 = _mm256_mul_pd(xi_153, _mm256_set_pd(0.5, 0.5, 0.5, 0.5));
282 const __m256d xi_155 = _mm256_mul_pd(xi_152, xi_154);
283 const __m256d xi_161 = _mm256_mul_pd(_mm256_mul_pd(xi_153, _mm256_add_pd(_mm256_set_pd(-0.5, -0.5, -0.5, -0.5), random_0_0)), _mm256_set_pd(1.7320508075688772, 1.7320508075688772, 1.7320508075688772, 1.7320508075688772));
284 const __m256d xi_165 = _mm256_add_pd(xi_146, xi_150);
285 const __m256d xi_170 = _mm256_mul_pd(xi_139, xi_169);
286 const __m256d xi_174 = _mm256_mul_pd(xi_149, xi_173);
287 const __m256d xi_175 = _mm256_add_pd(xi_172, xi_174);
288 const __m256d xi_177 = _mm256_add_pd(_mm256_mul_pd(xi_172, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_174, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)));
289 const __m256d xi_181 = _mm256_mul_pd(xi_139, xi_180);
290 const __m256d xi_185 = _mm256_mul_pd(xi_149, xi_184);
291 const __m256d xi_186 = _mm256_add_pd(_mm256_mul_pd(xi_183, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_185, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)));
292 const __m256d xi_188 = _mm256_add_pd(xi_183, xi_185);
293 const __m256d xi_189 = _mm256_mul_pd(_mm256_mul_pd(xi_152, xi_153), _mm256_set_pd(0.25, 0.25, 0.25, 0.25));
294 const __m256d xi_192 = _mm256_mul_pd(xi_107, _mm256_set_pd(0.083333333333333329, 0.083333333333333329, 0.083333333333333329, 0.083333333333333329));
295 const __m256d xi_196 = _mm256_mul_pd(xi_154, _mm256_add_pd(_mm256_set_pd(-0.5, -0.5, -0.5, -0.5), random_1_0));
296 const __m256d xi_203 = _mm256_mul_pd(xi_154, _mm256_add_pd(_mm256_set_pd(-0.5, -0.5, -0.5, -0.5), random_2_0));
297 const __m256d xi_207 = _mm256_mul_pd(xi_111, _mm256_set_pd(-0.014285714285714285, -0.014285714285714285, -0.014285714285714285, -0.014285714285714285));
298 const __m256d xi_208 = _mm256_mul_pd(xi_108, _mm256_set_pd(0.050000000000000003, 0.050000000000000003, 0.050000000000000003, 0.050000000000000003));
299 const __m256d xi_215 = _mm256_mul_pd(xi_148, _mm256_set_pd(0.083333333333333329, 0.083333333333333329, 0.083333333333333329, 0.083333333333333329));
300 const __m256d xi_216 = _mm256_mul_pd(xi_184, xi_215);
301 const __m256d xi_217 = _mm256_mul_pd(xi_138, _mm256_set_pd(0.25, 0.25, 0.25, 0.25));
302 const __m256d xi_218 = _mm256_mul_pd(xi_180, xi_217);
303 const __m256d xi_219 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_212, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_216, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), xi_214), xi_218);
304 const __m256d xi_222 = _mm256_mul_pd(xi_147, xi_215);
305 const __m256d xi_223 = _mm256_mul_pd(xi_136, xi_217);
306 const __m256d xi_224 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_220, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_222, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), xi_221), xi_223);
307 const __m256d xi_225 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_221, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_223, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), xi_220), xi_222);
308 const __m256d xi_227 = _mm256_mul_pd(xi_189, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
309 const __m256d xi_230 = _mm256_mul_pd(xi_111, _mm256_set_pd(0.035714285714285712, 0.035714285714285712, 0.035714285714285712, 0.035714285714285712));
310 const __m256d xi_232 = _mm256_mul_pd(xi_154, _mm256_add_pd(_mm256_set_pd(-0.5, -0.5, -0.5, -0.5), random_1_1));
311 const __m256d xi_237 = _mm256_mul_pd(xi_169, xi_217);
312 const __m256d xi_238 = _mm256_mul_pd(xi_173, xi_215);
313 const __m256d xi_239 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_235, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_237, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), xi_236), xi_238);
314 const __m256d xi_241 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_236, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_238, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), xi_235), xi_237);
315 const __m256d xi_242 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_214, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_218, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), xi_212), xi_216);
316 const __m256d xi_0 = _mm256_div_pd(_mm256_set_pd(1.0, 1.0, 1.0, 1.0), rho);
317 const __m256d xi_7 = _mm256_mul_pd(xi_0, _mm256_set_pd(0.5, 0.5, 0.5, 0.5));
318 const __m256d u_0 = _mm256_add_pd(_mm256_mul_pd(xi_0, _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(vel0Term, xi_13), xi_8), xi_9)), _mm256_mul_pd(xi_261, xi_7));
319 const __m256d xi_25 = _mm256_mul_pd(u_0, xi_261);
320 const __m256d xi_37 = _mm256_mul_pd(xi_25, _mm256_set_pd(0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666));
321 const __m256d xi_38 = _mm256_mul_pd(xi_25, _mm256_set_pd(0.083333333333333329, 0.083333333333333329, 0.083333333333333329, 0.083333333333333329));
322 const __m256d xi_39 = _mm256_mul_pd(xi_38, _mm256_set_pd(omega_shear, omega_shear, omega_shear, omega_shear));
323 const __m256d xi_40 = _mm256_add_pd(_mm256_mul_pd(xi_37, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_39);
324 const __m256d xi_56 = _mm256_add_pd(_mm256_mul_pd(_mm256_mul_pd(xi_25, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_set_pd(xi_55, xi_55, xi_55, xi_55)), xi_37);
325 const __m256d xi_57 = _mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_43, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_54), xi_56);
326 const __m256d xi_61 = _mm256_mul_pd(_mm256_mul_pd(xi_25, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_set_pd(xi_60, xi_60, xi_60, xi_60));
327 const __m256d xi_68 = _mm256_mul_pd(u_0, xi_67);
328 const __m256d xi_73 = _mm256_mul_pd(u_0, xi_72);
329 const __m256d xi_77 = _mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_54, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_43), xi_56);
330 const __m256d xi_84 = _mm256_mul_pd(xi_38, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
331 const __m256d xi_95 = _mm256_mul_pd(u_0, xi_259);
332 const __m256d xi_96 = _mm256_mul_pd(xi_95, _mm256_set_pd(0.25, 0.25, 0.25, 0.25));
333 const __m256d xi_99 = _mm256_mul_pd(xi_95, _mm256_set_pd(xi_71, xi_71, xi_71, xi_71));
334 const __m256d xi_113 = _mm256_mul_pd(rho, _mm256_mul_pd(u_0, u_0));
335 const __m256d u_1 = _mm256_add_pd(_mm256_mul_pd(xi_0, _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(vel1Term, xi_16), xi_19), xi_257), xi_8)), _mm256_mul_pd(xi_254, xi_7));
336 const __m256d xi_26 = _mm256_mul_pd(u_1, xi_254);
337 const __m256d xi_32 = _mm256_mul_pd(xi_26, _mm256_set_pd(0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666));
338 const __m256d xi_45 = _mm256_mul_pd(xi_26, _mm256_set_pd(0.083333333333333329, 0.083333333333333329, 0.083333333333333329, 0.083333333333333329));
339 const __m256d xi_46 = _mm256_mul_pd(xi_45, _mm256_set_pd(omega_shear, omega_shear, omega_shear, omega_shear));
340 const __m256d xi_47 = _mm256_add_pd(_mm256_mul_pd(xi_32, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_46);
341 const __m256d xi_62 = _mm256_mul_pd(_mm256_mul_pd(xi_26, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_set_pd(xi_60, xi_60, xi_60, xi_60));
342 const __m256d xi_69 = _mm256_mul_pd(u_1, _mm256_set_pd(0.25, 0.25, 0.25, 0.25));
343 const __m256d xi_70 = _mm256_mul_pd(xi_261, xi_69);
344 const __m256d xi_74 = _mm256_mul_pd(u_1, _mm256_set_pd(xi_71, xi_71, xi_71, xi_71));
345 const __m256d xi_75 = _mm256_mul_pd(xi_261, xi_74);
346 const __m256d xi_76 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_68, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_70, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), xi_73), xi_75);
347 const __m256d xi_78 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_73, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_75, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), xi_68), xi_70);
348 const __m256d xi_86 = _mm256_mul_pd(xi_259, xi_69);
349 const __m256d xi_88 = _mm256_mul_pd(xi_259, xi_74);
350 const __m256d xi_93 = _mm256_mul_pd(xi_45, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
351 const __m256d xi_112 = _mm256_mul_pd(rho, _mm256_mul_pd(u_1, u_1));
352 const __m256d xi_121 = _mm256_add_pd(_mm256_add_pd(xi_112, xi_120), xi_9);
353 const __m256d xi_197 = _mm256_mul_pd(rho, u_1);
354 const __m256d xi_199 = _mm256_mul_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(u_0, xi_197), xi_120), xi_247), xi_252), _mm256_set_pd(xi_198, xi_198, xi_198, xi_198));
355 const __m256d xi_200 = _mm256_add_pd(_mm256_mul_pd(xi_196, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_199, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)));
356 const __m256d xi_201 = _mm256_add_pd(xi_196, xi_199);
357 const __m256d u_2 = _mm256_add_pd(_mm256_mul_pd(xi_0, _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(vel2Term, xi_21), xi_24), xi_260)), _mm256_mul_pd(xi_259, xi_7));
358 const __m256d xi_27 = _mm256_mul_pd(u_2, xi_259);
359 const __m256d xi_33 = _mm256_mul_pd(xi_27, _mm256_set_pd(0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666));
360 const __m256d xi_34 = _mm256_mul_pd(xi_27, _mm256_set_pd(0.083333333333333329, 0.083333333333333329, 0.083333333333333329, 0.083333333333333329));
361 const __m256d xi_35 = _mm256_mul_pd(xi_34, _mm256_set_pd(omega_shear, omega_shear, omega_shear, omega_shear));
362 const __m256d xi_36 = _mm256_add_pd(_mm256_mul_pd(xi_33, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_35);
363 const __m256d xi_41 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_26, _mm256_set_pd(0.33333333333333331, 0.33333333333333331, 0.33333333333333331, 0.33333333333333331)), _mm256_mul_pd(_mm256_mul_pd(xi_32, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_set_pd(omega_shear, omega_shear, omega_shear, omega_shear))), xi_36), xi_40);
364 const __m256d xi_48 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_25, _mm256_set_pd(0.33333333333333331, 0.33333333333333331, 0.33333333333333331, 0.33333333333333331)), _mm256_mul_pd(_mm256_mul_pd(xi_37, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_set_pd(omega_shear, omega_shear, omega_shear, omega_shear))), xi_36), xi_47);
365 const __m256d xi_52 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_27, _mm256_set_pd(0.33333333333333331, 0.33333333333333331, 0.33333333333333331, 0.33333333333333331)), _mm256_mul_pd(_mm256_mul_pd(xi_33, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_set_pd(omega_shear, omega_shear, omega_shear, omega_shear))), xi_40), xi_47);
366 const __m256d xi_58 = _mm256_mul_pd(xi_34, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
367 const __m256d xi_63 = _mm256_mul_pd(_mm256_mul_pd(xi_27, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_set_pd(xi_60, xi_60, xi_60, xi_60));
368 const __m256d xi_64 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(_mm256_mul_pd(xi_26, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_set_pd(xi_55, xi_55, xi_55, xi_55)), xi_32), xi_61), xi_62), xi_63);
369 const __m256d xi_65 = _mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_59, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_30), xi_64);
370 const __m256d xi_66 = _mm256_add_pd(_mm256_add_pd(xi_35, xi_58), xi_65);
371 const __m256d xi_79 = _mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_30, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_59), xi_64);
372 const __m256d xi_80 = _mm256_add_pd(_mm256_add_pd(xi_35, xi_58), xi_79);
373 const __m256d xi_82 = _mm256_add_pd(_mm256_mul_pd(_mm256_mul_pd(xi_27, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_set_pd(xi_55, xi_55, xi_55, xi_55)), xi_33);
374 const __m256d xi_83 = _mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_81, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_50), xi_82);
375 const __m256d xi_85 = _mm256_add_pd(_mm256_add_pd(xi_39, xi_65), xi_84);
376 const __m256d xi_87 = _mm256_mul_pd(u_2, xi_67);
377 const __m256d xi_89 = _mm256_mul_pd(u_2, xi_72);
378 const __m256d xi_90 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_88, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_89, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), xi_86), xi_87);
379 const __m256d xi_91 = _mm256_add_pd(_mm256_add_pd(xi_39, xi_79), xi_84);
380 const __m256d xi_92 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_86, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_87, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), xi_88), xi_89);
381 const __m256d xi_94 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(xi_46, xi_61), xi_62), xi_63), xi_83), xi_93);
382 const __m256d xi_97 = _mm256_mul_pd(u_2, xi_261);
383 const __m256d xi_98 = _mm256_mul_pd(xi_97, _mm256_set_pd(0.25, 0.25, 0.25, 0.25));
384 const __m256d xi_100 = _mm256_mul_pd(xi_97, _mm256_set_pd(xi_71, xi_71, xi_71, xi_71));
385 const __m256d xi_101 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_96, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_98, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), xi_100), xi_99);
386 const __m256d xi_102 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_100, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_99, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), xi_96), xi_98);
387 const __m256d xi_103 = _mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_50, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_81), xi_82);
388 const __m256d xi_104 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(xi_103, xi_46), xi_61), xi_62), xi_63), xi_93);
389 const __m256d xi_115 = _mm256_mul_pd(rho, _mm256_mul_pd(u_2, u_2));
390 const __m256d xi_116 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_245, _mm256_set_pd(3.0, 3.0, 3.0, 3.0)), _mm256_mul_pd(xi_265, _mm256_set_pd(3.0, 3.0, 3.0, 3.0))), _mm256_mul_pd(xi_115, _mm256_set_pd(0.66666666666666663, 0.66666666666666663, 0.66666666666666663, 0.66666666666666663))), xi_114);
391 const __m256d xi_117 = _mm256_mul_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_251, _mm256_set_pd(3.0, 3.0, 3.0, 3.0)), _mm256_mul_pd(xi_264, _mm256_set_pd(3.0, 3.0, 3.0, 3.0))), _mm256_mul_pd(xi_112, _mm256_set_pd(0.66666666666666663, 0.66666666666666663, 0.66666666666666663, 0.66666666666666663))), _mm256_mul_pd(xi_113, _mm256_set_pd(1.6666666666666667, 1.6666666666666667, 1.6666666666666667, 1.6666666666666667))), _mm256_mul_pd(xi_246, _mm256_set_pd(-3.0, -3.0, -3.0, -3.0))), _mm256_mul_pd(xi_248, _mm256_set_pd(-3.0, -3.0, -3.0, -3.0))), _mm256_mul_pd(xi_255, _mm256_set_pd(-3.0, -3.0, -3.0, -3.0))), _mm256_mul_pd(xi_263, _mm256_set_pd(-3.0, -3.0, -3.0, -3.0))), xi_116), _mm256_set_pd(omega_even, omega_even, omega_even, omega_even));
392 const __m256d xi_124 = _mm256_mul_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(xi_113, xi_115), xi_119), xi_121), xi_123), xi_17), xi_22), xi_253), _mm256_set_pd(omega_bulk, omega_bulk, omega_bulk, omega_bulk));
393 const __m256d xi_127 = _mm256_mul_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_112, _mm256_set_pd(2.3333333333333335, 2.3333333333333335, 2.3333333333333335, 2.3333333333333335)), _mm256_mul_pd(xi_251, _mm256_set_pd(-2.0, -2.0, -2.0, -2.0))), _mm256_mul_pd(xi_264, _mm256_set_pd(-2.0, -2.0, -2.0, -2.0))), _mm256_mul_pd(xi_249, _mm256_set_pd(-5.0, -5.0, -5.0, -5.0))), _mm256_mul_pd(xi_256, _mm256_set_pd(-5.0, -5.0, -5.0, -5.0))), _mm256_mul_pd(xi_260, _mm256_set_pd(-5.0, -5.0, -5.0, -5.0))), _mm256_mul_pd(xi_262, _mm256_set_pd(-5.0, -5.0, -5.0, -5.0))), xi_116), xi_126), _mm256_set_pd(omega_even, omega_even, omega_even, omega_even));
394 const __m256d xi_131 = _mm256_mul_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_115, _mm256_set_pd(3.0, 3.0, 3.0, 3.0)), _mm256_mul_pd(xi_251, _mm256_set_pd(5.0, 5.0, 5.0, 5.0))), _mm256_mul_pd(xi_264, _mm256_set_pd(5.0, 5.0, 5.0, 5.0))), _mm256_mul_pd(xi_245, _mm256_set_pd(-4.0, -4.0, -4.0, -4.0))), _mm256_mul_pd(xi_265, _mm256_set_pd(-4.0, -4.0, -4.0, -4.0))), _mm256_mul_pd(xi_247, _mm256_set_pd(-7.0, -7.0, -7.0, -7.0))), _mm256_mul_pd(xi_250, _mm256_set_pd(-7.0, -7.0, -7.0, -7.0))), _mm256_mul_pd(xi_252, _mm256_set_pd(-7.0, -7.0, -7.0, -7.0))), _mm256_mul_pd(xi_257, _mm256_set_pd(-7.0, -7.0, -7.0, -7.0))), xi_114), xi_126), xi_128), xi_129), xi_130), _mm256_set_pd(omega_even, omega_even, omega_even, omega_even));
395 const __m256d xi_156 = _mm256_add_pd(_mm256_mul_pd(xi_115, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_265);
396 const __m256d xi_157 = _mm256_mul_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_251, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_121), xi_156), xi_16), xi_2), xi_256), xi_6), _mm256_set_pd(omega_shear, omega_shear, omega_shear, omega_shear));
397 const __m256d xi_158 = _mm256_mul_pd(xi_157, _mm256_set_pd(0.125, 0.125, 0.125, 0.125));
398 const __m256d xi_159 = _mm256_add_pd(_mm256_mul_pd(xi_131, _mm256_set_pd(-0.01984126984126984, -0.01984126984126984, -0.01984126984126984, -0.01984126984126984)), _mm256_mul_pd(xi_107, _mm256_set_pd(-0.11904761904761904, -0.11904761904761904, -0.11904761904761904, -0.11904761904761904)));
399 const __m256d xi_160 = _mm256_mul_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_112, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_113, _mm256_set_pd(2.0, 2.0, 2.0, 2.0))), _mm256_mul_pd(xi_244, _mm256_set_pd(-2.0, -2.0, -2.0, -2.0))), _mm256_mul_pd(xi_258, _mm256_set_pd(-2.0, -2.0, -2.0, -2.0))), xi_120), xi_123), xi_125), xi_156), xi_245), xi_251), xi_264), xi_9), _mm256_set_pd(omega_shear, omega_shear, omega_shear, omega_shear));
400 const __m256d xi_162 = _mm256_add_pd(_mm256_mul_pd(xi_160, _mm256_set_pd(-0.041666666666666664, -0.041666666666666664, -0.041666666666666664, -0.041666666666666664)), _mm256_mul_pd(xi_161, _mm256_set_pd(-0.16666666666666666, -0.16666666666666666, -0.16666666666666666, -0.16666666666666666)));
401 const __m256d xi_163 = _mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_117, _mm256_set_pd(-0.050000000000000003, -0.050000000000000003, -0.050000000000000003, -0.050000000000000003)), _mm256_mul_pd(xi_108, _mm256_set_pd(-0.10000000000000001, -0.10000000000000001, -0.10000000000000001, -0.10000000000000001))), xi_162);
402 const __m256d xi_164 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_127, _mm256_set_pd(0.014285714285714285, 0.014285714285714285, 0.014285714285714285, 0.014285714285714285)), _mm256_mul_pd(xi_111, _mm256_set_pd(0.028571428571428571, 0.028571428571428571, 0.028571428571428571, 0.028571428571428571))), xi_155), xi_158), xi_159), xi_163);
403 const __m256d xi_176 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_160, _mm256_set_pd(0.083333333333333329, 0.083333333333333329, 0.083333333333333329, 0.083333333333333329)), _mm256_mul_pd(xi_161, _mm256_set_pd(0.33333333333333331, 0.33333333333333331, 0.33333333333333331, 0.33333333333333331))), _mm256_mul_pd(xi_127, _mm256_set_pd(-0.035714285714285712, -0.035714285714285712, -0.035714285714285712, -0.035714285714285712))), _mm256_mul_pd(xi_111, _mm256_set_pd(-0.071428571428571425, -0.071428571428571425, -0.071428571428571425, -0.071428571428571425))), xi_159);
404 const __m256d xi_187 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_155, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_158, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), _mm256_mul_pd(xi_131, _mm256_set_pd(0.015873015873015872, 0.015873015873015872, 0.015873015873015872, 0.015873015873015872))), _mm256_mul_pd(xi_107, _mm256_set_pd(0.095238095238095233, 0.095238095238095233, 0.095238095238095233, 0.095238095238095233))), _mm256_mul_pd(xi_127, _mm256_set_pd(-0.021428571428571429, -0.021428571428571429, -0.021428571428571429, -0.021428571428571429))), _mm256_mul_pd(xi_111, _mm256_set_pd(-0.042857142857142858, -0.042857142857142858, -0.042857142857142858, -0.042857142857142858))), xi_163);
405 const __m256d xi_190 = _mm256_mul_pd(xi_157, _mm256_set_pd(0.0625, 0.0625, 0.0625, 0.0625));
406 const __m256d xi_191 = _mm256_mul_pd(xi_131, _mm256_set_pd(0.013888888888888888, 0.013888888888888888, 0.013888888888888888, 0.013888888888888888));
407 const __m256d xi_193 = _mm256_add_pd(_mm256_mul_pd(xi_124, _mm256_set_pd(0.041666666666666664, 0.041666666666666664, 0.041666666666666664, 0.041666666666666664)), _mm256_mul_pd(xi_110, _mm256_set_pd(0.083333333333333329, 0.083333333333333329, 0.083333333333333329, 0.083333333333333329)));
408 const __m256d xi_194 = _mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_160, _mm256_set_pd(0.020833333333333332, 0.020833333333333332, 0.020833333333333332, 0.020833333333333332)), _mm256_mul_pd(xi_161, _mm256_set_pd(0.083333333333333329, 0.083333333333333329, 0.083333333333333329, 0.083333333333333329))), xi_193);
409 const __m256d xi_195 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(xi_165, xi_189), xi_190), xi_191), xi_192), xi_194);
410 const __m256d xi_202 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(xi_151, xi_189), xi_190), xi_191), xi_192), xi_194);
411 const __m256d xi_204 = _mm256_mul_pd(xi_127, _mm256_set_pd(-0.0071428571428571426, -0.0071428571428571426, -0.0071428571428571426, -0.0071428571428571426));
412 const __m256d xi_205 = _mm256_mul_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(u_2, xi_197), xi_132), xi_17), xi_246), _mm256_set_pd(xi_198, xi_198, xi_198, xi_198));
413 const __m256d xi_206 = _mm256_mul_pd(xi_117, _mm256_set_pd(0.025000000000000001, 0.025000000000000001, 0.025000000000000001, 0.025000000000000001));
414 const __m256d xi_209 = _mm256_add_pd(_mm256_mul_pd(xi_131, _mm256_set_pd(-0.003968253968253968, -0.003968253968253968, -0.003968253968253968, -0.003968253968253968)), _mm256_mul_pd(xi_107, _mm256_set_pd(-0.023809523809523808, -0.023809523809523808, -0.023809523809523808, -0.023809523809523808)));
415 const __m256d xi_210 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(xi_162, xi_193), xi_203), xi_204), xi_205), xi_206), xi_207), xi_208), xi_209);
416 const __m256d xi_226 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_203, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_205, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), xi_162), xi_193), xi_204), xi_206), xi_207), xi_208), xi_209);
417 const __m256d xi_228 = _mm256_mul_pd(xi_190, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0));
418 const __m256d xi_229 = _mm256_mul_pd(xi_127, _mm256_set_pd(0.017857142857142856, 0.017857142857142856, 0.017857142857142856, 0.017857142857142856));
419 const __m256d xi_231 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(xi_188, xi_194), xi_209), xi_227), xi_228), xi_229), xi_230);
420 const __m256d xi_233 = _mm256_mul_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(_mm256_mul_pd(rho, u_0), u_2), xi_10), xi_166), xi_249), _mm256_set_pd(xi_198, xi_198, xi_198, xi_198));
421 const __m256d xi_234 = _mm256_add_pd(_mm256_mul_pd(xi_232, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_233, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)));
422 const __m256d xi_240 = _mm256_add_pd(xi_232, xi_233);
423 const __m256d xi_243 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(xi_186, xi_194), xi_209), xi_227), xi_228), xi_229), xi_230);
424 const __m256d forceTerm_0 = _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_25, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_26, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), _mm256_mul_pd(xi_27, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), _mm256_mul_pd(xi_25, _mm256_set_pd(xi_28, xi_28, xi_28, xi_28))), _mm256_mul_pd(xi_26, _mm256_set_pd(xi_28, xi_28, xi_28, xi_28))), _mm256_mul_pd(xi_27, _mm256_set_pd(xi_28, xi_28, xi_28, xi_28)));
425 const __m256d forceTerm_1 = _mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_31, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_29), xi_41);
426 const __m256d forceTerm_2 = _mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_29, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_31), xi_41);
427 const __m256d forceTerm_3 = _mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_42, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_44), xi_48);
428 const __m256d forceTerm_4 = _mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_44, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_42), xi_48);
429 const __m256d forceTerm_5 = _mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_51, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_49), xi_52);
430 const __m256d forceTerm_6 = _mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_49, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), xi_51), xi_52);
431 const __m256d forceTerm_7 = _mm256_add_pd(_mm256_add_pd(xi_57, xi_66), xi_76);
432 const __m256d forceTerm_8 = _mm256_add_pd(_mm256_add_pd(xi_66, xi_77), xi_78);
433 const __m256d forceTerm_9 = _mm256_add_pd(_mm256_add_pd(xi_57, xi_78), xi_80);
434 const __m256d forceTerm_10 = _mm256_add_pd(_mm256_add_pd(xi_76, xi_77), xi_80);
435 const __m256d forceTerm_11 = _mm256_add_pd(_mm256_add_pd(xi_83, xi_85), xi_90);
436 const __m256d forceTerm_12 = _mm256_add_pd(_mm256_add_pd(xi_83, xi_91), xi_92);
437 const __m256d forceTerm_13 = _mm256_add_pd(_mm256_add_pd(xi_101, xi_57), xi_94);
438 const __m256d forceTerm_14 = _mm256_add_pd(_mm256_add_pd(xi_102, xi_77), xi_94);
439 const __m256d forceTerm_15 = _mm256_add_pd(_mm256_add_pd(xi_103, xi_85), xi_92);
440 const __m256d forceTerm_16 = _mm256_add_pd(_mm256_add_pd(xi_103, xi_90), xi_91);
441 const __m256d forceTerm_17 = _mm256_add_pd(_mm256_add_pd(xi_102, xi_104), xi_57);
442 const __m256d forceTerm_18 = _mm256_add_pd(_mm256_add_pd(xi_101, xi_104), xi_77);
443 _mm256_store_pd(&_data_pdfs_20_30_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_110, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_131, _mm256_set_pd(0.023809523809523808, 0.023809523809523808, 0.023809523809523808, 0.023809523809523808))), _mm256_mul_pd(xi_107, _mm256_set_pd(0.14285714285714285, 0.14285714285714285, 0.14285714285714285, 0.14285714285714285))), _mm256_mul_pd(xi_127, _mm256_set_pd(0.042857142857142858, 0.042857142857142858, 0.042857142857142858, 0.042857142857142858))), _mm256_mul_pd(xi_111, _mm256_set_pd(0.085714285714285715, 0.085714285714285715, 0.085714285714285715, 0.085714285714285715))), _mm256_mul_pd(xi_117, _mm256_set_pd(0.10000000000000001, 0.10000000000000001, 0.10000000000000001, 0.10000000000000001))), _mm256_mul_pd(xi_108, _mm256_set_pd(0.20000000000000001, 0.20000000000000001, 0.20000000000000001, 0.20000000000000001))), _mm256_mul_pd(xi_124, _mm256_set_pd(-0.5, -0.5, -0.5, -0.5))), forceTerm_0), xi_253));
444 _mm256_store_pd(&_data_pdfs_20_31_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_135, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_140, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), forceTerm_1), xi_151), xi_164), xi_251));
445 _mm256_store_pd(&_data_pdfs_20_32_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(forceTerm_2, xi_135), xi_140), xi_164), xi_165), xi_264));
446 _mm256_store_pd(&_data_pdfs_20_33_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(forceTerm_3, xi_168), xi_170), xi_175), xi_176), xi_258));
447 _mm256_store_pd(&_data_pdfs_20_34_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_168, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_170, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), forceTerm_4), xi_176), xi_177), xi_244));
448 _mm256_store_pd(&_data_pdfs_20_35_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_mul_pd(xi_179, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0)), _mm256_mul_pd(xi_181, _mm256_set_pd(-1.0, -1.0, -1.0, -1.0))), forceTerm_5), xi_186), xi_187), xi_265));
449 _mm256_store_pd(&_data_pdfs_20_36_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(forceTerm_6, xi_179), xi_181), xi_187), xi_188), xi_245));
450 _mm256_store_pd(&_data_pdfs_20_37_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(forceTerm_7, xi_177), xi_195), xi_200), xi_252));
451 _mm256_store_pd(&_data_pdfs_20_38_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(forceTerm_8, xi_175), xi_195), xi_201), xi_257));
452 _mm256_store_pd(&_data_pdfs_20_39_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(forceTerm_9, xi_177), xi_201), xi_202), xi_250));
453 _mm256_store_pd(&_data_pdfs_20_310_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(forceTerm_10, xi_175), xi_200), xi_202), xi_247));
454 _mm256_store_pd(&_data_pdfs_20_311_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(forceTerm_11, xi_210), xi_219), xi_224), xi_263));
455 _mm256_store_pd(&_data_pdfs_20_312_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(forceTerm_12, xi_219), xi_225), xi_226), xi_248));
456 _mm256_store_pd(&_data_pdfs_20_313_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(forceTerm_13, xi_231), xi_234), xi_239), xi_256));
457 _mm256_store_pd(&_data_pdfs_20_314_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(forceTerm_14, xi_231), xi_240), xi_241), xi_260));
458 _mm256_store_pd(&_data_pdfs_20_315_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(forceTerm_15, xi_224), xi_226), xi_242), xi_246));
459 _mm256_store_pd(&_data_pdfs_20_316_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(forceTerm_16, xi_210), xi_225), xi_242), xi_255));
460 _mm256_store_pd(&_data_pdfs_20_317_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(forceTerm_17, xi_239), xi_240), xi_243), xi_262));
461 _mm256_store_pd(&_data_pdfs_20_318_10[ctr_0], _mm256_add_pd(_mm256_add_pd(_mm256_add_pd(_mm256_add_pd(forceTerm_18, xi_234), xi_241), xi_243), xi_249));
462 }
463 for (int64_t ctr_0 = (int64_t)((_size_force_0) / (4)) * (4); ctr_0 < _size_force_0; ctr_0 += 1) {
464 const double xi_244 = _data_pdfs_20_34_10[ctr_0];
465 const double xi_245 = _data_pdfs_20_36_10[ctr_0];
466 const double xi_246 = _data_pdfs_20_315_10[ctr_0];
467 const double xi_247 = _data_pdfs_20_310_10[ctr_0];
468 const double xi_248 = _data_pdfs_20_312_10[ctr_0];
469 const double xi_249 = _data_pdfs_20_318_10[ctr_0];
470 const double xi_250 = _data_pdfs_20_39_10[ctr_0];
471 const double xi_251 = _data_pdfs_20_31_10[ctr_0];
472 const double xi_252 = _data_pdfs_20_37_10[ctr_0];
473 const double xi_253 = _data_pdfs_20_30_10[ctr_0];
474 const double xi_254 = _data_force_20_31_10[ctr_0];
475 const double xi_255 = _data_pdfs_20_316_10[ctr_0];
476 const double xi_256 = _data_pdfs_20_313_10[ctr_0];
477 const double xi_257 = _data_pdfs_20_38_10[ctr_0];
478 const double xi_258 = _data_pdfs_20_33_10[ctr_0];
479 const double xi_259 = _data_force_20_32_10[ctr_0];
480 const double xi_260 = _data_pdfs_20_314_10[ctr_0];
481 const double xi_261 = _data_force_20_30_10[ctr_0];
482 const double xi_262 = _data_pdfs_20_317_10[ctr_0];
483 const double xi_263 = _data_pdfs_20_311_10[ctr_0];
484 const double xi_264 = _data_pdfs_20_32_10[ctr_0];
485 const double xi_265 = _data_pdfs_20_35_10[ctr_0];
486
487 double random_7_0{};
488 double random_7_1{};
489 if (kT > 0.) {
490 philox_double2(time_step, block_offset_0 + ctr_0, block_offset_1 + ctr_1, block_offset_2 + ctr_2, 7, seed, random_7_0, random_7_1);
491 }
492
493 double random_6_0{};
494 double random_6_1{};
495 if (kT > 0.) {
496 philox_double2(time_step, block_offset_0 + ctr_0, block_offset_1 + ctr_1, block_offset_2 + ctr_2, 6, seed, random_6_0, random_6_1);
497 }
498
499 double random_5_0{};
500 double random_5_1{};
501 if (kT > 0.) {
502 philox_double2(time_step, block_offset_0 + ctr_0, block_offset_1 + ctr_1, block_offset_2 + ctr_2, 5, seed, random_5_0, random_5_1);
503 }
504
505 double random_4_0{};
506 double random_4_1{};
507 if (kT > 0.) {
508 philox_double2(time_step, block_offset_0 + ctr_0, block_offset_1 + ctr_1, block_offset_2 + ctr_2, 4, seed, random_4_0, random_4_1);
509 }
510
511 double random_3_0{};
512 double random_3_1{};
513 if (kT > 0.) {
514 philox_double2(time_step, block_offset_0 + ctr_0, block_offset_1 + ctr_1, block_offset_2 + ctr_2, 3, seed, random_3_0, random_3_1);
515 }
516
517 double random_2_0{};
518 double random_2_1{};
519 if (kT > 0.) {
520 philox_double2(time_step, block_offset_0 + ctr_0, block_offset_1 + ctr_1, block_offset_2 + ctr_2, 2, seed, random_2_0, random_2_1);
521 }
522
523 double random_1_0{};
524 double random_1_1{};
525 if (kT > 0.) {
526 philox_double2(time_step, block_offset_0 + ctr_0, block_offset_1 + ctr_1, block_offset_2 + ctr_2, 1, seed, random_1_0, random_1_1);
527 }
528
529 double random_0_0{};
530 double random_0_1{};
531 if (kT > 0.) {
532 philox_double2(time_step, block_offset_0 + ctr_0, block_offset_1 + ctr_1, block_offset_2 + ctr_2, 0, seed, random_0_0, random_0_1);
533 }
534 const double xi_2 = xi_249 + xi_260;
535 const double xi_3 = xi_2 + xi_244;
536 const double xi_4 = xi_246 + xi_251 + xi_263;
537 const double xi_5 = xi_248 + xi_265;
538 const double xi_6 = xi_245 + xi_262;
539 const double xi_8 = xi_250 * -1.0;
540 const double xi_9 = xi_252 * -1.0;
541 const double xi_10 = xi_262 * -1.0;
542 const double xi_11 = xi_256 * -1.0;
543 const double xi_12 = xi_258 * -1.0;
544 const double xi_13 = xi_10 + xi_11 + xi_12;
545 const double xi_14 = xi_264 * -1.0;
546 const double xi_15 = xi_247 * -1.0;
547 const double xi_16 = xi_14 + xi_15;
548 const double xi_17 = xi_255 * -1.0;
549 const double xi_18 = xi_248 * -1.0;
550 const double xi_19 = xi_17 + xi_18;
551 const double xi_20 = xi_249 * -1.0;
552 const double xi_21 = xi_10 + xi_20;
553 const double xi_22 = xi_246 * -1.0;
554 const double xi_23 = xi_245 * -1.0;
555 const double xi_24 = xi_17 + xi_22 + xi_23 + xi_263;
556 const double xi_29 = xi_254 * 0.16666666666666666;
557 const double xi_30 = xi_254 * 0.083333333333333329;
558 const double xi_42 = xi_261 * 0.16666666666666666;
559 const double xi_43 = xi_261 * 0.083333333333333329;
560 const double xi_49 = xi_259 * 0.16666666666666666;
561 const double xi_50 = xi_259 * 0.083333333333333329;
562 const double xi_67 = xi_254 * 0.25;
563 const double xi_72 = xi_254 * xi_71;
564 const double xi_114 = xi_253 * -1.0;
565 const double xi_118 = xi_263 * -1.0;
566 const double xi_119 = xi_118 + xi_18;
567 const double xi_120 = xi_257 * -1.0 + xi_8;
568 const double xi_122 = xi_260 * -1.0;
569 const double xi_123 = xi_11 + xi_122 + xi_15 + xi_21;
570 const double xi_125 = xi_246 * 2.0 + xi_248 * 2.0 + xi_255 * 2.0 + xi_263 * 2.0;
571 const double xi_126 = xi_125 + xi_244 * 5.0 + xi_258 * 5.0;
572 const double xi_128 = xi_256 * 2.0;
573 const double xi_129 = xi_260 * 2.0;
574 const double xi_130 = xi_249 * 2.0 + xi_262 * 2.0;
575 const double xi_132 = xi_118 + xi_248;
576 const double xi_133 = xi_132 + xi_14 + xi_22 + xi_251 + xi_255;
577 const double xi_135 = xi_133 * xi_134;
578 const double xi_136 = random_5_1 - 0.5;
579 const double xi_141 = xi_252 * 2.0;
580 const double xi_142 = xi_247 * 2.0;
581 const double xi_143 = xi_250 * 2.0 + xi_257 * -2.0;
582 const double xi_144 = xi_14 + xi_141 * -1.0 + xi_142 + xi_143 + xi_19 + xi_4;
583 const double xi_146 = xi_144 * xi_145;
584 const double xi_147 = random_3_0 - 0.5;
585 const double xi_152 = random_0_1 - 0.5;
586 const double xi_166 = xi_122 + xi_256;
587 const double xi_167 = xi_12 + xi_166 + xi_20 + xi_244 + xi_262;
588 const double xi_168 = xi_134 * xi_167;
589 const double xi_169 = random_4_1 - 0.5;
590 const double xi_171 = xi_13 + xi_141 + xi_142 * -1.0 + xi_143 + xi_3;
591 const double xi_172 = xi_145 * xi_171;
592 const double xi_173 = random_4_0 - 0.5;
593 const double xi_178 = xi_119 + xi_23 + xi_246 + xi_255 + xi_265;
594 const double xi_179 = xi_134 * xi_178;
595 const double xi_180 = random_5_0 - 0.5;
596 const double xi_182 = xi_128 * -1.0 + xi_129 * -1.0 + xi_130 + xi_24 + xi_5;
597 const double xi_183 = xi_145 * xi_182;
598 const double xi_184 = random_3_1 - 0.5;
599 const double xi_212 = xi_182 * xi_211;
600 const double xi_214 = xi_178 * xi_213;
601 const double xi_220 = xi_144 * xi_211;
602 const double xi_221 = xi_133 * xi_213;
603 const double xi_235 = xi_167 * xi_213;
604 const double xi_236 = xi_171 * xi_211;
605 const double xi_31 = rr_0 * xi_30;
606 const double xi_44 = rr_0 * xi_43;
607 const double xi_51 = rr_0 * xi_50;
608 const double xi_54 = xi_261 * xi_53;
609 const double xi_59 = xi_254 * xi_53;
610 const double xi_81 = xi_259 * xi_53;
611 const double vel0Term = xi_247 + xi_257 + xi_3;
612 const double vel1Term = xi_252 + xi_4;
613 const double vel2Term = xi_256 + xi_5;
614 const double rho = vel0Term + vel1Term + vel2Term + xi_250 + xi_253 + xi_255 + xi_258 + xi_264 + xi_6;
615 const double xi_105 = kT * rho;
616 const double xi_106 = pow(xi_105 * (-1.0 * (omega_even * -1.0 + 1.0) * (omega_even * -1.0 + 1.0) + 1.0), 0.5);
617 const double xi_107 = xi_106 * (random_6_0 - 0.5) * 3.7416573867739413;
618 const double xi_108 = xi_106 * (random_7_0 - 0.5) * 5.4772255750516612;
619 const double xi_110 = xi_109 * (random_2_1 - 0.5) * pow(xi_105 * (-1.0 * (omega_bulk * -1.0 + 1.0) * (omega_bulk * -1.0 + 1.0) + 1.0), 0.5);
620 const double xi_111 = xi_106 * (random_6_1 - 0.5) * 8.3666002653407556;
621 const double xi_137 = pow(xi_105 * (-1.0 * (omega_odd * -1.0 + 1.0) * (omega_odd * -1.0 + 1.0) + 1.0), 0.5);
622 const double xi_138 = xi_137 * 1.4142135623730951;
623 const double xi_139 = xi_138 * 0.5;
624 const double xi_140 = xi_136 * xi_139;
625 const double xi_148 = xi_109 * xi_137;
626 const double xi_149 = xi_148 * 0.16666666666666666;
627 const double xi_150 = xi_147 * xi_149;
628 const double xi_151 = xi_146 * -1.0 + xi_150 * -1.0;
629 const double xi_153 = pow(xi_105 * (-1.0 * (omega_shear * -1.0 + 1.0) * (omega_shear * -1.0 + 1.0) + 1.0), 0.5);
630 const double xi_154 = xi_153 * 0.5;
631 const double xi_155 = xi_152 * xi_154;
632 const double xi_161 = xi_153 * (random_0_0 - 0.5) * 1.7320508075688772;
633 const double xi_165 = xi_146 + xi_150;
634 const double xi_170 = xi_139 * xi_169;
635 const double xi_174 = xi_149 * xi_173;
636 const double xi_175 = xi_172 + xi_174;
637 const double xi_177 = xi_172 * -1.0 + xi_174 * -1.0;
638 const double xi_181 = xi_139 * xi_180;
639 const double xi_185 = xi_149 * xi_184;
640 const double xi_186 = xi_183 * -1.0 + xi_185 * -1.0;
641 const double xi_188 = xi_183 + xi_185;
642 const double xi_189 = xi_152 * xi_153 * 0.25;
643 const double xi_192 = xi_107 * 0.083333333333333329;
644 const double xi_196 = xi_154 * (random_1_0 - 0.5);
645 const double xi_203 = xi_154 * (random_2_0 - 0.5);
646 const double xi_207 = xi_111 * -0.014285714285714285;
647 const double xi_208 = xi_108 * 0.050000000000000003;
648 const double xi_215 = xi_148 * 0.083333333333333329;
649 const double xi_216 = xi_184 * xi_215;
650 const double xi_217 = xi_138 * 0.25;
651 const double xi_218 = xi_180 * xi_217;
652 const double xi_219 = xi_212 * -1.0 + xi_214 + xi_216 * -1.0 + xi_218;
653 const double xi_222 = xi_147 * xi_215;
654 const double xi_223 = xi_136 * xi_217;
655 const double xi_224 = xi_220 * -1.0 + xi_221 + xi_222 * -1.0 + xi_223;
656 const double xi_225 = xi_220 + xi_221 * -1.0 + xi_222 + xi_223 * -1.0;
657 const double xi_227 = xi_189 * -1.0;
658 const double xi_230 = xi_111 * 0.035714285714285712;
659 const double xi_232 = xi_154 * (random_1_1 - 0.5);
660 const double xi_237 = xi_169 * xi_217;
661 const double xi_238 = xi_173 * xi_215;
662 const double xi_239 = xi_235 * -1.0 + xi_236 + xi_237 * -1.0 + xi_238;
663 const double xi_241 = xi_235 + xi_236 * -1.0 + xi_237 + xi_238 * -1.0;
664 const double xi_242 = xi_212 + xi_214 * -1.0 + xi_216 + xi_218 * -1.0;
665 const double xi_0 = ((1.0) / (rho));
666 const double xi_7 = xi_0 * 0.5;
667 const double u_0 = xi_0 * (vel0Term + xi_13 + xi_8 + xi_9) + xi_261 * xi_7;
668 const double xi_25 = u_0 * xi_261;
669 const double xi_37 = xi_25 * 0.16666666666666666;
670 const double xi_38 = xi_25 * 0.083333333333333329;
671 const double xi_39 = omega_shear * xi_38;
672 const double xi_40 = xi_37 * -1.0 + xi_39;
673 const double xi_56 = xi_25 * xi_55 * -1.0 + xi_37;
674 const double xi_57 = xi_43 * -1.0 + xi_54 + xi_56;
675 const double xi_61 = xi_25 * xi_60 * -1.0;
676 const double xi_68 = u_0 * xi_67;
677 const double xi_73 = u_0 * xi_72;
678 const double xi_77 = xi_43 + xi_54 * -1.0 + xi_56;
679 const double xi_84 = xi_38 * -1.0;
680 const double xi_95 = u_0 * xi_259;
681 const double xi_96 = xi_95 * 0.25;
682 const double xi_99 = xi_71 * xi_95;
683 const double xi_113 = rho * u_0 * u_0;
684 const double u_1 = xi_0 * (vel1Term + xi_16 + xi_19 + xi_257 + xi_8) + xi_254 * xi_7;
685 const double xi_26 = u_1 * xi_254;
686 const double xi_32 = xi_26 * 0.16666666666666666;
687 const double xi_45 = xi_26 * 0.083333333333333329;
688 const double xi_46 = omega_shear * xi_45;
689 const double xi_47 = xi_32 * -1.0 + xi_46;
690 const double xi_62 = xi_26 * xi_60 * -1.0;
691 const double xi_69 = u_1 * 0.25;
692 const double xi_70 = xi_261 * xi_69;
693 const double xi_74 = u_1 * xi_71;
694 const double xi_75 = xi_261 * xi_74;
695 const double xi_76 = xi_68 * -1.0 + xi_70 * -1.0 + xi_73 + xi_75;
696 const double xi_78 = xi_68 + xi_70 + xi_73 * -1.0 + xi_75 * -1.0;
697 const double xi_86 = xi_259 * xi_69;
698 const double xi_88 = xi_259 * xi_74;
699 const double xi_93 = xi_45 * -1.0;
700 const double xi_112 = rho * u_1 * u_1;
701 const double xi_121 = xi_112 + xi_120 + xi_9;
702 const double xi_197 = rho * u_1;
703 const double xi_199 = xi_198 * (u_0 * xi_197 + xi_120 + xi_247 + xi_252);
704 const double xi_200 = xi_196 * -1.0 + xi_199 * -1.0;
705 const double xi_201 = xi_196 + xi_199;
706 const double u_2 = xi_0 * (vel2Term + xi_21 + xi_24 + xi_260) + xi_259 * xi_7;
707 const double xi_27 = u_2 * xi_259;
708 const double xi_33 = xi_27 * 0.16666666666666666;
709 const double xi_34 = xi_27 * 0.083333333333333329;
710 const double xi_35 = omega_shear * xi_34;
711 const double xi_36 = xi_33 * -1.0 + xi_35;
712 const double xi_41 = omega_shear * xi_32 * -1.0 + xi_26 * 0.33333333333333331 + xi_36 + xi_40;
713 const double xi_48 = omega_shear * xi_37 * -1.0 + xi_25 * 0.33333333333333331 + xi_36 + xi_47;
714 const double xi_52 = omega_shear * xi_33 * -1.0 + xi_27 * 0.33333333333333331 + xi_40 + xi_47;
715 const double xi_58 = xi_34 * -1.0;
716 const double xi_63 = xi_27 * xi_60 * -1.0;
717 const double xi_64 = xi_26 * xi_55 * -1.0 + xi_32 + xi_61 + xi_62 + xi_63;
718 const double xi_65 = xi_30 + xi_59 * -1.0 + xi_64;
719 const double xi_66 = xi_35 + xi_58 + xi_65;
720 const double xi_79 = xi_30 * -1.0 + xi_59 + xi_64;
721 const double xi_80 = xi_35 + xi_58 + xi_79;
722 const double xi_82 = xi_27 * xi_55 * -1.0 + xi_33;
723 const double xi_83 = xi_50 + xi_81 * -1.0 + xi_82;
724 const double xi_85 = xi_39 + xi_65 + xi_84;
725 const double xi_87 = u_2 * xi_67;
726 const double xi_89 = u_2 * xi_72;
727 const double xi_90 = xi_86 + xi_87 + xi_88 * -1.0 + xi_89 * -1.0;
728 const double xi_91 = xi_39 + xi_79 + xi_84;
729 const double xi_92 = xi_86 * -1.0 + xi_87 * -1.0 + xi_88 + xi_89;
730 const double xi_94 = xi_46 + xi_61 + xi_62 + xi_63 + xi_83 + xi_93;
731 const double xi_97 = u_2 * xi_261;
732 const double xi_98 = xi_97 * 0.25;
733 const double xi_100 = xi_71 * xi_97;
734 const double xi_101 = xi_100 + xi_96 * -1.0 + xi_98 * -1.0 + xi_99;
735 const double xi_102 = xi_100 * -1.0 + xi_96 + xi_98 + xi_99 * -1.0;
736 const double xi_103 = xi_50 * -1.0 + xi_81 + xi_82;
737 const double xi_104 = xi_103 + xi_46 + xi_61 + xi_62 + xi_63 + xi_93;
738 const double xi_115 = rho * u_2 * u_2;
739 const double xi_116 = xi_114 + xi_115 * 0.66666666666666663 + xi_245 * 3.0 + xi_265 * 3.0;
740 const double xi_117 = omega_even * (xi_112 * 0.66666666666666663 + xi_113 * 1.6666666666666667 + xi_116 + xi_246 * -3.0 + xi_248 * -3.0 + xi_251 * 3.0 + xi_255 * -3.0 + xi_263 * -3.0 + xi_264 * 3.0);
741 const double xi_124 = omega_bulk * (xi_113 + xi_115 + xi_119 + xi_121 + xi_123 + xi_17 + xi_22 + xi_253);
742 const double xi_127 = omega_even * (xi_112 * 2.3333333333333335 + xi_116 + xi_126 + xi_249 * -5.0 + xi_251 * -2.0 + xi_256 * -5.0 + xi_260 * -5.0 + xi_262 * -5.0 + xi_264 * -2.0);
743 const double xi_131 = omega_even * (xi_114 + xi_115 * 3.0 + xi_126 + xi_128 + xi_129 + xi_130 + xi_245 * -4.0 + xi_247 * -7.0 + xi_250 * -7.0 + xi_251 * 5.0 + xi_252 * -7.0 + xi_257 * -7.0 + xi_264 * 5.0 + xi_265 * -4.0);
744 const double xi_156 = xi_115 * -1.0 + xi_265;
745 const double xi_157 = omega_shear * (xi_121 + xi_156 + xi_16 + xi_2 + xi_251 * -1.0 + xi_256 + xi_6);
746 const double xi_158 = xi_157 * 0.125;
747 const double xi_159 = xi_107 * -0.11904761904761904 + xi_131 * -0.01984126984126984;
748 const double xi_160 = omega_shear * (xi_112 * -1.0 + xi_113 * 2.0 + xi_120 + xi_123 + xi_125 + xi_156 + xi_244 * -2.0 + xi_245 + xi_251 + xi_258 * -2.0 + xi_264 + xi_9);
749 const double xi_162 = xi_160 * -0.041666666666666664 + xi_161 * -0.16666666666666666;
750 const double xi_163 = xi_108 * -0.10000000000000001 + xi_117 * -0.050000000000000003 + xi_162;
751 const double xi_164 = xi_111 * 0.028571428571428571 + xi_127 * 0.014285714285714285 + xi_155 + xi_158 + xi_159 + xi_163;
752 const double xi_176 = xi_111 * -0.071428571428571425 + xi_127 * -0.035714285714285712 + xi_159 + xi_160 * 0.083333333333333329 + xi_161 * 0.33333333333333331;
753 const double xi_187 = xi_107 * 0.095238095238095233 + xi_111 * -0.042857142857142858 + xi_127 * -0.021428571428571429 + xi_131 * 0.015873015873015872 + xi_155 * -1.0 + xi_158 * -1.0 + xi_163;
754 const double xi_190 = xi_157 * 0.0625;
755 const double xi_191 = xi_131 * 0.013888888888888888;
756 const double xi_193 = xi_110 * 0.083333333333333329 + xi_124 * 0.041666666666666664;
757 const double xi_194 = xi_160 * 0.020833333333333332 + xi_161 * 0.083333333333333329 + xi_193;
758 const double xi_195 = xi_165 + xi_189 + xi_190 + xi_191 + xi_192 + xi_194;
759 const double xi_202 = xi_151 + xi_189 + xi_190 + xi_191 + xi_192 + xi_194;
760 const double xi_204 = xi_127 * -0.0071428571428571426;
761 const double xi_205 = xi_198 * (u_2 * xi_197 + xi_132 + xi_17 + xi_246);
762 const double xi_206 = xi_117 * 0.025000000000000001;
763 const double xi_209 = xi_107 * -0.023809523809523808 + xi_131 * -0.003968253968253968;
764 const double xi_210 = xi_162 + xi_193 + xi_203 + xi_204 + xi_205 + xi_206 + xi_207 + xi_208 + xi_209;
765 const double xi_226 = xi_162 + xi_193 + xi_203 * -1.0 + xi_204 + xi_205 * -1.0 + xi_206 + xi_207 + xi_208 + xi_209;
766 const double xi_228 = xi_190 * -1.0;
767 const double xi_229 = xi_127 * 0.017857142857142856;
768 const double xi_231 = xi_188 + xi_194 + xi_209 + xi_227 + xi_228 + xi_229 + xi_230;
769 const double xi_233 = xi_198 * (rho * u_0 * u_2 + xi_10 + xi_166 + xi_249);
770 const double xi_234 = xi_232 * -1.0 + xi_233 * -1.0;
771 const double xi_240 = xi_232 + xi_233;
772 const double xi_243 = xi_186 + xi_194 + xi_209 + xi_227 + xi_228 + xi_229 + xi_230;
773 const double forceTerm_0 = xi_25 * xi_28 + xi_25 * -1.0 + xi_26 * xi_28 + xi_26 * -1.0 + xi_27 * xi_28 + xi_27 * -1.0;
774 const double forceTerm_1 = xi_29 + xi_31 * -1.0 + xi_41;
775 const double forceTerm_2 = xi_29 * -1.0 + xi_31 + xi_41;
776 const double forceTerm_3 = xi_42 * -1.0 + xi_44 + xi_48;
777 const double forceTerm_4 = xi_42 + xi_44 * -1.0 + xi_48;
778 const double forceTerm_5 = xi_49 + xi_51 * -1.0 + xi_52;
779 const double forceTerm_6 = xi_49 * -1.0 + xi_51 + xi_52;
780 const double forceTerm_7 = xi_57 + xi_66 + xi_76;
781 const double forceTerm_8 = xi_66 + xi_77 + xi_78;
782 const double forceTerm_9 = xi_57 + xi_78 + xi_80;
783 const double forceTerm_10 = xi_76 + xi_77 + xi_80;
784 const double forceTerm_11 = xi_83 + xi_85 + xi_90;
785 const double forceTerm_12 = xi_83 + xi_91 + xi_92;
786 const double forceTerm_13 = xi_101 + xi_57 + xi_94;
787 const double forceTerm_14 = xi_102 + xi_77 + xi_94;
788 const double forceTerm_15 = xi_103 + xi_85 + xi_92;
789 const double forceTerm_16 = xi_103 + xi_90 + xi_91;
790 const double forceTerm_17 = xi_102 + xi_104 + xi_57;
791 const double forceTerm_18 = xi_101 + xi_104 + xi_77;
792 _data_pdfs_20_30_10[ctr_0] = forceTerm_0 + xi_107 * 0.14285714285714285 + xi_108 * 0.20000000000000001 + xi_110 * -1.0 + xi_111 * 0.085714285714285715 + xi_117 * 0.10000000000000001 + xi_124 * -0.5 + xi_127 * 0.042857142857142858 + xi_131 * 0.023809523809523808 + xi_253;
793 _data_pdfs_20_31_10[ctr_0] = forceTerm_1 + xi_135 * -1.0 + xi_140 * -1.0 + xi_151 + xi_164 + xi_251;
794 _data_pdfs_20_32_10[ctr_0] = forceTerm_2 + xi_135 + xi_140 + xi_164 + xi_165 + xi_264;
795 _data_pdfs_20_33_10[ctr_0] = forceTerm_3 + xi_168 + xi_170 + xi_175 + xi_176 + xi_258;
796 _data_pdfs_20_34_10[ctr_0] = forceTerm_4 + xi_168 * -1.0 + xi_170 * -1.0 + xi_176 + xi_177 + xi_244;
797 _data_pdfs_20_35_10[ctr_0] = forceTerm_5 + xi_179 * -1.0 + xi_181 * -1.0 + xi_186 + xi_187 + xi_265;
798 _data_pdfs_20_36_10[ctr_0] = forceTerm_6 + xi_179 + xi_181 + xi_187 + xi_188 + xi_245;
799 _data_pdfs_20_37_10[ctr_0] = forceTerm_7 + xi_177 + xi_195 + xi_200 + xi_252;
800 _data_pdfs_20_38_10[ctr_0] = forceTerm_8 + xi_175 + xi_195 + xi_201 + xi_257;
801 _data_pdfs_20_39_10[ctr_0] = forceTerm_9 + xi_177 + xi_201 + xi_202 + xi_250;
802 _data_pdfs_20_310_10[ctr_0] = forceTerm_10 + xi_175 + xi_200 + xi_202 + xi_247;
803 _data_pdfs_20_311_10[ctr_0] = forceTerm_11 + xi_210 + xi_219 + xi_224 + xi_263;
804 _data_pdfs_20_312_10[ctr_0] = forceTerm_12 + xi_219 + xi_225 + xi_226 + xi_248;
805 _data_pdfs_20_313_10[ctr_0] = forceTerm_13 + xi_231 + xi_234 + xi_239 + xi_256;
806 _data_pdfs_20_314_10[ctr_0] = forceTerm_14 + xi_231 + xi_240 + xi_241 + xi_260;
807 _data_pdfs_20_315_10[ctr_0] = forceTerm_15 + xi_224 + xi_226 + xi_242 + xi_246;
808 _data_pdfs_20_316_10[ctr_0] = forceTerm_16 + xi_210 + xi_225 + xi_242 + xi_255;
809 _data_pdfs_20_317_10[ctr_0] = forceTerm_17 + xi_239 + xi_240 + xi_243 + xi_262;
810 _data_pdfs_20_318_10[ctr_0] = forceTerm_18 + xi_234 + xi_241 + xi_243 + xi_249;
811 }
812 }
813 }
814 }
815}
816} // namespace internal_25bc51f30ec2c20f3ee9796f7dcb65c6
817
819 auto force = block->getData<field::GhostLayerField<double, 3>>(forceID);
820 auto pdfs = block->getData<field::GhostLayerField<double, 19>>(pdfsID);
821
822 auto &omega_bulk = this->omega_bulk_;
823 auto block_offset_1 = this->block_offset_1_;
824 auto &seed = this->seed_;
825 auto &omega_even = this->omega_even_;
826 auto &kT = this->kT_;
827 auto &omega_odd = this->omega_odd_;
828 auto block_offset_2 = this->block_offset_2_;
829 auto &time_step = this->time_step_;
830 auto block_offset_0 = this->block_offset_0_;
831 auto &omega_shear = this->omega_shear_;
832 block_offset_generator(block, block_offset_0, block_offset_1, block_offset_2);
833 WALBERLA_ASSERT_GREATER_EQUAL(0, -int_c(force->nrOfGhostLayers()));
834 double *RESTRICT const _data_force = force->dataAt(0, 0, 0, 0);
835 WALBERLA_ASSERT_EQUAL(force->layout(), field::fzyx);
836 WALBERLA_ASSERT_EQUAL((uintptr_t)force->dataAt(0, 0, 0, 0) % 32, 0);
837 WALBERLA_ASSERT_GREATER_EQUAL(0, -int_c(pdfs->nrOfGhostLayers()));
838 double *RESTRICT _data_pdfs = pdfs->dataAt(0, 0, 0, 0);
839 WALBERLA_ASSERT_EQUAL(pdfs->layout(), field::fzyx);
840 WALBERLA_ASSERT_EQUAL((uintptr_t)pdfs->dataAt(0, 0, 0, 0) % 32, 0);
841 WALBERLA_ASSERT_GREATER_EQUAL(force->xSizeWithGhostLayer(), int64_t(cell_idx_c(force->xSize()) + 0));
842 const int64_t _size_force_0 = int64_t(cell_idx_c(force->xSize()) + 0);
843 WALBERLA_ASSERT_EQUAL(force->layout(), field::fzyx);
844 WALBERLA_ASSERT_EQUAL((uintptr_t)force->dataAt(0, 0, 0, 0) % 32, 0);
845 WALBERLA_ASSERT_GREATER_EQUAL(force->ySizeWithGhostLayer(), int64_t(cell_idx_c(force->ySize()) + 0));
846 const int64_t _size_force_1 = int64_t(cell_idx_c(force->ySize()) + 0);
847 WALBERLA_ASSERT_EQUAL(force->layout(), field::fzyx);
848 WALBERLA_ASSERT_EQUAL((uintptr_t)force->dataAt(0, 0, 0, 0) % 32, 0);
849 WALBERLA_ASSERT_GREATER_EQUAL(force->zSizeWithGhostLayer(), int64_t(cell_idx_c(force->zSize()) + 0));
850 const int64_t _size_force_2 = int64_t(cell_idx_c(force->zSize()) + 0);
851 WALBERLA_ASSERT_EQUAL(force->layout(), field::fzyx);
852 WALBERLA_ASSERT_EQUAL((uintptr_t)force->dataAt(0, 0, 0, 0) % 32, 0);
853 const int64_t _stride_force_1 = int64_t(force->yStride());
854 const int64_t _stride_force_2 = int64_t(force->zStride());
855 const int64_t _stride_force_3 = int64_t(1 * int64_t(force->fStride()));
856 const int64_t _stride_pdfs_1 = int64_t(pdfs->yStride());
857 const int64_t _stride_pdfs_2 = int64_t(pdfs->zStride());
858 const int64_t _stride_pdfs_3 = int64_t(1 * int64_t(pdfs->fStride()));
859 internal_25bc51f30ec2c20f3ee9796f7dcb65c6::collidesweepdoubleprecisionthermalizedavx_collidesweepdoubleprecisionthermalizedavx(_data_force, _data_pdfs, _size_force_0, _size_force_1, _size_force_2, _stride_force_1, _stride_force_2, _stride_force_3, _stride_pdfs_1, _stride_pdfs_2, _stride_pdfs_3, block_offset_0, block_offset_1, block_offset_2, kT, omega_bulk, omega_even, omega_odd, omega_shear, seed, time_step);
860}
861
862void CollideSweepDoublePrecisionThermalizedAVX::runOnCellInterval(const shared_ptr<StructuredBlockStorage> &blocks, const CellInterval &globalCellInterval, cell_idx_t ghostLayers, IBlock *block) {
863 CellInterval ci = globalCellInterval;
864 CellInterval blockBB = blocks->getBlockCellBB(*block);
865 blockBB.expand(ghostLayers);
866 ci.intersect(blockBB);
867 blocks->transformGlobalToBlockLocalCellInterval(ci, *block);
868 if (ci.empty())
869 return;
870
871 auto force = block->getData<field::GhostLayerField<double, 3>>(forceID);
872 auto pdfs = block->getData<field::GhostLayerField<double, 19>>(pdfsID);
873
874 auto &omega_bulk = this->omega_bulk_;
875 auto block_offset_1 = this->block_offset_1_;
876 auto &seed = this->seed_;
877 auto &omega_even = this->omega_even_;
878 auto &kT = this->kT_;
879 auto &omega_odd = this->omega_odd_;
880 auto block_offset_2 = this->block_offset_2_;
881 auto &time_step = this->time_step_;
882 auto block_offset_0 = this->block_offset_0_;
883 auto &omega_shear = this->omega_shear_;
884 block_offset_generator(block, block_offset_0, block_offset_1, block_offset_2);
885 WALBERLA_ASSERT_GREATER_EQUAL(ci.xMin(), -int_c(force->nrOfGhostLayers()));
886 WALBERLA_ASSERT_GREATER_EQUAL(ci.yMin(), -int_c(force->nrOfGhostLayers()));
887 WALBERLA_ASSERT_GREATER_EQUAL(ci.zMin(), -int_c(force->nrOfGhostLayers()));
888 double *RESTRICT const _data_force = force->dataAt(ci.xMin(), ci.yMin(), ci.zMin(), 0);
889 WALBERLA_ASSERT_EQUAL(force->layout(), field::fzyx);
890 WALBERLA_ASSERT_EQUAL((uintptr_t)force->dataAt(0, 0, 0, 0) % 32, 0);
891 WALBERLA_ASSERT_GREATER_EQUAL(ci.xMin(), -int_c(pdfs->nrOfGhostLayers()));
892 WALBERLA_ASSERT_GREATER_EQUAL(ci.yMin(), -int_c(pdfs->nrOfGhostLayers()));
893 WALBERLA_ASSERT_GREATER_EQUAL(ci.zMin(), -int_c(pdfs->nrOfGhostLayers()));
894 double *RESTRICT _data_pdfs = pdfs->dataAt(ci.xMin(), ci.yMin(), ci.zMin(), 0);
895 WALBERLA_ASSERT_EQUAL(pdfs->layout(), field::fzyx);
896 WALBERLA_ASSERT_EQUAL((uintptr_t)pdfs->dataAt(0, 0, 0, 0) % 32, 0);
897 WALBERLA_ASSERT_GREATER_EQUAL(force->xSizeWithGhostLayer(), int64_t(cell_idx_c(ci.xSize()) + 0));
898 const int64_t _size_force_0 = int64_t(cell_idx_c(ci.xSize()) + 0);
899 WALBERLA_ASSERT_EQUAL(force->layout(), field::fzyx);
900 WALBERLA_ASSERT_EQUAL((uintptr_t)force->dataAt(0, 0, 0, 0) % 32, 0);
901 WALBERLA_ASSERT_GREATER_EQUAL(force->ySizeWithGhostLayer(), int64_t(cell_idx_c(ci.ySize()) + 0));
902 const int64_t _size_force_1 = int64_t(cell_idx_c(ci.ySize()) + 0);
903 WALBERLA_ASSERT_EQUAL(force->layout(), field::fzyx);
904 WALBERLA_ASSERT_EQUAL((uintptr_t)force->dataAt(0, 0, 0, 0) % 32, 0);
905 WALBERLA_ASSERT_GREATER_EQUAL(force->zSizeWithGhostLayer(), int64_t(cell_idx_c(ci.zSize()) + 0));
906 const int64_t _size_force_2 = int64_t(cell_idx_c(ci.zSize()) + 0);
907 WALBERLA_ASSERT_EQUAL(force->layout(), field::fzyx);
908 WALBERLA_ASSERT_EQUAL((uintptr_t)force->dataAt(0, 0, 0, 0) % 32, 0);
909 const int64_t _stride_force_1 = int64_t(force->yStride());
910 const int64_t _stride_force_2 = int64_t(force->zStride());
911 const int64_t _stride_force_3 = int64_t(1 * int64_t(force->fStride()));
912 const int64_t _stride_pdfs_1 = int64_t(pdfs->yStride());
913 const int64_t _stride_pdfs_2 = int64_t(pdfs->zStride());
914 const int64_t _stride_pdfs_3 = int64_t(1 * int64_t(pdfs->fStride()));
915 internal_25bc51f30ec2c20f3ee9796f7dcb65c6::collidesweepdoubleprecisionthermalizedavx_collidesweepdoubleprecisionthermalizedavx(_data_force, _data_pdfs, _size_force_0, _size_force_1, _size_force_2, _stride_force_1, _stride_force_2, _stride_force_3, _stride_pdfs_1, _stride_pdfs_2, _stride_pdfs_3, block_offset_0, block_offset_1, block_offset_2, kT, omega_bulk, omega_even, omega_odd, omega_shear, seed, time_step);
916}
917
918} // namespace pystencils
919} // namespace walberla
920
921#if (defined WALBERLA_CXX_COMPILER_IS_GNU) || (defined WALBERLA_CXX_COMPILER_IS_CLANG)
922#pragma GCC diagnostic pop
923#endif
924
925#if (defined WALBERLA_CXX_COMPILER_IS_INTEL)
926#pragma warning pop
927#endif
#define FUNC_PREFIX
\file AdvectiveFluxKernel_double_precision.cpp \ingroup lbm \author lbmpy
#define RESTRICT
\file AdvectiveFluxKernel_double_precision.h \author pystencils
__global__ float * force
std::function< void(IBlock *, uint32_t &, uint32_t &, uint32_t &)> block_offset_generator
void runOnCellInterval(const shared_ptr< StructuredBlockStorage > &blocks, const CellInterval &globalCellInterval, cell_idx_t ghostLayers, IBlock *block)
static double * block(double *p, std::size_t index, std::size_t size)
Definition elc.cpp:174
static FUNC_PREFIX void collidesweepdoubleprecisionthermalizedavx_collidesweepdoubleprecisionthermalizedavx(double *RESTRICT const _data_force, double *RESTRICT _data_pdfs, int64_t const _size_force_0, int64_t const _size_force_1, int64_t const _size_force_2, int64_t const _stride_force_1, int64_t const _stride_force_2, int64_t const _stride_force_3, int64_t const _stride_pdfs_1, int64_t const _stride_pdfs_2, int64_t const _stride_pdfs_3, uint32_t block_offset_0, uint32_t block_offset_1, uint32_t block_offset_2, double kT, double omega_bulk, double omega_even, double omega_odd, double omega_shear, uint32_t seed, uint32_t time_step)
QUALIFIERS void philox_double2(uint32 ctr0, uint32 ctr1, uint32 ctr2, uint32 ctr3, uint32 key0, uint32 key1, double &rnd1, double &rnd2)