46 double eta2 = (eta*eta);
47 double eta3 = (eta2*eta);
51 double noSpin, eqSpin, uneqSpin;
58 noSpin = (1.4873184918202145 + 1974.6112656679577*eta + 27563.641024162127*eta2 - 19837.908020966777*eta3)/(1. + 143.29004876335128*eta + 458.4097306093354*eta2);
60 eqSpin = (S*(27.952730865904343 + eta*(-365.55631765202895 - 260.3494489873286*S) + 3.2646808851249016*S + 3011.446602208493*eta2*S - 19.38970173389662*S2 + eta3*(1612.2681322644232 - 6962.675551371755*S + 1486.4658089990298*S2)))/(12.647425554323242 - 10.540154508599963*S + 1.*S2);
62 uneqSpin = dchi*
delta*(-0.016404056649860943 - 296.473359655246*eta)*eta2;
68 XLAL_ERROR_REAL8(
XLAL_EINVAL,
"Error in IMRPhenomX_Intermediate_Amp_22_vA: IMRPhenomXIntermediateAmpVersion is not valid. Recommended flag is 104. \n");
72 return (noSpin + eqSpin + uneqSpin);
85 double eta2 = (eta*eta);
86 double eta3 = (eta2*eta);
90 double noSpin, eqSpin, uneqSpin;
97 noSpin = (2.2436523786378983 + 2162.4749081764216*eta + 24460.158604784723*eta2 - 12112.140570900956*eta3)/(1. + 120.78623282522702*eta + 416.4179522274108*eta2);
99 eqSpin = (S*(6.727511603827924 + eta2*(414.1400701039126 - 234.3754066885935*S) - 5.399284768639545*S
100 + eta*(-186.87972530996245 + 128.7402290554767*S)))/(3.24359204029217 - 3.975650468231452*S + 1.*S2);
102 uneqSpin = dchi*
delta*(-59.52510939953099 + 13.12679437100751*eta)*eta2;
108 XLAL_ERROR_REAL8(
XLAL_EINVAL,
"Error in IMRPhenomX_Intermediate_Amp_22_v2: IMRPhenomXIntermediateAmpVersion is not valid. Recommended flag is 104 (should not call *_Amp_22_v2). \n");
112 return (noSpin + eqSpin + uneqSpin);
125 double eta2 = (eta*eta);
126 double eta3 = (eta2*eta);
130 double noSpin, eqSpin, uneqSpin;
132 switch ( IntAmpFlag )
137 noSpin = (1.195392410912163 + 1677.2558976605421*eta + 24838.37133975971*eta2 - 17277.938868280915*eta3)/(1. + 144.78606839716073*eta + 428.8155916011666*eta2);
139 eqSpin = (S*(-2.1413952025647793 + 0.5719137940424858*S + eta*(46.61350006858767 + 0.40917927503842105*S - 11.526500209146906*S2)
140 + 1.1833965566688387*S2 + eta2*(-84.82318288272965 - 34.90591158988979*S + 19.494962340530186*S2)))/(-1.4786392693666195 + 1.*S);
142 uneqSpin = dchi*
delta*(-333.7662575986524 + 532.2475589084717*eta)*eta3;
148 XLAL_ERROR_REAL8(
XLAL_EINVAL,
"Error in IMRPhenomX_Intermediate_Amp_22_v3: IMRPhenomXIntermediateAmpVersion is not valid. Recommended flag is 104 (should not call *_Amp_22_v3). \n");
152 return (noSpin + eqSpin + uneqSpin);
157 static double IMRPhenomX_Intermediate_Amp_22_delta0(
double d1,
double d4,
double v1,
double v2,
double v3,
double v4,
double f1,
double f2,
double f3,
double f4,
int IntAmpFlag)
161 switch ( IntAmpFlag )
165 UNUSED
double f12 = f1*f1;
166 UNUSED
double f13 = f12*f1;
167 UNUSED
double f14 = f13*f1;
169 UNUSED
double f22 = f2*f2;
170 UNUSED
double f23 = f22*f2;
171 UNUSED
double f24 = f23*f2;
172 UNUSED
double f25 = f24*f2;
174 UNUSED
double f32 = f3*f3;
175 UNUSED
double f33 = f32*f3;
176 UNUSED
double f34 = f33*f3;
178 UNUSED
double f42 = f4*f4;
179 UNUSED
double f43 = f42*f4;
180 UNUSED
double f44 = f43*f4;
181 UNUSED
double f45 = f44*f4;
183 UNUSED
double f1mf2 = f1-f2;
184 UNUSED
double f1mf3 = f1-f3;
185 UNUSED
double f1mf4 = f1-f4;
186 UNUSED
double f2mf3 = f2-f3;
187 UNUSED
double f2mf4 = f2-f4;
188 UNUSED
double f3mf4 = f3-f4;
190 UNUSED
double f1mf42 = f1mf4*f1mf4;
191 UNUSED
double f3mf42 = f3mf4*f3mf4;
192 UNUSED
double f2mf32 = f2mf3*f2mf3;
193 UNUSED
double f2mf42 = f2mf4*f2mf4;
195 retVal = (-(d4*f1*f1mf2*f1mf3*f1mf4*f2*f2mf3*f2mf4*f3*f3mf4*f4) - f1*f1mf3*f1mf42*f3*f3mf42*f42*v2 +
196 f24*(-(f1*f1mf42*f42*v3) + f33*(f42*v1 + f12*v4 - 2*f1*f4*v4) + f3*f4*(f43*v1 + 2*f13*v4 - 3*f12*f4*v4) - f32*(2*f43*v1 + f13*v4 - 3*f1*f42*v4)) +
197 f2*f4*(f12*f1mf42*f43*v3 - f34*(f43*v1 + 2*f13*v4 - 3*f12*f4*v4) - f32*f4*(f44*v1 + 3*
f14*v4 - 4*f13*f4*v4) + 2*f33*(f44*v1 +
f14*v4 - 2*f12*f42*v4)) +
198 f22*(-(f1*f1mf42*(2*f1 + f4)*f43*v3) + f3*f42*(f44*v1 + 3*
f14*v4 - 4*f13*f4*v4) + f34*(2*f43*v1 + f13*v4 - 3*f1*f42*v4) - f33*(3*f44*v1 +
f14*v4 - 4*f1*f43*v4)) +
199 f23*(f1*f1mf42*(f1 + 2*f4)*f42*v3 - f34*(f42*v1 + f12*v4 - 2*f1*f4*v4) + f32*(3*f44*v1 +
f14*v4 - 4*f1*f43*v4) - 2*f3*(f45*v1 +
f14*f4*v4 - 2*f12*f43*v4)))/(f1mf2*f1mf3*f1mf42*f2mf3*f2mf42*f3mf42);
205 UNUSED
double f12 = f1*f1;
206 UNUSED
double f13 = f12*f1;
207 UNUSED
double f14 = f13*f1;
209 UNUSED
double f22 = f2*f2;
210 UNUSED
double f23 = f22*f2;
211 UNUSED
double f24 = f23*f2;
213 UNUSED
double f32 = f3*f3;
214 UNUSED
double f33 = f32*f3;
215 UNUSED
double f34 = f33*f3;
217 UNUSED
double f42 = f4*f4;
218 UNUSED
double f43 = f42*f4;
219 UNUSED
double f44 = f43*f4;
221 UNUSED
double f1mf2 = f1-f2;
222 UNUSED
double f1mf3 = f1-f3;
223 UNUSED
double f1mf4 = f1-f4;
224 UNUSED
double f2mf3 = f2-f3;
225 UNUSED
double f2mf4 = f2-f4;
226 UNUSED
double f3mf4 = f3-f4;
228 UNUSED
double f1mf22 = f1mf2*f1mf2;
229 UNUSED
double f1mf32 = f1mf3*f1mf3;
230 UNUSED
double f1mf42 = f1mf4*f1mf4;
231 UNUSED
double f2mf32 = f2mf3*f2mf3;
232 UNUSED
double f2mf42 = f2mf4*f2mf4;
233 UNUSED
double f3mf42 = f3mf4*f3mf4;
234 UNUSED
double f1mf43 = f1mf4*f1mf4*f1mf4;
236 retVal = ((-(d4*f12*f1mf22*f1mf4*f2*f2mf4*f4) + d1*f1*f1mf2*f1mf4*f2*f2mf42*f42 + f42*(f2*f2mf42*(-4*f12 + 3*f1*f2 + 2*f1*f4 - f2*f4)*v1 + f12*f1mf43*v2) +
237 f12*f1mf22*f2*(f1*f2 - 2*f1*f4 - 3*f2*f4 + 4*f42)*v4)/(f1mf22*f1mf43*f2mf42));
243 UNUSED
double f12 = f1*f1;
244 UNUSED
double f13 = f12*f1;
245 UNUSED
double f14 = f13*f1;
246 UNUSED
double f15 =
f14*f1;
247 UNUSED
double f16 = f15*f1;
248 UNUSED
double f17 = f16*f1;
250 UNUSED
double f22 = f2*f2;
251 UNUSED
double f23 = f22*f2;
252 UNUSED
double f24 = f23*f2;
253 UNUSED
double f25 = f24*f2;
254 UNUSED
double f26 = f25*f2;
255 UNUSED
double f27 = f26*f2;
257 UNUSED
double f32 = f3*f3;
258 UNUSED
double f33 = f32*f3;
259 UNUSED
double f34 = f33*f3;
260 UNUSED
double f35 = f34*f3;
261 UNUSED
double f36 = f35*f3;
262 UNUSED
double f37 = f36*f3;
264 UNUSED
double f42 = f4*f4;
265 UNUSED
double f43 = f42*f4;
266 UNUSED
double f44 = f43*f4;
267 UNUSED
double f45 = f44*f4;
268 UNUSED
double f46 = f45*f4;
269 UNUSED
double f47 = f46*f4;
271 UNUSED
double f1mf2 = f1-f2;
272 UNUSED
double f1mf3 = f1-f3;
273 UNUSED
double f1mf4 = f1-f4;
274 UNUSED
double f2mf3 = f2-f3;
275 UNUSED
double f2mf4 = f2-f4;
276 UNUSED
double f3mf4 = f3-f4;
278 UNUSED
double f1mf22 = f1mf2*f1mf2;
279 UNUSED
double f1mf32 = f1mf3*f1mf3;
280 UNUSED
double f1mf42 = f1mf4*f1mf4;
281 UNUSED
double f2mf32 = f2mf3*f2mf3;
282 UNUSED
double f2mf42 = f2mf4*f2mf4;
283 UNUSED
double f3mf42 = f3mf4*f3mf4;
284 UNUSED
double f1mf43 = f1mf4*f1mf4*f1mf4;
287 (-(d4*f12*f1mf22*f1mf32*f1mf4*f2*f2mf3*f2mf4*f3*f3mf4*f4) - d1*f1*f1mf2*f1mf3*f1mf4*f2*f2mf3*f2mf42*f3*f3mf42*f42 + 5*f13*f24*f33*f42*v1 - 4*f12*f25*f33*f42*v1 - 5*f13*f23*f34*f42*v1 +
288 3*f1*f25*f34*f42*v1 + 4*f12*f23*f35*f42*v1 - 3*f1*f24*f35*f42*v1 - 10*f13*f24*f32*f43*v1 + 8*f12*f25*f32*f43*v1 + 5*f12*f24*f33*f43*v1 - 4*f1*f25*f33*f43*v1 + 10*f13*f22*f34*f43*v1 -
289 5*f12*f23*f34*f43*v1 - f25*f34*f43*v1 - 8*f12*f22*f35*f43*v1 + 4*f1*f23*f35*f43*v1 + f24*f35*f43*v1 + 5*f13*f24*f3*f44*v1 - 4*f12*f25*f3*f44*v1 + 15*f13*f23*f32*f44*v1 -
290 10*f12*f24*f32*f44*v1 - f1*f25*f32*f44*v1 - 15*f13*f22*f33*f44*v1 + 5*f1*f24*f33*f44*v1 + 2*f25*f33*f44*v1 - 5*f13*f2*f34*f44*v1 + 10*f12*f22*f34*f44*v1 - 5*f1*f23*f34*f44*v1 +
291 4*f12*f2*f35*f44*v1 + f1*f22*f35*f44*v1 - 2*f23*f35*f44*v1 - 10*f13*f23*f3*f45*v1 + 5*f12*f24*f3*f45*v1 + 2*f1*f25*f3*f45*v1 - f12*f23*f32*f45*v1 + 2*f1*f24*f32*f45*v1 - f25*f32*f45*v1 +
292 10*f13*f2*f33*f45*v1 + f12*f22*f33*f45*v1 - 3*f24*f33*f45*v1 - 5*f12*f2*f34*f45*v1 - 2*f1*f22*f34*f45*v1 + 3*f23*f34*f45*v1 - 2*f1*f2*f35*f45*v1 + f22*f35*f45*v1 + 5*f13*f22*f3*f46*v1 +
293 2*f12*f23*f3*f46*v1 - 4*f1*f24*f3*f46*v1 - 5*f13*f2*f32*f46*v1 - f1*f23*f32*f46*v1 + 2*f24*f32*f46*v1 - 2*f12*f2*f33*f46*v1 + f1*f22*f33*f46*v1 + 4*f1*f2*f34*f46*v1 - 2*f22*f34*f46*v1 -
294 3*f12*f22*f3*f47*v1 + 2*f1*f23*f3*f47*v1 + 3*f12*f2*f32*f47*v1 - f23*f32*f47*v1 - 2*f1*f2*f33*f47*v1 + f22*f33*f47*v1 - f17*f33*f42*v2 + 2*f16*f34*f42*v2 - f15*f35*f42*v2 + 2*f17*f32*f43*v2 -
295 f16*f33*f43*v2 - 4*f15*f34*f43*v2 + 3*
f14*f35*f43*v2 - f17*f3*f44*v2 - 4*f16*f32*f44*v2 + 8*f15*f33*f44*v2 - 3*f13*f35*f44*v2 + 3*f16*f3*f45*v2 - 8*
f14*f33*f45*v2 + 4*f13*f34*f45*v2 +
296 f12*f35*f45*v2 - 3*f15*f3*f46*v2 + 4*
f14*f32*f46*v2 + f13*f33*f46*v2 - 2*f12*f34*f46*v2 +
f14*f3*f47*v2 - 2*f13*f32*f47*v2 + f12*f33*f47*v2 + f17*f23*f42*v3 - 2*f16*f24*f42*v3 +
297 f15*f25*f42*v3 - 2*f17*f22*f43*v3 + f16*f23*f43*v3 + 4*f15*f24*f43*v3 - 3*
f14*f25*f43*v3 + f17*f2*f44*v3 + 4*f16*f22*f44*v3 - 8*f15*f23*f44*v3 + 3*f13*f25*f44*v3 - 3*f16*f2*f45*v3 +
298 8*
f14*f23*f45*v3 - 4*f13*f24*f45*v3 - f12*f25*f45*v3 + 3*f15*f2*f46*v3 - 4*
f14*f22*f46*v3 - f13*f23*f46*v3 + 2*f12*f24*f46*v3 -
f14*f2*f47*v3 + 2*f13*f22*f47*v3 - f12*f23*f47*v3 +
299 f12*f1mf22*f1mf32*f2*f2mf3*f3*(f4*(-3*f2*f3 + 4*(f2 + f3)*f4 - 5*f42) + f1*(f2*f3 - 2*(f2 + f3)*f4 + 3*f42))*v4)/(f1mf22*f1mf32*f1mf43*f2mf3*f2mf42*f3mf42)
306 XLAL_ERROR_REAL8(
XLAL_EINVAL,
"Error in IMRPhenomX_Intermediate_Amp_22_v3: IMRPhenomXIntermediateAmpVersion is not valid. Recommended flag is 104.\n");
314 static double IMRPhenomX_Intermediate_Amp_22_delta1(
double d1,
double d4,
double v1,
double v2,
double v3,
double v4,
double f1,
double f2,
double f3,
double f4,
int IntAmpFlag)
318 switch ( IntAmpFlag )
322 UNUSED
double f12 = f1*f1;
323 UNUSED
double f13 = f12*f1;
324 UNUSED
double f14 = f13*f1;
326 UNUSED
double f22 = f2*f2;
327 UNUSED
double f23 = f22*f2;
328 UNUSED
double f24 = f23*f2;
329 UNUSED
double f25 = f24*f2;
331 UNUSED
double f32 = f3*f3;
332 UNUSED
double f33 = f32*f3;
333 UNUSED
double f34 = f33*f3;
335 UNUSED
double f42 = f4*f4;
336 UNUSED
double f43 = f42*f4;
337 UNUSED
double f44 = f43*f4;
339 UNUSED
double f1mf2 = f1-f2;
340 UNUSED
double f1mf3 = f1-f3;
341 UNUSED
double f1mf4 = f1-f4;
342 UNUSED
double f2mf3 = f2-f3;
343 UNUSED
double f2mf4 = f2-f4;
344 UNUSED
double f3mf4 = f3-f4;
346 UNUSED
double f1mf42 = f1mf4*f1mf4;
347 UNUSED
double f2mf42 = f2mf4*f2mf4;
348 UNUSED
double f3mf42 = f3mf4*f3mf4;
350 UNUSED
double v1mv2 = v1-v2;
351 UNUSED
double v2mv3 = v2-v3;
352 UNUSED
double v2mv4 = v2-v4;
353 UNUSED
double v1mv3 = v1-v3;
354 UNUSED
double v1mv4 = v1-v4;
355 UNUSED
double v3mv4 = v3-v4;
357 retVal =(d4*f1mf4*f2mf4*f3mf4*(f1*f2*f3 + f2*f3*f4 + f1*(f2 + f3)*f4) + (f4*(f12*f1mf42*f43*v2mv3 + f34*(f43*v1mv2 +
358 3*f12*f4*v2mv4 + 2*f13*(-v2 + v4)) + f32*f4*(f44*v1mv2 + 4*f13*f4*v2mv4 + 3*
f14*(-v2 + v4)) + 2*f33*(f44*(-v1 + v2)
359 +
f14*v2mv4 + 2*f12*f42*(-v2 + v4)) + 2*f23*(f44*v1mv3 + f34*v1mv4 + 2*f12*f42*v3mv4 + 2*f32*f42*(-v1 + v4) +
f14*(-v3 + v4))
360 + f24*(3*f32*f4*v1mv4 + f43*(-v1 + v3) + 2*f13*v3mv4 + 2*f33*(-v1 + v4) + 3*f12*f4*(-v3 + v4)) + f22*f4*(4*f33*f4*v1mv4 + f44*(-v1 + v3)
361 + 3*
f14*v3mv4 + 3*f34*(-v1 + v4) + 4*f13*f4*(-v3 + v4))))/(f1mf2*f1mf3*f2mf3))/(f1mf42*f2mf42*f3mf42);
368 UNUSED
double f12 = f1*f1;
369 UNUSED
double f13 = f12*f1;
370 UNUSED
double f14 = f13*f1;
372 UNUSED
double f22 = f2*f2;
373 UNUSED
double f23 = f22*f2;
374 UNUSED
double f24 = f23*f2;
376 UNUSED
double f32 = f3*f3;
377 UNUSED
double f33 = f32*f3;
378 UNUSED
double f34 = f33*f3;
380 UNUSED
double f42 = f4*f4;
381 UNUSED
double f43 = f42*f4;
382 UNUSED
double f44 = f43*f4;
384 UNUSED
double f1mf2 = f1-f2;
385 UNUSED
double f1mf3 = f1-f3;
386 UNUSED
double f1mf4 = f1-f4;
387 UNUSED
double f2mf3 = f2-f3;
388 UNUSED
double f2mf4 = f2-f4;
389 UNUSED
double f3mf4 = f3-f4;
391 UNUSED
double f1mf22 = f1mf2*f1mf2;
392 UNUSED
double f1mf32 = f1mf3*f1mf3;
393 UNUSED
double f1mf42 = f1mf4*f1mf4;
394 UNUSED
double f2mf32 = f2mf3*f2mf3;
395 UNUSED
double f2mf42 = f2mf4*f2mf4;
396 UNUSED
double f3mf42 = f3mf4*f3mf4;
397 UNUSED
double f1mf43 = f1mf4*f1mf4*f1mf4;
399 retVal = ((d4*f1*f1mf22*f1mf4*f2mf4*(2*f2*f4 + f1*(f2 + f4)) + f4*(-(d1*f1mf2*f1mf4*f2mf42*(2*f1*f2 + (f1 + f2)*f4)) -
400 2*f1*(f44*(v1 - v2) + 3*f24*(v1 - v4) +
f14*(v2 - v4) + 4*f23*f4*(-v1 + v4)
401 + 2*f13*f4*(-v2 + v4) + f1*(2*f43*(-v1 + v2) + 6*f22*f4*(v1 - v4) + 4*f23*(-v1 + v4)))))/(f1mf22*f1mf43*f2mf42));
409 UNUSED
double f12 = f1*f1;
410 UNUSED
double f13 = f12*f1;
411 UNUSED
double f14 = f13*f1;
412 UNUSED
double f15 =
f14*f1;
413 UNUSED
double f16 = f15*f1;
414 UNUSED
double f17 = f16*f1;
416 UNUSED
double f22 = f2*f2;
417 UNUSED
double f23 = f22*f2;
418 UNUSED
double f24 = f23*f2;
419 UNUSED
double f25 = f24*f2;
420 UNUSED
double f26 = f25*f2;
421 UNUSED
double f27 = f26*f2;
423 UNUSED
double f32 = f3*f3;
424 UNUSED
double f33 = f32*f3;
425 UNUSED
double f34 = f33*f3;
426 UNUSED
double f35 = f34*f3;
427 UNUSED
double f36 = f35*f3;
428 UNUSED
double f37 = f36*f3;
430 UNUSED
double f42 = f4*f4;
431 UNUSED
double f43 = f42*f4;
432 UNUSED
double f44 = f43*f4;
433 UNUSED
double f45 = f44*f4;
434 UNUSED
double f46 = f45*f4;
435 UNUSED
double f47 = f46*f4;
437 UNUSED
double f1mf2 = f1-f2;
438 UNUSED
double f1mf3 = f1-f3;
439 UNUSED
double f1mf4 = f1-f4;
440 UNUSED
double f2mf3 = f2-f3;
441 UNUSED
double f2mf4 = f2-f4;
442 UNUSED
double f3mf4 = f3-f4;
444 UNUSED
double f1mf22 = f1mf2*f1mf2;
445 UNUSED
double f1mf32 = f1mf3*f1mf3;
446 UNUSED
double f1mf42 = f1mf4*f1mf4;
447 UNUSED
double f2mf32 = f2mf3*f2mf3;
448 UNUSED
double f2mf42 = f2mf4*f2mf4;
449 UNUSED
double f3mf42 = f3mf4*f3mf4;
450 UNUSED
double f1mf43 = f1mf4*f1mf4*f1mf4;
453 (d4*f1*f1mf22*f1mf32*f1mf4*f2mf3*f2mf4*f3mf4*(f1*f2*f3 + 2*f2*f3*f4 + f1*(f2 + f3)*f4) +
454 f4*(d1*f1mf2*f1mf3*f1mf4*f2mf3*f2mf42*f3mf42*(2*f1*f2*f3 + f2*f3*f4 + f1*(f2 + f3)*f4) +
455 f1*(f16*(f43*(v2 - v3) + 2*f33*(v2 - v4) + 3*f22*f4*(v3 - v4) + 3*f32*f4*(-v2 + v4) + 2*f23*(-v3 + v4)) +
456 f13*f4*(f45*(-v2 + v3) + 5*f34*f4*(v2 - v4) + 4*f25*(v3 - v4) + 4*f35*(-v2 + v4) + 5*f24*f4*(-v3 + v4)) +
457 f14*(3*f45*(v2 - v3) + 2*f35*(v2 - v4) + 5*f34*f4*(v2 - v4) + 10*f23*f42*(v3 - v4) + 10*f33*f42*(-v2 + v4) + 2*f25*(-v3 + v4) + 5*f24*f4*(-v3 + v4)) +
458 f15*(3*f44*(-v2 + v3) + 2*f33*f4*(v2 - v4) + 5*f32*f42*(v2 - v4) + 4*f24*(v3 - v4) + 4*f34*(-v2 + v4) + 2*f23*f4*(-v3 + v4) + 5*f22*f42*(-v3 + v4)) -
459 5*f12*(-(f32*f3mf42*f43*(v1 - v2)) + 2*f23*(f44*(-v1 + v3) + 2*f32*f42*(v1 - v4) + f34*(-v1 + v4)) + f24*(f43*(v1 - v3) + 2*f33*(v1 - v4) + 3*f32*f4*(-v1 + v4)) +
460 f22*f4*(f44*(v1 - v3) + 3*f34*(v1 - v4) + 4*f33*f4*(-v1 + v4))) +
461 f1*(-(f32*f3mf42*(4*f3 + 3*f4)*f43*(v1 - v2)) + 2*f23*(f45*(-v1 + v3) + 5*f34*f4*(v1 - v4) + 4*f35*(-v1 + v4)) + 4*f25*(f43*(v1 - v3) + 2*f33*(v1 - v4) + 3*f32*f4*(-v1 + v4)) -
462 5*f24*f4*(f43*(v1 - v3) + 2*f33*(v1 - v4) + 3*f32*f4*(-v1 + v4)) + 3*f22*f4*(f45*(v1 - v3) + 4*f35*(v1 - v4) + 5*f34*f4*(-v1 + v4))) -
463 2*(-(f33*f3mf42*f44*(v1 - v2)) + f24*(2*f45*(-v1 + v3) + 5*f33*f42*(v1 - v4) + 3*f35*(-v1 + v4)) + f25*(f44*(v1 - v3) + 3*f34*(v1 - v4) + 4*f33*f4*(-v1 + v4)) +
464 f23*f4*(f45*(v1 - v3) + 4*f35*(v1 - v4) + 5*f34*f4*(-v1 + v4))))))/(f1mf22*f1mf32*f1mf43*f2mf3*f2mf42*f3mf42)
470 XLAL_ERROR_REAL8(
XLAL_EINVAL,
"Error in IMRPhenomX_Intermediate_Amp_22_v3: IMRPhenomXIntermediateAmpVersion is not valid. Recommended flag is 104.\n");
478 static double IMRPhenomX_Intermediate_Amp_22_delta2(
double d1,
double d4,
double v1,
double v2,
double v3,
double v4,
double f1,
double f2,
double f3,
double f4,
int IntAmpFlag)
482 switch ( IntAmpFlag )
486 UNUSED
double f12 = f1*f1;
487 UNUSED
double f13 = f12*f1;
488 UNUSED
double f14 = f13*f1;
490 UNUSED
double f22 = f2*f2;
491 UNUSED
double f23 = f22*f2;
492 UNUSED
double f24 = f23*f2;
493 UNUSED
double f25 = f24*f2;
494 UNUSED
double f26 = f25*f2;
496 UNUSED
double f32 = f3*f3;
497 UNUSED
double f33 = f32*f3;
498 UNUSED
double f34 = f33*f3;
500 UNUSED
double f42 = f4*f4;
501 UNUSED
double f43 = f42*f4;
502 UNUSED
double f44 = f43*f4;
503 UNUSED
double f46 = f44*f42;
505 UNUSED
double f1mf2 = f1-f2;
506 UNUSED
double f1mf3 = f1-f3;
507 UNUSED
double f1mf4 = f1-f4;
508 UNUSED
double f2mf3 = f2-f3;
509 UNUSED
double f2mf4 = f2-f4;
510 UNUSED
double f3mf4 = f3-f4;
512 UNUSED
double f1mf42 = f1mf4*f1mf4;
513 UNUSED
double f2mf42 = f2mf4*f2mf4;
514 UNUSED
double f3mf42 = f3mf4*f3mf4;
516 UNUSED
double v1mv2 = v1-v2;
517 UNUSED
double v2mv3 = v2-v3;
518 UNUSED
double v2mv4 = v2-v4;
519 UNUSED
double v1mv3 = v1-v3;
520 UNUSED
double v1mv4 = v1-v4;
521 UNUSED
double v3mv4 = v3-v4;
523 retVal = (-(d4*f1mf2*f1mf3*f1mf4*f2mf3*f2mf4*f3mf4*(f3*f4 + f2*(f3 + f4) + f1*(f2 + f3 + f4))) - 2*f34*f43*v1 + 3*f33*f44*v1 - f3*f46*v1 -
f14*f33*v2 + f13*f34*v2 + 3*
f14*f3*f42*v2 - 3*f1*f34*f42*v2 -
524 2*
f14*f43*v2 - 4*f13*f3*f43*v2 + 4*f1*f33*f43*v2 + 2*f34*f43*v2 + 3*f13*f44*v2 - 3*f33*f44*v2 - f1*f46*v2 + f3*f46*v2 + 2*
f14*f43*v3 - 3*f13*f44*v3 + f1*f46*v3 +
525 f2*f42*(f44*v1mv3 + 3*f34*v1mv4 - 4*f33*f4*v1mv4 - 3*
f14*v3mv4 + 4*f13*f4*v3mv4) + f24*(2*f43*v1mv3 + f33*v1mv4 - 3*f3*f42*v1mv4 - f13*v3mv4 + 3*f1*f42*v3mv4) +
526 f23*(-3*f44*v1mv3 - f34*v1mv4 + 4*f3*f43*v1mv4 +
f14*v3mv4 - 4*f1*f43*v3mv4) +
f14*f33*v4 - f13*f34*v4 - 3*
f14*f3*f42*v4 + 3*f1*f34*f42*v4 + 4*f13*f3*f43*v4 - 4*f1*f33*f43*v4)/
527 (f1mf2*f1mf3*f1mf42*f2mf3*f2mf42*f3mf42);
532 UNUSED
double f12 = f1*f1;
533 UNUSED
double f13 = f12*f1;
534 UNUSED
double f14 = f13*f1;
535 UNUSED
double f15 =
f14*f1;
537 UNUSED
double f22 = f2*f2;
538 UNUSED
double f23 = f22*f2;
539 UNUSED
double f24 = f23*f2;
541 UNUSED
double f32 = f3*f3;
542 UNUSED
double f33 = f32*f3;
543 UNUSED
double f34 = f33*f3;
545 UNUSED
double f42 = f4*f4;
546 UNUSED
double f43 = f42*f4;
547 UNUSED
double f44 = f43*f4;
548 UNUSED
double f45 = f44*f4;
550 UNUSED
double f1mf2 = f1-f2;
551 UNUSED
double f1mf3 = f1-f3;
552 UNUSED
double f1mf4 = f1-f4;
553 UNUSED
double f2mf3 = f2-f3;
554 UNUSED
double f2mf4 = f2-f4;
555 UNUSED
double f3mf4 = f3-f4;
557 UNUSED
double f1mf22 = f1mf2*f1mf2;
558 UNUSED
double f1mf32 = f1mf3*f1mf3;
559 UNUSED
double f1mf42 = f1mf4*f1mf4;
560 UNUSED
double f2mf32 = f2mf3*f2mf3;
561 UNUSED
double f2mf42 = f2mf4*f2mf4;
562 UNUSED
double f3mf42 = f3mf4*f3mf4;
563 UNUSED
double f1mf43 = f1mf4*f1mf4*f1mf4;
565 retVal = ((-(d4*f1mf22*f1mf4*f2mf4*(f12 + f2*f4 + 2*f1*(f2 + f4))) + d1*f1mf2*f1mf4*f2mf42*(f1*f2 + 2*(f1 + f2)*f4 + f42)
566 - 4*f12*f23*v1 + 3*f1*f24*v1 - 4*f1*f23*f4*v1 + 3*f24*f4*v1 + 12*f12*f2*f42*v1 -
567 4*f23*f42*v1 - 8*f12*f43*v1 + f1*f44*v1 + f45*v1 + f15*v2 +
f14*f4*v2 - 8*f13*f42*v2 + 8*f12*f43*v2 - f1*f44*v2 - f45*v2 -
568 f1mf22*(f13 + f2*(3*f2 - 4*f4)*f4 + f12*(2*f2 + f4) + f1*(3*f2 - 4*f4)*(f2 + 2*f4))*v4)/(f1mf22*f1mf43*f2mf42));
574 UNUSED
double f12 = f1*f1;
575 UNUSED
double f13 = f12*f1;
576 UNUSED
double f14 = f13*f1;
577 UNUSED
double f15 =
f14*f1;
578 UNUSED
double f16 = f15*f1;
579 UNUSED
double f17 = f16*f1;
581 UNUSED
double f22 = f2*f2;
582 UNUSED
double f23 = f22*f2;
583 UNUSED
double f24 = f23*f2;
584 UNUSED
double f25 = f24*f2;
585 UNUSED
double f26 = f25*f2;
586 UNUSED
double f27 = f26*f2;
588 UNUSED
double f32 = f3*f3;
589 UNUSED
double f33 = f32*f3;
590 UNUSED
double f34 = f33*f3;
591 UNUSED
double f35 = f34*f3;
592 UNUSED
double f36 = f35*f3;
593 UNUSED
double f37 = f36*f3;
595 UNUSED
double f42 = f4*f4;
596 UNUSED
double f43 = f42*f4;
597 UNUSED
double f44 = f43*f4;
598 UNUSED
double f45 = f44*f4;
599 UNUSED
double f46 = f45*f4;
600 UNUSED
double f47 = f46*f4;
602 UNUSED
double f1mf2 = f1-f2;
603 UNUSED
double f1mf3 = f1-f3;
604 UNUSED
double f1mf4 = f1-f4;
605 UNUSED
double f2mf3 = f2-f3;
606 UNUSED
double f2mf4 = f2-f4;
607 UNUSED
double f3mf4 = f3-f4;
609 UNUSED
double f1mf22 = f1mf2*f1mf2;
610 UNUSED
double f1mf32 = f1mf3*f1mf3;
611 UNUSED
double f1mf42 = f1mf4*f1mf4;
612 UNUSED
double f2mf32 = f2mf3*f2mf3;
613 UNUSED
double f2mf42 = f2mf4*f2mf4;
614 UNUSED
double f3mf42 = f3mf4*f3mf4;
615 UNUSED
double f1mf43 = f1mf4*f1mf4*f1mf4;
618 (-(d4*f1mf22*f1mf32*f1mf4*f2mf3*f2mf4*f3mf4*(f2*f3*f4 + f12*(f2 + f3 + f4) + 2*f1*(f2*f3 + (f2 + f3)*f4))) -
619 d1*f1mf2*f1mf3*f1mf4*f2mf3*f2mf42*f3mf42*(f1*f2*f3 + 2*(f2*f3 + f1*(f2 + f3))*f4 + (f1 + f2 + f3)*f42) + 5*f13*f24*f33*v1 - 4*f12*f25*f33*v1 - 5*f13*f23*f34*v1 + 3*f1*f25*f34*v1 +
620 4*f12*f23*f35*v1 - 3*f1*f24*f35*v1 + 5*f12*f24*f33*f4*v1 - 4*f1*f25*f33*f4*v1 - 5*f12*f23*f34*f4*v1 + 3*f25*f34*f4*v1 + 4*f1*f23*f35*f4*v1 - 3*f24*f35*f4*v1 - 15*f13*f24*f3*f42*v1 +
621 12*f12*f25*f3*f42*v1 + 5*f1*f24*f33*f42*v1 - 4*f25*f33*f42*v1 + 15*f13*f2*f34*f42*v1 - 5*f1*f23*f34*f42*v1 - 12*f12*f2*f35*f42*v1 + 4*f23*f35*f42*v1 + 10*f13*f24*f43*v1 - 8*f12*f25*f43*v1 +
622 20*f13*f23*f3*f43*v1 - 15*f12*f24*f3*f43*v1 - 20*f13*f2*f33*f43*v1 + 5*f24*f33*f43*v1 - 10*f13*f34*f43*v1 + 15*f12*f2*f34*f43*v1 - 5*f23*f34*f43*v1 + 8*f12*f35*f43*v1 - 15*f13*f23*f44*v1 +
623 10*f12*f24*f44*v1 + f1*f25*f44*v1 + 15*f13*f33*f44*v1 - 10*f12*f34*f44*v1 - f1*f35*f44*v1 + f12*f23*f45*v1 - 2*f1*f24*f45*v1 + f25*f45*v1 - f12*f33*f45*v1 + 2*f1*f34*f45*v1 - f35*f45*v1 +
624 5*f13*f2*f46*v1 + f1*f23*f46*v1 - 2*f24*f46*v1 - 5*f13*f3*f46*v1 - f1*f33*f46*v1 + 2*f34*f46*v1 - 3*f12*f2*f47*v1 + f23*f47*v1 + 3*f12*f3*f47*v1 - f33*f47*v1 - f17*f33*v2 + 2*f16*f34*v2 -
625 f15*f35*v2 - f16*f33*f4*v2 + 2*f15*f34*f4*v2 -
f14*f35*f4*v2 + 3*f17*f3*f42*v2 - f15*f33*f42*v2 - 10*
f14*f34*f42*v2 + 8*f13*f35*f42*v2 - 2*f17*f43*v2 - 5*f16*f3*f43*v2 + 15*
f14*f33*f43*v2 -
626 8*f12*f35*f43*v2 + 4*f16*f44*v2 - 15*f13*f33*f44*v2 + 10*f12*f34*f44*v2 + f1*f35*f44*v2 + f12*f33*f45*v2 - 2*f1*f34*f45*v2 + f35*f45*v2 - 4*
f14*f46*v2 + 5*f13*f3*f46*v2 + f1*f33*f46*v2 -
627 2*f34*f46*v2 + 2*f13*f47*v2 - 3*f12*f3*f47*v2 + f33*f47*v2 + f17*f23*v3 - 2*f16*f24*v3 + f15*f25*v3 + f16*f23*f4*v3 - 2*f15*f24*f4*v3 +
f14*f25*f4*v3 - 3*f17*f2*f42*v3 + f15*f23*f42*v3 +
628 10*
f14*f24*f42*v3 - 8*f13*f25*f42*v3 + 2*f17*f43*v3 + 5*f16*f2*f43*v3 - 15*
f14*f23*f43*v3 + 8*f12*f25*f43*v3 - 4*f16*f44*v3 + 15*f13*f23*f44*v3 - 10*f12*f24*f44*v3 - f1*f25*f44*v3 -
629 f12*f23*f45*v3 + 2*f1*f24*f45*v3 - f25*f45*v3 + 4*
f14*f46*v3 - 5*f13*f2*f46*v3 - f1*f23*f46*v3 + 2*f24*f46*v3 - 2*f13*f47*v3 + 3*f12*f2*f47*v3 - f23*f47*v3 -
630 f1mf22*f1mf32*f2mf3*(f13*(f22 + f2*f3 + f32 - 3*f42) + f2*f3*f4*(3*f2*f3 - 4*(f2 + f3)*f4 + 5*f42) + f1*(f2*f3 + 2*(f2 + f3)*f4)*(3*f2*f3 - 4*(f2 + f3)*f4 + 5*f42) +
631 f12*(2*f2*f3*(f2 + f3) + (f22 + f2*f3 + f32)*f4 - 6*(f2 + f3)*f42 + 5*f43))*v4)/(f1mf22*f1mf32*f1mf43*f2mf3*f2mf42*f3mf42)
638 XLAL_ERROR_REAL8(
XLAL_EINVAL,
"Error in IMRPhenomX_Intermediate_Amp_22_v3: IMRPhenomXIntermediateAmpVersion is not valid. Recommended flag is 104.\n");
646 static double IMRPhenomX_Intermediate_Amp_22_delta3(
double d1,
double d4,
double v1,
double v2,
double v3,
double v4,
double f1,
double f2,
double f3,
double f4,
int IntAmpFlag)
650 switch ( IntAmpFlag )
654 UNUSED
double f12 = f1*f1;
655 UNUSED
double f13 = f12*f1;
656 UNUSED
double f14 = f13*f1;
658 UNUSED
double f22 = f2*f2;
659 UNUSED
double f23 = f22*f2;
660 UNUSED
double f24 = f23*f2;
662 UNUSED
double f32 = f3*f3;
663 UNUSED
double f34 = f32*f32;
665 UNUSED
double f42 = f4*f4;
666 UNUSED
double f43 = f42*f4;
667 UNUSED
double f44 = f43*f4;
668 UNUSED
double f45 = f44*f4;
670 UNUSED
double f1mf2 = f1-f2;
671 UNUSED
double f1mf3 = f1-f3;
672 UNUSED
double f1mf4 = f1-f4;
673 UNUSED
double f2mf3 = f2-f3;
674 UNUSED
double f2mf4 = f2-f4;
675 UNUSED
double f3mf4 = f3-f4;
677 UNUSED
double f1mf42 = f1mf4*f1mf4;
678 UNUSED
double f2mf42 = f2mf4*f2mf4;
679 UNUSED
double f3mf42 = f3mf4*f3mf4;
681 UNUSED
double v1mv2 = v1-v2;
682 UNUSED
double v2mv3 = v2-v3;
683 UNUSED
double v2mv4 = v2-v4;
684 UNUSED
double v1mv3 = v1-v3;
685 UNUSED
double v1mv4 = v1-v4;
686 UNUSED
double v3mv4 = v3-v4;
688 retVal = (d4*f1mf2*f1mf3*f1mf4*f2mf3*f2mf4*f3mf4*(f1 + f2 + f3 + f4) + f34*f42*v1 - 3*f32*f44*v1 + 2*f3*f45*v1 +
f14*f32*v2 - f12*f34*v2 - 2*
f14*f3*f4*v2 + 2*f1*f34*f4*v2 +
f14*f42*v2 - f34*f42*v2 +
689 4*f12*f3*f43*v2 - 4*f1*f32*f43*v2 - 3*f12*f44*v2 + 3*f32*f44*v2 + 2*f1*f45*v2 - 2*f3*f45*v2 -
f14*f42*v3 + 3*f12*f44*v3 - 2*f1*f45*v3 +
690 f24*(-(f42*v1mv3) - f32*v1mv4 + 2*f3*f4*v1mv4 + f12*v3mv4 - 2*f1*f4*v3mv4) - 2*f2*f4*(f44*v1mv3 + f34*v1mv4 - 2*f32*f42*v1mv4 -
f14*v3mv4 + 2*f12*f42*v3mv4) +
691 f22*(3*f44*v1mv3 + f34*v1mv4 - 4*f3*f43*v1mv4 -
f14*v3mv4 + 4*f1*f43*v3mv4) -
f14*f32*v4 + f12*f34*v4 + 2*
f14*f3*f4*v4 - 2*f1*f34*f4*v4 - 4*f12*f3*f43*v4 + 4*f1*f32*f43*v4)/
692 (f1mf2*f1mf3*f1mf42*f2mf3*f2mf42*f3mf42);
698 UNUSED
double f12 = f1*f1;
699 UNUSED
double f13 = f12*f1;
700 UNUSED
double f14 = f13*f1;
702 UNUSED
double f22 = f2*f2;
703 UNUSED
double f23 = f22*f2;
704 UNUSED
double f24 = f23*f2;
706 UNUSED
double f32 = f3*f3;
707 UNUSED
double f33 = f32*f3;
708 UNUSED
double f34 = f33*f3;
710 UNUSED
double f42 = f4*f4;
711 UNUSED
double f43 = f42*f4;
712 UNUSED
double f44 = f43*f4;
714 UNUSED
double f1mf2 = f1-f2;
715 UNUSED
double f1mf3 = f1-f3;
716 UNUSED
double f1mf4 = f1-f4;
717 UNUSED
double f2mf3 = f2-f3;
718 UNUSED
double f2mf4 = f2-f4;
719 UNUSED
double f3mf4 = f3-f4;
721 UNUSED
double f1mf22 = f1mf2*f1mf2;
722 UNUSED
double f1mf32 = f1mf3*f1mf3;
723 UNUSED
double f1mf42 = f1mf4*f1mf4;
724 UNUSED
double f2mf32 = f2mf3*f2mf3;
725 UNUSED
double f2mf42 = f2mf4*f2mf4;
726 UNUSED
double f3mf42 = f3mf4*f3mf4;
727 UNUSED
double f1mf43 = f1mf4*f1mf4*f1mf4;
729 retVal = ((d4*f1mf22*f1mf4*f2mf4*(2*f1 + f2 + f4) - d1*f1mf2*f1mf4*f2mf42*(f1 + f2 + 2*f4)
730 + 2*(f44*(-v1 + v2) + 2*f12*f2mf42*(v1 - v4) + 2*f22*f42*(v1 - v4)
731 + 2*f13*f4*(v2 - v4) + f24*(-v1 + v4) +
f14*(-v2 + v4) + 2*f1*f4*(f42*(v1 - v2) + f22*(v1 - v4) + 2*f2*f4*(-v1 + v4)))) / (f1mf22*f1mf43*f2mf42));
738 UNUSED
double f12 = f1*f1;
739 UNUSED
double f13 = f12*f1;
740 UNUSED
double f14 = f13*f1;
741 UNUSED
double f15 =
f14*f1;
742 UNUSED
double f16 = f15*f1;
743 UNUSED
double f17 = f16*f1;
745 UNUSED
double f22 = f2*f2;
746 UNUSED
double f23 = f22*f2;
747 UNUSED
double f24 = f23*f2;
748 UNUSED
double f25 = f24*f2;
749 UNUSED
double f26 = f25*f2;
750 UNUSED
double f27 = f26*f2;
752 UNUSED
double f32 = f3*f3;
753 UNUSED
double f33 = f32*f3;
754 UNUSED
double f34 = f33*f3;
755 UNUSED
double f35 = f34*f3;
756 UNUSED
double f36 = f35*f3;
757 UNUSED
double f37 = f36*f3;
759 UNUSED
double f42 = f4*f4;
760 UNUSED
double f43 = f42*f4;
761 UNUSED
double f44 = f43*f4;
762 UNUSED
double f45 = f44*f4;
763 UNUSED
double f46 = f45*f4;
764 UNUSED
double f47 = f46*f4;
766 UNUSED
double f1mf2 = f1-f2;
767 UNUSED
double f1mf3 = f1-f3;
768 UNUSED
double f1mf4 = f1-f4;
769 UNUSED
double f2mf3 = f2-f3;
770 UNUSED
double f2mf4 = f2-f4;
771 UNUSED
double f3mf4 = f3-f4;
773 UNUSED
double f1mf22 = f1mf2*f1mf2;
774 UNUSED
double f1mf32 = f1mf3*f1mf3;
775 UNUSED
double f1mf42 = f1mf4*f1mf4;
776 UNUSED
double f2mf32 = f2mf3*f2mf3;
777 UNUSED
double f2mf42 = f2mf4*f2mf4;
778 UNUSED
double f3mf42 = f3mf4*f3mf4;
779 UNUSED
double f1mf43 = f1mf4*f1mf4*f1mf4;
782 (d4*f1mf22*f1mf32*f1mf4*f2mf3*f2mf4*f3mf4*(f12 + f2*f3 + (f2 + f3)*f4 + 2*f1*(f2 + f3 + f4)) + d1*f1mf2*f1mf3*f1mf4*f2mf3*f2mf42*f3mf42*(f1*f2 + f1*f3 + f2*f3 + 2*(f1 + f2 + f3)*f4 + f42) -
783 5*f13*f24*f32*v1 + 4*f12*f25*f32*v1 + 5*f13*f22*f34*v1 - 2*f25*f34*v1 - 4*f12*f22*f35*v1 + 2*f24*f35*v1 + 10*f13*f24*f3*f4*v1 - 8*f12*f25*f3*f4*v1 - 5*f12*f24*f32*f4*v1 + 4*f1*f25*f32*f4*v1 -
784 10*f13*f2*f34*f4*v1 + 5*f12*f22*f34*f4*v1 + 8*f12*f2*f35*f4*v1 - 4*f1*f22*f35*f4*v1 - 5*f13*f24*f42*v1 + 4*f12*f25*f42*v1 + 10*f12*f24*f3*f42*v1 - 8*f1*f25*f3*f42*v1 - 5*f1*f24*f32*f42*v1 +
785 4*f25*f32*f42*v1 + 5*f13*f34*f42*v1 - 10*f12*f2*f34*f42*v1 + 5*f1*f22*f34*f42*v1 - 4*f12*f35*f42*v1 + 8*f1*f2*f35*f42*v1 - 4*f22*f35*f42*v1 - 5*f12*f24*f43*v1 + 4*f1*f25*f43*v1 -
786 20*f13*f22*f3*f43*v1 + 10*f1*f24*f3*f43*v1 + 20*f13*f2*f32*f43*v1 - 5*f24*f32*f43*v1 + 5*f12*f34*f43*v1 - 10*f1*f2*f34*f43*v1 + 5*f22*f34*f43*v1 - 4*f1*f35*f43*v1 + 15*f13*f22*f44*v1 -
787 5*f1*f24*f44*v1 - 2*f25*f44*v1 - 15*f13*f32*f44*v1 + 5*f1*f34*f44*v1 + 2*f35*f44*v1 - 10*f13*f2*f45*v1 - f12*f22*f45*v1 + 3*f24*f45*v1 + 10*f13*f3*f45*v1 + f12*f32*f45*v1 - 3*f34*f45*v1 +
788 2*f12*f2*f46*v1 - f1*f22*f46*v1 - 2*f12*f3*f46*v1 + f1*f32*f46*v1 + 2*f1*f2*f47*v1 - f22*f47*v1 - 2*f1*f3*f47*v1 + f32*f47*v1 + f17*f32*v2 - 3*f15*f34*v2 + 2*
f14*f35*v2 - 2*f17*f3*f4*v2 +
789 f16*f32*f4*v2 + 5*
f14*f34*f4*v2 - 4*f13*f35*f4*v2 + f17*f42*v2 - 2*f16*f3*f42*v2 + f15*f32*f42*v2 + f16*f43*v2 + 10*f15*f3*f43*v2 - 15*
f14*f32*f43*v2 + 4*f1*f35*f43*v2 - 8*f15*f44*v2 +
790 15*f13*f32*f44*v2 - 5*f1*f34*f44*v2 - 2*f35*f44*v2 + 8*
f14*f45*v2 - 10*f13*f3*f45*v2 - f12*f32*f45*v2 + 3*f34*f45*v2 - f13*f46*v2 + 2*f12*f3*f46*v2 - f1*f32*f46*v2 - f12*f47*v2 +
791 2*f1*f3*f47*v2 - f32*f47*v2 - f17*f22*v3 + 3*f15*f24*v3 - 2*
f14*f25*v3 + 2*f17*f2*f4*v3 - f16*f22*f4*v3 - 5*
f14*f24*f4*v3 + 4*f13*f25*f4*v3 - f17*f42*v3 + 2*f16*f2*f42*v3 - f15*f22*f42*v3 -
792 f16*f43*v3 - 10*f15*f2*f43*v3 + 15*
f14*f22*f43*v3 - 4*f1*f25*f43*v3 + 8*f15*f44*v3 - 15*f13*f22*f44*v3 + 5*f1*f24*f44*v3 + 2*f25*f44*v3 - 8*
f14*f45*v3 + 10*f13*f2*f45*v3 + f12*f22*f45*v3 -
793 3*f24*f45*v3 + f13*f46*v3 - 2*f12*f2*f46*v3 + f1*f22*f46*v3 + f12*f47*v3 - 2*f1*f2*f47*v3 + f22*f47*v3 +
794 f1mf22*f1mf32*f2mf3*(2*f22*f32 + f13*(f2 + f3 - 2*f4) + f12*(f2 + f3 - 2*f4)*(2*(f2 + f3) + f4) - 4*(f22 + f2*f3 + f32)*f42 + 5*(f2 + f3)*f43 +
795 f1*(4*f2*f3*(f2 + f3) - 4*(f22 + f2*f3 + f32)*f4 - 3*(f2 + f3)*f42 + 10*f43))*v4)/(f1mf22*f1mf32*f1mf43*f2mf3*f2mf42*f3mf42)
802 XLAL_ERROR_REAL8(
XLAL_EINVAL,
"Error in IMRPhenomX_Intermediate_Amp_22_v3: IMRPhenomXIntermediateAmpVersion is not valid. Recommended flag is 104.\n");
810 static double IMRPhenomX_Intermediate_Amp_22_delta4(
double d1,
double d4,
double v1,
double v2,
double v3,
double v4,
double f1,
double f2,
double f3,
double f4,
int IntAmpFlag)
814 switch ( IntAmpFlag )
818 UNUSED
double f12 = f1*f1;
819 UNUSED
double f13 = f12*f1;
821 UNUSED
double f22 = f2*f2;
822 UNUSED
double f23 = f22*f2;
823 UNUSED
double f24 = f23*f2;
825 UNUSED
double f32 = f3*f3;
826 UNUSED
double f33 = f32*f3;
828 UNUSED
double f42 = f4*f4;
829 UNUSED
double f43 = f42*f4;
830 UNUSED
double f44 = f43*f4;
832 UNUSED
double f1mf2 = f1-f2;
833 UNUSED
double f1mf3 = f1-f3;
834 UNUSED
double f1mf4 = f1-f4;
835 UNUSED
double f2mf3 = f2-f3;
836 UNUSED
double f2mf4 = f2-f4;
837 UNUSED
double f3mf4 = f3-f4;
839 UNUSED
double f1mf42 = f1mf4*f1mf4;
840 UNUSED
double f2mf42 = f2mf4*f2mf4;
841 UNUSED
double f3mf42 = f3mf4*f3mf4;
843 UNUSED
double v1mv2 = v1-v2;
844 UNUSED
double v2mv3 = v2-v3;
845 UNUSED
double v2mv4 = v2-v4;
846 UNUSED
double v1mv3 = v1-v3;
847 UNUSED
double v1mv4 = v1-v4;
848 UNUSED
double v3mv4 = v3-v4;
850 retVal = (-(d4*f1mf2*f1mf3*f1mf4*f2mf3*f2mf4*f3mf4) - f33*f42*v1 + 2*f32*f43*v1 - f3*f44*v1 - f13*f32*v2 + f12*f33*v2 + 2*f13*f3*f4*v2 - 2*f1*f33*f4*v2 - f13*f42*v2 - 3*f12*f3*f42*v2 + 3*f1*f32*f42*v2 +
851 f33*f42*v2 + 2*f12*f43*v2 - 2*f32*f43*v2 - f1*f44*v2 + f3*f44*v2 + f13*f42*v3 - 2*f12*f43*v3 + f1*f44*v3 + f23*(f42*v1mv3 + f32*v1mv4 - 2*f3*f4*v1mv4 - f12*v3mv4 + 2*f1*f4*v3mv4) +
852 f2*f4*(f43*v1mv3 + 2*f33*v1mv4 - 3*f32*f4*v1mv4 - 2*f13*v3mv4 + 3*f12*f4*v3mv4) + f22*(-2*f43*v1mv3 - f33*v1mv4 + 3*f3*f42*v1mv4 + f13*v3mv4 - 3*f1*f42*v3mv4) + f13*f32*v4 - f12*f33*v4 -
853 2*f13*f3*f4*v4 + 2*f1*f33*f4*v4 + 3*f12*f3*f42*v4 - 3*f1*f32*f42*v4)/(f1mf2*f1mf3*f1mf42*f2mf3*f2mf42*f3mf42);
858 UNUSED
double f12 = f1*f1;
859 UNUSED
double f13 = f12*f1;
860 UNUSED
double f14 = f13*f1;
862 UNUSED
double f22 = f2*f2;
863 UNUSED
double f23 = f22*f2;
864 UNUSED
double f24 = f23*f2;
866 UNUSED
double f32 = f3*f3;
867 UNUSED
double f33 = f32*f3;
868 UNUSED
double f34 = f33*f3;
870 UNUSED
double f42 = f4*f4;
871 UNUSED
double f43 = f42*f4;
872 UNUSED
double f44 = f43*f4;
874 UNUSED
double f1mf2 = f1-f2;
875 UNUSED
double f1mf3 = f1-f3;
876 UNUSED
double f1mf4 = f1-f4;
877 UNUSED
double f2mf3 = f2-f3;
878 UNUSED
double f2mf4 = f2-f4;
879 UNUSED
double f3mf4 = f3-f4;
881 UNUSED
double f1mf22 = f1mf2*f1mf2;
882 UNUSED
double f1mf32 = f1mf3*f1mf3;
883 UNUSED
double f1mf42 = f1mf4*f1mf4;
884 UNUSED
double f2mf32 = f2mf3*f2mf3;
885 UNUSED
double f2mf42 = f2mf4*f2mf4;
886 UNUSED
double f3mf42 = f3mf4*f3mf4;
887 UNUSED
double f1mf43 = f1mf4*f1mf4*f1mf4;
889 retVal = ((-(d4*f1mf22*f1mf4*f2mf4) + d1*f1mf2*f1mf4*f2mf42 - 3*f1*f22*v1 + 2*f23*v1 + 6*f1*f2*f4*v1 - 3*f22*f4*v1
890 - 3*f1*f42*v1 + f43*v1 + f13*v2 - 3*f12*f4*v2 + 3*f1*f42*v2 - f43*v2 - f1mf22*(f1 + 2*f2 - 3*f4)*v4)/(f1mf22*f1mf43*f2mf42));
896 UNUSED
double f12 = f1*f1;
897 UNUSED
double f13 = f12*f1;
898 UNUSED
double f14 = f13*f1;
899 UNUSED
double f15 =
f14*f1;
900 UNUSED
double f16 = f15*f1;
901 UNUSED
double f17 = f16*f1;
903 UNUSED
double f22 = f2*f2;
904 UNUSED
double f23 = f22*f2;
905 UNUSED
double f24 = f23*f2;
906 UNUSED
double f25 = f24*f2;
907 UNUSED
double f26 = f25*f2;
908 UNUSED
double f27 = f26*f2;
910 UNUSED
double f32 = f3*f3;
911 UNUSED
double f33 = f32*f3;
912 UNUSED
double f34 = f33*f3;
913 UNUSED
double f35 = f34*f3;
914 UNUSED
double f36 = f35*f3;
915 UNUSED
double f37 = f36*f3;
917 UNUSED
double f42 = f4*f4;
918 UNUSED
double f43 = f42*f4;
919 UNUSED
double f44 = f43*f4;
920 UNUSED
double f45 = f44*f4;
921 UNUSED
double f46 = f45*f4;
922 UNUSED
double f47 = f46*f4;
924 UNUSED
double f1mf2 = f1-f2;
925 UNUSED
double f1mf3 = f1-f3;
926 UNUSED
double f1mf4 = f1-f4;
927 UNUSED
double f2mf3 = f2-f3;
928 UNUSED
double f2mf4 = f2-f4;
929 UNUSED
double f3mf4 = f3-f4;
931 UNUSED
double f1mf22 = f1mf2*f1mf2;
932 UNUSED
double f1mf32 = f1mf3*f1mf3;
933 UNUSED
double f1mf42 = f1mf4*f1mf4;
934 UNUSED
double f2mf32 = f2mf3*f2mf3;
935 UNUSED
double f2mf42 = f2mf4*f2mf4;
936 UNUSED
double f3mf42 = f3mf4*f3mf4;
937 UNUSED
double f1mf43 = f1mf4*f1mf4*f1mf4;
940 (-(d4*f1mf22*f1mf32*f1mf4*f2mf3*f2mf4*f3mf4*(2*f1 + f2 + f3 + f4)) - d1*f1mf2*f1mf3*f1mf4*f2mf3*f2mf42*f3mf42*(f1 + f2 + f3 + 2*f4) + 5*f13*f23*f32*v1 - 3*f1*f25*f32*v1 - 5*f13*f22*f33*v1 +
941 2*f25*f33*v1 + 3*f1*f22*f35*v1 - 2*f23*f35*v1 - 10*f13*f23*f3*f4*v1 + 6*f1*f25*f3*f4*v1 + 5*f12*f23*f32*f4*v1 - 3*f25*f32*f4*v1 + 10*f13*f2*f33*f4*v1 - 5*f12*f22*f33*f4*v1 -
942 6*f1*f2*f35*f4*v1 + 3*f22*f35*f4*v1 + 5*f13*f23*f42*v1 - 3*f1*f25*f42*v1 + 15*f13*f22*f3*f42*v1 - 10*f12*f23*f3*f42*v1 - 15*f13*f2*f32*f42*v1 + 5*f1*f23*f32*f42*v1 - 5*f13*f33*f42*v1 +
943 10*f12*f2*f33*f42*v1 - 5*f1*f22*f33*f42*v1 + 3*f1*f35*f42*v1 - 10*f13*f22*f43*v1 + 5*f12*f23*f43*v1 + f25*f43*v1 + 15*f12*f22*f3*f43*v1 - 10*f1*f23*f3*f43*v1 + 10*f13*f32*f43*v1 -
944 15*f12*f2*f32*f43*v1 + 5*f23*f32*f43*v1 - 5*f12*f33*f43*v1 + 10*f1*f2*f33*f43*v1 - 5*f22*f33*f43*v1 - f35*f43*v1 + 5*f13*f2*f44*v1 - 10*f12*f22*f44*v1 + 5*f1*f23*f44*v1 - 5*f13*f3*f44*v1 +
945 10*f12*f32*f44*v1 - 5*f1*f33*f44*v1 + 5*f12*f2*f45*v1 + 2*f1*f22*f45*v1 - 3*f23*f45*v1 - 5*f12*f3*f45*v1 - 2*f1*f32*f45*v1 + 3*f33*f45*v1 - 4*f1*f2*f46*v1 + 2*f22*f46*v1 + 4*f1*f3*f46*v1 -
946 2*f32*f46*v1 - 2*f16*f32*v2 + 3*f15*f33*v2 - f13*f35*v2 + 4*f16*f3*f4*v2 - 2*f15*f32*f4*v2 - 5*
f14*f33*f4*v2 + 3*f12*f35*f4*v2 - 2*f16*f42*v2 - 5*f15*f3*f42*v2 + 10*
f14*f32*f42*v2 -
947 3*f1*f35*f42*v2 + 4*f15*f43*v2 - 5*
f14*f3*f43*v2 + f35*f43*v2 + 5*f13*f3*f44*v2 - 10*f12*f32*f44*v2 + 5*f1*f33*f44*v2 - 4*f13*f45*v2 + 5*f12*f3*f45*v2 + 2*f1*f32*f45*v2 - 3*f33*f45*v2 +
948 2*f12*f46*v2 - 4*f1*f3*f46*v2 + 2*f32*f46*v2 + 2*f16*f22*v3 - 3*f15*f23*v3 + f13*f25*v3 - 4*f16*f2*f4*v3 + 2*f15*f22*f4*v3 + 5*
f14*f23*f4*v3 - 3*f12*f25*f4*v3 + 2*f16*f42*v3 +
949 5*f15*f2*f42*v3 - 10*
f14*f22*f42*v3 + 3*f1*f25*f42*v3 - 4*f15*f43*v3 + 5*
f14*f2*f43*v3 - f25*f43*v3 - 5*f13*f2*f44*v3 + 10*f12*f22*f44*v3 - 5*f1*f23*f44*v3 + 4*f13*f45*v3 - 5*f12*f2*f45*v3 -
950 2*f1*f22*f45*v3 + 3*f23*f45*v3 - 2*f12*f46*v3 + 4*f1*f2*f46*v3 - 2*f22*f46*v3 -
951 f1mf22*f1mf32*f2mf3*(2*f2*f3*(f2 + f3) + 2*f12*(f2 + f3 - 2*f4) - 3*(f22 + f2*f3 + f32)*f4 + f1*(f22 + 5*f2*f3 + f32 - 6*(f2 + f3)*f4 + 5*f42) + 5*f43)*v4)/
952 (f1mf22*f1mf32*f1mf43*f2mf3*f2mf42*f3mf42)
959 XLAL_ERROR_REAL8(
XLAL_EINVAL,
"Error in IMRPhenomX_Intermediate_Amp_22_v3: IMRPhenomXIntermediateAmpVersion is not valid. Recommended flag is 104.\n");
967 static double IMRPhenomX_Intermediate_Amp_22_delta5(
double d1,
double d4,
double v1,
double v2,
double v3,
double v4,
double f1,
double f2,
double f3,
double f4,
int IntAmpFlag)
971 switch ( IntAmpFlag )
985 UNUSED
double f12 = f1*f1;
986 UNUSED
double f13 = f12*f1;
987 UNUSED
double f14 = f13*f1;
988 UNUSED
double f15 =
f14*f1;
989 UNUSED
double f16 = f15*f1;
990 UNUSED
double f17 = f16*f1;
992 UNUSED
double f22 = f2*f2;
993 UNUSED
double f23 = f22*f2;
994 UNUSED
double f24 = f23*f2;
995 UNUSED
double f25 = f24*f2;
996 UNUSED
double f26 = f25*f2;
997 UNUSED
double f27 = f26*f2;
999 UNUSED
double f32 = f3*f3;
1000 UNUSED
double f33 = f32*f3;
1001 UNUSED
double f34 = f33*f3;
1002 UNUSED
double f35 = f34*f3;
1003 UNUSED
double f36 = f35*f3;
1004 UNUSED
double f37 = f36*f3;
1006 UNUSED
double f42 = f4*f4;
1007 UNUSED
double f43 = f42*f4;
1008 UNUSED
double f44 = f43*f4;
1009 UNUSED
double f45 = f44*f4;
1010 UNUSED
double f46 = f45*f4;
1011 UNUSED
double f47 = f46*f4;
1013 UNUSED
double f1mf2 = f1-f2;
1014 UNUSED
double f1mf3 = f1-f3;
1015 UNUSED
double f1mf4 = f1-f4;
1016 UNUSED
double f2mf3 = f2-f3;
1017 UNUSED
double f2mf4 = f2-f4;
1018 UNUSED
double f3mf4 = f3-f4;
1020 UNUSED
double f1mf22 = f1mf2*f1mf2;
1021 UNUSED
double f1mf32 = f1mf3*f1mf3;
1022 UNUSED
double f1mf42 = f1mf4*f1mf4;
1023 UNUSED
double f2mf32 = f2mf3*f2mf3;
1024 UNUSED
double f2mf42 = f2mf4*f2mf4;
1025 UNUSED
double f3mf42 = f3mf4*f3mf4;
1026 UNUSED
double f1mf43 = f1mf4*f1mf4*f1mf4;
1029 (d4*f1mf22*f1mf32*f1mf4*f2mf3*f2mf4*f3mf4 + d1*f1mf2*f1mf3*f1mf4*f2mf3*f2mf42*f3mf42 - 4*f12*f23*f32*v1 + 3*f1*f24*f32*v1 + 4*f12*f22*f33*v1 - 2*f24*f33*v1 - 3*f1*f22*f34*v1 + 2*f23*f34*v1 +
1030 8*f12*f23*f3*f4*v1 - 6*f1*f24*f3*f4*v1 - 4*f1*f23*f32*f4*v1 + 3*f24*f32*f4*v1 - 8*f12*f2*f33*f4*v1 + 4*f1*f22*f33*f4*v1 + 6*f1*f2*f34*f4*v1 - 3*f22*f34*f4*v1 - 4*f12*f23*f42*v1 +
1031 3*f1*f24*f42*v1 - 12*f12*f22*f3*f42*v1 + 8*f1*f23*f3*f42*v1 + 12*f12*f2*f32*f42*v1 - 4*f23*f32*f42*v1 + 4*f12*f33*f42*v1 - 8*f1*f2*f33*f42*v1 + 4*f22*f33*f42*v1 - 3*f1*f34*f42*v1 +
1032 8*f12*f22*f43*v1 - 4*f1*f23*f43*v1 - f24*f43*v1 - 8*f12*f32*f43*v1 + 4*f1*f33*f43*v1 + f34*f43*v1 - 4*f12*f2*f44*v1 - f1*f22*f44*v1 + 2*f23*f44*v1 + 4*f12*f3*f44*v1 + f1*f32*f44*v1 -
1033 2*f33*f44*v1 + 2*f1*f2*f45*v1 - f22*f45*v1 - 2*f1*f3*f45*v1 + f32*f45*v1 + f15*f32*v2 - 2*
f14*f33*v2 + f13*f34*v2 - 2*f15*f3*f4*v2 +
f14*f32*f4*v2 + 4*f13*f33*f4*v2 - 3*f12*f34*f4*v2 +
1034 f15*f42*v2 + 4*
f14*f3*f42*v2 - 8*f13*f32*f42*v2 + 3*f1*f34*f42*v2 - 3*
f14*f43*v2 + 8*f12*f32*f43*v2 - 4*f1*f33*f43*v2 - f34*f43*v2 + 3*f13*f44*v2 - 4*f12*f3*f44*v2 - f1*f32*f44*v2 +
1035 2*f33*f44*v2 - f12*f45*v2 + 2*f1*f3*f45*v2 - f32*f45*v2 - f15*f22*v3 + 2*
f14*f23*v3 - f13*f24*v3 + 2*f15*f2*f4*v3 -
f14*f22*f4*v3 - 4*f13*f23*f4*v3 + 3*f12*f24*f4*v3 - f15*f42*v3 -
1036 4*
f14*f2*f42*v3 + 8*f13*f22*f42*v3 - 3*f1*f24*f42*v3 + 3*
f14*f43*v3 - 8*f12*f22*f43*v3 + 4*f1*f23*f43*v3 + f24*f43*v3 - 3*f13*f44*v3 + 4*f12*f2*f44*v3 + f1*f22*f44*v3 - 2*f23*f44*v3 +
1037 f12*f45*v3 - 2*f1*f2*f45*v3 + f22*f45*v3 + f1mf22*f1mf32*f2mf3*(2*f2*f3 + f1*(f2 + f3 - 2*f4) - 3*(f2 + f3)*f4 + 4*f42)*v4)/(f1mf22*f1mf32*f1mf43*f2mf3*f2mf42*f3mf42)
1044 XLAL_ERROR_REAL8(
XLAL_EINVAL,
"Error in IMRPhenomX_Intermediate_Amp_22_v3: IMRPhenomXIntermediateAmpVersion is not valid. Recommended flag is 104.\n");
1054 double a0 = pAmp->
delta0;
1055 double a1 = pAmp->
delta1;
1057 double a3 = pAmp->
delta3;
1059 double a5 = pAmp->
delta5;
1067 switch ( IntAmpFlag )
1073 polynomial = a0 + ff*(a1 + ff*(
a2 + ff*(a3 + ff*
a4)));
1080 polynomial = a0 + ff*(a1 + ff*(
a2 + ff*(a3 + ff*(
a4 + ff*a5))));
1086 XLAL_ERROR_REAL8(
XLAL_EINVAL,
"Error in IMRPhenomX_Intermediate_Amp_22_Ansatz: IMRPhenomXIntermediateAmpVersion is not valid. Recommended flag is 104.\n");
1096 return ff7o6 / polynomial;
1104 double eta2 = eta*eta;
1105 double eta3 = eta2*eta;
1106 double eta4 = eta3*eta;
1111 double noSpin, eqSpin, uneqSpin;
1113 switch ( IntPhaseFlag )
1117 noSpin = (-85.65000000000003 - 413.9060603156141*eta - 4784.537141069749*eta2 + 1490.1208098409852*eta3 + 10040.191767983953*eta4)/(1. + 5.779247659216663*eta + 69.57046710291102*eta2);
1119 eqSpin = (S*(4.692726457075735 - 7.038980703663575*S + eta2*(87.92695072054693 - 53.72849910673372*S - 40.89299875429382*S2) + 1.809693754991601*S2 + eta*(-13.873355143236994 + 11.141463520702901*S + 6.450651190707574*S2) + eta3*(-214.15302815157358 + 165.87430354875272*S + 53.19430999626736*S2 - 2.8784864337832756*S3) + 0.18008416467045393*S3))/(-0.19174821584528354 + 0.22339091810747033*S + 1.*eta3*S - 0.06756256922190347*S2);
1121 uneqSpin = dchi*
delta*eta3*(2989.296934478898 - 8693.785422139264*eta + 230.93311202289289*S);
1128 noSpin = (-85.31300000000003 - 10937.096332153926*eta + 24894.22839497624*eta2 - 41966.01695284205*eta3 + 67157.48707167112*eta4)/(1. + 132.44486857622041*eta);
1130 eqSpin = (S*(-50.47357379240076 + 73.09727082201734*S + eta3*(3596.7780512820414 - 2213.251793086697*S - 2751.2131780622212*S2) + eta*(156.5226913355741 - 58.19427796874254*S - 243.59756424149438*S2) - 14.822551015459917*S2 + eta2*(-1399.6118302073216 + 505.3549556613735*S + 1522.6588404541071*S2)))/(1.9694717754728777 - 2.272396692525365*S + 1.*S2);
1132 uneqSpin = dchi*
delta*eta3*(3453.778014556621 + eta*(-10048.103462582065 + 1757.9089741373239*S));
1138 XLAL_ERROR_REAL8(
XLAL_EINVAL,
"Error in IMRPhenomX_Intermediate_Phase_22_v3: IMRPhenomXIntermediatePhaseVersion is not valid. Recommended flag is 104.\n");
1143 return (noSpin + eqSpin + uneqSpin);
1149 double eta2 = eta*eta;
1150 double eta3 = eta2*eta;
1156 double noSpin, eqSpin, uneqSpin;
1158 switch ( IntPhaseFlag )
1162 noSpin = (-84.09400000000004 - 1782.8025405571802*eta + 5384.38721936653*eta2)/(1. + 28.515617312596103*eta + 12.404097877099353*eta2);
1164 eqSpin = (S*(22.5665046165141 - 39.94907120140026*S + 4.668251961072*S2 + 12.648584361431245*S3 + eta2*(-298.7528127869681 + 14.228745354543983*S + 398.1736953382448*S2 + 506.94924905801673*S3 - 626.3693674479357*S4) - 5.360554789680035*S4 + eta*(152.07900889608595 - 121.70617732909962*S2 - 169.36311036967322*S3 + 182.40064586992762*S4)))/(-1.1571201220629341 + 1.*S);
1166 uneqSpin = dchi*
delta*eta3*(5357.695021063607 - 15642.019339339662*eta + 674.8698102366333*S);
1172 noSpin = (-82.54500000000004 - 5.58197349185435e6*eta - 3.5225742421184325e8*eta2 + 1.4667258334378073e9*eta3)/(1. + 66757.12830903867*eta + 5.385164380400193e6*eta2 + 2.5176585751772933e6*eta3);
1174 eqSpin = (S*(19.416719811164853 - 36.066611959079935*S - 0.8612656616290079*S2 + eta2*(170.97203068800542 - 107.41099349364234*S - 647.8103976942541*S3) + 5.95010003393006*S3 + eta3*(-1365.1499998427248 + 1152.425940764218*S + 415.7134909564443*S2 + 1897.5444343138167*S3 - 866.283566780576*S4) + 4.984750041013893*S4 + eta*(207.69898051583655 - 132.88417400679026*S - 17.671713040498304*S2 + 29.071788188638315*S3 + 37.462217031512786*S4)))/(-1.1492259468169692 + 1.*S);
1176 uneqSpin = dchi*
delta*eta3*(7343.130973149263 - 20486.813161100774*eta + 515.9898508588834*S);
1182 XLAL_ERROR_REAL8(
XLAL_EINVAL,
"Error in IMRPhenomX_Intermediate_Phase_22_v3: IMRPhenomXIntermediatePhaseVersion is not valid. Recommended flag is 104.\n");
1187 return (noSpin + eqSpin + uneqSpin);
1193 double eta2 = eta*eta;
1194 double eta3 = eta2*eta;
1198 double noSpin, eqSpin, uneqSpin;
1200 switch ( IntPhaseFlag )
1204 noSpin = (eta*(-8.244230124407343 - 182.80239160435949*eta + 638.2046409916306*eta2 - 578.878727101827*eta3))/(-0.004863669418916522 - 0.5095088831841608*eta + 1.*eta2);
1206 eqSpin = (S*(0.1344136125169328 + 0.0751872427147183*S + eta2*(7.158823192173721 + 25.489598292111104*S - 7.982299108059922*S2) + eta*(-5.792368563647471 + 1.0190164430971749*S + 0.29150399620268874*S2) + 0.033627267594199865*S2 + eta3*(17.426503081351534 - 90.69790378048448*S + 20.080325133405847*S2)))/(0.03449545664201546 - 0.027941977370442107*S + (0.005274757661661763 + 0.0455523144123269*eta - 0.3880379911692037*eta2 + 1.*eta3)*S2);
1208 uneqSpin = 160.2975913661124*dchi*
delta*eta2;
1214 noSpin = (eta*(0.9951733419499662 + 101.21991715215253*eta + 632.4731389009143*eta2))/(0.00016803066316882238 + 0.11412314719189287*eta + 1.8413983770369362*eta2 + 1.*eta3);
1216 eqSpin = (S*(18.694178521101332 + 16.89845522539974*S + 4941.31613710257*eta2*S + eta*(-697.6773920613674 - 147.53381808989846*S2) + 0.3612417066833153*S2 + eta3*(3531.552143264721 - 14302.70838220423*S + 178.85850322465944*S2)))/(2.965640445745779 - 2.7706595614504725*S + 1.*S2);
1218 uneqSpin = dchi*
delta*eta2*(356.74395864902294 + 1693.326644293169*eta2*S);
1224 XLAL_ERROR_REAL8(
XLAL_EINVAL,
"Error in IMRPhenomX_Intermediate_Phase_22_d23: IMRPhenomXIntermediatePhaseVersion is not valid. Recommended flag is 104.\n");
1229 return (noSpin + eqSpin + uneqSpin);
1235 double eta2 = eta*eta;
1236 double eta3 = eta2*eta;
1237 double eta4 = eta3*eta;
1238 double eta6 = eta3*eta3;
1242 double noSpin, eqSpin, uneqSpin;
1244 switch ( IntPhaseFlag )
1248 noSpin = (0.3145740304678042 + 299.79825045000655*eta - 605.8886581267144*eta2 - 1302.0954503758007*eta3)/(1. + 2.3903703875353255*eta - 19.03836730923657*eta2);
1250 eqSpin = (S*(1.150925084789774 - 0.3072381261655531*S + eta4*(12160.22339193134 - 1459.725263347619*S - 9581.694749116636*S2) + eta2*(1240.3900459406875 - 289.48392062629966*S - 1218.1320231846412*S2) - 1.6191217310064605*S2 + eta*(-41.38762957457647 + 60.47477582076457*S2) + eta3*(-7540.259952257055 + 1379.3429194626635*S + 6372.99271204178*S2)))/(-1.4325421225106187 + 1.*S);
1252 uneqSpin = dchi*
delta*eta3*(-444.797248674011 + 1448.47758082697*eta + 152.49290092435044*S);
1258 noSpin = (eta*(-5.126358906504587 - 227.46830225846668*eta + 688.3609087244353*eta2 - 751.4184178636324*eta3))/(-0.004551938711031158 - 0.7811680872741462*eta + 1.*eta2);
1260 eqSpin = (S*(0.1549280856660919 - 0.9539250460041732*S - 539.4071941841604*eta2*S + eta*(73.79645135116367 - 8.13494176717772*S2) - 2.84311102369862*S2 + eta3*(-936.3740515136005 + 1862.9097047992134*S + 224.77581754671272*S2)))/(-1.5308507364054487 + 1.*S);
1262 uneqSpin = 2993.3598520496153*dchi*
delta*eta6;
1268 XLAL_ERROR_REAL8(
XLAL_EINVAL,
"Error in IMRPhenomX_Intermediate_Phase_22_d23: IMRPhenomXIntermediatePhaseVersion is not valid. Recommended flag is 104.\n");
1273 return (noSpin + eqSpin + uneqSpin);
1279 double eta2 = eta*eta;
1280 double eta3 = eta2*eta;
1281 double eta4 = eta3*eta;
1285 double noSpin, eqSpin, uneqSpin;
1287 switch ( IntPhaseFlag )
1291 XLAL_ERROR_REAL8(
XLAL_EINVAL,
"Error in IMRPhenomX_Intermediate_Phase_22_d43: Point d43 should not be called when using IMRPhenomXIntermediatePhaseVersion == 104.\n");
1296 noSpin = (0.4248820426833804 - 906.746595921514*eta - 282820.39946006844*eta2 - 967049.2793750163*eta3 + 670077.5414916876*eta4)/(1. + 1670.9440812294847*eta + 19783.077247023448*eta2);
1298 eqSpin = (S*(0.22814271667259703 + 1.1366593671801855*S + eta3*(3499.432393555856 - 877.8811492839261*S - 4974.189172654984*S2) + eta*(12.840649528989287 - 61.17248283184154*S2) + 0.4818323187946999*S2 + eta2*(-711.8532052499075 + 269.9234918621958*S + 941.6974723887743*S2) + eta4*(-4939.642457025497 - 227.7672020783411*S + 8745.201037897836*S2)))/(-1.2442293719740283 + 1.*S);
1300 uneqSpin = dchi*
delta*(-514.8494071830514 + 1493.3851099678195*eta)*eta3;
1306 XLAL_ERROR_REAL8(
XLAL_EINVAL,
"Error in IMRPhenomX_Intermediate_Phase_22_d43: IMRPhenomXIntermediatePhaseVersion is not valid. Recommended flag is 104.\n");
1311 return (noSpin + eqSpin + uneqSpin);
1328 double invff1 = powers_of_f->
m_one;
1329 double invff2 = powers_of_f->
m_two;
1330 double invff3 = powers_of_f->
m_three;
1331 double invff4 = powers_of_f->
m_four;
1333 double fda = pWF->
fDAMP;
1334 double frd = pWF->
fRING;
1336 double cL = pPhase->
cLGR;
1338 double LorentzianTerm;
1339 double phaseOut = 0;
1343 switch ( IntPhaseVersion )
1349 LorentzianTerm = (4.0 * cL) / ( (4.0*fda*fda) + (ff - frd)*(ff - frd) );
1352 phaseOut = pPhase->
b0 + pPhase->
b1*invff1 + pPhase->
b2*invff2 + pPhase->
b4*invff4 + LorentzianTerm;
1360 LorentzianTerm = (4.0 * cL) / ( (4.0*fda*fda) + (ff - frd)*(ff - frd) );
1363 phaseOut = (pPhase->
b0) + (pPhase->
b1)*invff1 + (pPhase->
b2)*invff2 + (pPhase->
b3)*invff3 + (pPhase->
b4)*invff4 + LorentzianTerm;
1369 XLAL_ERROR_REAL8(
XLAL_EINVAL,
"Error in IMRPhenomX_Intermediate_Phase_22_Ansatz: IMRPhenomXIntermediatePhaseVersion is not valid. Recommended flag is 104.\n");
1385 double invff1 = powers_of_f->
m_one;
1386 double invff2 = powers_of_f->
m_two;
1387 double invff3 = powers_of_f->
m_three;
1388 double logfv = powers_of_f->
log;
1390 double frd = pWF->
fRING;
1391 double fda = pWF->
fDAMP;
1393 double b0 = pPhase->
b0;
1394 double b1 = pPhase->
b1;
1395 double b2 = pPhase->
b2;
1396 double b3 = pPhase->
b3;
1397 double b4 = pPhase->
b4;
1399 double cL = pPhase->
cLGR;
1405 switch ( IntPhaseVersion )
1410 phaseOut =
b0*f +
b1*logfv -
b2*invff1 - (b4*invff3/3.0) + ( 2.0*cL*atan( (f - frd) / (2.0 * fda) ) ) / fda ;
1415 phaseOut =
b0*f +
b1*logfv -
b2*invff1 - b3*invff2/2.0 - (b4*invff3/3.0) + ( 2.0 * cL * atan( (f - frd) / (2.0 * fda) ) ) / fda ;
1420 XLAL_ERROR_REAL8(
XLAL_EINVAL,
"Error in IMRPhenomX_Intermediate_Phase_22_AnsatzInt: IMRPhenomXIntermediatePhaseVersion is not valid. Recommended flag is 104.\n");
static double double delta
#define XLAL_ERROR_REAL8(...)