//computes the value of the dichotomy using a initial values, init, with a specific variable or constraint
static double dichotomy(double init, double diff(double, void *),
void *var_cnst, double min_error);
-//computes the value of the differential of variable param_var applied to mu
-static double partial_diff_mu(double mu, void *param_var);
//computes the value of the differential of constraint param_cnst applied to lambda
static double partial_diff_lambda(double lambda, void *param_cnst);
if (var->bound >= 0) {
XBT_DEBUG("Working on var (%p)", var);
var->new_mu = new_mu(var);
-/* dual_updated += (fabs(var->new_mu-var->mu)>dichotomy_min_error); */
-/* XBT_DEBUG("dual_updated (%d) : %1.20f",dual_updated,fabs(var->new_mu-var->mu)); */
+/* dual_updated += (fabs(var->new_mu-var->mu)>dichotomy_min_error); */
+/* XBT_DEBUG("dual_updated (%d) : %1.20f",dual_updated,fabs(var->new_mu-var->mu)); */
XBT_DEBUG("Updating mu : var->mu (%p) : %1.20f -> %1.20f", var,
var->mu, var->new_mu);
var->mu = var->new_mu;
new_obj = dual_objective(var_list, cnst_list);
XBT_DEBUG("Improvement for Objective (%g -> %g) : %g", obj, new_obj,
obj - new_obj);
- xbt_assert1(obj - new_obj >= -epsilon_min_error,
+ xbt_assert(obj - new_obj >= -epsilon_min_error,
"Our gradient sucks! (%1.20f)", obj - new_obj);
obj = new_obj;
}
new_obj = dual_objective(var_list, cnst_list);
XBT_DEBUG("Improvement for Objective (%g -> %g) : %g", obj, new_obj,
obj - new_obj);
- xbt_assert1(obj - new_obj >= -epsilon_min_error,
+ xbt_assert(obj - new_obj >= -epsilon_min_error,
"Our gradient sucks! (%1.20f)", obj - new_obj);
obj = new_obj;
}
double diff_0 = 0.0;
min = max = init;
- XBT_IN;
+ XBT_IN();
if (init == 0.0) {
min = max = 0.5;
if ((diff_0 = diff(1e-16, var_cnst)) >= 0) {
XBT_CDEBUG(surf_lagrange_dichotomy, "returning 0.0 (diff = %e)", diff_0);
- XBT_OUT;
+ XBT_OUT();
return 0.0;
}
min = middle;
overall_error = max_diff - middle_diff;
min_diff = middle_diff;
-/* SHOW_EXPR(overall_error); */
+/* SHOW_EXPR(overall_error); */
} else if (middle_diff > 0) {
XBT_CDEBUG(surf_lagrange_dichotomy, "Decreasing max");
max = middle;
overall_error = max_diff - middle_diff;
max_diff = middle_diff;
-/* SHOW_EXPR(overall_error); */
+/* SHOW_EXPR(overall_error); */
} else {
overall_error = 0;
-/* SHOW_EXPR(overall_error); */
+/* SHOW_EXPR(overall_error); */
}
} else if (min_diff == 0) {
max = min;
} else if (min_diff > 0 && max_diff < 0) {
XBT_CWARN(surf_lagrange_dichotomy,
"The impossible happened, partial_diff(min) > 0 && partial_diff(max) < 0");
- abort();
+ xbt_abort();
} else {
XBT_CWARN(surf_lagrange_dichotomy,
"diffmin (%1.20f) or diffmax (%1.20f) are something I don't know, taking no action.",
min_diff, max_diff);
- abort();
+ xbt_abort();
}
}
XBT_CDEBUG(surf_lagrange_dichotomy, "returning %e", (min + max) / 2.0);
- XBT_OUT;
+ XBT_OUT();
return ((min + max) / 2.0);
}
double diff = 0.0;
double sigma_i = 0.0;
- XBT_IN;
+ XBT_IN();
elem_list = &(cnst->element_set);
XBT_CDEBUG(surf_lagrange_dichotomy, "Computing diff of cnst (%p)", cnst);
XBT_CDEBUG(surf_lagrange_dichotomy,
"d D/d lambda for cnst (%p) at %1.20f = %1.20f", cnst, lambda,
diff);
- XBT_OUT;
+ XBT_OUT();
return diff;
}
double func_vegas_f(lmm_variable_t var, double x)
{
- xbt_assert1(x > 0.0, "Don't call me with stupid values! (%1.20f)", x);
+ xbt_assert(x > 0.0, "Don't call me with stupid values! (%1.20f)", x);
return VEGAS_SCALING * var->weight * log(x);
}
double func_vegas_fp(lmm_variable_t var, double x)
{
- xbt_assert1(x > 0.0, "Don't call me with stupid values! (%1.20f)", x);
+ xbt_assert(x > 0.0, "Don't call me with stupid values! (%1.20f)", x);
return VEGAS_SCALING * var->weight / x;
}
double func_vegas_fpi(lmm_variable_t var, double x)
{
- xbt_assert1(x > 0.0, "Don't call me with stupid values! (%1.20f)", x);
+ xbt_assert(x > 0.0, "Don't call me with stupid values! (%1.20f)", x);
return var->weight / (x / VEGAS_SCALING);
}
#define RENO_SCALING 1.0
double func_reno_f(lmm_variable_t var, double x)
{
- xbt_assert0(var->weight > 0.0, "Don't call me with stupid values!");
+ xbt_assert(var->weight > 0.0, "Don't call me with stupid values!");
return RENO_SCALING * sqrt(3.0 / 2.0) / var->weight *
atan(sqrt(3.0 / 2.0) * var->weight * x);
{
double res_fpi;
- xbt_assert0(var->weight > 0.0, "Don't call me with stupid values!");
- xbt_assert0(x > 0.0, "Don't call me with stupid values!");
+ xbt_assert(var->weight > 0.0, "Don't call me with stupid values!");
+ xbt_assert(x > 0.0, "Don't call me with stupid values!");
res_fpi =
1.0 / (var->weight * var->weight * (x / RENO_SCALING)) -
2.0 / (3.0 * var->weight * var->weight);
if (res_fpi <= 0.0)
return 0.0;
-/* xbt_assert0(res_fpi>0.0,"Don't call me with stupid values!"); */
+/* xbt_assert(res_fpi>0.0,"Don't call me with stupid values!"); */
return sqrt(res_fpi);
}
#define RENO2_SCALING 1.0
double func_reno2_f(lmm_variable_t var, double x)
{
- xbt_assert0(var->weight > 0.0, "Don't call me with stupid values!");
+ xbt_assert(var->weight > 0.0, "Don't call me with stupid values!");
return RENO2_SCALING * (1.0 / var->weight) * log((x * var->weight) /
(2.0 * x * var->weight +
3.0));
double res_fpi;
double tmp;
- xbt_assert0(x > 0.0, "Don't call me with stupid values!");
+ xbt_assert(x > 0.0, "Don't call me with stupid values!");
tmp = x * var->weight * var->weight;
res_fpi = tmp * (9.0 * x + 24.0);