From 9e72c37b2544ae4da329de6e179a77376b2b6134 Mon Sep 17 00:00:00 2001 From: "Steinar H. Gunderson" Date: Sat, 8 Dec 2007 18:24:20 +0100 Subject: [PATCH] Switch to the standard normalization of the logistic pdf. Much more consistent with the RDs used by the normal distribution. --- foosrank.cpp | 33 +++++++++++++-------------------- 1 file changed, 13 insertions(+), 20 deletions(-) diff --git a/foosrank.cpp b/foosrank.cpp index c3bfc88..1c4353b 100644 --- a/foosrank.cpp +++ b/foosrank.cpp @@ -16,11 +16,6 @@ static const double int_step_size = 75.0; // rating constant (see below) static const double rating_constant = 455.0; -#if USE_LOGISTIC_DISTRIBUTION -// constant used in the logistic pdf -static const double l_const = M_PI / (2.0 * sqrt(3.0)); -#endif - using namespace std; static double prob_score_real(int k, int a, double binomial, double rd_norm); @@ -149,8 +144,7 @@ static void compute_opponent_rating_pdf(int k, int a, double mu2, double sigma2, // opponent's pdf #if USE_LOGISTIC_DISTRIBUTION double z = (x1 - mu2) * invsigma2; - double ch = cosh(l_const * z); - func1[i].real() = 1.0 / (ch * ch); + func1[i].real() = sech2(0.5 * z); #else double z = (x1 - mu2) * invsq2sigma2; func1[i].real() = exp(-z*z); @@ -316,11 +310,11 @@ static void least_squares(vector > &curve, double mu1, doub #if USE_LOGISTIC_DISTRIBUTION // df/dA(x_i) - matA[i + 0 * curve.size()] = sech2(l_const * (x-mu)/sigma); + matA[i + 0 * curve.size()] = sech2(0.5 * (x-mu)/sigma); // df/dµ(x_i) - matA[i + 1 * curve.size()] = 2.0 * l_const * A * matA[i + 0 * curve.size()] - * tanh(l_const * (x-mu)/sigma) / sigma; + matA[i + 1 * curve.size()] = A * matA[i + 0 * curve.size()] + * tanh(0.5 * (x-mu)/sigma) / sigma; // df/dσ(x_i) matA[i + 2 * curve.size()] = @@ -346,7 +340,7 @@ static void least_squares(vector > &curve, double mu1, doub double y = curve[i].second; #if USE_LOGISTIC_DISTRIBUTION - dbeta[i] = y - A * sech2(l_const * (x-mu)/sigma); + dbeta[i] = y - A * sech2(0.5 * (x-mu)/sigma); #else dbeta[i] = y - A * exp(- (x-mu)*(x-mu)/(2.0*sigma*sigma)); #endif @@ -389,8 +383,7 @@ static void compute_new_rating(double mu1, double sigma1, double mu2, double sig // my pdf double z = (r1 - mu1) / sigma1; #if USE_LOGISTIC_DISTRIBUTION - double ch = cosh(l_const * z); - curve[i].second /= (ch * ch); + curve[i].second *= sech2(0.5 * z); #else double gaussian = exp(-(z*z/2.0)); curve[i].second *= gaussian; @@ -418,8 +411,8 @@ static void compute_new_rating(double mu1, double sigma1, double mu2, double sig // pdf normalization factors #if USE_LOGISTIC_DISTRIBUTION - sum *= M_PI / (sigma1 * 4.0 * sqrt(3.0)); - sum *= M_PI / (sigma2 * 4.0 * sqrt(3.0)); + sum /= (sigma1 * 4.0); + sum /= (sigma2 * 4.0); #else sum /= (sigma1 * sqrt(2.0 * M_PI)); sum /= (sigma2 * sqrt(2.0 * M_PI)); @@ -469,7 +462,7 @@ static void compute_new_double_rating(double mu1, double sigma1, double mu2, dou #if USE_LOGISTIC_DISTRIBUTION double z = (r2 - mu2) * invsigma2; - double gaussian = sech2(l_const * z); + double gaussian = sech2(0.5 * z); #else double z = (r2 - mu2) * invsq2sigma2; double gaussian = exp(-z*z); @@ -479,7 +472,7 @@ static void compute_new_double_rating(double mu1, double sigma1, double mu2, dou #if USE_LOGISTIC_DISTRIBUTION double z = (r1 - mu1) / sigma1; - double gaussian = sech2(l_const * z); + double gaussian = sech2(0.5 * z); #else double z = (r1 - mu1) / sigma1; double gaussian = exp(-(z*z/2.0)); @@ -511,9 +504,9 @@ static void compute_new_double_rating(double mu1, double sigma1, double mu2, dou // pdf normalization factors #if USE_LOGISTIC_DISTRIBUTION - sum *= M_PI / (sigma1 * 4.0 * sqrt(3.0)); - sum *= M_PI / (sigma2 * 4.0 * sqrt(3.0)); - sum *= M_PI / (sigma_t * 4.0 * sqrt(3.0)); + sum /= (sigma1 * 4.0); + sum /= (sigma2 * 4.0); + sum /= (sigma_t * 4.0); #else sum /= (sigma1 * sqrt(2.0 * M_PI)); sum /= (sigma2 * sqrt(2.0 * M_PI)); -- 2.39.2