vendredi 28 mai 2021

Random normal distribution generator comparison

So i've written a couple random number generators for a project that relies on generating normally distributed random numbers. I have written a couple of different implementations of the generator - namely Box-Muller method, Marsaglia's Polar method and the Inverse cumulative distribution approximation method. I've compared them in terms of speed and it turns out Inverse method is the fastest of the 3, is this expected, or did I mess up while writing the other two? I know numpy used the Polar method for a long time, so i believe it should be the fastest of the 3?

I compiled using gcc9.3.0 and used -O3 flag.

here are the codes for the generators:

struct gaussGenState 
{
    float gauss;
    int has_gauss;
};

void initializeGauss(struct gaussGenState *state)
{
    state->has_gauss = 0;
    state->gauss = 0.0;
}

float gauss(struct gaussGenState *state)
{
    /*
        Implementation of Marsaglia's polar method for calculating normally distributed 
        gaussian variables
        seeding of rand needs to be done outside of function (with srand())
    */
    if (state->has_gauss){
        const float temp = state->gauss;
        state->has_gauss = 0;
        state->gauss = 0.0;
        return temp;
    }   
    else {
        float f, x1, x2, r2;

        do {
            x1 = ((float)rand() / RAND_MAX) * 2 - 1;
            x2 = ((float)rand() / RAND_MAX) * 2 - 1;
            r2 = x1 * x1 + x2 * x2;
        } while (r2 >= 1.0 || r2 == 0.0);

        f = sqrt(-2.0 * log(r2) / r2);

        state->gauss = f * x1;
        state->has_gauss = 1;
        return f * x2;
    }
}

float gaussbm(struct gaussGenState *state)
{
    /*
        Implementation of Box-Muller method for calculating normally distributed gaussian 
        variables
        seeding of rand needs to be done outside of function (with srand())
    */
    if (state->has_gauss){
        const float temp = state->gauss;
        state->has_gauss = 0;
        state->gauss = 0.0;
        return temp;
    }
    else {
        float u, v, f1, f2;

        u = ((float)rand() / RAND_MAX);
        v = ((float)rand() / RAND_MAX);

        f1 = sqrt(-2.0 * log(u));
        f2 = 2*M_PI*v;

        state->gauss = f1 * cos(f2);
        state->has_gauss = 1;
        return f1 * sin(f2);
    }
}

float gaussInv(void)
{
    /*
        Implementation of Inverse cumulative distribution method for calculating normally 
        distributed gaussian variables
        Approximation relative error less than 1.15 x 10e-9 in the entire region.
        seeding of rand needs to be done outside of function (with srand())
    */
    float p, q, r;

    float a[6] = {-3.969683028665376e+01,  2.209460984245205e+02,
                    -2.759285104469687e+02,  1.383577518672690e+02,
                    -3.066479806614716e+01,  2.506628277459239e+00};
    float b[5] = {-5.447609879822406e+01,  1.615858368580409e+02,
                    -1.556989798598866e+02,  6.680131188771972e+01,
                    -1.328068155288572e+01};
    float c[6] = {-7.784894002430293e-03, -3.223964580411365e-01,
                    -2.400758277161838e+00, -2.549732539343734e+00,
                     4.374664141464968e+00,  2.938163982698783e+00};
    float d[4] = { 7.784695709041462e-03,  3.224671290700398e-01,
                    2.445134137142996e+00,  3.754408661907416e+00};

    p = ((float)rand()/RAND_MAX);

    if (p < 0.02425){
        q = sqrt(-2*log(p));
        return ((((((c[0]*q+c[1])*q+c[2])*q+c[3])*q+c[4])*q+c[5]) /
               ((((d[0]*q+d[1])*q+d[2])*q+d[3])*q+1));
    }

    if ((1-0.02425) < p){
        q = sqrt(-2*log(1-p));
        return -((((((c[0]*q+c[1])*q+c[2])*q+c[3])*q+c[4])*q+c[5]) / 
                ((((d[0]*q+d[1])*q+d[2])*q+d[3])*q+1));
    }

    q = p - 0.5;
    r = q*q;
    return ((((((a[0]*r+a[1])*r+a[2])*r+a[3])*r+a[4])*r+a[5])*q /
           (((((b[0]*r+b[1])*r+b[2])*r+b[3])*r+b[4])*r+1));
}



Aucun commentaire:

Enregistrer un commentaire