why does the following c code produce real numbers only ranging between 0 and 1(eg: 0.840188,0.394383...etc) for double a,b when the value for RAND_MAX appears to be 0.000000 . Shouldn't RAND_MAX set the maximum value for the number generated by rand() function ? 
    #include <stdio.h>
    #include <stdlib.h>
    int main()
    {
    double a,b,c;
    for (int i=0;i<100;i++){
    a=(double)rand()/(double)RAND_MAX;
    b=(double)rand()/(double)RAND_MAX;
    c=a-b;
    printf("itteration : %d values a=%f,b=%f,c=%f, RAND_MAX=%f \n",i,a,b,c,RAND_MAX);
    }  
    return 0;
    }
 
     
     
     
    