I stumbled upon this weird problem.
I got 2 pair of four octets.
1) 255.255.255.0 (like a subnet mask)
2) 128.127.0.0 (fictive mask)
My binary representation would be:
1) 11111111.11111111.11111111.00000000
2) 10000000.01111111.00000000.00000000
But from my simple application I get:
1) 11111111.11111111.11111111.00000000
2) 11111110.01111111.00000000.00000000
I think that is the wrong value, but please correct me, I think I have misunderstood something important in this.
The code for generating the binary representation is:
#include <stdio.h>
#include <stdlib.h>
void PrintBinary(unsigned int n)
{
    unsigned int tmp;
    int i = 1;
    tmp = n;
    do{
       printf("%d", (n & tmp) ? 1 : 0);
       if(i % 8 == 0 && i < 32)
           printf(".");
       ++i;
       n >>= 1;
      }while(n>0);
    printf("\n");
}
int main(int argc, char **argv)
{
    unsigned int octet1 = 128;
    unsigned int octet2 = 127;
    unsigned int octet3 = 0;
    unsigned int octet4 = 0;
    unsigned int a = octet1 << 24 | octet2 << 16 | octet3 << 8 | octet4;
    PrintBinary(a);
    return 0;
}
 
     
     
     
    