Why does using the bit mask 0x30000 give a different result than using 0x3
Today I wrote a program but something is wrong with it, so I made this
test program. Well, 0x30000 is equal to 0x3 << 16, isn't it?
In this program, it shows that 0x30000 is equal to 0x3 << 16 in the
beginning, but finally, this program give me totally different result!, I
don't why, This result is so strange!
The output is:
MASK1
0000 0000 0000 0011 0000 0000 0000 0000
MASK2
0000 0000 0000 0011 0000 0000 0000 0000
equal!!!!!
Before 1
0000 0000 0001 0010 1101 0110 1000 0111
After 1
0000 0000 0001 0000 1101 0110 1000 0111
Before 2
0000 0000 0001 0010 1101 0110 1000 0111
After 2
0000 0000 0001 0000 0000 0000 0000 0000
Why are the outputs after MASK1 and MASK2 totally different? MASK1 did the
right thing, but something seems to go wrong with MASK2. Why do I get
these results?
#include <stdio.h>
#include <string.h>
#define MASK1 0x30000
#define MASK2 0x3 << 16
void show_binary(unsigned long n);
void change_alignment(unsigned long *s);
void eatline(void);
int main(void)
{
unsigned long num;
puts("MASK1");
show_binary(MASK1);
puts("MASK2");
show_binary(MASK2);
num = 1234567;
if(MASK1 == MASK2)
puts("equal!!!!!");
puts("Before 1");
show_binary(num);
num &= ~MASK1;
puts("After 1");
show_binary(num);
num = 1234567;
puts("Before 2");
show_binary(num);
num &= ~MASK2;
puts("After 2");
show_binary(num);
return 0;
}
void show_binary(unsigned long n)
{
unsigned long mask = 1;
int size = sizeof(unsigned long) * 8;
char bin[size + 1];
int index = 0;
bin[size] = '\0';
while(size > 0)
{
if((mask & n) == mask)
bin[size - 1] = '1';
else
bin[size - 1] = '0';
mask <<= 1;
size--;
}
while(bin[index])
{
putchar(bin[index]);
if(++index % 4 == 0 && bin[index])
putchar(' ');
}
putchar('\n');
}
void eatline(void)
{
while(getchar() != '\n')
continue;
}
No comments:
Post a Comment