0

What I'm making is input a number that type is 'unsigned long long' and then make it to binary form and show it by 16 figures.

Here's my code and result.

#define _CRT_SECURE_NO_WARNINGS
#include <stdio.h>

char D_PAN_ID[16];
char D_ADDRESS[16];
char S_PAN_ID[16];
char S_ADDRESS[16];

int main()
{
int bit1, bit2, bit3, bit4, k1, k2, k3, k4;
unsigned long long dec = 5;
/*
printf("8바이트 정수를 이진수로 변환\n");
scanf("%llu", &dec);
*/
printf("%llu 를(을) 이진수로 변환하면:\n", dec);

for (bit1 = 63; bit1 >= 48; bit1--)
{
    k1 = dec >> bit1;

    if (k1 & 1) {
        D_PAN_ID[63 - bit1] = '1';
        printf(&D_PAN_ID[63 - bit1]);
    }
    else {
        D_PAN_ID[63 - bit1] = '0';
        printf(&D_PAN_ID[63 - bit1]);
    }
}

printf("\n\n");

for (bit2 = 47; bit2 >= 32; bit2--)
{
    k2 = dec >> bit2;

    if (k2 & 1) {
        D_ADDRESS[47 - bit2] = '1';
        printf(&D_ADDRESS[47 - bit2]);
    }
    else {
        D_ADDRESS[47 - bit2] = '0';
        printf(&D_ADDRESS[47 - bit2]);
    }
}

printf("\n\n");

for (bit3 = 31; bit3 >= 16; bit3--)
{
    k3 = dec >> bit3;

    if (k3 & 1) {
        S_PAN_ID[31 - bit3] = '1';
        printf(&S_PAN_ID[31 - bit3]);
    }
    else {
        S_PAN_ID[31 - bit3] = '0';
        printf(&S_PAN_ID[31 - bit3]);
    }
}

printf("\n\n");

for (bit4 = 15; bit4 >= 0; bit4--)
{
    k4 = dec >> bit4;

    if (k4 & 1) {
        S_ADDRESS[15 - bit4] = '1';
        printf(&S_ADDRESS[15 - bit4]);
    }
    else {
        S_ADDRESS[15 - bit4] = '0';
        printf(&S_ADDRESS[15 - bit4]);
    }
}

return 0;
}

And what I got is this. I think it is adding upper result at back since printing second result. Like this:

result1

result2 + result1

result3 + result1 + result2

result4 + result1 + result2 + result3

Output

How do I fix it to show like

000000000000

000000000000

000000000000

000000000101
underscore_d
  • 6,309
  • 3
  • 38
  • 64
  • Change the `printf` statements to `printf("%c", D_PAN_ID[63 - bit1]);` etc – user8277998 May 14 '21 at 09:55
  • @Rabbit_Klm, just to make sure I've got the question correctly.. So your target is to input an unsigned long long, and then break it down into 16-bit chunks and show in binary. Right? – Just Shadow May 14 '21 at 09:55
  • @Just Shadow, yes its correct! – Rabbit_KIm May 14 '21 at 10:09
  • @Rabbit_KIm, I've posted a quick answer to that problem. Please let me know if more details and description is needed. – Just Shadow May 14 '21 at 10:20
  • @Rabbit_KIm , also, can you please test your code with number 65535 instead of 5. I suspect you get `result4 + result3 + result2 + result1` instead of `result4 + result1 + result2 + result3` – Just Shadow May 14 '21 at 10:26

0 Answers0