#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <limits.h>
#define BINARY_ACCURACY 32
int convertDezToDualString(const int number, char stringDualNumber[]){
int num = number;
if (number<0){
//nothing here yet
} else {
for (int i = 0; i<BINARY_ACCURACY; i++){
sprintf(stringDualNumber,"%d%s",num%2,stringDualNumber);
//I think that's the part where the thing happens
printf("%s",stringDualNumber); //For Testing purposes
printf("\n");
num/=2;
}
}
}
int main() {
char* stringDualNumber= (char*) calloc(BINARY_ACCURACY,sizeof(char));
const int dezNumber = getInt(); //Gets a decimal number and returns if it's valid
convertDezToDualString(dezNumber, stringDualNumber);
}
Input:
1234
Output:
00
110
0010
00010
110010
0010010
11010010
111010010
0011010010
00011010010
110011010010
0010011010010
00010011010010
000010011010010
0000010011010010
00000010011010010
000000010011010010
0000000010011010010
00000000010011010010
000000000010011010010
0000000000010011010010
00000000000010011010010
000000000000010011010010
0000000000000010011010010
00000000000000010011010010
000000000000000010011010010
0000000000000000010011010010
00000000000000000010011010010
000000000000000000010011010010
0000000000000000000010011010010
00000000000000000000010011010010
000000000000000000000010011010010
I was wondering why it puts out the last number twice all the time... I did not expect that behaviour. I am using cygwin. I stumbled apon the problem when my output had 33 instead of 32 digits. So i made the for loop output the string every iteration. and found this problem. i am quite new with c but used java before.