Get binary representation of any type:
I wrote the following code to provide two functions that do what you asked for. The first function getBinaryHostMemoryRepresentation()
will fill your provided buffer
with the bits how they are saved in your hosts memory: That could be little-endian, big-endian or something else (unlikely). For example the integer 0xABCD
is saved on a little-endian as 0xCDAB
.
The getBinaryRepresentation()
function will check on what machine you are running and will fill your buffer
with the bit representation you would expect as human readable which is big-endian so you would get 0xABCD
. To check on what kind of machine the program is running it uses the isLittleEndian()
function, see how it works here on SO.
Both functions need the following arguments: char * const buffer, size_t bufSize, const void * var, size_t varSize
. The first two are the buffer and the size of the buffer. The last two are the variable you want to convert (it's a void pointer, it can be any data) and the size in bytes of the variable which can get by sizeof()
. Further the functions check if the buffer is big enough, if not they're returning a null pointer. If it is big enough they return the pointer to the buffer you provided. The usage could be observed in main()
function.
The main()
function uses two inputs for teh functions above:
- The integer:
2882400235
which is 0xABCDEFEB
or 0b10101011110011011110111111101011
- The float:
42.0
which is 0x42280000
or 0b01000010001010000000000000000000
.
To check integers you could use the Windows calculator, to check floats you can use this site online.
Output of the program:
integer 2882400235:
Host memory binary representation: "11101011111011111100110110101011"
Human readable binary representation: "10101011110011011110111111101011"
single floating point 42.0:
Host memory binary representation: "00000000000000000010100001000010"
Human readable binary representation: "01000010001010000000000000000000"
The code in C as you tagged the question with C:
#include <stdio.h>
#include <stddef.h>
#include <limits.h>
#if CHAR_BIT != 8
#error "unsupported char size"
#endif
int isLittleEndian ()
{
static const int num = 1;
if (1 == *((char *) &num))
{
return 1;
}
else
{
return 0;
}
}
char * getBinaryHostMemoryRepresentation (char * const buffer, size_t bufSize, const void * var, size_t varSize)
{
size_t byteIdx;
size_t bitIdx;
if (bufSize < varSize * CHAR_BIT + 1)
{
return NULL;
}
const unsigned char * curByte = (const unsigned char *) var;
for (byteIdx = 0; byteIdx < varSize; ++byteIdx, ++curByte)
{
for (bitIdx = 0; bitIdx < CHAR_BIT; ++bitIdx)
{
unsigned char curBit = (*curByte & (1 << ((CHAR_BIT - 1) - bitIdx))) >> ((CHAR_BIT - 1) - bitIdx);
buffer[byteIdx * CHAR_BIT + bitIdx] = curBit + '0';
}
}
buffer[varSize * CHAR_BIT] = '\0';
return buffer;
}
char * getBinaryRepresentation (char * const buffer, size_t bufSize, const void * var, size_t varSize)
{
size_t byteIdx;
size_t bitIdx;
if (bufSize < varSize * CHAR_BIT + 1)
{
return NULL;
}
const unsigned char * curByte;;
int incByte;
if (isLittleEndian ())
{
curByte = (const unsigned char *) var + (varSize - 1);
incByte = -1;
}
else
{
curByte = (const unsigned char *) var;
incByte = 1;
}
for (byteIdx = 0; byteIdx < varSize; ++byteIdx, curByte += incByte)
{
for (bitIdx = 0; bitIdx < CHAR_BIT; ++bitIdx)
{
unsigned char curBit = (*curByte & (1 << ((CHAR_BIT - 1) - bitIdx))) >> ((CHAR_BIT - 1) - bitIdx);
buffer[byteIdx * CHAR_BIT + bitIdx] = curBit + '0';
}
}
buffer[varSize * CHAR_BIT] = '\0';
return buffer;
}
int main ()
{
int integer = 2882400235; /* 10101011110011011110111111101011 */
char bufferMemInt[sizeof (integer) * CHAR_BIT + 1];
char bufferBinInt[sizeof (integer) * CHAR_BIT + 1];
printf ("integer 2882400235:\n");
if (getBinaryHostMemoryRepresentation (bufferMemInt,
sizeof (bufferMemInt),
(void *) &integer,
sizeof (integer)))
{
printf ("Host memory binary representation: \"%s\"",
bufferMemInt);
printf ("\n");
}
if (getBinaryRepresentation (bufferBinInt,
sizeof (bufferBinInt),
(void *) &integer,
sizeof (integer)))
{
printf ("Human readable binary representation: \"%s\"",
bufferBinInt);
printf ("\n");
}
float floating = 42.0; /* 01000010001010000000000000000000 */
char bufferMemFloat[sizeof (floating) * CHAR_BIT + 1];
char bufferBinFloat[sizeof (floating) * CHAR_BIT + 1];
printf ("\n");
printf ("single floating point 42.0:\n");
if (getBinaryHostMemoryRepresentation (bufferMemFloat,
sizeof (bufferMemFloat),
(void *) &floating,
sizeof (floating)))
{
printf ("Host memory binary representation: \"%s\"",
bufferMemFloat);
printf ("\n");
}
if (getBinaryRepresentation (bufferBinFloat,
sizeof (bufferBinFloat),
(void *) &floating,
sizeof (floating)))
{
printf ("Human readable binary representation: \"%s\"",
bufferBinFloat);
printf ("\n");
}
return 0;
}