I've created a method to generate random binary numbers with n-digits up to 256 digits. In order to continue with my program I need to take the vector with my binary value and put it into a decimal array and then convert that number into a int
decimal. Here's the following function that creates the random binary vector and my attempt to convert to decimal. I'm having trouble converting my binary vector to a decimal.
int random_binary(int n, int m){
vector<int> binary;
for(int i = 0; i < n; i++)
{
m = rand() % 2;
binary.push_back(m);
}
binary.push_back(1);
int j;
for(j = 0; j < binary.size(); j++)
{
cout << binary[j];
}
cout <<"\n";
int len = binary.size();
int a = binary[len]; //having trouble right here
int decimalValue = 0;
for (int i = len-1; i>= 0; i--)
{
decimalValue = decimalValue + binary[i]*pow(2,len-i-1);
}
return decimalValue;
}
If anybody could help figure this out that would be much appreciated.