Payload=ABCDEFGHIJKLMNOPQR;
basetoConvert = hexa
public static string SHA256HashToBaseMessage(string payload, eBase baseToConvert)
{
StringBuilder sb;
using (SHA256 sHA256 = SHA256.Create("SHA256"))
{
byte[] hash = sHA256.ComputeHash(Encoding.UTF8.GetBytes(payload));
sb = new StringBuilder();
for (int i = 0; i < hash.Length; i++)
{
sb.Append(Convert.ToString((hash[i] & 0xff) + 0x100, (int)baseToConvert).Substring(1));
}
}
return sb.ToString();
}
public enum eBase
{
BINARY=2,
OCTAL=8,
DECA=10,
HEXA=16
}
Above is my code for generating the corresponding base string of the byte. I used the code given in the documentation. My question is when I was using this part of code like
sb.Append(Convert.ToString(hash[i], (int)baseToConvert));
Output is:
38d3bed3499804995bda3123885d4f3eab0c19efc2dd03df5be147b359ceed
But when I am using the code as the documentation,
sb.Append(Convert.ToString((hash[i] & 0xff) + 0x100, (int)baseToConvert).Substring(1));
for the same payload, the output is (the original version from above is underneath, for comparison - differences as space),
38d3bed34909804995bda3123885d4f3eab0c19efc2dd03df5be0147b359ceed
38d3bed349 9804995bda3123885d4f3eab0c19efc2dd03df5be 147b359ceed -- original
Can anybody explain why there is a difference in the output? What actually (hash[i] & 0xff) + 0x100) is doing?