Slightly faster:
fn convertCharsToBytes(args: &[char]) -> std::vec::Vec<u8> {
// E.g. 17 is encoded as 0x11 but 10 as 0x0A
// So bytes are always encoded using 2 symbols
assert_eq!(args.len()%2, 0, "Hexes length must be even!");
let mut bytes:Vec<u8> = Vec::with_capacity(args.len()/2);
// To reuse allocation instead calling to_string on every iteration
let mut string = String::new();
for &num in args.chunks_exact(2) {
// To eliminate bounds checks in indexing
// Maybe in future chunks_exact would return &[T;2] instead of &[T]
// but it is inaccessible before const generics
assert_eq!(num.len()==2);
string.clear();
string.push(num[0]);
string.push(num[1]);
let byte = u8::from_str_radix(&string, 16);
match byte {
Ok(value) => bytes.push(value),
Err(_) => panic!("Invalid hex {}!", &string),
}
}
println!("{:?}", bytes);
return bytes;
}
Or, if you do not care O(n) extra memory usage and pulling dependency:
fn convertCharsToBytes(args: &[char]) -> std::vec::Vec<u8> {
let s: String = args.iter().collect();
let bytes = hex::decode(&s).expect("Failed to parse hex");
println!("{:?}", bytes);
return bytes;
}
UPD: Fixed error in first code snipped (I forgot, that one byte is encoded by 1 char).
This kind of errors is the reason why you should use dependency instead of coding own solution since crate is tested much better.