Using ASCII values you can shorten it to this (works with lowercase, too):
func convertStringToInt(characterText: String) -> Int? {
guard let aValue = "A".unicodeScalars.first?.value,
let zValue = "Z".unicodeScalars.first?.value,
let characterValue = characterText.uppercased().unicodeScalars.first?.value,
// next line tests if the input value is between A and Z
characterValue >= aValue && characterValue <= zValue else {
return nil // error
}
return Int(characterValue) - Int(aValue)
}
print("Value for A: \(convertStringToInt(characterText: "A"))")
print("Value for G: \(convertStringToInt(characterText: "G"))")
print("Value for Z: \(convertStringToInt(characterText: "Z"))")
print("Value for z: \(convertStringToInt(characterText: "z"))")
print("Value for ^: \(convertStringToInt(characterText: "^"))")
Prints:
Value for A: Optional(0)
Value for G: Optional(6)
Value for Z: Optional(25)
Value for z: Optional(25)
Value for ^: nil
Based on this question.
Or, if you want to play around with array indices:
func convertStringToInt(characterText: String) -> Int {
let array = ["A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Y","Z"]
return array.firstIndex(of: characterText.uppercased()) ?? -1 // default value for a text that is not found
}