With Swift 5, according to your needs, you may choose one of the following ways in order to solve you problem.
#1. Using Character
's isNumber
property
Character has a property called isNumber
. isNumber
has the following declaration:
var isNumber: Bool { get }
A Boolean value indicating whether this character represents a number.
The Playground sample codes below show how to check if a character represents a number using isNumber
:
let character: Character = "9"
print(character.isNumber) // true
let character: Character = "½"
print(character.isNumber) // true
let character: Character = "④"
print(character.isNumber) // true
let character: Character = "1⃣"
print(character.isNumber) // true
let character: Character = "1️⃣"
print(character.isNumber) // true
let character: Character = "৯"
print(character.isNumber) // true
let character: Character = ""
print(character.isNumber) // true
let character: Character = "F"
print(character.isNumber) // false
#2. Using Character
's isWholeNumber
property
If you want to check if a character represents a whole number, you can use Character
's isWholeNumber
property:
let character: Character = "9"
print(character.isWholeNumber) // true
let character: Character = "½"
print(character.isWholeNumber) // false
let character: Character = "④"
print(character.isWholeNumber) // true
let character: Character = "1⃣"
print(character.isWholeNumber) // false
let character: Character = "1️⃣"
print(character.isWholeNumber) // false
let character: Character = "৯"
print(character.isWholeNumber) // true
let character: Character = ""
print(character.isWholeNumber) // true
let character: Character = "F"
print(character.isWholeNumber) // false
#3. Using Unicode.Scalar.Properties
's generalCategory
property and Unicode.GeneralCategory.decimalNumber
The Playground sample codes below show how to check if the first Unicode scalar of a character is a decimal number using generalCategory
and Unicode.GeneralCategory.decimalNumber
:
let character: Character = "9"
let scalar = character.unicodeScalars.first! // DIGIT NINE
print(scalar.properties.generalCategory == .decimalNumber) // true
let character: Character = "½"
let scalar = character.unicodeScalars.first! // VULGAR FRACTION ONE HALF
print(scalar.properties.generalCategory == .decimalNumber) // false
let character: Character = "④"
let scalar = character.unicodeScalars.first! // CIRCLED DIGIT FOUR
print(scalar.properties.generalCategory == .decimalNumber) // false
let character: Character = "1⃣"
let scalar = character.unicodeScalars.first! // DIGIT ONE
print(scalar.properties.generalCategory == .decimalNumber) // true
let character: Character = "1️⃣"
let scalar = character.unicodeScalars.first! // DIGIT ONE
print(scalar.properties.generalCategory == .decimalNumber) // true
let character: Character = "৯"
let scalar = character.unicodeScalars.first! // BENGALI DIGIT NINE
print(scalar.properties.generalCategory == .decimalNumber) // true
let character: Character = ""
let scalar = character.unicodeScalars.first! // MATHEMATICAL DOUBLE-STRUCK DIGIT ONE
print(scalar.properties.generalCategory == .decimalNumber) // true
let character: Character = "F"
let scalar = character.unicodeScalars.first! // LATIN CAPITAL LETTER F
print(scalar.properties.generalCategory == .decimalNumber) // false
#4. Using Unicode.Scalar.Properties
's generalCategory
property and Unicode.GeneralCategory.otherNumber
Similarly, you can check that the first Unicode scalar of a character corresponds to the category Other_Number in the Unicode Standard using generalCategory
and Unicode.GeneralCategory.otherNumber
:
let character: Character = "9"
let scalar = character.unicodeScalars.first!
print(scalar.properties.generalCategory == .otherNumber) // false
let character: Character = "½"
let scalar = character.unicodeScalars.first!
print(scalar.properties.generalCategory == .otherNumber) // true
let character: Character = "④"
let scalar = character.unicodeScalars.first!
print(scalar.properties.generalCategory == .otherNumber) // true
let character: Character = "1⃣"
let scalar = character.unicodeScalars.first!
print(scalar.properties.generalCategory == .otherNumber) // false
let character: Character = "1️⃣"
let scalar = character.unicodeScalars.first!
print(scalar.properties.generalCategory == .otherNumber) // false
let character: Character = "৯"
let scalar = character.unicodeScalars.first!
print(scalar.properties.generalCategory == .otherNumber) // false
let character: Character = ""
let scalar = character.unicodeScalars.first!
print(scalar.properties.generalCategory == .otherNumber) // false
let character: Character = "F"
let scalar = character.unicodeScalars.first!
print(scalar.properties.generalCategory == .otherNumber) // false
#5. Using CharacterSet
's decimalDigits
property
As an alternative, you can import Foundation and check if CharacterSet.decimalDigits
contains the first Unicode scalar of a character:
import Foundation
let character: Character = "9"
let scalar = character.unicodeScalars.first!
print(CharacterSet.decimalDigits.contains(scalar)) // true
import Foundation
let character: Character = "½"
let scalar = character.unicodeScalars.first!
print(CharacterSet.decimalDigits.contains(scalar)) // false
import Foundation
let character: Character = "④"
let scalar = character.unicodeScalars.first!
print(CharacterSet.decimalDigits.contains(scalar)) // false
import Foundation
let character: Character = "1⃣"
let scalar = character.unicodeScalars.first!
print(CharacterSet.decimalDigits.contains(scalar)) // true
import Foundation
let character: Character = "1️⃣"
let scalar = character.unicodeScalars.first!
print(CharacterSet.decimalDigits.contains(scalar)) // true
import Foundation
let character: Character = "৯"
let scalar = character.unicodeScalars.first!
print(CharacterSet.decimalDigits.contains(scalar)) // true
import Foundation
let character: Character = ""
let scalar = character.unicodeScalars.first!
print(CharacterSet.decimalDigits.contains(scalar)) // true
import Foundation
let character: Character = "F"
let scalar = character.unicodeScalars.first!
print(CharacterSet.decimalDigits.contains(scalar)) // false
#6. Using Unicode.Scalar.Properties
's numericType
Apple documentation states for numericType
:
For scalars that represent a number, numericType is the numeric type of the scalar. For all other scalars, this property is nil
.
The sample codes below show the possible numeric type (decimal
, digit
or numeric
) for the first scalar of a given character:
let character: Character = "9"
let scalar = character.unicodeScalars.first!
print(scalar.properties.numericType) // Optional(Swift.Unicode.NumericType.decimal)
let character: Character = "½"
let scalar = character.unicodeScalars.first!
print(scalar.properties.numericType) // Optional(Swift.Unicode.NumericType.numeric)
let character: Character = "④"
let scalar = character.unicodeScalars.first!
print(scalar.properties.numericType) // Optional(Swift.Unicode.NumericType.digit)
let character: Character = "1⃣"
let scalar = character.unicodeScalars.first!
print(scalar.properties.numericType) // Optional(Swift.Unicode.NumericType.decimal)
let character: Character = "1️⃣"
let scalar = character.unicodeScalars.first!
print(scalar.properties.numericType) // Optional(Swift.Unicode.NumericType.decimal)
let character: Character = "৯"
let scalar = character.unicodeScalars.first!
print(scalar.properties.numericType) // Optional(Swift.Unicode.NumericType.decimal)
let character: Character = ""
let scalar = character.unicodeScalars.first!
print(scalar.properties.numericType) // Optional(Swift.Unicode.NumericType.decimal)
let character: Character = "F"
let scalar = character.unicodeScalars.first!
print(scalar.properties.numericType) // nil