You could use the unicode scalar representation of each character (look up ASCII tables) and sum these shifted by -96 (such that a -> 1, b -> 2 and so on). In the following, upper case letters will generate the same number value as lower case ones.
let foo = "cae"
let pattern = UnicodeScalar("a")..."z"
let charsAsNumbers = foo.lowercaseString.unicodeScalars
.filter { pattern ~= $0 }
let sumOfNumbers = charsAsNumbers
.reduce(0) { $0 + $1.value - 96 }
print(sumOfNumbers) // 9
Or, to simplify usage, create a function or String extension
/* as a function */
func getNumberSum(foo: String) -> UInt32 {
let pattern = UnicodeScalar("a")..."z"
return foo.lowercaseString.unicodeScalars
.filter { pattern ~= $0 }
.reduce(0) { $0 + $1.value - 96 }
}
/* or an extension */
extension String {
var numberSum: UInt32 {
let pattern = UnicodeScalar("a")..."z"
return self.lowercaseString.unicodeScalars
.filter { pattern ~= $0 }
.reduce(0) { $0 + $1.value - 96 }
}
}
Example usage for your case:
/* example test case (using extension) */
let textField1 = UITextField()
let textField2 = UITextField()
textField1.text = "cAe"
textField2.text = "FCa"
/* example usage */
if let textFieldText1 = textField1.text,
let textFieldText2 = textField2.text {
print(textFieldText1.numberSum) // 9
print(textFieldText2.numberSum) // 10
print(textFieldText1.numberSum
== textFieldText2.numberSum) // false
}