3

With Xcode 10.2 and iOS 12.x we were able to extract Decimal from json string. With Xcode 11.1 and iOS 13.1 it is throwing exception

Expected to decode Double but found a string/data instead.

class MyClass : Codable {

     var decimal: Decimal?
 }

then trying to parse it

let json = "{\"decimal\":\"0.007\"}"
let data = json.data(using: .utf8)
let decoder = JSONDecoder()
decoder.nonConformingFloatDecodingStrategy = .convertFromString(positiveInfinity: "s1", negativeInfinity: "s2", nan: "s3")
 do {
   let t = try decoder.decode(MyClass.self, from: data!)
 } catch {
   print(error)
 }

If I change json string as

let json = "{\"decimal\":0.007}"

It works, but then again we are losing precision. Any ideas?

Marina Aguilar
  • 1,151
  • 9
  • 26
juhamaja
  • 58
  • 7

6 Answers6

8

You would need to extend KeyedDecodingContainer and add an implementation for Decimal.Type.

extension KeyedDecodingContainer {
    func decode(_ type: Decimal.Type, forKey key: K) throws -> Decimal {
        let stringValue = try decode(String.self, forKey: key)
        guard let decimalValue = Decimal(string: stringValue) else {
            let context = DecodingError.Context(codingPath: [key], debugDescription: "The key \(key) couldn't be converted to a Decimal value")
            throw DecodingError.typeMismatch(type, context)
        }
        return decimalValue
    }
}

Here is an example:

let json = """
{
  "capAmount": "123.45"
}
"""

struct Status: Decodable {
    let capAmount: Decimal

    enum CodingKeys: String, CodingKey {
        case capAmount
    }

    init(from decoder: Decoder) throws {
        let container = try decoder.container(keyedBy: CodingKeys.self)
        capAmount = try container.decode(Decimal.self, forKey: .capAmount)
    }
}

// Execute it
if let data = json.data(using: .utf8){
    let status = try JSONDecoder().decode(Status.self, from: data)
    print(status.capAmount)
}
bandejapaisa
  • 26,576
  • 13
  • 94
  • 112
3
struct Root: Codable {
    let decimal: Decimal
}

extension Root {
    public init(from decoder: Decoder) throws {
        let container = try decoder.container(keyedBy: CodingKeys.self)
        decimal = try Decimal(string: container.decode(String.self, forKey: .decimal)) ?? .zero
    }
}

let json = #"{"decimal":"0.007"}"# 
do {
    let root = try JSONDecoder().decode(Root.self, from: .init(json.utf8))
    print(root)
} catch {
    print(error)
}

This will print

Root(decimal: 0.007)

Leo Dabus
  • 229,809
  • 59
  • 489
  • 571
  • This is getting closer. Now if I have models like this ' class SomeModel : Codable { var title: String var enabled: Bool var amount : Root // used to be Decimal before etc } ' And it's quite common that I need Decimal in several objects that I'm receiving. I have to implement quite a lot of code that used to work automatically – juhamaja Oct 10 '19 at 13:19
  • I think that the easiest would be to treat them as strings. You can have computed properties to return the decimal value – Leo Dabus Oct 10 '19 at 17:10
  • 1
    Yeps, I actually ended up writing modified version of JSONDecoder which accepts strings as well. This was still best answer for the actual problem so marking this ad solved – juhamaja Oct 11 '19 at 18:29
3

I just add the following code. It supports optional as well.

extension KeyedDecodingContainer {

    func decode(_ type: Decimal.Type, forKey key: K) throws -> Decimal {
        let stringValue = try decode(String.self, forKey: key)
        guard let decimalValue = Decimal(string: stringValue) else {
            let context = DecodingError.Context(codingPath: [key], debugDescription: "The key \(key) couldn't be converted to a Decimal value")
            throw DecodingError.typeMismatch(type, context)
        }
        return decimalValue
    }

    func decodeIfPresent(_ type: Decimal.Type, forKey key: K) throws -> Decimal? {
        guard 
            let stringValue = try? decodeIfPresent(String.self, forKey: key),
            let decimalValue = Decimal(string: stringValue)
        else { return nil }
        return decimalValue
    }
}
RcoderNY
  • 1,204
  • 2
  • 16
  • 23
Reza Dehnavi
  • 2,256
  • 3
  • 16
  • 30
1

That decoding strategy has nothing to do with numbers being represented as strings. What you need to do is to implement init(from:) and convert from string there

class MyClass : Codable {
    var decimal: Double?

    enum CodingKeys: String, CodingKey {
        case decimal = "test"
    }

    required init(from decoder: Decoder) throws {
        let container = try decoder.container(keyedBy: CodingKeys.self)
        decimal = Double(try container.decode(String.self, forKey: .decimal)
        //or if Decimal is used:
        //decimal = Decimal(string: try container.decode(String.self, forKey: .decimal)
    }
}

Note that I am using Double instead of Decimal here to make it simpler

Joakim Danielson
  • 43,251
  • 5
  • 22
  • 52
-1

I believe that a cleaner solution is declare value not like a string but like a value:

"test": 0.007

having a struct like that:

struct Stuff {
     var test: Decimal
}

and then:

let decoder = JSONDecoder()
let stuff = try decoder.decode(Stuff.self, from: json)

otherwise you can use this example:

https://forums.swift.org/t/parsing-decimal-values-from-json/6906/3

Alessio Campanelli
  • 970
  • 11
  • 19
-1

The type should be Double and define also in the parsing as Double. Swift will figure out the rest

struct MyClass: Decodable {
        let decimal: Double

        //can be renamed to follow the API name.
        enum CodingKeys: String, CodingKey {
            case decimal
        }
    }
    extension MyClass {
        init(from decoder: Decoder) throws {

            let values = try decoder.container(keyedBy: CodingKeys.self)
            decimal = try values.decode(Double.self, forKey: .decimal)

        }
    }