13

I am using Xcode8 Beta with Swift 3.0. I tried to encode a simple object base on NSObject, but I cannot decode Int or NSInteger type. (The encoding process is OK)

enter image description here

codes

class Model : NSObject, NSCoding {
    var seq: NSNumber?
    var seq2: Int? // problem with seq2, NSInteger is not ok, either
    var id: String?
    var value: String?

    override init() {
        super.init()
    }

    required init?(coder aDecoder: NSCoder){
        self.seq = aDecoder.decodeObject(forKey: "seq") as? NSNumber
        self.seq2 = aDecoder.decodeInteger(forKey: "seq2")
        self.id = aDecoder.decodeObject(forKey: "id") as? String
        self.value = aDecoder.decodeObject(forKey: "value") as? String
    }

    func encode(with aCoder: NSCoder){
        aCoder.encode(seq, forKey: "seq")
        aCoder.encode(seq2, forKey: "seq2")
        aCoder.encode(id, forKey: "id")
        aCoder.encode(value, forKey: "value")
    }
}
wqyfavor
  • 519
  • 1
  • 4
  • 14

3 Answers3

16

The problem is that seq2 is not an Int, but rather a Int? optional. It cannot be represented as an Objective-C integer.

You can use decodeObject:

required init?(coder aDecoder: NSCoder){
    self.seq = aDecoder.decodeObject(forKey: "seq") as? NSNumber
    self.seq2 = aDecoder.decodeObject(forKey: "seq2") as? Int
    self.id = aDecoder.decodeObject(forKey: "id") as? String
    self.value = aDecoder.decodeObject(forKey: "value") as? String

    super.init()
}

or change it so it is not optional:

class Model : NSObject, NSCoding {
    var seq: NSNumber?
    var seq2: Int
    var id: String?
    var value: String?

    init(seq: NSNumber, seq2: Int, id: String, value: String) {
        self.seq = seq
        self.seq2 = seq2
        self.id = id
        self.value = value

        super.init()
    }

    required init?(coder aDecoder: NSCoder) {
        self.seq = aDecoder.decodeObject(forKey: "seq") as? NSNumber
        self.seq2 = aDecoder.decodeInteger(forKey: "seq2")
        self.id = aDecoder.decodeObject(forKey: "id") as? String
        self.value = aDecoder.decodeObject(forKey: "value") as? String

        super.init()
    }

    func encode(with aCoder: NSCoder) {
        aCoder.encode(seq, forKey: "seq")
        aCoder.encode(seq2, forKey: "seq2")
        aCoder.encode(id, forKey: "id")
        aCoder.encode(value, forKey: "value")
    }

    override var description: String { return "<Model; seq=\(seq); seq2=\(seq2); id=\(id); value=\(value)>" }
}
Rob
  • 415,655
  • 72
  • 787
  • 1,044
  • 1
    What's the safe way to decode values in Swift 3? For instance to guard against the scenario where you use `decodeInteger` and `seq2` is not an Integer. This was handled in Swift 2 by using a `let` statement for `decodeObject`, but `decodeInteger` does not return an optional. – Crashalot Sep 23 '16 at 22:15
  • `aDecoder.containsValue(forKey:)` will at least check if there is _some_ kind of value for a given key. – idrougge Sep 02 '19 at 12:55
1

Use the encodeInteger method to encode integers instead:

func encode(with aCoder: NSCoder) {
    ...
    aCoder.encodeInteger(seq2, forKey: "seq2")
}
Alessandro Ornano
  • 34,887
  • 11
  • 106
  • 133
  • What's the safe way to decode values in Swift 3? For instance to guard against the scenario where you use `decodeInteger` and `seq2` is not an Integer. This was handled in Swift 2 by using a `let` statement for `decodeObject`, but `decodeInteger` does not return an optional. – Crashalot Sep 23 '16 at 22:15
0

Take care with nil values

func encode(with aCoder: NSCoder){
    aCoder.encode(seq, forKey: "seq")

    if let second_seq = self.seq2
    {
        aCoder.encode(second_seq, forKey: "seq2")
    }
    aCoder.encode(id, forKey: "id")
    aCoder.encode(value, forKey: "value")
}

seq2 will be nil if the key is not present on decode operations

Adolfo
  • 1,862
  • 13
  • 19