I could not find a way even after longer research. But the good news is that you can use the exact same neural network used to achieve the results in the photo app to build your own search index. Here is some sample code:
let fetchOptions = PHFetchOptions()
let fetchResult = PHAsset.fetchAssets(with: fetchOptions)
fetchResult.enumerateObjects { asset, index, pointer in
self.assetManager.requestImage(for: asset,
targetSize: CGSize(width: 150, height: 100),
contentMode: .aspectFit,
options: .none) { image, metadata in
if let uiImage = image, let cgImage = uiImage.cgImage {
let requestHandler = VNImageRequestHandler(cgImage: cgImage)
let request = VNClassifyImageRequest()
try? requestHandler.perform([request])
let results = request.results as! [VNClassificationObservation]
// Filter the 1303 classification results, and use them in your app
// in my case, fullfill promise with a wrapper object
promise(.success(ClassifiedImage(imageIdentifier: asset.localIdentifier,
classifications: results.filter { $0.hasMinimumPrecision(0.9, forRecall: 0.0) }.map{
($0.identifier, nil)
})))
}
}
}
More info: https://developer.apple.com/videos/play/wwdc2019/222
Also note: When building a search index you can use the localIdentifier
of class PHAsset
when fetching the assets.