This happens when some files are deleted from the data source that Autoloader stream is reading from.
try:
raw_df = spark.readStream.format("cloudFiles") \
.option("cloudFiles.format","csv") \
.option("cloudFiles.includeExistingFiles", "true") \
.option("cloudFiles.allowOverwrites", "true") \
.option("cloudFiles.schemaLocation",
opPath.outputPath +"/checkpoints/" + storageAccountInfo.adlsContainerName) \
.option("delimiter","\t")\
.load(source)
except Exception as f:
print(f)