I have been getting the following error upon running the COPY INTO command in Databricks Community edition:
Error in SQL statement: UnsupportedOperationException: com.databricks.backend.daemon.data.client.DBFSV1.createAtomicIfAbsent(path: Path)** com.databricks.backend.common.rpc.DatabricksExceptions$SQLExecutionException: java.lang.UnsupportedOperationException: com.databricks.backend.daemon.data.client.DBFSV1.createAtomicIfAbsent(path: Path) at com.databricks.tahoe.store.EnhancedDatabricksFileSystemV1.createAtomicIfAbsent(EnhancedFileSystem.scala:324) at com.databricks.spark.sql.streaming.AWSCheckpointFileManager.createAtomicIfAbsent(DatabricksCheckpointFileManager.scala:159) at com.databricks.spark.sql.streaming.DatabricksCheckpointFileManager.createAtomicIfAbsent(DatabricksCheckpointFileManager.scala:60) at org.apache.spark.sql.execution.streaming.state.RocksDBFileManager.zipToDfsFile(RocksDBFileManager.scala:510) at org.apache.spark.sql.execution.streaming.state.RocksDBFileManager.saveCheckpointToDfs(RocksDBFileManager.scala:189) at com.databricks.sql.rocksdb.CloudRocksDB.$anonfun$open$5(CloudRocksDB.scala:467) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at org.apache.spark.util.Utils$.timeTakenMs(Utils.scala:680) at com.databricks.sql.rocksdb.CloudRocksDB.timeTakenMs(CloudRocksDB.scala:543) at com.databricks.sql.rocksdb.CloudRocksDB.$anonfun$open$2(CloudRocksDB.scala:455) at com.databricks.logging.UsageLogging.$anonfun$recordOperation$1(UsageLogging.scala:330) at com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:424) at com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:444) at com.databricks.logging.Log4jUsageLoggingShim$.$anonfun$withAttributionContext$1(Log4jUsageLoggingShim.scala:33) at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62) at com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:94) at com.databricks.logging.Log4jUsageLoggingShim$.withAttributionContext(Log4jUsageLoggingShim.scala:31) at com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:205) at com.databricks.logging.UsageLogging.withAttributionContext$(UsageLogging.scala:204) at com.databricks.spark.util.PublicDBLogging.withAttributionContext(DatabricksSparkUsageLogger.scala:20) at com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:240) at com.databricks.logging.UsageLogging.withAttributionTags$(UsageLogging.scala:225) at com.databricks.spark.util.PublicDBLogging.withAttributionTags(DatabricksSparkUsageLogger.scala:20) at com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:419) at com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:339) at com.databricks.spark.util.PublicDBLogging.recordOperationWithResultTags(DatabricksSparkUsageLogger.scala:20) at com.databricks.logging.UsageLogging.recordOperation(UsageLogging.scala:330) at com.databricks.logging.UsageLogging.recordOperation$(UsageLogging.scala:302) at com.databricks.spark.util.PublicDBLogging.recordOperation(DatabricksSparkUsageLogger.scala:20) at com.databricks.spark.util.PublicDBLogging.recordOperation0(DatabricksSparkUsageLogger.scala:57) at com.databricks.spark.util.DatabricksSparkUsageLogger.recordOperation(DatabricksSparkUsageLogger.scala:137) at com.databricks.spark.util.UsageLogger.recordOperation(UsageLogger.scala:73) at com.databricks.spark.util.UsageLogger.recordOperation$(UsageLogger.scala:60) at com.databricks.spark.util.DatabricksSparkUsageLogger.recordOperation(DatabricksSparkUsageLogger.scala:98) at com.databricks.spark.util.UsageLogging.recordOperation(UsageLogger.scala:431) at com.databricks.spark.util.UsageLogging.recordOperation$(UsageLogger.scala:410) at com.databricks.sql.rocksdb.CloudRocksDB.recordOperation(CloudRocksDB.scala:55) at com.databricks.sql.rocksdb.CloudRocksDB.recordRocksDBOperation(CloudRocksDB.scala:562) at com.databricks.sql.rocksdb.CloudRocksDB.$anonfun$open$1(CloudRocksDB.scala:447) at com.databricks.backend.daemon.driver.ProgressReporter$.withStatusCode(ProgressReporter.scala:377) at com.databricks.backend.daemon.driver.ProgressReporter$.withStatusCode(ProgressReporter.scala:363) at com.databricks.spark.util.SparkDatabricksProgressReporter$.withStatusCode(ProgressReporter.scala:34) at com.databricks.sql.rocksdb.CloudRocksDB.open(CloudRocksDB.scala:447) at com.databricks.sql.rocksdb.CloudRocksDB.(CloudRocksDB.scala:83) at com.databricks.sql.rocksdb.CloudRocksDB$.open(CloudRocksDB.scala:615) at com.databricks.sql.transaction.tahoe.commands.copy.CopyIntoRocksDB.(CopyIntoRocksDB.scala:45) at com.databricks.sql.transaction.tahoe.commands.copy.CopyIntoRocksDB$.apply(CopyIntoRocksDB.scala:39) at com.databricks.sql.transaction.tahoe.commands.copy.CopyIntoRocksDB$.withDeduplicator(CopyIntoRocksDB.scala:87) at com.databricks.sql.transaction.tahoe.commands.copy.CopyIntoCommandEdge.processExactlyOnce(CopyIntoCommandEdge.scala:271) at com.databricks.sql.transaction.tahoe.commands.copy.CopyIntoCommandEdge.runWriteMode(CopyIntoCommandEdge.scala:312) at com.databricks.sql.transaction.tahoe.commands.copy.CopyIntoCommandEdge.$anonfun$run$1(CopyIntoCommandEdge.scala:301) at com.databricks.sql.acl.CheckPermissions$.trusted(CheckPermissions.scala:1585) at com.databricks.sql.transaction.tahoe.commands.copy.CopyIntoCommandEdge.run(CopyIntoCommandEdge.scala:297) at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:80) at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:78) at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:89) at org.apache.spark.sql.execution.QueryExecution$$anonfun$$nestedInanonfun$eagerlyExecuteCommands$1$1.$anonfun$applyOrElse$1(QueryExecution.scala:160) at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withCustomExecutionEnv$8(SQLExecution.scala:239) at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:386) at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withCustomExecutionEnv$1(SQLExecution.scala:186) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:968) at org.apache.spark.sql.execution.SQLExecution$.withCustomExecutionEnv(SQLExecution.scala:141) at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:336) at org.apache.spark.sql.execution.QueryExecution$$anonfun$$nestedInanonfun$eagerlyExecuteCommands$1$1.applyOrElse(QueryExecution.scala:160) at org.apache.spark.sql.execution.QueryExecution$$anonfun$$nestedInanonfun$eagerlyExecuteCommands$1$1.applyOrElse(QueryExecution.scala:156) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:575) at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:167) at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:575) at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30) at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:268) at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:264) at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30) at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30) at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:551) at org.apache.spark.sql.execution.QueryExecution.$anonfun$eagerlyExecuteCommands$1(QueryExecution.scala:156) at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransformsInAnalyzer(AnalysisHelper.scala:324) at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:156) at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:141) at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:132) at org.apache.spark.sql.Dataset.(Dataset.scala:225) at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:104) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:968) at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:101) at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:803) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:968) at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:798) at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:695) at com.databricks.backend.daemon.driver.SQLDriverLocal.$anonfun$executeSql$1(SQLDriverLocal.scala:91) at scala.collection.immutable.List.map(List.scala:293) at com.databricks.backend.daemon.driver.SQLDriverLocal.executeSql(SQLDriverLocal.scala:37) at com.databricks.backend.daemon.driver.SQLDriverLocal.repl(SQLDriverLocal.scala:145) at com.databricks.backend.daemon.driver.DriverLocal.$anonfun$execute$11(DriverLocal.scala:605) at com.databricks.logging.Log4jUsageLoggingShim$.$anonfun$withAttributionContext$1(Log4jUsageLoggingShim.scala:33) at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62) at com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:94) at com.databricks.logging.Log4jUsageLoggingShim$.withAttributionContext(Log4jUsageLoggingShim.scala:31) at com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:205) at com.databricks.logging.UsageLogging.withAttributionContext$(UsageLogging.scala:204) at com.databricks.backend.daemon.driver.DriverLocal.withAttributionContext(DriverLocal.scala:60) at com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:240) at com.databricks.logging.UsageLogging.withAttributionTags$(UsageLogging.scala:225) at com.databricks.backend.daemon.driver.DriverLocal.withAttributionTags(DriverLocal.scala:60) at com.databricks.backend.daemon.driver.DriverLocal.execute(DriverLocal.scala:582) at com.databricks.backend.daemon.driver.DriverWrapper.$anonfun$tryExecutingCommand$1(DriverWrapper.scala:615) at scala.util.Try$.apply(Try.scala:213) at com.databricks.backend.daemon.driver.DriverWrapper.tryExecutingCommand(DriverWrapper.scala:607) at com.databricks.backend.daemon.driver.DriverWrapper.executeCommandAndGetError(DriverWrapper.scala:526) at com.databricks.backend.daemon.driver.DriverWrapper.executeCommand(DriverWrapper.scala:561) at com.databricks.backend.daemon.driver.DriverWrapper.runInnerLoop(DriverWrapper.scala:431) at com.databricks.backend.daemon.driver.DriverWrapper.runInner(DriverWrapper.scala:374) at com.databricks.backend.daemon.driver.DriverWrapper.run(DriverWrapper.scala:225) at java.lang.Thread.run(Thread.java:748) at com.databricks.backend.daemon.driver.SQLDriverLocal.executeSql(SQLDriverLocal.scala:130) at com.databricks.backend.daemon.driver.SQLDriverLocal.repl(SQLDriverLocal.scala:145) at com.databricks.backend.daemon.driver.DriverLocal.$anonfun$execute$11(DriverLocal.scala:605) at com.databricks.logging.Log4jUsageLoggingShim$.$anonfun$withAttributionContext$1(Log4jUsageLoggingShim.scala:33) at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62) at com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:94) at com.databricks.logging.Log4jUsageLoggingShim$.withAttributionContext(Log4jUsageLoggingShim.scala:31) at com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:205) at com.databricks.logging.UsageLogging.withAttributionContext$(UsageLogging.scala:204) at com.databricks.backend.daemon.driver.DriverLocal.withAttributionContext(DriverLocal.scala:60) at com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:240) at com.databricks.logging.UsageLogging.withAttributionTags$(UsageLogging.scala:225) at com.databricks.backend.daemon.driver.DriverLocal.withAttributionTags(DriverLocal.scala:60) at com.databricks.backend.daemon.driver.DriverLocal.execute(DriverLocal.scala:582) at com.databricks.backend.daemon.driver.DriverWrapper.$anonfun$tryExecutingCommand$1(DriverWrapper.scala:615) at scala.util.Try$.apply(Try.scala:213) at com.databricks.backend.daemon.driver.DriverWrapper.tryExecutingCommand(DriverWrapper.scala:607) at com.databricks.backend.daemon.driver.DriverWrapper.executeCommandAndGetError(DriverWrapper.scala:526) at com.databricks.backend.daemon.driver.DriverWrapper.executeCommand(DriverWrapper.scala:561) at com.databricks.backend.daemon.driver.DriverWrapper.runInnerLoop(DriverWrapper.scala:431) at com.databricks.backend.daemon.driver.DriverWrapper.runInner(DriverWrapper.scala:374) at com.databricks.backend.daemon.driver.DriverWrapper.run(DriverWrapper.scala:225) at java.lang.Thread.run(Thread.java:748)
I already created a table with the schema, and now trying to copy the data from Data Lake to delta table, but not been able to copy it using COPY INTO command. Does anybody know its solution?
The command I am executing is:
%sql COPY INTO bronze_table
FROM "/mnt/streamingdatastorage/datastorage/Bronze/Table1"
FILEFORMAT = DELTA