diff --git a/pom.xml b/pom.xml
index bf5b8deee..f799a72a7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1482,6 +1482,17 @@
+
+ webapi-databricks
+
+
+ com.databricks
+ databricks-jdbc
+ 2.6.34
+ runtime
+
+
+
webapi-bigquery
diff --git a/src/main/java/org/ohdsi/webapi/DataAccessConfig.java b/src/main/java/org/ohdsi/webapi/DataAccessConfig.java
index 439119890..8bc4e438d 100644
--- a/src/main/java/org/ohdsi/webapi/DataAccessConfig.java
+++ b/src/main/java/org/ohdsi/webapi/DataAccessConfig.java
@@ -82,7 +82,7 @@ public DataSource primaryDataSource() {
//note autocommit defaults vary across vendors. use provided @Autowired TransactionTemplate
String[] supportedDrivers;
- supportedDrivers = new String[]{"org.postgresql.Driver", "com.microsoft.sqlserver.jdbc.SQLServerDriver", "oracle.jdbc.driver.OracleDriver", "com.amazon.redshift.jdbc.Driver", "com.cloudera.impala.jdbc.Driver", "net.starschema.clouddb.jdbc.BQDriver", "org.netezza.Driver", "com.simba.googlebigquery.jdbc42.Driver", "org.apache.hive.jdbc.HiveDriver", "com.simba.spark.jdbc.Driver", "net.snowflake.client.jdbc.SnowflakeDriver"};
+ supportedDrivers = new String[]{"org.postgresql.Driver", "com.microsoft.sqlserver.jdbc.SQLServerDriver", "oracle.jdbc.driver.OracleDriver", "com.amazon.redshift.jdbc.Driver", "com.cloudera.impala.jdbc.Driver", "net.starschema.clouddb.jdbc.BQDriver", "org.netezza.Driver", "com.simba.googlebigquery.jdbc42.Driver", "org.apache.hive.jdbc.HiveDriver", "com.simba.spark.jdbc.Driver", "net.snowflake.client.jdbc.SnowflakeDriver", "com.databricks.client.jdbc.Driver"};
for (String driverName : supportedDrivers) {
try {
Class.forName(driverName);
diff --git a/src/main/java/org/ohdsi/webapi/util/CancelableJdbcTemplate.java b/src/main/java/org/ohdsi/webapi/util/CancelableJdbcTemplate.java
index 00a29b973..a7ebb3d39 100644
--- a/src/main/java/org/ohdsi/webapi/util/CancelableJdbcTemplate.java
+++ b/src/main/java/org/ohdsi/webapi/util/CancelableJdbcTemplate.java
@@ -78,7 +78,8 @@ public int[] doInStatement(Statement stmt) throws SQLException, DataAccessExcept
}
else {
for (int i = 0; i < sql.length; i++) {
- if (stmt.getConnection().getMetaData().getURL().startsWith("jdbc:spark")) {
+ String connectionString = stmt.getConnection().getMetaData().getURL();
+ if (connectionString.startsWith("jdbc:spark") || connectionString.startsWith("jdbc:databricks")) {
this.currSql = BigQuerySparkTranslate.sparkHandleInsert(sql[i], stmt.getConnection());
if (this.currSql == "" || this.currSql.isEmpty() || this.currSql == null) {
rowsAffected[i] = -1;