Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions src/main/java/org/duckdb/DuckDBDriver.java
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,13 @@ public Connection connect(String url, Properties info) throws SQLException {
read_only = prop_clean.equals("1") || prop_clean.equals("true") || prop_clean.equals("yes");
}
info.put("duckdb_api", "jdbc");

// Apache Spark passes this option when SELECT on a JDBC DataSource
// table is performed. It is the internal Spark option and is likely
// passed by mistake, so we need to ignore it to allow the connection
// to be established.
info.remove("path");

return DuckDBConnection.newConnection(url, read_only, info);
}

Expand Down
7 changes: 7 additions & 0 deletions src/test/java/org/duckdb/TestDuckDBJDBC.java
Original file line number Diff line number Diff line change
Expand Up @@ -4869,6 +4869,13 @@ public static void test_empty_typemap_allowed() throws Exception {
}
}

public static void test_spark_path_option_ignored() throws Exception {
Properties config = new Properties();
config.put("path", "path/to/spark/catalog/dir");
Connection conn = DriverManager.getConnection(JDBC_URL, config);
conn.close();
}

public static void main(String[] args) throws Exception {
String arg1 = args.length > 0 ? args[0] : "";
final int statusCode;
Expand Down
Loading