Skip to content

Commit

Permalink
Merge branch 'master' into SNOW-1812949-add-get-object-and-bytes-support
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-mkubik authored Nov 26, 2024
2 parents 496f99c + ecccc36 commit 9e97ba7
Show file tree
Hide file tree
Showing 20 changed files with 545 additions and 290 deletions.
10 changes: 5 additions & 5 deletions .github/workflows/build-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ jobs:
name: Build
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v1
- uses: actions/checkout@v4
- name: Build
shell: bash
env:
Expand All @@ -53,7 +53,7 @@ jobs:
java-version: ${{ matrix.runConfig.javaVersion }}
distribution: 'temurin'
cache: maven
- uses: actions/setup-python@v4
- uses: actions/setup-python@v5
with:
python-version: '3.7'
architecture: 'x64'
Expand Down Expand Up @@ -83,7 +83,7 @@ jobs:
java-version: ${{ matrix.runConfig.javaVersion }}
distribution: 'temurin'
cache: maven
- uses: actions/setup-python@v4
- uses: actions/setup-python@v5
with:
python-version: '3.7'
- name: Install Homebrew Bash
Expand All @@ -110,7 +110,7 @@ jobs:
category: ['TestCategoryResultSet,TestCategoryStatement,TestCategoryLoader', 'TestCategoryOthers', 'TestCategoryArrow,TestCategoryConnection,TestCategoryCore,TestCategoryDiagnostic', 'TestCategoryFips']
additionalMavenProfile: ['', '-Dthin-jar']
steps:
- uses: actions/checkout@v1
- uses: actions/checkout@v4
- name: Tests
shell: bash
env:
Expand All @@ -132,7 +132,7 @@ jobs:
category: ['TestCategoryOthers', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryCore,TestCategoryLoader,TestCategoryResultSet']
is_old_driver: ['true']
steps:
- uses: actions/checkout@v1
- uses: actions/checkout@v4
- name: Tests
shell: bash
env:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/check-style.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ jobs:
name: Check Style
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v1
- uses: actions/checkout@v4
- name: Check Style
shell: bash
run: mvn clean validate --batch-mode --show-version -P check-style
2 changes: 1 addition & 1 deletion .github/workflows/jira_close.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
repository: snowflakedb/gh-actions
ref: jira_v1
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/jira_issue.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ jobs:
if: ((github.event_name == 'issue_comment' && github.event.comment.body == 'recreate jira' && github.event.comment.user.login == 'sfc-gh-mkeller') || (github.event_name == 'issues' && github.event.pull_request.user.login != 'whitesource-for-github-com[bot]'))
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
repository: snowflakedb/gh-actions
ref: jira_v1
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/snyk-issue.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: checkout action
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
repository: snowflakedb/whitesource-actions
token: ${{ secrets.WHITESOURCE_ACTION_TOKEN }}
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/snyk-pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,13 @@ jobs:
if: ${{ github.event.pull_request.user.login == 'sfc-gh-snyk-sca-sa' }}
steps:
- name: checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
fetch-depth: 0

- name: checkout action
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
repository: snowflakedb/whitesource-actions
token: ${{ secrets.WHITESOURCE_ACTION_TOKEN }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1111,8 +1111,16 @@ static StageInfo getStageInfo(JsonNode jsonNode, SFSession session) throws Snowf
// specifically
// for FIPS or VPCE S3 endpoint. SNOW-652696
String endPoint = null;
if ("AZURE".equalsIgnoreCase(stageLocationType) || "S3".equalsIgnoreCase(stageLocationType)) {
if ("AZURE".equalsIgnoreCase(stageLocationType)
|| "S3".equalsIgnoreCase(stageLocationType)
|| "GCS".equalsIgnoreCase(stageLocationType)) {
endPoint = jsonNode.path("data").path("stageInfo").findValue("endPoint").asText();
if ("GCS".equalsIgnoreCase(stageLocationType)
&& endPoint != null
&& (endPoint.trim().isEmpty() || "null".equals(endPoint))) {
// setting to null to preserve previous behaviour for GCS
endPoint = null;
}
}

String stgAcct = null;
Expand Down Expand Up @@ -1179,6 +1187,8 @@ static StageInfo getStageInfo(JsonNode jsonNode, SFSession session) throws Snowf
}
}

setupUseRegionalUrl(jsonNode, stageInfo);

if (stageInfo.getStageType() == StageInfo.StageType.S3) {
if (session == null) {
// This node's value is set if PUT is used without Session. (For Snowpipe Streaming, we rely
Expand All @@ -1200,6 +1210,18 @@ static StageInfo getStageInfo(JsonNode jsonNode, SFSession session) throws Snowf
return stageInfo;
}

private static void setupUseRegionalUrl(JsonNode jsonNode, StageInfo stageInfo) {
if (stageInfo.getStageType() != StageInfo.StageType.GCS
&& stageInfo.getStageType() != StageInfo.StageType.S3) {
return;
}
JsonNode useRegionalURLNode = jsonNode.path("data").path("stageInfo").path("useRegionalUrl");
if (!useRegionalURLNode.isMissingNode()) {
boolean useRegionalURL = useRegionalURLNode.asBoolean(false);
stageInfo.setUseRegionalUrl(useRegionalURL);
}
}

/**
* A helper method to verify if the local file path from GS matches what's parsed locally. This is
* for security purpose as documented in SNOW-15153.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,11 @@
import com.google.api.gax.rpc.FixedHeaderProvider;
import com.google.auth.oauth2.AccessToken;
import com.google.auth.oauth2.GoogleCredentials;
import com.google.cloud.NoCredentials;
import com.google.cloud.storage.Blob;
import com.google.cloud.storage.BlobId;
import com.google.cloud.storage.BlobInfo;
import com.google.cloud.storage.HttpStorageOptions;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.Storage.BlobListOption;
import com.google.cloud.storage.StorageException;
Expand Down Expand Up @@ -1312,6 +1314,8 @@ private void setupGCSClient(
if (accessToken != null) {
// We are authenticated with an oauth access token.
StorageOptions.Builder builder = StorageOptions.newBuilder();
stage.gcsCustomEndpoint().ifPresent(builder::setHost);

if (areDisabledGcsDefaultCredentials(session)) {
logger.debug(
"Adding explicit credentials to avoid default credential lookup by the GCS client");
Expand All @@ -1329,7 +1333,10 @@ private void setupGCSClient(
.getService();
} else {
// Use anonymous authentication.
this.gcsClient = StorageOptions.getUnauthenticatedInstance().getService();
HttpStorageOptions.Builder builder =
HttpStorageOptions.newBuilder().setCredentials(NoCredentials.getInstance());
stage.gcsCustomEndpoint().ifPresent(builder::setHost);
this.gcsClient = builder.build().getService();
}

if (encMat != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,17 @@

import java.io.Serializable;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import net.snowflake.client.core.SnowflakeJdbcInternalApi;

/** Encapsulates all the required stage properties used by GET/PUT for Azure and S3 stages */
/** Encapsulates all the required stage properties used by GET/PUT for Azure, GCS and S3 stages */
public class StageInfo implements Serializable {

// me-central2 GCS region always use regional urls
// TODO SNOW-1818804: the value is hardcoded now, but it should be server driven
private static final String GCS_REGION_ME_CENTRAL_2 = "me-central2";

public enum StageType {
S3,
AZURE,
Expand All @@ -17,12 +24,18 @@ public enum StageType {
private StageType stageType; // The stage type
private String location; // The container or bucket
private Map<?, ?> credentials; // the credentials required for the stage
private String region; // AWS/S3/GCS region (S3/GCS only)
private String endPoint; // The Azure Storage endpoint (Azure only)
private String region; // S3/GCS region
// An endpoint (Azure, AWS FIPS and GCS custom endpoint override)
private String endPoint;
private String storageAccount; // The Azure Storage account (Azure only)
private String presignedUrl; // GCS gives us back a presigned URL instead of a cred
private boolean isClientSideEncrypted; // whether to encrypt/decrypt files on the stage
private boolean useS3RegionalUrl; // whether to use s3 regional URL (AWS Only)
// whether to use s3 regional URL (AWS Only)
// TODO SNOW-1818804: this field will be deprecated when the server returns {@link
// #useRegionalUrl}
private boolean useS3RegionalUrl;
// whether to use regional URL (AWS and GCS only)
private boolean useRegionalUrl;
private Properties proxyProperties;

/*
Expand Down Expand Up @@ -166,16 +179,39 @@ public boolean getUseS3RegionalUrl() {
return useS3RegionalUrl;
}

@SnowflakeJdbcInternalApi
public void setUseRegionalUrl(boolean useRegionalUrl) {
this.useRegionalUrl = useRegionalUrl;
}

@SnowflakeJdbcInternalApi
public boolean getUseRegionalUrl() {
return useRegionalUrl;
}

private static boolean isSpecified(String arg) {
return !(arg == null || arg.equalsIgnoreCase(""));
}

public void setProxyProperties(Properties proxyProperties) {
this.proxyProperties = proxyProperties;
}
;

public Properties getProxyProperties() {
return proxyProperties;
}

@SnowflakeJdbcInternalApi
public Optional<String> gcsCustomEndpoint() {
if (stageType != StageType.GCS) {
return Optional.empty();
}
if (endPoint != null && !endPoint.trim().isEmpty() && !"null".equals(endPoint)) {
return Optional.of(endPoint);
}
if (GCS_REGION_ME_CENTRAL_2.equalsIgnoreCase(region) || useRegionalUrl) {
return Optional.of(String.format("storage.%s.rep.googleapis.com", region.toLowerCase()));
}
return Optional.empty();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,9 @@ public SnowflakeStorageClient createClient(
switch (stage.getStageType()) {
case S3:
boolean useS3RegionalUrl =
(stage.getUseS3RegionalUrl()
|| (session != null && session.getUseRegionalS3EndpointsForPresignedURL()));
stage.getUseS3RegionalUrl()
|| stage.getUseRegionalUrl()
|| session != null && session.getUseRegionalS3EndpointsForPresignedURL();
return createS3Client(
stage.getCredentials(),
parallel,
Expand Down
30 changes: 9 additions & 21 deletions src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,6 @@ public void testAsyncQueryOpenAndCloseConnection()
throws SQLException, IOException, InterruptedException {
// open connection and run asynchronous query
String queryID = null;
QueryStatusV2 statusV2 = null;
try (Connection con = getConnection();
Statement statement = con.createStatement();
ResultSet rs1 =
Expand All @@ -288,7 +287,7 @@ public void testAsyncQueryOpenAndCloseConnection()
await()
.atMost(Duration.ofSeconds(5))
.until(() -> sfrs.getStatusV2().getStatus(), not(equalTo(QueryStatus.NO_DATA)));
statusV2 = sfrs.getStatusV2();
QueryStatusV2 statusV2 = sfrs.getStatusV2();
// Query should take 60 seconds so should be running
assertEquals(QueryStatus.RUNNING, statusV2.getStatus());
assertEquals(QueryStatus.RUNNING.name(), statusV2.getName());
Expand All @@ -305,7 +304,7 @@ public void testAsyncQueryOpenAndCloseConnection()
assertEquals(SqlState.INVALID_PARAMETER_VALUE, e.getSQLState());
}
try (ResultSet rs = con.unwrap(SnowflakeConnection.class).createResultSet(queryID)) {
statusV2 = rs.unwrap(SnowflakeResultSet.class).getStatusV2();
QueryStatusV2 statusV2 = rs.unwrap(SnowflakeResultSet.class).getStatusV2();
// Assert status of query is a success
assertEquals(QueryStatus.SUCCESS, statusV2.getStatus());
assertEquals("No error reported", statusV2.getErrorMessage());
Expand All @@ -318,27 +317,16 @@ public void testAsyncQueryOpenAndCloseConnection()
.unwrap(SnowflakeStatement.class)
.executeAsyncQuery("select * from nonexistentTable")) {
Thread.sleep(100);
statusV2 = rs1.unwrap(SnowflakeResultSet.class).getStatusV2();
// when GS response is slow, allow up to 1 second of retries to get final query status
SnowflakeResultSet sfrs1 = rs1.unwrap(SnowflakeResultSet.class);
await()
.atMost(Duration.ofSeconds(10))
.until(
() -> {
QueryStatus qs = sfrs1.getStatusV2().getStatus();
return !(qs == QueryStatus.NO_DATA || qs == QueryStatus.RUNNING);
});
// If GS response is too slow to return data, do nothing to avoid flaky test failure. If
// response has returned,
// assert it is the error message that we are expecting.
if (statusV2.getStatus() != QueryStatus.NO_DATA) {
assertEquals(QueryStatus.FAILED_WITH_ERROR, statusV2.getStatus());
assertEquals(2003, statusV2.getErrorCode());
assertEquals(
"SQL compilation error:\n"
+ "Object 'NONEXISTENTTABLE' does not exist or not authorized.",
statusV2.getErrorMessage());
}
.until(() -> sfrs1.getStatusV2().getStatus() == QueryStatus.FAILED_WITH_ERROR);
statusV2 = sfrs1.getStatusV2();
assertEquals(2003, statusV2.getErrorCode());
assertEquals(
"SQL compilation error:\n"
+ "Object 'NONEXISTENTTABLE' does not exist or not authorized.",
statusV2.getErrorMessage());
}
}
}
Expand Down
Loading

0 comments on commit 9e97ba7

Please sign in to comment.