Skip to content

Commit

Permalink
chore(deps): update dependency com.google.cloud.samples:shared-config…
Browse files Browse the repository at this point in the history
…uration to v1.0.22 (#144)
  • Loading branch information
jiangmichaellll committed Apr 12, 2021
1 parent 368d7df commit 5238be8
Show file tree
Hide file tree
Showing 7 changed files with 52 additions and 31 deletions.
3 changes: 0 additions & 3 deletions .kokoro/build.sh
Expand Up @@ -78,9 +78,6 @@ samples)

if [[ -f ${SAMPLES_DIR}/pom.xml ]]
then
# get versions for constructing the full names of the packaged JARs
export CONNECTOR_VERSION=$(grep pubsublite-spark-sql-streaming ${scriptDir}/../versions.txt | cut -d: -f3)
export SAMPLE_VERSION=$(grep com.google.cloud.samples.shared-configuration: ${scriptDir}/../versions.txt | cut -d: -f3)
if [ -f "${KOKORO_GFILE_DIR}/secret_manager/java-pubsublite-spark-samples-secrets" ]
then
source "${KOKORO_GFILE_DIR}/secret_manager/java-pubsublite-spark-samples-secrets"
Expand Down
2 changes: 1 addition & 1 deletion samples/pom.xml
Expand Up @@ -17,7 +17,7 @@
<parent>
<groupId>com.google.cloud.samples</groupId>
<artifactId>shared-configuration</artifactId>
<version>1.0.21</version>
<version>1.0.22</version>
</parent>

<properties>
Expand Down
2 changes: 1 addition & 1 deletion samples/snapshot/pom.xml
Expand Up @@ -14,7 +14,7 @@
<parent>
<groupId>com.google.cloud.samples</groupId>
<artifactId>shared-configuration</artifactId>
<version>1.0.21</version>
<version>1.0.22</version>
</parent>

<properties>
Expand Down
2 changes: 1 addition & 1 deletion samples/snippets/pom.xml
Expand Up @@ -14,7 +14,7 @@
<parent>
<groupId>com.google.cloud.samples</groupId>
<artifactId>shared-configuration</artifactId>
<version>1.0.21</version>
<version>1.0.22</version>
</parent>

<properties>
Expand Down
72 changes: 48 additions & 24 deletions samples/snippets/src/test/java/pubsublite/spark/SampleTestBase.java
Expand Up @@ -33,17 +33,20 @@
import com.google.cloud.storage.BlobId;
import com.google.cloud.storage.BlobInfo;
import com.google.cloud.storage.Storage;
import com.google.common.collect.ImmutableList;
import com.google.common.flogger.GoogleLogger;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.shared.invoker.DefaultInvocationRequest;
import org.apache.maven.shared.invoker.DefaultInvoker;
import org.apache.maven.shared.invoker.InvocationOutputHandler;
import org.apache.maven.shared.invoker.InvocationRequest;
import org.apache.maven.shared.invoker.InvocationResult;
import org.apache.maven.shared.invoker.Invoker;
Expand All @@ -52,15 +55,15 @@

public abstract class SampleTestBase {

private static final GoogleLogger log = GoogleLogger.forEnclosingClass();

private static final String CLOUD_REGION = "CLOUD_REGION";
private static final String CLOUD_ZONE = "CLOUD_ZONE";
private static final String PROJECT_NUMBER = "GOOGLE_CLOUD_PROJECT_NUMBER";
private static final String PROJECT_ID = "PROJECT_ID";
private static final String TOPIC_ID = "TOPIC_ID";
private static final String CLUSTER_NAME = "CLUSTER_NAME";
private static final String BUCKET_NAME = "BUCKET_NAME";
private static final String SAMPLE_VERSION = "SAMPLE_VERSION";
private static final String CONNECTOR_VERSION = "CONNECTOR_VERSION";

protected final String runId = UUID.randomUUID().toString();
protected CloudRegion cloudRegion;
Expand Down Expand Up @@ -91,32 +94,14 @@ protected void setupEnvVars() {
PROJECT_NUMBER,
TOPIC_ID,
CLUSTER_NAME,
BUCKET_NAME,
SAMPLE_VERSION,
CONNECTOR_VERSION);
BUCKET_NAME);
cloudRegion = CloudRegion.of(env.get(CLOUD_REGION));
cloudZone = CloudZone.of(cloudRegion, env.get(CLOUD_ZONE).charAt(0));
projectId = ProjectId.of(env.get(PROJECT_ID));
projectNumber = ProjectNumber.of(Long.parseLong(env.get(PROJECT_NUMBER)));
sourceTopicId = TopicName.of(env.get(TOPIC_ID));

clusterName = env.get(CLUSTER_NAME);
bucketName = env.get(BUCKET_NAME);
workingDir =
System.getProperty("user.dir")
.replace("/samples/snapshot", "")
.replace("/samples/snippets", "");
sampleVersion = env.get(SAMPLE_VERSION);
connectorVersion = env.get(CONNECTOR_VERSION);
sampleJarName = String.format("pubsublite-spark-snippets-%s.jar", sampleVersion);
connectorJarName =
String.format("pubsublite-spark-sql-streaming-%s-with-dependencies.jar", connectorVersion);
sampleJarNameInGCS = String.format("pubsublite-spark-snippets-%s-%s.jar", sampleVersion, runId);
connectorJarNameInGCS =
String.format(
"pubsublite-spark-sql-streaming-%s-with-dependencies-%s.jar", connectorVersion, runId);
sampleJarLoc = String.format("%s/samples/snippets/target/%s", workingDir, sampleJarName);
connectorJarLoc = String.format("%s/target/%s", workingDir, connectorJarName);
}

protected void findMavenHome() throws Exception {
Expand All @@ -131,12 +116,14 @@ protected void findMavenHome() throws Exception {
}
}

protected void mavenPackage(String workingDir)
private void runMavenCommand(
String workingDir, Optional<InvocationOutputHandler> outputHandler, String... goals)
throws MavenInvocationException, CommandLineException {
InvocationRequest request = new DefaultInvocationRequest();
request.setPomFile(new File(workingDir + "/pom.xml"));
request.setGoals(ImmutableList.of("clean", "package", "-Dmaven.test.skip=true"));
request.setGoals(Arrays.asList(goals.clone()));
Invoker invoker = new DefaultInvoker();
outputHandler.ifPresent(invoker::setOutputHandler);
invoker.setMavenHome(new File(mavenHome));
InvocationResult result = invoker.execute(request);
if (result.getExecutionException() != null) {
Expand All @@ -145,6 +132,43 @@ protected void mavenPackage(String workingDir)
assertThat(result.getExitCode()).isEqualTo(0);
}

protected void mavenPackage(String workingDir)
throws MavenInvocationException, CommandLineException {
runMavenCommand(workingDir, Optional.empty(), "clean", "package", "-Dmaven.test.skip=true");
}

private void getVersion(String workingDir, InvocationOutputHandler outputHandler)
throws MavenInvocationException, CommandLineException {
runMavenCommand(
workingDir,
Optional.of(outputHandler),
"-q",
"-Dexec.executable=echo",
"-Dexec.args='${project.version}'",
"--non-recursive",
"exec:exec");
}

protected void setupVersions() throws MavenInvocationException, CommandLineException {
workingDir =
System.getProperty("user.dir")
.replace("/samples/snapshot", "")
.replace("/samples/snippets", "");
getVersion(workingDir, (l) -> connectorVersion = l);
log.atInfo().log("Connector version is: %s", connectorVersion);
getVersion(workingDir + "/samples", (l) -> sampleVersion = l);
log.atInfo().log("Sample version is: %s", sampleVersion);
sampleJarName = String.format("pubsublite-spark-snippets-%s.jar", sampleVersion);
connectorJarName =
String.format("pubsublite-spark-sql-streaming-%s-with-dependencies.jar", connectorVersion);
sampleJarNameInGCS = String.format("pubsublite-spark-snippets-%s-%s.jar", sampleVersion, runId);
connectorJarNameInGCS =
String.format(
"pubsublite-spark-sql-streaming-%s-with-dependencies-%s.jar", connectorVersion, runId);
sampleJarLoc = String.format("%s/samples/snippets/target/%s", workingDir, sampleJarName);
connectorJarLoc = String.format("%s/target/%s", workingDir, connectorJarName);
}

protected void uploadGCS(Storage storage, String fileNameInGCS, String fileLoc) throws Exception {
BlobId blobId = BlobId.of(bucketName, fileNameInGCS);
BlobInfo blobInfo = BlobInfo.newBuilder(blobId).build();
Expand Down
Expand Up @@ -63,6 +63,7 @@ public void beforeClass() throws Exception {
log.atInfo().log("RunId is: %s", runId);
setupEnvVars();
findMavenHome();
setupVersions();

// Maven package into jars
mavenPackage(workingDir);
Expand Down
1 change: 0 additions & 1 deletion versions.txt
Expand Up @@ -2,4 +2,3 @@
# module:released-version:current-version

pubsublite-spark-sql-streaming:0.1.0:0.1.1-SNAPSHOT
com.google.cloud.samples.shared-configuration:1.0.21:1.0.21

0 comments on commit 5238be8

Please sign in to comment.