diff --git a/README.md b/README.md index 9c1491635..43472a4ab 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -![hpcc4j Master Nightly](https://github.com/hpcc-systems/hpcc4j/workflows/hpcc4j%20Nightly/badge.svg?branch=master) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.hpccsystems/hpcc4j/badge.svg?subject=hpcc4j)](https://maven-badges.herokuapp.com/maven-central/org.hpccsystems/hpcc4j) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.hpccsystems/commons-hpcc/badge.svg?subject=commons-hpcc)](https://maven-badges.herokuapp.com/maven-central/org.hpccsystems/commons-hpcc) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.hpccsystems/wsclient/badge.svg?subject=wsclient)](https://maven-badges.herokuapp.com/maven-central/org.hpccsystems/wsclient) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.hpccsystems/dfsclient/badge.svg?subject=dfsclient)](https://maven-badges.herokuapp.com/maven-central/org.hpccsystems/dfsclient) +![hpcc4j Master Nightly](https://github.com/hpcc-systems/hpcc4j/workflows/hpcc4j%20Nightly/badge.svg?branch=master) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.hpccsystems/hpcc4j/badge.svg?subject=hpcc4j)](https://maven-badges.herokuapp.com/maven-central/org.hpccsystems/hpcc4j) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.hpccsystems/commons-hpcc/badge.svg?subject=commons-hpcc)](https://maven-badges.herokuapp.com/maven-central/org.hpccsystems/commons-hpcc) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.hpccsystems/wsclient/badge.svg?subject=wsclient)](https://maven-badges.herokuapp.com/maven-central/org.hpccsystems/wsclient) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.hpccsystems/dfsclient/badge.svg?subject=dfsclient)](https://maven-badges.herokuapp.com/maven-central/org.hpccsystems/dfsclient) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.hpccsystems/spark-hpcc/badge.svg?subject=spark-hpcc)](https://maven-badges.herokuapp.com/maven-central/org.hpccsystems/spark-hpcc) [![CodeQL](https://github.com/hpcc-systems/hpcc4j/actions/workflows/codeql-analysis.yml/badge.svg)](https://github.com/hpcc-systems/hpcc4j/actions/workflows/codeql-analysis.yml) @@ -57,6 +57,10 @@ Everybody is encouraged to report issues and contribute to the project. When ope - RDF data ingestion tool to HPCC - Based on Apache Jena and dependent on org.hpccsystems.ws.client +- [Spark-HPCC](https://github.com/hpcc-systems/hpcc4j/blob/master/spark-hpcc/README.md) + - Spark classes for HPCC Systems / Spark interoperability + + #### HPCC4J and HPCC Kubernetes Clusters Utilizing HPCC4J with containized HPCC Systems clusters requires some additional local configuration steps. These configuration steps are documented here: [Using HPCC4j with HPCC on a Kubernetes Cluster](https://github.com/hpcc-systems/hpcc4j/wiki/Using-HPCC4J-with-HPCC-on-a-Kubernetes-Cluster) @@ -65,7 +69,7 @@ These projects are configured to be built using Maven. To build the projects usi `mvn install` -NOTE: hpcccommons, wsclient, and dfsclient are controled via the top-level maven pom file and can be built via a single command. +NOTE: hpcccommons, wsclient, dfsclient, and spark-hpcc are controlled via the top-level maven pom file and can be built via a single command. All sub-projects can be built individually using the pom file in each sub-project directory For more information on how to use Maven see http://maven.apache.org diff --git a/spark-hpcc/README.md b/spark-hpcc/README.md index 049faf292..bc3d8e52c 100644 --- a/spark-hpcc/README.md +++ b/spark-hpcc/README.md @@ -25,8 +25,7 @@ # Spark-HPCC Spark classes for HPCC Systems / Spark interoperability -### DataAccess -The DataAccess project contains the classes which expose distributed +This project contains the classes which expose distributed streaming of HPCC based data via Spark constructs. In addition, the HPCC data is exposed as a Dataframe for the convenience of the Spark developer. @@ -36,7 +35,7 @@ If using a standard Spark submission pipeline such as spark-submit these depende However, if your pipeline executes a jar directly you may need to add the Spark libraries from your $SPARK_HOME to the classpath. ### Examples & Documentation -See: [Examples](https://github.com/hpcc-systems/Spark-HPCC/tree/master/Examples) for example usage of the connector as well as API documentation for the reading and writing APIs. +See: [Examples](https://github.com/hpcc-systems/hpcc4j/tree/master/spark-hpcc/Examples) for example usage of the connector as well as API documentation for the reading and writing APIs. ## Please note: ##### As reported by github: diff --git a/spark-hpcc/src/main/javadoc/overview.html b/spark-hpcc/src/main/javadoc/overview.html index 5828b7418..2d17f3c7e 100644 --- a/spark-hpcc/src/main/javadoc/overview.html +++ b/spark-hpcc/src/main/javadoc/overview.html @@ -2,6 +2,6 @@
 This project enables HPCC Systems / Spark interoperability.
 
-The DataAccess project contains the classes which expose distributed streaming of HPCC based data via Spark constructs. In addition, the HPCC data is exposed as a Dataframe for the convenience of the Spark developer.
+This project contains the classes which expose distributed streaming of HPCC based data via Spark constructs. In addition, the HPCC data is exposed as a Dataframe for the convenience of the Spark developer.
     
\ No newline at end of file diff --git a/spark-hpcc/src/test/java/org/hpccsystems/spark/BaseIntegrationTest.java b/spark-hpcc/src/test/java/org/hpccsystems/spark/BaseIntegrationTest.java index b7d621f0f..6c1749fa7 100644 --- a/spark-hpcc/src/test/java/org/hpccsystems/spark/BaseIntegrationTest.java +++ b/spark-hpcc/src/test/java/org/hpccsystems/spark/BaseIntegrationTest.java @@ -86,7 +86,7 @@ public SparkConf getDefaultSparkConf() else { System.out.println("Unable to find spark jar matching pattern: spark-hpcc.*-jar-with-dependencies.jar, " - + "in directory [PROJECT_ROOT]/DataAccess/target/, check maven package / verify output for errors."); + + "in directory [PROJECT_ROOT]/spark-hpcc/target/, check maven package / verify output for errors."); } String[] jars = {