Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/candidate-9.4.x'
Browse files Browse the repository at this point in the history
Signed-off-by: Gavin Halliday <[email protected]>
  • Loading branch information
ghalliday committed Sep 28, 2023
2 parents 7b8fa07 + e057926 commit d0b0b39
Show file tree
Hide file tree
Showing 295 changed files with 17,791 additions and 5,816 deletions.
3 changes: 3 additions & 0 deletions .github/workflows/build-and-publish-debug.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@ jobs:
runs-on: ubuntu-20.04
if: github.repository == 'hpcc-systems/HPCC-Platform'
steps:
- name: Free additional disk space (remove Android SDK + Tools)
run: |
sudo rm -rf /usr/local/lib/android
- name: Checkout
uses: actions/checkout@v2
- name: Build
Expand Down
9 changes: 9 additions & 0 deletions .github/workflows/build-and-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@ jobs:
runs-on: ubuntu-20.04
if: github.repository == 'hpcc-systems/HPCC-Platform'
steps:
- name: Free additional disk space (remove Android SDK + Tools)
run: |
sudo rm -rf /usr/local/lib/android
- name: Checkout
uses: actions/checkout@v2
- name: Build
Expand All @@ -41,6 +44,9 @@ jobs:
matrix:
engine: ['ml', 'gnn', 'gnn-gpu']
steps:
- name: Free additional disk space (remove Android SDK + Tools)
run: |
sudo rm -rf /usr/local/lib/android
- name: Checkout
uses: actions/checkout@v2
- name: Build
Expand All @@ -58,6 +64,9 @@ jobs:
runs-on: ubuntu-20.04
if: github.repository == 'hpcc-systems/HPCC-Platform'
steps:
- name: Free additional disk space (remove Android SDK + Tools)
run: |
sudo rm -rf /usr/local/lib/android
- name: Checkout
uses: actions/checkout@v2
- name: Build
Expand Down
5 changes: 5 additions & 0 deletions .github/workflows/build-vcpkg.yml
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,11 @@ jobs:
id: buildx
uses: docker/setup-buildx-action@v2

- name: Pull previous images
run: |
docker pull ${{ steps.vars.outputs.docker_tag_candidate_base }} || true
docker pull ${{ steps.vars.outputs.docker_tag }} || true
- name: Create Build Image
if: ${{ contains(matrix.event_name, github.event_name) && needs.preamble.outputs.platform }}
uses: docker/build-push-action@v4
Expand Down
1 change: 1 addition & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,7 @@ if ( PLUGIN )
HPCC_ADD_SUBDIRECTORY (plugins/h3 "H3")
HPCC_ADD_SUBDIRECTORY (plugins/nlp "NLP")
HPCC_ADD_SUBDIRECTORY (plugins/mongodb "MONGODBEMBED")
HPCC_ADD_SUBDIRECTORY (plugins/parquet "PARQUETEMBED")
elseif ( NOT MAKE_DOCS_ONLY )
HPCC_ADD_SUBDIRECTORY (system)
HPCC_ADD_SUBDIRECTORY (initfiles)
Expand Down
1 change: 1 addition & 0 deletions cmake_modules/plugins.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ set(PLUGINS_LIST
MONGODBEMBED
MYSQLEMBED
NLP
PARQUETEMBED
REDIS
REMBED
SQLITE3EMBED
Expand Down
1 change: 1 addition & 0 deletions common/thorhelper/roxiehelper.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,7 @@ class THORHELPER_API HttpHelper : public CInterface
return getContentTypeMlFormat();
}
IProperties *queryUrlParameters(){return parameters;}
const IProperties * queryRequestHeaders() const { return reqHeaders; }
bool validateHttpGetTarget(const char *target)
{
if (!target)
Expand Down
19 changes: 13 additions & 6 deletions common/thorhelper/thorsoapcall.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1911,13 +1911,20 @@ class CWSCAsyncFor : implements IWSCAsyncFor, public CInterface, public CAsyncFo
if (!httpHeaderBlockContainsHeader(httpheaders, ACCEPT_ENCODING))
request.appendf("%s: gzip, deflate\r\n", ACCEPT_ENCODING);
#endif
if (!isEmptyString(master->logctx.queryGlobalId()))
Owned<IProperties> traceHeaders = master->logctx.getClientHeaders();
if (traceHeaders)
{
if (!httpHeaderBlockContainsHeader(httpheaders, master->logctx.queryGlobalIdHttpHeaderName()))
request.append(master->logctx.queryGlobalIdHttpHeaderName()).append(": ").append(master->logctx.queryGlobalId()).append("\r\n");

if (!isEmptyString(master->logctx.queryLocalId()) && !httpHeaderBlockContainsHeader(httpheaders, master->logctx.queryCallerIdHttpHeaderName()))
request.append(master->logctx.queryCallerIdHttpHeaderName()).append(": ").append(master->logctx.queryLocalId()).append("\r\n"); //our localId is reciever's callerId
Owned<IPropertyIterator> iter = traceHeaders->getIterator();
ForEach(*iter)
{
const char * key = iter->getPropKey();
if (!httpHeaderBlockContainsHeader(httpheaders, key))
{
const char * value = traceHeaders->queryProp(key);
if (!isEmptyString(value))
request.append(key).append(": ").append(value).append("\r\n");
}
}
}

if (master->wscType == STsoap)
Expand Down
67 changes: 67 additions & 0 deletions common/workunit/workunit.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -14634,3 +14634,70 @@ bool executeGraphOnLingeringThor(IConstWorkUnit &workunit, unsigned wfid, const
throwUnexpected();
}
#endif


//The names of the debug options used to serialize trace info - lower case to ensure they also work on the property tree (in thor slaves)
static constexpr const char * traceDebugOptions[] = { "globalid", "callerid", "ottraceparent", "ottracestate" };
//The names of the headers containing the trace info
static constexpr const char * traceHeaderNames[] = { "global-id", "caller-id", "traceparent", "tracestate" };
static_assert(_elements_in(traceDebugOptions) == _elements_in(traceHeaderNames), "Inconsistent tracePropertyNames, traceHeaderNames arrays");

IProperties * extractTraceDebugOptions(IConstWorkUnit * source)
{
if (!source)
return nullptr;

Owned<IProperties> target = createProperties(true);
SCMStringBuffer temp;
for (unsigned i=0; i < _elements_in(traceDebugOptions); i++)
{
const char * debugOption = traceDebugOptions[i];
const char * headerName = traceHeaderNames[i];
if (source->hasDebugValue(debugOption))
{
temp.clear();
source->getDebugValue(debugOption, temp);
target->setProp(headerName, temp.str());
}
}
return target.getClear();
}

IProperties * deserializeTraceDebugOptions(const IPropertyTree * debugOptions)
{
if (!debugOptions)
return nullptr;

Owned<IProperties> target = createProperties(true);
if (debugOptions)
{
for (unsigned i=0; i < _elements_in(traceDebugOptions); i++)
{
const char * debugOption = traceDebugOptions[i];
const char * headerName = traceHeaderNames[i];
if (debugOptions->hasProp(debugOption))
{
const char * value = debugOptions->queryProp(debugOption);
target->setProp(headerName, value);
}
}
}
return target.getClear();
}

void recordTraceDebugOptions(IWorkUnit * target, const IProperties * source)
{
if (!source)
return;

for (unsigned i=0; i < _elements_in(traceDebugOptions); i++)
{
const char * headerName = traceHeaderNames[i];
const char * debugOption = traceDebugOptions[i];
if (source->hasProp(headerName))
{
const char * value = source->queryProp(headerName);
target->setDebugValue(debugOption, value, true);
}
}
}
4 changes: 4 additions & 0 deletions common/workunit/workunit.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -1764,6 +1764,10 @@ class WORKUNIT_API WorkUnitErrorReceiver : implements IErrorReceiver, public CIn
bool removeTimeStamp;
};

extern WORKUNIT_API IProperties * extractTraceDebugOptions(IConstWorkUnit * source);
extern WORKUNIT_API IProperties * deserializeTraceDebugOptions(const IPropertyTree * debugOptions);
extern WORKUNIT_API void recordTraceDebugOptions(IWorkUnit * target, const IProperties * source);

extern WORKUNIT_API void addWorkunitException(IWorkUnit * wu, IError * error, bool removeTimeStamp);

inline bool isGlobalScope(const char * scope) { return scope && (streq(scope, GLOBAL_SCOPE) || streq(scope, LEGACY_GLOBAL_SCOPE)); }
Expand Down
32 changes: 32 additions & 0 deletions dali/base/dafdesc.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1040,6 +1040,7 @@ class CFileDescriptor: public CFileDescriptorBase, implements ISuperFileDescrip

SocketEndpointArray *pending; // for constructing cluster group
Owned<IStoragePlane> remoteStoragePlane;
std::vector<std::string> dafileSrvEndpoints;
bool setupdone;
byte version;

Expand Down Expand Up @@ -1398,6 +1399,32 @@ class CFileDescriptor: public CFileDescriptorBase, implements ISuperFileDescrip
}
}

// mapDafileSrvSecrets is a CFileDescriptor is created if it is associated with a remoteStoragePlane.
// Identify the target dafilesrv location urls a secret based connections in the dafilesrv hook
// NB: the expectation is that they'll only be 1 target service dafilesrv URL
// These will remain associated in the hook, until this CFileDescriptor object is destroyed, and removeMappedDafileSrvSecrets is called.
void mapDafileSrvSecrets(IClusterInfo &cluster)
{
Owned<INodeIterator> groupIter = cluster.queryGroup()->getIterator();

ForEach(*groupIter)
{
INode &node = groupIter->query();
StringBuffer endpointString;
node.endpoint().getEndpointHostText(endpointString);
auto it = std::find(dafileSrvEndpoints.begin(), dafileSrvEndpoints.end(), endpointString.str());
if (it == dafileSrvEndpoints.end())
dafileSrvEndpoints.push_back(endpointString.str());
}
for (auto &dafileSrvEp: dafileSrvEndpoints)
queryDaFileSrvHook()->addSecretUrl(dafileSrvEp.c_str());
}
void removeMappedDafileSrvSecrets()
{
for (auto &dafileSrvEp: dafileSrvEndpoints)
queryDaFileSrvHook()->removeSecretUrl(dafileSrvEp.c_str());
}

public:
IMPLEMENT_IINTERFACE;

Expand Down Expand Up @@ -1482,6 +1509,8 @@ class CFileDescriptor: public CFileDescriptorBase, implements ISuperFileDescrip
{
assertex(1 == clusters.ordinality()); // only one cluster per logical remote file supported/will have resolved to 1
remoteStoragePlane.setown(createStoragePlane(remoteStoragePlaneMeta));
if (attr->getPropBool("@_remoteSecure"))
mapDafileSrvSecrets(clusters.item(0));
}
}
else
Expand Down Expand Up @@ -1613,6 +1642,8 @@ class CFileDescriptor: public CFileDescriptorBase, implements ISuperFileDescrip
assertex(1 == clusters.ordinality()); // only one cluster per logical remote file supported/will have resolved to 1
remoteStoragePlane.setown(createStoragePlane(remoteStoragePlaneMeta));
clusters.item(0).applyPlane(remoteStoragePlane);
if (attr->getPropBool("@_remoteSecure"))
mapDafileSrvSecrets(clusters.item(0));
}
}

Expand Down Expand Up @@ -1707,6 +1738,7 @@ class CFileDescriptor: public CFileDescriptorBase, implements ISuperFileDescrip

virtual ~CFileDescriptor()
{
removeMappedDafileSrvSecrets();
closePending(); // not sure strictly needed
ForEachItemInRev(p, parts)
delpart(p);
Expand Down
3 changes: 2 additions & 1 deletion dali/base/dautils.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

#include "platform.h"
#include "jlib.hpp"
#include "jcontainerized.hpp"
#include "jstring.hpp"
#include "jfile.hpp"
#include "jmisc.hpp"
Expand Down Expand Up @@ -3628,7 +3629,7 @@ void remapGroupsToDafilesrv(IPropertyTree *file, INamedGroupStore *resolver)
CriticalBlock b(dafileSrvNodeCS);
if (nullptr == dafileSrvNode)
{
auto externalService = getDafileServiceFromConfig("directio");
auto externalService = k8s::getDafileServiceFromConfig("directio");
VStringBuffer dafilesrvEpStr("%s:%u", externalService.first.c_str(), externalService.second);
dafileSrvNode.setown(createINode(dafilesrvEpStr));
}
Expand Down
24 changes: 14 additions & 10 deletions dockerfiles/vcpkg/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ GITHUB_REF=$(git rev-parse --short=8 HEAD)
cd vcpkg
VCPKG_REF=$(git rev-parse --short=8 HEAD)
cd ..
GITHUB_BRANCH=$(git log -50 --pretty=format:"%D" | tr ',' '\n' | grep 'upstream/' | awk 'NR==1 {sub("upstream/", ""); print}')
GITHUB_BRANCH=$(git log -50 --pretty=format:"%D" | tr ',' '\n' | grep 'upstream/' | awk 'NR==1 {sub("upstream/", ""); print}' | xargs)
DOCKER_USERNAME="${DOCKER_USERNAME:-hpccbuilds}"
DOCKER_PASSWORD="${DOCKER_PASSWORD:-none}"

Expand All @@ -29,30 +29,34 @@ docker login -u $DOCKER_USERNAME -p $DOCKER_PASSWORD
CMAKE_OPTIONS="-G Ninja -DCMAKE_BUILD_TYPE=RelWithDebInfo -DVCPKG_FILES_DIR=/hpcc-dev -DCPACK_THREADS=0 -DUSE_OPTIONAL=OFF -DINCLUDE_PLUGINS=ON -DSUPPRESS_V8EMBED=ON"

function doBuild() {
docker pull "hpccsystems/platform-build-$1:$VCPKG_REF" || true
docker pull "hpccsystems/platform-build-$1:$GITHUB_BRANCH" || true

docker build --progress plain --pull --rm -f "$SCRIPT_DIR/$1.dockerfile" \
--build-arg DOCKER_NAMESPACE=$DOCKER_USERNAME \
--build-arg VCPKG_REF=$VCPKG_REF \
-t hpccsystems/platform-build-$1:$VCPKG_REF \
-t hpccsystems/platform-build-$1:$GITHUB_BRANCH \
--cache-from hpccsystems/platform-build-$1:$VCPKG_REF \
--cache-from hpccsystems/platform-build-$1:$GITHUB_BRANCH \
-t hpccsystems/platform-build-$1:$VCPKG_REF \
-t hpccsystems/platform-build-$1:$GITHUB_BRANCH \
"$SCRIPT_DIR/."

docker push hpccsystems/platform-build-$1:$VCPKG_REF &
docker push hpccsystems/platform-build-$1:$GITHUB_BRANCH &
# docker push hpccsystems/platform-build-$1:$VCPKG_REF
# docker push hpccsystems/platform-build-$1:$GITHUB_BRANCH

docker run --rm --mount source="$(pwd)",target=/hpcc-dev/HPCC-Platform,type=bind,consistency=cached hpccsystems/platform-build-$1:$VCPKG_REF \
"cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/HPCC-Platform/build-$1 ${CMAKE_OPTIONS} && \
cmake --build /hpcc-dev/HPCC-Platform/build-$1 --target package --parallel $(nproc)"

sudo chown -R $(id -u):$(id -g) ./build-$1
# docker run -it --mount source="$(pwd)",target=/hpcc-dev/HPCC-Platform,type=bind,consistency=cached build-ubuntu-22.04:latest bash
}

doBuild ubuntu-23.04
doBuild ubuntu-20.04
doBuild amazonlinux
doBuild ubuntu-22.04
doBuild centos-8
# doBuild ubuntu-23.04
# doBuild ubuntu-20.04
# doBuild amazonlinux
# doBuild ubuntu-22.04
# doBuild centos-8
doBuild centos-7

wait
Expand Down
7 changes: 4 additions & 3 deletions dockerfiles/vcpkg/centos-7.dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@ ARG VCPKG_REF=latest
FROM hpccsystems/platform-build-base-centos-7:$VCPKG_REF

RUN yum install -y \
epel-release \
java-11-openjdk-devel \
python3-devel \
wget \
epel-release
RUN yum update -y && yum install -y R-core-devel
wget && \
yum update -y && yum install -y R-core-devel && \
yum -y clean all && rm -rf /var/cache

ENV Rcpp_package=Rcpp_0.12.19.tar.gz
ENV RInside_package=RInside_0.2.12.tar.gz
Expand Down
35 changes: 35 additions & 0 deletions docs/EN_US/ContainerizedHPCC/ContainerizedMods/ConfigureValues.xml
Original file line number Diff line number Diff line change
Expand Up @@ -1023,6 +1023,41 @@ thor:
categories, including the "ecl" category, are read internally by
system components and not exposed directly to ECL code.</para>
</sect3>

<sect3 id="CV_CrossOriginRes">
<title>Cross Origin Resource Handling</title>

<para>Cross-origin resource sharing (CORS) is a mechanism for
integrating applications in different domains. CORS defines how client
web applications in one domain can interact with resources in another
domain. You can configure CORS support settings in the ESP section of
the values.yaml file as illustrated below: </para>

<programlisting>esp:
- name: eclwatch
application: eclwatch
auth: ldap
replicas: 1
# The following 'corsAllowed' section is used to configure CORS support
# origin - the origin to support CORS requests from
# headers - the headers to allow for the given origin via CORS
# methods - the HTTP methods to allow for the given origin via CORS
#
corsAllowed:
# origin starting with https will only allow https CORS
- origin: https://*.example2.com
headers:
- "X-Custom-Header"
methods:
- "GET"
# origin starting with http will allow http or https CORS
- origin: http://www.example.com
headers:
- "*"
methods:
- "GET"
- "POST" </programlisting>
</sect3>
</sect2>

<sect2>
Expand Down
Loading

0 comments on commit d0b0b39

Please sign in to comment.