diff --git a/.github/workflows/build-assets.yml b/.github/workflows/build-assets.yml index 8c416335ddc..2525dfc74fc 100644 --- a/.github/workflows/build-assets.yml +++ b/.github/workflows/build-assets.yml @@ -188,7 +188,7 @@ jobs: run: | mkdir -p ${{ needs.preamble.outputs.folder_build }} echo "${{ secrets.SIGNING_SECRET }}" > ${{ needs.preamble.outputs.folder_build }}/private.key - plugins=("CASSANDRAEMBED" "COUCHBASEEMBED" "ECLBLAS" "H3" "JAVAEMBED" "KAFKA" "MEMCACHED" "MONGODBEMBED" "MYSQLEMBED" "NLP" "REDIS" "REMBED" "SQLITE3EMBED" "SQS" "PLATFORM" "CLIENTTOOLS_ONLY") + plugins=("CASSANDRAEMBED" "COUCHBASEEMBED" "ECLBLAS" "H3" "JAVAEMBED" "KAFKA" "MEMCACHED" "MONGODBEMBED" "MYSQLEMBED" "NLP" "PARQUETEMBED" "REDIS" "REMBED" "SQLITE3EMBED" "SQS" "PLATFORM" "CLIENTTOOLS_ONLY") for plugin in "${plugins[@]}"; do sudo rm -f ${{ needs.preamble.outputs.folder_build }}/CMakeCache.txt sudo rm -rf ${{ needs.preamble.outputs.folder_build }}/CMakeFiles diff --git a/.github/workflows/build-vcpkg.yml b/.github/workflows/build-vcpkg.yml index eb49d9f13e9..3707b351c1c 100644 --- a/.github/workflows/build-vcpkg.yml +++ b/.github/workflows/build-vcpkg.yml @@ -175,7 +175,7 @@ jobs: mkdir -p ${{ needs.preamble.outputs.folder_build }} declare -a plugins if [ ${{ needs.preamble.outputs.include_plugins }} == "ON" ]; then - plugins=("CASSANDRAEMBED" "COUCHBASEEMBED" "ECLBLAS" "H3" "JAVAEMBED" "KAFKA" "MEMCACHED" "MONGODBEMBED" "MYSQLEMBED" "NLP" "REDIS" "REMBED" "SQLITE3EMBED" "SQS" "PLATFORM") + plugins=("CASSANDRAEMBED" "COUCHBASEEMBED" "ECLBLAS" "H3" "JAVAEMBED" "KAFKA" "MEMCACHED" "MONGODBEMBED" "MYSQLEMBED" "NLP" "PARQUETEMBED" "REDIS" "REMBED" "SQLITE3EMBED" "SQS" "PLATFORM") else plugins=("PLATFORM") fi diff --git a/dali/base/dadfs.cpp b/dali/base/dadfs.cpp index 86572a03605..cbe2f957fc8 100644 --- a/dali/base/dadfs.cpp +++ b/dali/base/dadfs.cpp @@ -5873,6 +5873,13 @@ class CDistributedSuperFile: public CDistributedFileBase const char *ecl = file.queryAttributes().queryProp("ECL"); if (!isEmptyString(ecl)) addPropIfCommon(*at, "ECL", ecl); + IPropertyTree *_remoteStoragePlane = file.queryAttributes().queryPropTree("_remoteStoragePlane"); + if (_remoteStoragePlane) + { + // NB: CDistributedSuperFile sub-files in different environments are not permitted + if (!at->hasProp("_remoteStoragePlane")) + at->setPropTree("_remoteStoragePlane", LINK(_remoteStoragePlane)); + } } unsigned np = file.numParts(); if (0 == width) diff --git a/dali/base/dautils.cpp b/dali/base/dautils.cpp index 88e71fa41a4..8affed148fd 100644 --- a/dali/base/dautils.cpp +++ b/dali/base/dautils.cpp @@ -3612,8 +3612,10 @@ void addStripeDirectory(StringBuffer &out, const char *directory, const char *pl } } +static CConfigUpdateHook directIOUpdateHook; static CriticalSection dafileSrvNodeCS; static Owned dafileSrvNode; + void remapGroupsToDafilesrv(IPropertyTree *file, INamedGroupStore *resolver) { FileDescriptorFlags fileFlags = static_cast(file->getPropInt("Attr/@flags")); @@ -3625,15 +3627,14 @@ void remapGroupsToDafilesrv(IPropertyTree *file, INamedGroupStore *resolver) Owned plane = getDataStoragePlane(planeName, true); if ((0 == plane->queryHosts().size()) && isAbsolutePath(plane->queryPrefix())) // if hosts group, or url, don't touch { + auto updateFunc = [&](const IPropertyTree *oldComponentConfiguration, const IPropertyTree *oldGlobalConfiguration) { CriticalBlock b(dafileSrvNodeCS); - if (nullptr == dafileSrvNode) - { - auto externalService = k8s::getDafileServiceFromConfig("directio"); - VStringBuffer dafilesrvEpStr("%s:%u", externalService.first.c_str(), externalService.second); - dafileSrvNode.setown(createINode(dafilesrvEpStr)); - } - } + auto externalService = k8s::getDafileServiceFromConfig("directio"); + VStringBuffer dafilesrvEpStr("%s:%u", externalService.first.c_str(), externalService.second); + dafileSrvNode.setown(createINode(dafilesrvEpStr)); + }; + directIOUpdateHook.installOnce(updateFunc, true); Owned group; if (cluster.hasProp("Group")) @@ -3646,9 +3647,15 @@ void remapGroupsToDafilesrv(IPropertyTree *file, INamedGroupStore *resolver) group.setown(resolver->lookup(planeName, defaultDir, groupType)); } + Linked dafileSrvNodeCopy; + { + // in case config hook above changes dafileSrvNode + CriticalBlock b(dafileSrvNodeCS); + dafileSrvNodeCopy.set(dafileSrvNode); + } std::vector nodes; for (unsigned n=0; nordinality(); n++) - nodes.push_back(dafileSrvNode); + nodes.push_back(dafileSrvNodeCopy); Owned newGroup = createIGroup((rank_t)group->ordinality(), &nodes[0]); StringBuffer groupText; newGroup->getText(groupText); diff --git a/dali/dfu/dfurun.cpp b/dali/dfu/dfurun.cpp index 537bad7c106..72989b5ed34 100644 --- a/dali/dfu/dfurun.cpp +++ b/dali/dfu/dfurun.cpp @@ -667,7 +667,7 @@ class CDFUengine: public CInterface, implements IDFUengine if (write) auditflags |= DALI_LDAP_WRITE_WANTED; - SecAccessFlags perm; + SecAccessFlags perm = SecAccess_None; IClusterInfo *iClusterInfo = fd->queryClusterNum(0); const char *planeName = iClusterInfo->queryGroupName(); if (!isEmptyString(planeName)) @@ -697,6 +697,7 @@ class CDFUengine: public CInterface, implements IDFUengine Owned env = factory->openEnvironment(); if (env->isDropZoneRestrictionEnabled()) throw makeStringException(-1,"Empty plane name."); + perm = SecAccess_Full; //Not able to check DropZone permissions without a plane name #else throw makeStringException(-1,"Unexpected empty plane name."); // should never be the case in containerized setups #endif diff --git a/dockerfiles/image.sh b/dockerfiles/image.sh index 9df935ae896..a147eb22637 100755 --- a/dockerfiles/image.sh +++ b/dockerfiles/image.sh @@ -26,7 +26,7 @@ globals() { popd DOCKER_USERNAME="${DOCKER_USERNAME:-hpccbuilds}" - CMAKE_OPTIONS="-G Ninja -DVCPKG_FILES_DIR=/hpcc-dev -DCPACK_THREADS=$(docker info --format '{{.NCPU}}') -DUSE_OPTIONAL=OFF -DCONTAINERIZED=ON -DINCLUDE_PLUGINS=ON -DSUPPRESS_V8EMBED=ON" + CMAKE_OPTIONS="-G Ninja -DCPACK_THREADS=$(docker info --format '{{.NCPU}}') -DUSE_OPTIONAL=OFF -DCONTAINERIZED=ON -DINCLUDE_PLUGINS=ON -DSUPPRESS_V8EMBED=ON" if [ "$TAG_BUILD" -eq 1 ]; then HPCC_BUILD="hpcc_build_$MODE-$GIT_BRANCH" @@ -189,6 +189,7 @@ reconfigure() { configure() { local options=$1 echo "--- cmake config $options ---" + run "cp -r -n /hpcc-dev/vcpkg_installed /hpcc-dev/build/vcpkg_installed" run "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/build $options" } diff --git a/docs/EN_US/ContainerizedHPCC/ContainerizedMods/ContainerLogging.xml b/docs/EN_US/ContainerizedHPCC/ContainerizedMods/ContainerLogging.xml index 5cbef70cf29..51787bcf528 100644 --- a/docs/EN_US/ContainerizedHPCC/ContainerizedMods/ContainerLogging.xml +++ b/docs/EN_US/ContainerizedHPCC/ContainerizedMods/ContainerLogging.xml @@ -619,7 +619,7 @@ ContainerLog Target Audience Filtering The availble target audiences include operator(OPR), user(USR), - programmer(PRO), audit(ADT), or all. The filter is controlled by the + programmer(PRO), monitor(MON), audit(ADT), or all. The filter is controlled by the <section>.logging.audiences value. The string value is comprised of 3 letter codes delimited by the aggregation operator (+) or the removal operator (-). @@ -634,7 +634,7 @@ ContainerLog Target Category Filtering The available target categories include disaster(DIS), error(ERR), - information(INF), warning(WRN), progress(PRO), metrics(MET). The + information(INF), warning(WRN), progress(PRO), event(EVT), metrics(MET). The category (or class) filter is controlled by the <section>.logging.classes value, comprised of 3 letter codes delimited by the aggregation operator (+) or the removal operator diff --git a/esp/clients/ws_dfsclient/ws_dfsclient.cpp b/esp/clients/ws_dfsclient/ws_dfsclient.cpp index 1d8d3ce030e..677e14f3535 100644 --- a/esp/clients/ws_dfsclient/ws_dfsclient.cpp +++ b/esp/clients/ws_dfsclient/ws_dfsclient.cpp @@ -398,7 +398,9 @@ class CServiceDistributedFile : public CServiceDistributedFileBasesetPropTree("Attr/_remoteStoragePlane", createPTreeFromIPT(dafileSrvRemoteFilePlane)); - if (remoteStorage->hasProp("@secret")) + + const char *serviceUrl = remoteStorage->queryProp("@service"); + if (serviceUrl && startsWith(serviceUrl, "https")) { // if remote storage service is secure, dafilesrv connections must be also. // this flag is used by consumers of this IFleDescriptor to tell whether they need to make @@ -673,7 +675,10 @@ IDFSFile *lookupDFSFile(const char *logicalName, AccessMode accessMode, unsigned if (!remoteStorage) throw makeStringExceptionV(0, "Remote storage '%s' not found", remoteName.str()); serviceUrl.set(remoteStorage->queryProp("@service")); + + // NB: for legacy support only, if the service url is secure, a secret name will be auto-generated serviceSecret.set(remoteStorage->queryProp("@secret")); + logicalName = remoteLogicalFileName; useDafilesrv = remoteStorage->getPropBool("@useDafilesrv"); } @@ -683,20 +688,18 @@ IDFSFile *lookupDFSFile(const char *logicalName, AccessMode accessMode, unsigned // auto-discover local environment dfs service. #ifdef _CONTAINERIZED // NB: only expected to be here if experimental option #option('dfsesp-localfiles', true); is in use. - // This finds and uses local dfs service for local read lookukup. + // This finds and uses local dfs service for local read lookups. Owned eclWatchServices = getGlobalConfigSP()->getElements("services[@type='dfs']"); if (!eclWatchServices->first()) throw makeStringException(-1, "Dfs service not defined in esp services"); const IPropertyTree &eclWatch = eclWatchServices->query(); StringBuffer eclWatchName; eclWatch.getProp("@name", eclWatchName); - auto result = k8s::getExternalService(eclWatchName); - if (result.first.empty()) - throw makeStringExceptionV(-1, "dfs '%s': service not found", eclWatchName.str()); - if (0 == result.second) - throw makeStringExceptionV(-1, "dfs '%s': service port not defined", eclWatchName.str()); const char *protocol = eclWatch.getPropBool("@tls") ? "https" : "http"; - serviceUrl.appendf("%s://%s:%u", protocol, result.first.c_str(), result.second); + unsigned port = (unsigned)eclWatch.getPropInt("@port", NotFound); + if (NotFound == port) + throw makeStringExceptionV(-1, "dfs '%s': service port not defined", eclWatchName.str()); + serviceUrl.appendf("%s://%s:%u", protocol, eclWatchName.str(), port); #else { CriticalBlock b(dfsServiceUrlCrit); @@ -714,7 +717,12 @@ IDFSFile *lookupDFSFile(const char *logicalName, AccessMode accessMode, unsigned #endif } bool useSSL = startsWith(serviceUrl, "https"); - if (!useSSL) + if (useSSL) + { + if (0 == serviceSecret.length()) + generateDynamicUrlSecretName(serviceSecret, serviceUrl, nullptr); + } + else serviceSecret.clear(); DBGLOG("Looking up file '%s' on '%s'", logicalName, serviceUrl.str()); diff --git a/esp/scm/ws_logaccess.ecm b/esp/scm/ws_logaccess.ecm index 455561e7f7f..d89dc4d4a30 100644 --- a/esp/scm/ws_logaccess.ecm +++ b/esp/scm/ws_logaccess.ecm @@ -108,7 +108,8 @@ ESPenum LogEventClass : string Warning("WRN"), Info("INF"), Progress("PRO"), - Metric("MET") + Metric("MET"), + Event("EVT") }; /* @@ -133,12 +134,14 @@ ESPenum LogEventClass : string * INF - Information * PRO - Progress * MET - Metric +* EVT - Event * *If searching by "ByTargetAudience", the SearchByValue should contain the 3 letter code associated with the target audience of interest. * valid values at time of writing are: * OPR - Operator * USR - User * PRO - Programmer +* MON - Monitor * ADT - Audit *If searching by "BySourceInstance", the SearchByValue should contain the instance of interest *If searching by "BySourceNode", the SearchByValue should contain the node of interest diff --git a/esp/services/ws_workunits/ws_workunitsHelpers.hpp b/esp/services/ws_workunits/ws_workunitsHelpers.hpp index 37534458b2b..efaad03fce9 100644 --- a/esp/services/ws_workunits/ws_workunitsHelpers.hpp +++ b/esp/services/ws_workunits/ws_workunitsHelpers.hpp @@ -261,7 +261,7 @@ struct WUComponentLogOptions logFetchFilter = getBinaryLogAccessFilter(logFetchFilter, componentsFilterObj, LOGACCESS_FILTER_and); ILogAccessFilter * logEventTypeFilterObj = nullptr; - StringBuffer logType; //"DIS","ERR","WRN","INF","PRO","MET","ALL" + StringBuffer logType; //"DIS","ERR","WRN","INF","PRO","MET","EVT","ALL" zapHttpRequest->getParameter("LogFilter_LogEventType", logType); if (!logType.isEmpty() && strcmp(logType.str(), "ALL") != 0) logEventTypeFilterObj = getClassLogAccessFilter(LogMsgClassFromAbbrev(logType.str())); diff --git a/esp/src/eclwatch/VizWidget.js b/esp/src/eclwatch/VizWidget.js index 4708c10b94c..6ff09e500b7 100644 --- a/esp/src/eclwatch/VizWidget.js +++ b/esp/src/eclwatch/VizWidget.js @@ -391,7 +391,7 @@ define([ refreshData: function () { if (this.limit.get("value") > this.rows.length) { - var result = this.wu.results[this.params.Sequence]; + var result = this.wu.results.filter(r => r.Sequence == this.params.Sequence)[0]; var context = this; result.fetchNRows(this.rows.length, this.limit.get("value")).then(function (response) { context.rows = context.rows.concat(response); diff --git a/esp/src/src-react/components/Results.tsx b/esp/src/src-react/components/Results.tsx index f867da58f61..58d6ac9f890 100644 --- a/esp/src/src-react/components/Results.tsx +++ b/esp/src/src-react/components/Results.tsx @@ -91,10 +91,22 @@ export const Results: React.FunctionComponent = ({ key: "open legacy", text: nlsHPCC.OpenLegacyMode, disabled: !uiState.hasSelection, iconProps: { iconName: "WindowEdit" }, onClick: () => { if (selection.length === 1) { - window.location.href = `#/workunits/${wuid}/outputs/${selection[0].Name}/legacy`; + window.location.href = `#/workunits/${wuid}/outputs/${selection[0].Name}?__legacy`; } else { for (let i = selection.length - 1; i >= 0; --i) { - window.open(`#/workunits/${wuid}/outputs/${selection[i].Name}/legacy`, "_blank"); + window.open(`#/workunits/${wuid}/outputs/${selection[i].Name}?__legacy`, "_blank"); + } + } + } + }, + { + key: "visualize", text: nlsHPCC.Visualize, disabled: !uiState.hasSelection, iconProps: { iconName: "BarChartVertical" }, + onClick: () => { + if (selection.length === 1) { + window.location.href = `#/workunits/${wuid}/outputs/${selection[0].Sequence}?__visualize`; + } else { + for (let i = selection.length - 1; i >= 0; --i) { + window.open(`#/workunits/${wuid}/outputs/${selection[i].Sequence}?__visualize`, "_blank"); } } } diff --git a/esp/src/src-react/components/WorkunitDetails.tsx b/esp/src/src-react/components/WorkunitDetails.tsx index c09f3ef5806..8b17f47aa53 100644 --- a/esp/src/src-react/components/WorkunitDetails.tsx +++ b/esp/src/src-react/components/WorkunitDetails.tsx @@ -11,18 +11,19 @@ import { DojoAdapter } from "../layouts/DojoAdapter"; import { pivotItemStyle } from "../layouts/pivot"; import { pushUrl } from "../util/history"; import { WorkunitPersona } from "./controls/StateIcon"; -import { Results } from "./Results"; -import { Variables } from "./Variables"; -import { SourceFiles } from "./SourceFiles"; import { Helpers } from "./Helpers"; +import { IFrame } from "./IFrame"; +import { Logs } from "./Logs"; +import { Metrics } from "./Metrics"; import { Queries } from "./Queries"; import { Resources } from "./Resources"; +import { Result } from "./Result"; +import { Results } from "./Results"; import { FetchEditor, WUXMLSourceEditor } from "./SourceEditor"; +import { SourceFiles } from "./SourceFiles"; +import { Variables } from "./Variables"; import { Workflows } from "./Workflows"; -import { Metrics } from "./Metrics"; import { WorkunitSummary } from "./WorkunitSummary"; -import { Result } from "./Result"; -import { Logs } from "./Logs"; const logger = scopedLogger("src-react/components/WorkunitDetails.tsx"); @@ -84,7 +85,9 @@ export const WorkunitDetails: React.FunctionComponent = ({ {state ? - : + queryParams.hasOwnProperty("__legacy") ?