diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 99b3cc693af..36dcfa3ff48 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -43,7 +43,7 @@ jobs: show-progress: 'false' - name: Setup Java JDK - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@v4.2.1 with: java-version: 11 # Java distribution. See the list of supported distributions in README file diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index c65fbd51148..0d596bc06df 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -33,9 +33,9 @@ jobs: if: ${{ github.event_name == 'pull_request' }} working-directory: docs/manual run: | - mike deploy --title "4.4 Latest" --alias-type=copy --update-aliases 4.4 latest + mike deploy --title "4.4" --alias-type=copy --update-aliases 4.4 latest - name: deploy latest docs to gh-pages branch if: ${{ github.event_name != 'pull_request' }} working-directory: docs/manual run: | - mike deploy --push --title "4.4 Latest" --alias-type=copy --update-aliases 4.4 latest + mike deploy --push --title "4.4" --alias-type=copy --update-aliases 4.4 latest diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index 977a3f64b16..4d1d6d27eb1 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -22,7 +22,7 @@ jobs: submodules: 'recursive' show-progress: 'false' - name: Set up JDK - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@v4.2.1 with: distribution: 'temurin' java-version: ${{ matrix.jdk }} @@ -37,7 +37,7 @@ jobs: - name: Set up Maven uses: stCarolas/setup-maven@v5 with: - maven-version: 3.6.3 + maven-version: 3.8.3 - name: Build with Maven run: | mvn -B -ntp -V install -DskipTests=true -Dmaven.javadoc.skip=true -Drelease -Pwith-doc @@ -55,7 +55,7 @@ jobs: submodules: 'recursive' show-progress: 'false' - name: Set up JDK - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@v4.2.1 with: distribution: 'temurin' java-version: 11 @@ -63,7 +63,7 @@ jobs: - name: Set up Maven uses: stCarolas/setup-maven@v5 with: - maven-version: 3.6.3 + maven-version: 3.8.3 - name: Test with maven run: | mvn -B resources:resources@copy-index-schema-to-source -f web diff --git a/.github/workflows/mvn-dep-tree.yml b/.github/workflows/mvn-dep-tree.yml index e0fcd59a519..75b02b2fc57 100644 --- a/.github/workflows/mvn-dep-tree.yml +++ b/.github/workflows/mvn-dep-tree.yml @@ -20,7 +20,7 @@ jobs: show-progress: 'false' - name: Setup Java JDK - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@v4.2.1 with: java-version: 11 # Java distribution. See the list of supported distributions in README file diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml new file mode 100644 index 00000000000..98cf13704c4 --- /dev/null +++ b/.github/workflows/scorecard.yml @@ -0,0 +1,72 @@ +# This workflow uses actions that are not certified by GitHub. They are provided +# by a third-party and are governed by separate terms of service, privacy +# policy, and support documentation. + +name: Scorecard supply-chain security +on: + # For Branch-Protection check. Only the default branch is supported. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection + branch_protection_rule: + # To guarantee Maintained check is occasionally updated. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained + schedule: + - cron: '26 10 * * 5' + push: + branches: [ "main" ] + +# Declare default permissions as read only. +permissions: read-all + +jobs: + analysis: + name: Scorecard analysis + runs-on: ubuntu-latest + permissions: + # Needed to upload the results to code-scanning dashboard. + security-events: write + # Needed to publish results and get a badge (see publish_results below). + id-token: write + # Uncomment the permissions below if installing in a private repository. + # contents: read + # actions: read + + steps: + - name: "Checkout code" + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # v3.1.0 + with: + persist-credentials: false + + - name: "Run analysis" + uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1 + with: + results_file: results.sarif + results_format: sarif + # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: + # - you want to enable the Branch-Protection check on a *public* repository, or + # - you are installing Scorecard on a *private* repository + # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat. + # repo_token: ${{ secrets.SCORECARD_TOKEN }} + + # Public repositories: + # - Publish results to OpenSSF REST API for easy access by consumers + # - Allows the repository to include the Scorecard badge. + # - See https://github.com/ossf/scorecard-action#publishing-results. + # For private repositories: + # - `publish_results` will always be set to `false`, regardless + # of the value entered here. + publish_results: true + + # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF + # format to the repository Actions tab. + - name: "Upload artifact" + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 + with: + name: SARIF file + path: results.sarif + retention-days: 5 + + # Upload the results to GitHub's code scanning dashboard. + - name: "Upload to code-scanning" + uses: github/codeql-action/upload-sarif@17573ee1cc1b9d061760f3a006fc4aac4f944fd5 # v2.2.4 + with: + sarif_file: results.sarif diff --git a/.github/workflows/sonarcloud.yml b/.github/workflows/sonarcloud.yml index 7db75de95ad..6f1285d3e23 100644 --- a/.github/workflows/sonarcloud.yml +++ b/.github/workflows/sonarcloud.yml @@ -19,7 +19,7 @@ jobs: submodules: 'recursive' show-progress: 'false' - name: Set up JDK 11 - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@v4.2.1 with: distribution: 'temurin' java-version: '11' @@ -34,7 +34,7 @@ jobs: run: mvn -B package -DskipTests - name: Set up JDK 21 # Sonarcloud analyzer needs at least JDK 17 - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@v4.2.1 with: distribution: 'temurin' java-version: '21' diff --git a/.gitignore b/.gitignore index 047cf25473c..84f282d0af7 100644 --- a/.gitignore +++ b/.gitignore @@ -28,6 +28,7 @@ eclipse/ es/elasticsearch-* es/es-dashboards/kibana-* es/es-dashboards/data/nodes/ +es/es-dashboards/data/index/ harvesters/harvester_*.log idea/ jcs_caching/ @@ -39,7 +40,12 @@ release/jetty/* schemas/*/doc/*/*.rst schematrons/.build target/ + +# build and release transifex/transifex-format/ +build/ +web-ui/LICENSE +web-ui/tx # web-app, clear using: mvn -f web/pom.xml clean:clean@reset @@ -58,11 +64,7 @@ web/src/main/webapp/META-INF/MANIFEST.MF web/src/main/webapp/WEB-INF/data/0* web/src/main/webapp/WEB-INF/data/config/encryptor.properties web/src/main/webapp/WEB-INF/data/config/index/records.json -web/src/main/webapp/WEB-INF/data/config/schema_plugins/*/schematron/schematron*.xsl -web/src/main/webapp/WEB-INF/data/config/schema_plugins/csw-record -web/src/main/webapp/WEB-INF/data/config/schema_plugins/dublin-core -web/src/main/webapp/WEB-INF/data/config/schema_plugins/iso19* -web/src/main/webapp/WEB-INF/data/config/schema_plugins/schemaplugin-uri-catalog.xml +web/src/main/webapp/WEB-INF/data/config/schema_plugins/* web/src/main/webapp/WEB-INF/data/config/schemaplugin-uri-catalog.xml web/src/main/webapp/WEB-INF/data/data/backup web/src/main/webapp/WEB-INF/data/data/metadata_data diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 00000000000..1cdaa3768cf --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,88 @@ +# This CITATION.cff file was generated with cffinit. +# Visit https://bit.ly/cffinit to generate yours today! + +cff-version: 1.2.0 +title: GeoNetwork opensource +message: >- + If you use this software, please cite it using the + metadata from this file. +type: software +authors: + - given-names: François + family-names: Prunayre + affiliation: Titellus + - given-names: Jose + family-names: García + affiliation: GeoCat BV + - given-names: Jeroen + family-names: Ticheler + affiliation: GeoCat BV + orcid: 'https://orcid.org/0009-0003-3896-0437' + email: jeroen.ticheler@geocat.net + - given-names: Florent + family-names: Gravin + affiliation: CamptoCamp + - given-names: Simon + family-names: Pigot + affiliation: CSIRO Australia + - name: GeoCat BV + address: Veenderweg 13 + city: Bennekom + country: NL + post-code: 6721 WD + tel: +31 (0) 318 416 664 + website: 'https://www.geocat.net/' + email: info@geocat.net + - name: Titellus + address: 321 Route de la Mollière + city: Saint Pierre de Genebroz + country: FR + post-code: 73360 + website: 'https://titellus.net/' + email: fx.prunayre@titellus.net + - name: CamptoCamp + address: QG Center Rte de la Chaux 4 + city: Bussigny + country: CH + post-code: 1030 + tel: +41 (21) 619 10 10 + website: 'https://camptocamp.com/' + email: info@camptocamp.com + - name: Open Source Geospatial Foundation - OSGeo + address: '9450 SW Gemini Dr. #42523' + location: Beaverton + region: Oregon + post-code: '97008' + country: US + email: info@osgeo.org + website: 'https://www.osgeo.org/' +repository-code: 'http://github.com/geonetwork/core-geonetwork' +url: 'https://geonetwork-opensource.org' +repository-artifact: >- + https://sourceforge.net/projects/geonetwork/files/GeoNetwork_opensource/ +abstract: >- + GeoNetwork is a catalog application to manage spatial and + non-spatial resources. It is compliant with critical + international standards from ISO, OGC and INSPIRE. It + provides powerful metadata editing and search functions as + well as an interactive web map viewer. +keywords: + - catalog + - gis + - sdi + - spatial data infrastructure + - dataspace + - search + - open data + - standards + - spatial + - CSW + - OGCAPI Records + - DCAT + - GeoDCAT-AP + - Catalog Service + - OGC + - open geospatial consortium + - osgeo + - open source geospatial foundation +license: GPL-2.0 diff --git a/README.md b/README.md index ed6c54d3596..65f57590c6a 100644 --- a/README.md +++ b/README.md @@ -30,3 +30,6 @@ Developer documentation located in ``README.md`` files in the code-base: * General documentation for the project as a whole is in this [README.md](README.md) * [Software Development Documentation](/software_development/) provides instructions for setting up a development environment, building GeoNetwork, compiling user documentation, and making a releases. * Module specific documentation can be found in each module: + +## Open Source Security Foundation (OpenSSF) best practices status +[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/8626/badge)](https://www.bestpractices.dev/projects/8626) diff --git a/SECURITY.md b/SECURITY.md index fda55f12dad..8ca2726ee51 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -11,11 +11,11 @@ Each GeoNetwork release is supported with bug fixes for a limited period, with p - We recommend to update to latest incremental release as soon as possible to address security vulnerabilities. - Some overlap is provided when major versions are announced with both a current version and a maintenance version being made available to provide time for organizations to upgrade. -| Version | Supported | Comment | -|---------|--------------------|---------------------| -| 4.4.x | :white_check_mark: | Latest version | -| 4.2.x | :white_check_mark: | Stable version | -| 3.12.x | :white_check_mark: | Maintenance version | +| Version | Supported | Comment | +|---------|--------------------|---------------------------------------------| +| 4.4.x | :white_check_mark: | Latest version | +| 4.2.x | :white_check_mark: | Stable version | +| 3.12.x | ❌ | End Of Life 2024-03-31 | If your organisation is making use of a GeoNetwork version that is no longer in use by the community all is not lost. You can volunteer on the developer list to make additional releases, or engage with one of our [Commercial Support](https://www.osgeo.org/service-providers/?p=geonetwork) providers. diff --git a/add-schema.sh b/add-schema.sh index 2a268428530..4f1ecc8c92d 100755 --- a/add-schema.sh +++ b/add-schema.sh @@ -83,7 +83,7 @@ then ${insertLine} a\\ \ \\ \ org.geonetwork-opensource.schemas\\ -\ schema-${schema}\\ +\ gn-schema-${schema}\\ \ ${gnSchemasVersion}\\ \ SED_SCRIPT @@ -103,7 +103,7 @@ SED_SCRIPT \ \\ \ \\ \ org.geonetwork-opensource.schemas\\ -\ schema-${schema}\\ +\ gn-schema-${schema}\\ \ ${gnSchemasVersion}\\ \ \\ \ \\ @@ -121,7 +121,7 @@ SED_SCRIPT \ \\ \ \\ \ org.geonetwork-opensource.schemas\\ -\ schema-${schema}\\ +\ gn-schema-${schema}\\ \ zip\\ \ false\\ \ \$\{schema-plugins.dir\}\\ @@ -138,7 +138,7 @@ SED_SCRIPT fi # Add schema resources in service/pom.xml with test scope for unit tests -line=$(grep -n "schema-${schema}" services/pom.xml | cut -d: -f1) +line=$(grep -n "gn-schema-${schema}" services/pom.xml | cut -d: -f1) if [ ! $line ] then @@ -154,7 +154,7 @@ then ${finalLine} a\\ \ \\ \ ${projectGroupId}\\ -\ schema-${schema}\\ +\ gn-schema-${schema}\\ \ ${gnSchemasVersion}\\ \ test\\ \ diff --git a/cachingxslt/pom.xml b/cachingxslt/pom.xml index d654a3e9917..5e961842496 100644 --- a/cachingxslt/pom.xml +++ b/cachingxslt/pom.xml @@ -31,7 +31,7 @@ org.geonetwork-opensource geonetwork - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT diff --git a/common/pom.xml b/common/pom.xml index 0dbdf0ab22d..631e5df8dad 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -31,7 +31,7 @@ org.geonetwork-opensource geonetwork - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT diff --git a/common/src/main/java/org/fao/geonet/Logger.java b/common/src/main/java/org/fao/geonet/Logger.java index 920d13e548f..76590b8d698 100644 --- a/common/src/main/java/org/fao/geonet/Logger.java +++ b/common/src/main/java/org/fao/geonet/Logger.java @@ -23,8 +23,6 @@ package org.fao.geonet; -//============================================================================= - import org.apache.logging.log4j.core.appender.FileAppender; /** @@ -37,35 +35,52 @@ public interface Logger { * * @return check if debug logging is enabled */ - public boolean isDebugEnabled(); + boolean isDebugEnabled(); /** * Log debug message used indicate module troubleshoot module activity. * * @param message debug message used to provide in */ - public void debug(String message); + void debug(String message); + + void debug(String message, Throwable throwable); + + void debug(String message, Object... object); /** * Log information message indicating module progress. * * @param message information message indicating progress */ - public void info(String message); + void info(String message); + + void info(String message, Throwable throwable); - /** Log warning message indicating potentially harmful situation, module + void info(String message, Object... object); + + /** + * Log warning message indicating potentially harmful situation, module * will continue to try and complete current activity. * * @param message Warning message indicating potentially harmful situation */ - public void warning(String message); + void warning(String message); + + void warning(String message, Throwable throwable); + + void warning(String message, Object... object); /** * Log error message indicating module cannot continue current activity. * * @param message Error message */ - public void error(String message); + void error(String message); + + void error(String message, Throwable throwable); + + void error(String message, Object... object); /** * Log error message using provided throwable, indicating module cannot continue @@ -73,51 +88,49 @@ public interface Logger { * * @param ex Cause of error condition. */ - public void error(Throwable ex); + void error(Throwable ex); /** * Log severe message, indicating application cannot continue to operate. * * @param message severe message */ - public void fatal(String message); + void fatal(String message); /** * Functional module used for logging messages (for example {@code jeeves.engine}). * * @return functional module used for logging messages. */ - public String getModule(); + String getModule(); /** * Configure logger with log4j {@link FileAppender}, used for output. - * + *

* The file appender is also responsible for log file location provided by {@link #getFileAppender()}. * * @param fileAppender Log4j FileAppender */ - public void setAppender(FileAppender fileAppender); + void setAppender(FileAppender fileAppender); /** * The log file name from the file appender for this module. - * + *

* Note both module and fallback module are provided allowing providing a better opportunity * to learn the log file location. Harvesters use the log file name parent directory as a good * location to create {@code /harvester_logs/} folder. - * + *

* Built-in configuration uses log file location {@code logs/geonetwork.log} relative to the current directory, or relative to system property {@code log_file}. * * @return logfile location of {@code logs/geonetwork.log} file */ - public String getFileAppender(); + String getFileAppender(); /** * Access to omodule logging level, providing + * * @return */ - public org.apache.logging.log4j.Level getThreshold(); + org.apache.logging.log4j.Level getThreshold(); } - -//============================================================================= - diff --git a/common/src/main/java/org/fao/geonet/utils/Log.java b/common/src/main/java/org/fao/geonet/utils/Log.java index 094dfb4942e..df0269aaf14 100644 --- a/common/src/main/java/org/fao/geonet/utils/Log.java +++ b/common/src/main/java/org/fao/geonet/utils/Log.java @@ -24,22 +24,18 @@ package org.fao.geonet.utils; -import org.apache.log4j.Priority; import org.apache.log4j.bridge.AppenderWrapper; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.Appender; +import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.appender.FileAppender; import org.apache.logging.log4j.core.appender.RollingFileAppender; -import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.config.LoggerConfig; import java.io.File; -import java.util.Enumeration; - -//============================================================================= /** * Jeeves logging integration, defining functional logger categories by module @@ -125,8 +121,12 @@ public static void debug(String module, Object message) { LogManager.getLogger(module).debug(message); } - public static void debug(String module, Object message, Exception e) { - LogManager.getLogger(module).debug(message, e); + public static void debug(String module, String message, Object... objects) { + LogManager.getLogger(module).debug(message, objects); + } + + public static void debug(String module, String message, Throwable throwable) { + LogManager.getLogger(module).debug(message, throwable); } public static boolean isDebugEnabled(String module) { @@ -157,10 +157,15 @@ public static void info(String module, Object message) { LogManager.getLogger(module).info(message); } - public static void info(String module, Object message, Throwable t) { - LogManager.getLogger(module).info(message, t); + public static void info(String module, String message, Object... objects) { + LogManager.getLogger(module).info(message, objects); + } + + public static void info(String module, String message, Throwable throwable) { + LogManager.getLogger(module).info(message, throwable); } + //--------------------------------------------------------------------------- public static void warning(String module, Object message) { @@ -182,6 +187,14 @@ public static void error(String module, Object message, Throwable t) { LogManager.getLogger(module).error(message, t); } + public static void error(String module, String message, Object... objects) { + LogManager.getLogger(module).error(message, objects); + } + + public static void error(String module, String message, Throwable throwable) { + LogManager.getLogger(module).error(message, throwable); + } + //--------------------------------------------------------------------------- public static void fatal(String module, Object message) { @@ -225,18 +238,58 @@ public void debug(String message) { Log.debug(module, message); } + @Override + public void debug(String message, Throwable throwable) { + Log.debug(module, message, throwable); + } + + @Override + public void debug(String message, Object... object) { + Log.debug(module, message, object); + } + public void info(String message) { Log.info(module, message); } + @Override + public void info(String message, Throwable throwable) { + Log.info(module, message, throwable); + } + + @Override + public void info(String message, Object... object) { + Log.info(module, message, object); + } + public void warning(String message) { Log.warning(module, message); } + @Override + public void warning(String message, Throwable throwable) { + Log.warning(module, message, throwable); + } + + @Override + public void warning(String message, Object... object) { + + } + public void error(String message) { Log.error(module, message); } + @Override + public void error(String message, Throwable throwable) { + Log.error(module, message, throwable); + } + + @Override + public void error(String message, Object... object) { + Log.error(module, message, object); + } + public void fatal(String message) { Log.fatal(module, message); } @@ -279,7 +332,7 @@ public String getFileAppender() { } } LoggerConfig fallbackConfig = configuration.getLoggers().get(fallbackModule); - if( fallbackConfig != null) { + if (fallbackConfig != null) { for (Appender appender : fallbackConfig.getAppenders().values()) { File file = toLogFile(appender); if (file != null && file.exists()) { diff --git a/common/src/main/java/org/fao/geonet/utils/Xml.java b/common/src/main/java/org/fao/geonet/utils/Xml.java index c796fbfe4ec..9e0b20ee6fd 100644 --- a/common/src/main/java/org/fao/geonet/utils/Xml.java +++ b/common/src/main/java/org/fao/geonet/utils/Xml.java @@ -39,14 +39,7 @@ import org.fao.geonet.utils.nio.NioPathAwareEntityResolver; import org.fao.geonet.utils.nio.NioPathHolder; import org.fao.geonet.utils.nio.PathStreamSource; -import org.jdom.Attribute; -import org.jdom.Content; -import org.jdom.DocType; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.Namespace; -import org.jdom.Text; +import org.jdom.*; import org.jdom.filter.ElementFilter; import org.jdom.input.SAXBuilder; import org.jdom.output.Format; @@ -64,27 +57,14 @@ import javax.xml.XMLConstants; import javax.xml.bind.JAXBContext; import javax.xml.bind.Unmarshaller; -import javax.xml.transform.Result; -import javax.xml.transform.Source; -import javax.xml.transform.Transformer; -import javax.xml.transform.TransformerException; -import javax.xml.transform.TransformerFactory; -import javax.xml.transform.URIResolver; +import javax.xml.transform.*; import javax.xml.transform.sax.SAXResult; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import javax.xml.validation.Schema; import javax.xml.validation.SchemaFactory; import javax.xml.validation.ValidatorHandler; -import java.io.BufferedOutputStream; -import java.io.ByteArrayInputStream; -import java.io.DataInputStream; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.PrintStream; -import java.io.StringReader; +import java.io.*; import java.net.HttpURLConnection; import java.net.URI; import java.net.URISyntaxException; @@ -99,14 +79,7 @@ import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; +import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -403,22 +376,16 @@ public static Element transform(Element xml, Path styleSheetPath, Map params, OutputStream out) throws Exception { StreamResult resStream = new StreamResult(out); - transform(xml, styleSheetPath, resStream, null); + transform(xml, styleSheetPath, resStream, params); out.flush(); } - - public static void transformXml(Element xml, Path styleSheetPath, OutputStream out) throws Exception { - StreamResult resStream = new StreamResult(out); - Map map = new HashMap<>(); - map.put("geonet-force-xml", "xml"); - transform(xml, styleSheetPath, resStream, map); - out.flush(); + public static void transform(Element xml, Path styleSheetPath, OutputStream out) throws Exception { + transform(xml, styleSheetPath, new HashMap<>(), out); } - //-------------------------------------------------------------------------- /** * Transforms an xml tree putting the result to a stream - no parameters. @@ -484,6 +451,9 @@ protected static Path resolvePath(Source s) throws URISyntaxException { /** * Transforms an xml tree putting the result to a stream with optional parameters. + *

+ * Add a geonet-force-xml parameter to force the formatting to be xml. + * The preferred method is to define it using xsl:output. */ public static void transform(Element xml, Path styleSheetPath, Result result, Map params) throws Exception { @@ -515,13 +485,13 @@ protected static Path resolvePath(Source s) throws URISyntaxException { t.setParameter(param.getKey(), param.getValue()); } - if (params.containsKey("geonet-force-xml")) { - ((Controller) t).setOutputProperty("indent", "yes"); - ((Controller) t).setOutputProperty("method", "xml"); - ((Controller) t).setOutputProperty("{http://saxon.sf.net/}indent-spaces", "3"); + if (params.containsKey("geonet-force-xml")) { + ((Controller) t).setOutputProperty("indent", "yes"); + ((Controller) t).setOutputProperty("method", "xml"); + ((Controller) t).setOutputProperty("{http://saxon.sf.net/}indent-spaces", "2"); + } } - } t.transform(srcXml, result); } } diff --git a/common/src/main/java/org/fao/geonet/utils/XmlRequest.java b/common/src/main/java/org/fao/geonet/utils/XmlRequest.java index 7b6a3b69c59..cba8608a556 100644 --- a/common/src/main/java/org/fao/geonet/utils/XmlRequest.java +++ b/common/src/main/java/org/fao/geonet/utils/XmlRequest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -124,13 +124,13 @@ protected final Element executeAndReadResponse(HttpRequestBase httpMethod) throw " -- Response Code: " + httpResponse.getRawStatusCode()); } - byte[] data = null; + byte[] data; try { data = IOUtils.toByteArray(httpResponse.getBody()); return Xml.loadStream(new ByteArrayInputStream(data)); } catch (JDOMException e) { - throw new BadXmlResponseEx("Response: '" + new String(data, "UTF8") + "' (from URI " + httpMethod.getURI() + ")"); + throw new BadXmlResponseEx("Invalid XML document from URI: " + httpMethod.getURI()); } finally { httpMethod.releaseConnection(); diff --git a/core/pom.xml b/core/pom.xml index f9a6f76215e..30f84917cfc 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT 4.0.0 @@ -552,7 +552,7 @@ org.owasp.esapi esapi - 2.4.0.0 + 2.5.4.0 log4j diff --git a/core/src/main/java/jeeves/server/context/BasicContext.java b/core/src/main/java/jeeves/server/context/BasicContext.java index da210ed0ecf..00d1769b4c6 100644 --- a/core/src/main/java/jeeves/server/context/BasicContext.java +++ b/core/src/main/java/jeeves/server/context/BasicContext.java @@ -143,21 +143,61 @@ public void debug(final String message) { logger.debug(message); } + @Override + public void debug(String message, Throwable throwable) { + logger.debug(message, throwable); + } + + @Override + public void debug(String message, Object... object) { + logger.debug(message, object); + } + @Override public void info(final String message) { logger.info(message); } + @Override + public void info(String message, Throwable throwable) { + logger.info(message, throwable); + } + + @Override + public void info(String message, Object... object) { + logger.info(message, object); + } + @Override public void warning(final String message) { logger.warning(message); } + @Override + public void warning(String message, Throwable throwable) { + logger.warning(message, throwable); + } + + @Override + public void warning(String message, Object... object) { + logger.warning(message, object); + } + @Override public void error(final String message) { logger.error(message); } + @Override + public void error(String message, Throwable throwable) { + logger.error(message, throwable); + } + + @Override + public void error(String message, Object... object) { + logger.error(message, object); + } + @Override public void error(Throwable ex) { logger.error(ex); @@ -200,6 +240,3 @@ public String getNodeId() { return NodeInfo.DEFAULT_NODE; } } - -//============================================================================= - diff --git a/core/src/main/java/org/fao/geonet/analytics/WebAnalyticsConfiguration.java b/core/src/main/java/org/fao/geonet/analytics/WebAnalyticsConfiguration.java new file mode 100644 index 00000000000..d9c2799076e --- /dev/null +++ b/core/src/main/java/org/fao/geonet/analytics/WebAnalyticsConfiguration.java @@ -0,0 +1,45 @@ +/* + * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.analytics; + +public class WebAnalyticsConfiguration { + private String service; + private String javascriptCode; + + public String getService() { + return service; + } + + public void setService(String service) { + this.service = service; + } + + public String getJavascriptCode() { + return javascriptCode; + } + + public void setJavascriptCode(String javascriptCode) { + this.javascriptCode = javascriptCode; + } +} diff --git a/core/src/main/java/org/fao/geonet/api/records/attachments/AbstractStore.java b/core/src/main/java/org/fao/geonet/api/records/attachments/AbstractStore.java index c2c20f8c898..c5291a59bbf 100644 --- a/core/src/main/java/org/fao/geonet/api/records/attachments/AbstractStore.java +++ b/core/src/main/java/org/fao/geonet/api/records/attachments/AbstractStore.java @@ -202,6 +202,12 @@ public String delResources(final ServiceContext context, final String metadataUu return delResources(context, metadataUuid, true); } + @Override + public String delResources(final ServiceContext context, final String metadataUuid, Boolean approved) throws Exception { + int metadataId = canEdit(context, metadataUuid, approved); + return delResources(context, metadataId); + } + @Override public String delResource(final ServiceContext context, final String metadataUuid, final String resourceId) throws Exception { return delResource(context, metadataUuid, resourceId, true); diff --git a/core/src/main/java/org/fao/geonet/api/records/attachments/FilesystemStore.java b/core/src/main/java/org/fao/geonet/api/records/attachments/FilesystemStore.java index 4f2c16ec739..fb0577bc8bd 100644 --- a/core/src/main/java/org/fao/geonet/api/records/attachments/FilesystemStore.java +++ b/core/src/main/java/org/fao/geonet/api/records/attachments/FilesystemStore.java @@ -229,10 +229,10 @@ private Path getPath(ServiceContext context, int metadataId, MetadataResourceVis } @Override - public String delResources(ServiceContext context, String metadataUuid, Boolean approved) throws Exception { - int metadataId = canEdit(context, metadataUuid, approved); + public String delResources(ServiceContext context, int metadataId) throws Exception { Path metadataDir = Lib.resource.getMetadataDir(getDataDirectory(context), metadataId); try { + Log.info(Geonet.RESOURCES, String.format("Deleting all files from metadataId '%d'", metadataId)); IO.deleteFileOrDirectory(metadataDir, true); return String.format("Metadata '%s' directory removed.", metadataId); } catch (Exception e) { diff --git a/core/src/main/java/org/fao/geonet/api/records/attachments/ResourceLoggerStore.java b/core/src/main/java/org/fao/geonet/api/records/attachments/ResourceLoggerStore.java index 75876ab95ff..14d9e74ce56 100644 --- a/core/src/main/java/org/fao/geonet/api/records/attachments/ResourceLoggerStore.java +++ b/core/src/main/java/org/fao/geonet/api/records/attachments/ResourceLoggerStore.java @@ -125,6 +125,13 @@ public String delResources(ServiceContext context, String metadataUuid, Boolean return null; } + public String delResources(ServiceContext context, int metadataId) throws Exception { + if (decoratedStore != null) { + return decoratedStore.delResources(context, metadataId); + } + return null; + } + @Override public String delResource(ServiceContext context, String metadataUuid, String resourceId, Boolean approved) throws Exception { if (decoratedStore != null) { diff --git a/core/src/main/java/org/fao/geonet/api/records/attachments/Store.java b/core/src/main/java/org/fao/geonet/api/records/attachments/Store.java index 3e6ad03011a..41dc645b0d1 100644 --- a/core/src/main/java/org/fao/geonet/api/records/attachments/Store.java +++ b/core/src/main/java/org/fao/geonet/api/records/attachments/Store.java @@ -278,12 +278,24 @@ MetadataResource putResource(ServiceContext context, String metadataUuid, String /** * Delete all resources for a metadata * + * @deprecated it is possible that the metadata draft was deleted during the transaction. Use + * String delResources(ServiceContext context, int metadataId) throws Exception; instead. + * * @param context * @param metadataUuid The metadata UUID * @param approved Return the approved version or not */ + @Deprecated String delResources(ServiceContext context, String metadataUuid, Boolean approved) throws Exception; + /** + * Delete all resources for a metadata + * + * @param context + * @param metadataId The metadata ID + */ + String delResources(ServiceContext context, int metadataId) throws Exception; + /** * Delete a resource from the metadata store * diff --git a/core/src/main/java/org/fao/geonet/kernel/AllThesaurus.java b/core/src/main/java/org/fao/geonet/kernel/AllThesaurus.java index 971e1c072a0..361c7fc816b 100644 --- a/core/src/main/java/org/fao/geonet/kernel/AllThesaurus.java +++ b/core/src/main/java/org/fao/geonet/kernel/AllThesaurus.java @@ -27,15 +27,13 @@ import com.google.common.base.Function; import com.google.common.collect.Lists; import com.google.common.collect.Maps; - -import org.locationtech.jts.util.Assert; - import org.fao.geonet.Constants; import org.fao.geonet.constants.Geonet; import org.fao.geonet.exceptions.TermNotFoundException; import org.fao.geonet.kernel.search.keyword.KeywordRelation; import org.fao.geonet.languages.IsoLanguagesMapper; import org.fao.geonet.utils.Log; +import org.locationtech.jts.util.Assert; import org.openrdf.model.GraphException; import org.openrdf.model.URI; import org.openrdf.sesame.config.AccessDeniedException; @@ -46,6 +44,8 @@ import org.openrdf.sesame.repository.local.LocalRepository; import org.springframework.beans.factory.annotation.Autowired; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; @@ -59,9 +59,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - /** * @author Jesse on 2/27/2015. */ @@ -221,8 +218,7 @@ public synchronized URI addElement(KeywordBean keyword) throws IOException, Acce } @Override - public synchronized Thesaurus removeElement(KeywordBean keyword) throws MalformedQueryException, QueryEvaluationException, - IOException, AccessDeniedException { + public synchronized Thesaurus removeElement(KeywordBean keyword) throws AccessDeniedException { throw new UnsupportedOperationException(); } @@ -237,8 +233,7 @@ public synchronized Thesaurus removeElement(String uri) throws AccessDeniedExcep } @Override - public synchronized URI updateElement(KeywordBean keyword, boolean replace) throws AccessDeniedException, IOException, - MalformedQueryException, QueryEvaluationException, GraphException { + public synchronized URI updateElement(KeywordBean keyword, boolean replace) throws AccessDeniedException { throw new UnsupportedOperationException(); } @@ -266,12 +261,12 @@ public Thesaurus updateCode(KeywordBean bean, String newcode) throws AccessDenie } @Override - public synchronized Thesaurus updateCode(String namespace, String oldcode, String newcode) throws AccessDeniedException, IOException { + public synchronized Thesaurus updateCode(String namespace, String oldcode, String newcode) throws AccessDeniedException { throw new UnsupportedOperationException(); } @Override - public synchronized Thesaurus updateCodeByURI(String olduri, String newuri) throws AccessDeniedException, IOException { + public synchronized Thesaurus updateCodeByURI(String olduri, String newuri) throws AccessDeniedException { throw new UnsupportedOperationException(); } @@ -287,8 +282,7 @@ public IsoLanguagesMapper getIsoLanguageMapper() { } @Override - public synchronized void addRelation(String subject, KeywordRelation related, String relatedSubject) throws AccessDeniedException, - IOException, MalformedQueryException, QueryEvaluationException, GraphException { + public synchronized void addRelation(String subject, KeywordRelation related, String relatedSubject) throws AccessDeniedException { throw new UnsupportedOperationException(); } diff --git a/core/src/main/java/org/fao/geonet/kernel/EditLib.java b/core/src/main/java/org/fao/geonet/kernel/EditLib.java index ae70dcd5e62..873b9c3bcdf 100644 --- a/core/src/main/java/org/fao/geonet/kernel/EditLib.java +++ b/core/src/main/java/org/fao/geonet/kernel/EditLib.java @@ -33,18 +33,10 @@ import java.io.IOException; import java.io.StringReader; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.BitSet; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import java.util.Vector; +import java.util.*; import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; +import java.util.stream.Stream; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.jxpath.ri.parser.Token; @@ -272,7 +264,7 @@ private void addChildToParent(MetadataSchema mdSchema, Element targetElement, El // remove everything and then, depending on removeExisting // readd all children to the element and assure a correct position for the new one: at the end of the others // or just add the new one - List existingAllType = new ArrayList(targetElement.getChildren()); + List existingAllType = new ArrayList(targetElement.getChildren()); targetElement.removeContent(); for (String singleType: type.getAlElements()) { List existingForThisType = filterOnQname(existingAllType, singleType); @@ -283,9 +275,22 @@ private void addChildToParent(MetadataSchema mdSchema, Element targetElement, El LOGGER_ADD_ELEMENT.debug("#### - add child {}", existingChild.toString()); } } - if (qname.equals(singleType)) + if (qname.equals(singleType)) { targetElement.addContent(childToAdd); + } + + filterOnQname(existingAllType, "geonet:child") + .stream() + .filter(gnChild -> (gnChild.getAttributeValue("prefix") + ":" + gnChild.getAttributeValue("name")).equals(singleType)) + .findFirst() + .ifPresent(targetElement::addContent); } + + Stream.concat( + filterOnQname(existingAllType, "geonet:element").stream(), + filterOnQname(existingAllType, "geonet:attribute").stream() + ).forEach(targetElement::addContent); + } public void addXMLFragments(String schema, Element md, Map xmlInputs) throws Exception { diff --git a/core/src/main/java/org/fao/geonet/kernel/GeonetworkDataDirectory.java b/core/src/main/java/org/fao/geonet/kernel/GeonetworkDataDirectory.java index 86a0cdca444..cc5296232bd 100644 --- a/core/src/main/java/org/fao/geonet/kernel/GeonetworkDataDirectory.java +++ b/core/src/main/java/org/fao/geonet/kernel/GeonetworkDataDirectory.java @@ -27,8 +27,11 @@ import jeeves.server.sources.http.JeevesServlet; import org.fao.geonet.ApplicationContextHolder; import org.fao.geonet.constants.Geonet; +import org.fao.geonet.exceptions.BadParameterEx; +import org.fao.geonet.utils.FilePathChecker; import org.fao.geonet.utils.IO; import org.fao.geonet.utils.Log; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationEvent; import org.springframework.context.ConfigurableApplicationContext; @@ -63,6 +66,9 @@ public class GeonetworkDataDirectory { */ public static final String GEONETWORK_BEAN_KEY = "GeonetworkDataDirectory"; + @Autowired + SchemaManager schemaManager; + private Path webappDir; private Path systemDataDir; private Path indexConfigDir; @@ -797,11 +803,18 @@ public Path getXsltConversion(String conversionId) { if (conversionId.startsWith(IMPORT_STYLESHEETS_SCHEMA_PREFIX)) { String[] pathToken = conversionId.split(":"); if (pathToken.length == 3) { + String schema = pathToken[1]; + if (!schemaManager.existsSchema(schema)) { + throw new BadParameterEx(String.format( + "Conversion not found. Schema '%s' is not registered in this catalog.", schema)); + } + FilePathChecker.verify(pathToken[2]); return this.getSchemaPluginsDir() .resolve(pathToken[1]) .resolve(pathToken[2] + ".xsl"); } } else { + FilePathChecker.verify(conversionId); return this.getWebappDir().resolve(Geonet.Path.IMPORT_STYLESHEETS). resolve(conversionId + ".xsl"); } diff --git a/core/src/main/java/org/fao/geonet/kernel/SchemaManager.java b/core/src/main/java/org/fao/geonet/kernel/SchemaManager.java index 14e09a62b12..4139d045ac5 100644 --- a/core/src/main/java/org/fao/geonet/kernel/SchemaManager.java +++ b/core/src/main/java/org/fao/geonet/kernel/SchemaManager.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -1383,7 +1383,6 @@ private void checkAppSupported(Element schemaPluginCatRoot) throws Exception { " requires max Geonetwork version: " + majorAppVersionSupported + ", current is: " + version + ". Skip load schema."); removes.add(schemaInfo.getKey()); - continue; } } @@ -1901,7 +1900,7 @@ public boolean accept(Path entry) throws IOException { try (DirectoryStream schemaplugins = Files.newDirectoryStream(schemaPluginDir, xsdFilter)) { boolean missingXsdFiles = true; for (Path schemaplugin : schemaplugins) { - IO.copyDirectoryOrFile(schemaplugin, webAppDirSchemaXSD.resolve(schemaplugin), false); + IO.copyDirectoryOrFile(schemaplugin, webAppDirSchemaXSD.resolve(schemaplugin.getFileName()), false); missingXsdFiles = false; } diff --git a/core/src/main/java/org/fao/geonet/kernel/Thesaurus.java b/core/src/main/java/org/fao/geonet/kernel/Thesaurus.java index d9937ed8958..91a506b57ab 100644 --- a/core/src/main/java/org/fao/geonet/kernel/Thesaurus.java +++ b/core/src/main/java/org/fao/geonet/kernel/Thesaurus.java @@ -76,6 +76,7 @@ public class Thesaurus { private static final String DEFAULT_THESAURUS_NAMESPACE = "http://custom.shared.obj.ch/concept#"; private static final String RDF_NAMESPACE = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"; + private static final String RDF_SCHEMA_NAMESPACE = "http://www.w3.org/2000/01/rdf-schema#"; private static final String SKOS_NAMESPACE = "http://www.w3.org/2004/02/skos/core#"; @@ -360,7 +361,8 @@ public boolean hasConceptScheme(String uri) { try { return performRequest(query).getRowCount() > 0; } catch (Exception e) { - Log.error(Geonet.THESAURUS_MAN, "Error retrieving concept scheme for " + thesaurusFile + ". Error is: " + e.getMessage()); + Log.error(Geonet.THESAURUS_MAN, + String.format("Error retrieving concept scheme for %s. Error is: %s", thesaurusFile, e.getMessage())); throw new RuntimeException(e); } } @@ -380,7 +382,8 @@ public List getConceptSchemes() { } return ret; } catch (Exception e) { - Log.error(Geonet.THESAURUS_MAN, "Error retrieving concept schemes for " + thesaurusFile + ". Error is: " + e.getMessage()); + Log.error(Geonet.THESAURUS_MAN, String.format( + "Error retrieving concept schemes for %s. Error is: %s", thesaurusFile, e.getMessage())); return Collections.emptyList(); } } @@ -452,8 +455,7 @@ public synchronized URI addElement(KeywordBean keyword) throws IOException, Acce /** * Remove keyword from thesaurus. */ - public synchronized Thesaurus removeElement(KeywordBean keyword) throws MalformedQueryException, - QueryEvaluationException, IOException, AccessDeniedException { + public synchronized Thesaurus removeElement(KeywordBean keyword) throws AccessDeniedException { String namespace = keyword.getNameSpaceCode(); String code = keyword.getRelativeCode(); @@ -518,8 +520,7 @@ private String toiso639_1_Lang(String lang) { * languages) and the coordinates will only be updated if they are non-empty * strings. */ - public synchronized URI updateElement(KeywordBean keyword, boolean replace) throws AccessDeniedException, IOException, - MalformedQueryException, QueryEvaluationException, GraphException { + public synchronized URI updateElement(KeywordBean keyword, boolean replace) throws AccessDeniedException { THESAURUS_SEARCH_CACHE.invalidateAll(); // Get thesaurus graph @@ -661,7 +662,7 @@ public Thesaurus updateCode(KeywordBean bean, String newcode) throws AccessDenie * Update concept code by creating URI from namespace and code. This is recommended when * thesaurus concept identifiers contains # eg. http://vocab.nerc.ac.uk/collection/P07/current#CFV13N44 */ - public synchronized Thesaurus updateCode(String namespace, String oldcode, String newcode) throws AccessDeniedException, IOException { + public synchronized Thesaurus updateCode(String namespace, String oldcode, String newcode) throws AccessDeniedException { Graph myGraph = repository.getGraph(); ValueFactory myFactory = myGraph.getValueFactory(); @@ -679,7 +680,7 @@ public synchronized Thesaurus updateCode(String namespace, String oldcode, Strin * * eg. http://vocab.nerc.ac.uk/collection/P07/current/CFV13N44/ */ - public synchronized Thesaurus updateCodeByURI(String olduri, String newuri) throws AccessDeniedException, IOException { + public synchronized Thesaurus updateCodeByURI(String olduri, String newuri) throws AccessDeniedException { Graph myGraph = repository.getGraph(); ValueFactory myFactory = myGraph.getValueFactory(); @@ -894,7 +895,11 @@ private void retrieveDublinCore(Element thesaurusEl) { // } private void retrieveMultiLingualTitles(Element thesaurusEl) { try { - String xpathTitles = "skos:ConceptScheme/dc:title[@xml:lang]|skos:ConceptScheme/dcterms:title[@xml:lang]|rdf:Description[rdf:type/@rdf:resource = 'http://www.w3.org/2004/02/skos/core#ConceptScheme']/dc:title[@xml:lang]"; + String xpathTitles = "skos:ConceptScheme/dc:title[@xml:lang]" + + "|skos:ConceptScheme/dcterms:title[@xml:lang]" + + "|skos:ConceptScheme/rdfs:label[@xml:lang]" + + "|skos:ConceptScheme/skos:prefLabel[@xml:lang]" + + "|rdf:Description[rdf:type/@rdf:resource = 'http://www.w3.org/2004/02/skos/core#ConceptScheme']/dc:title[@xml:lang]"; multilingualTitles.clear(); multilingualTitles.putAll(retrieveMultilingualField(thesaurusEl, xpathTitles)); } catch (Exception e) { @@ -944,25 +949,23 @@ private void retrieveThesaurusInformation(Path thesaurusFile, String defaultTitl try { Element thesaurusEl = Xml.loadFile(thesaurusFile); - List theNSs = new ArrayList<>(); - Namespace rdfNamespace = Namespace.getNamespace("rdf", RDF_NAMESPACE); - theNSs.add(rdfNamespace); - theNSs.add(Namespace.getNamespace("skos", SKOS_NAMESPACE)); - theNSs.add(Namespace.getNamespace("dc", DC_NAMESPACE)); - theNSs.add(Namespace.getNamespace("dcterms", DCTERMS_NAMESPACE)); + List theNSs = getThesaurusNamespaces(); this.defaultNamespace = null; retrieveMultiLingualTitles(thesaurusEl); retrieveDublinCore(thesaurusEl); Element titleEl = Xml.selectElement(thesaurusEl, - "skos:ConceptScheme/dc:title|skos:ConceptScheme/dcterms:title|" + - "skos:Collection/dc:title|skos:Collection/dcterms:title|" + - "rdf:Description/dc:title|rdf:Description/dcterms:title", theNSs); + "skos:ConceptScheme/dc:title|skos:ConceptScheme/dcterms:title" + + "|skos:ConceptScheme/rdfs:label|skos:ConceptScheme/skos:prefLabel" + + "|skos:Collection/dc:title|skos:Collection/dcterms:title" + + "|rdf:Description/dc:title|rdf:Description/dcterms:title", theNSs); if (titleEl != null) { this.title = titleEl.getValue(); - this.defaultNamespace = titleEl.getParentElement().getAttributeValue("about", rdfNamespace); + this.defaultNamespace = titleEl + .getParentElement() + .getAttributeValue("about", Namespace.getNamespace("rdf", RDF_NAMESPACE)); } else { this.title = defaultTitle; this.defaultNamespace = DEFAULT_THESAURUS_NAMESPACE; @@ -1027,11 +1030,13 @@ private void retrieveThesaurusInformation(Path thesaurusFile, String defaultTitl } if (Log.isDebugEnabled(Geonet.THESAURUS_MAN)) { - Log.debug(Geonet.THESAURUS_MAN, "Thesaurus information: " + this.title + " (" + this.date + ")"); + Log.debug(Geonet.THESAURUS_MAN, String.format( + "Thesaurus information: %s (%s)", this.title, this.date)); } } catch (Exception ex) { if (!ignoreMissingError) - Log.error(Geonet.THESAURUS_MAN, "Error getting thesaurus info for " + thesaurusFile + ". Error is: " + ex.getMessage()); + Log.error(Geonet.THESAURUS_MAN, String.format( + "Error getting thesaurus info for %s. Error is: %s", thesaurusFile, ex.getMessage())); } } @@ -1059,12 +1064,11 @@ private Date parseThesaurusDate(Element dateEl) { StringBuffer errorMsg = new StringBuffer("Error parsing the thesaurus date value: "); errorMsg.append(dateVal); - boolean success = false; for (SimpleDateFormat df : dfList) { try { thesaurusDate = df.parse(dateVal); - success = true; + return thesaurusDate; } catch (Exception ex) { // Ignore the exception and try next format errorMsg.append("\n * with format: "); @@ -1074,11 +1078,9 @@ private Date parseThesaurusDate(Element dateEl) { } } // Report error if no success - if (!success) { - errorMsg.append("\nCheck thesaurus date in "); - errorMsg.append(this.fname); - Log.error(Geonet.THESAURUS_MAN, errorMsg.toString()); - } + errorMsg.append("\nCheck thesaurus date in "); + errorMsg.append(this.fname); + Log.error(Geonet.THESAURUS_MAN, errorMsg.toString()); return thesaurusDate; } @@ -1102,8 +1104,7 @@ public IsoLanguagesMapper getIsoLanguageMapper() { * @param subject the keyword that is related to the other keyword * @param related the relation between the two keywords */ - public synchronized void addRelation(String subject, KeywordRelation related, String relatedSubject) throws AccessDeniedException, IOException, - MalformedQueryException, QueryEvaluationException, GraphException { + public synchronized void addRelation(String subject, KeywordRelation related, String relatedSubject) throws AccessDeniedException { THESAURUS_SEARCH_CACHE.invalidateAll(); Graph myGraph = repository.getGraph(); @@ -1126,7 +1127,7 @@ public synchronized void addRelation(String subject, KeywordRelation related, St * @return keyword */ public KeywordBean getKeyword(String uri, String... languages) { - String cacheKey = "getKeyword" + uri + Arrays.stream(languages).collect(Collectors.joining("")); + String cacheKey = "getKeyword" + uri + String.join("", languages); Object cacheValue = THESAURUS_SEARCH_CACHE.getIfPresent(cacheKey); if (cacheValue != null) { return (KeywordBean) cacheValue; @@ -1370,6 +1371,7 @@ private ArrayList classifyTermWithNoBroaderTerms(KeywordBean term) private List getThesaurusNamespaces() { List theNSs = new ArrayList<>(); theNSs.add(Namespace.getNamespace("rdf", RDF_NAMESPACE)); + theNSs.add(Namespace.getNamespace("rdfs", RDF_SCHEMA_NAMESPACE)); theNSs.add(Namespace.getNamespace("skos", SKOS_NAMESPACE)); theNSs.add(Namespace.getNamespace("dc", DC_NAMESPACE)); theNSs.add(Namespace.getNamespace("dcterms", DCTERMS_NAMESPACE)); diff --git a/core/src/main/java/org/fao/geonet/kernel/WatchListNotifier.java b/core/src/main/java/org/fao/geonet/kernel/WatchListNotifier.java index 7291dd8ff8b..09a17638f1a 100644 --- a/core/src/main/java/org/fao/geonet/kernel/WatchListNotifier.java +++ b/core/src/main/java/org/fao/geonet/kernel/WatchListNotifier.java @@ -30,9 +30,13 @@ import org.fao.geonet.domain.Selection; import org.fao.geonet.domain.User; import org.fao.geonet.kernel.setting.SettingManager; +import org.fao.geonet.languages.FeedbackLanguages; import org.fao.geonet.repository.SelectionRepository; import org.fao.geonet.repository.UserRepository; import org.fao.geonet.repository.UserSavedSelectionRepository; +import org.fao.geonet.util.LocalizedEmail; +import org.fao.geonet.util.LocalizedEmailParameter; +import org.fao.geonet.util.LocalizedEmailComponent; import org.fao.geonet.util.MailUtil; import org.fao.geonet.utils.Log; import org.quartz.JobExecutionContext; @@ -44,6 +48,10 @@ import java.util.*; import static org.fao.geonet.kernel.setting.Settings.SYSTEM_USER_LASTNOTIFICATIONDATE; +import static org.fao.geonet.util.LocalizedEmailComponent.ComponentType.*; +import static org.fao.geonet.util.LocalizedEmailComponent.KeyType; +import static org.fao.geonet.util.LocalizedEmailComponent.ReplacementType.*; +import static org.fao.geonet.util.LocalizedEmailParameter.ParameterType; /** * Task checking on a regular basis the list of records @@ -53,15 +61,13 @@ public class WatchListNotifier extends QuartzJobBean { private String lastNotificationDate; private String nextLastNotificationDate; - private String subject; - private String message; - private String recordMessage; private String updatedRecordPermalink; private String language = "eng"; private SettingManager settingManager; private ApplicationContext appContext; private UserSavedSelectionRepository userSavedSelectionRepository; private UserRepository userRepository; + private FeedbackLanguages feedbackLanguages; @Value("${usersavedselection.watchlist.searchurl}") private String permalinkApp = "catalog.search#/search?_uuid={{filter}}"; @@ -92,20 +98,7 @@ public WatchListNotifier() { protected void executeInternal(JobExecutionContext jobContext) throws JobExecutionException { appContext = ApplicationContextHolder.get(); settingManager = appContext.getBean(SettingManager.class); - - ResourceBundle messages = ResourceBundle.getBundle("org.fao.geonet.api.Messages", - new Locale( - language - )); - - try { - subject = messages.getString("user_watchlist_subject"); - message = messages.getString("user_watchlist_message"); - recordMessage = messages.getString("user_watchlist_message_record"). - replace("{{link}}", - settingManager.getNodeURL() + permalinkRecordApp); - } catch (Exception e) { - } + feedbackLanguages = appContext.getBean(FeedbackLanguages.class); updatedRecordPermalink = settingManager.getSiteURL(language); @@ -166,6 +159,9 @@ protected void executeInternal(JobExecutionContext jobContext) throws JobExecuti } private void notify(Integer selectionId, Integer userId) { + + Locale[] feedbackLocales = feedbackLanguages.getLocales(new Locale(language)); + // Get metadata with changes since last notification // TODO: Could be relevant to get versionning system info once available // and report deleted records too. @@ -188,27 +184,51 @@ private void notify(Integer selectionId, Integer userId) { // TODO: We should send email depending on user language Optional user = userRepository.findById(userId); if (user.isPresent() && StringUtils.isNotEmpty(user.get().getEmail())) { + String url = updatedRecordPermalink + + permalinkApp.replace("{{filter}}", String.join(" or ", updatedRecords)); - // Build message - StringBuffer listOfUpdateMessage = new StringBuffer(); - for (String record : updatedRecords) { - try { - listOfUpdateMessage.append( - MailUtil.compileMessageWithIndexFields(recordMessage, record, this.language) - ); - } catch (Exception e) { - Log.error(Geonet.USER_WATCHLIST, e.getMessage(), e); + LocalizedEmailComponent emailSubjectComponent = new LocalizedEmailComponent(SUBJECT, "user_watchlist_subject", KeyType.MESSAGE_KEY, POSITIONAL_FORMAT); + LocalizedEmailComponent emailMessageComponent = new LocalizedEmailComponent(MESSAGE, "user_watchlist_message", KeyType.MESSAGE_KEY, POSITIONAL_FORMAT); + + for (Locale feedbackLocale : feedbackLocales) { + + // Build message + StringBuffer listOfUpdateMessage = new StringBuffer(); + for (String record : updatedRecords) { + LocalizedEmailComponent recordMessageComponent = new LocalizedEmailComponent(NESTED, "user_watchlist_message_record", KeyType.MESSAGE_KEY, NAMED_FORMAT); + recordMessageComponent.enableCompileWithIndexFields(record); + recordMessageComponent.enableReplaceLinks(true); + try { + listOfUpdateMessage.append( + recordMessageComponent.parseMessage(feedbackLocale) + ); + } catch (Exception e) { + Log.error(Geonet.USER_WATCHLIST, e.getMessage(), e); + } } + + emailSubjectComponent.addParameters( + feedbackLocale, + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 1, settingManager.getSiteName()), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 2, updatedRecords.size()), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 3, lastNotificationDate) + ); + + emailMessageComponent.addParameters( + feedbackLocale, + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 1, listOfUpdateMessage.toString()), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 2, lastNotificationDate), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 3, url), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 4, url) + ); + } - String url = updatedRecordPermalink + - permalinkApp.replace("{{filter}}", String.join(" or ", updatedRecords)); - String mailSubject = String.format(subject, - settingManager.getSiteName(), updatedRecords.size(), lastNotificationDate); - String htmlMessage = String.format(message, - listOfUpdateMessage.toString(), - lastNotificationDate, - url, url); + LocalizedEmail localizedEmail = new LocalizedEmail(true); + localizedEmail.addComponents(emailSubjectComponent, emailMessageComponent); + + String mailSubject = localizedEmail.getParsedSubject(feedbackLocales); + String htmlMessage = localizedEmail.getParsedMessage(feedbackLocales); if (Log.isDebugEnabled(Geonet.USER_WATCHLIST)) { Log.debug(Geonet.USER_WATCHLIST, String.format( diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataStatus.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataStatus.java index 96b0aa34ee1..16cfc9719ae 100644 --- a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataStatus.java +++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataStatus.java @@ -69,11 +69,14 @@ public interface IMetadataStatus { MetadataStatus setStatusExt(ServiceContext context, int id, int status, ISODate changeDate, String changeMessage) throws Exception; /** - * Set status of metadata id and do not reindex metadata id afterwards. + * Set status of metadata id and reindex metadata id afterwards based on updateIndex flag + * + * @param status metadata status to set + * @param updateIndex index update flag * * @return the saved status entity object */ - MetadataStatus setStatusExt(MetadataStatus status) throws Exception; + MetadataStatus setStatusExt(MetadataStatus status, boolean updateIndex) throws Exception; /** * Set status of metadata id and reindex metadata id afterwards. diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataIndexer.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataIndexer.java index 8d1cd0db9a4..9b39ce6118b 100644 --- a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataIndexer.java +++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataIndexer.java @@ -383,10 +383,10 @@ public void indexMetadata(final String metadataId, if (!schemaManager.existsSchema(schema)) { fields.put(IndexFields.DRAFT, "n"); fields.put(IndexFields.INDEXING_ERROR_FIELD, true); - fields.put(IndexFields.INDEXING_ERROR_MSG, String.format( - "Schema '%s' is not registered in this catalog. Install it or remove those records", - schema - )); + fields.put(IndexFields.INDEXING_ERROR_MSG, + searchManager.createIndexingErrorMsgObject("indexingErrorMsg-schemaNotRegistered", + "error", + Map.of("record", metadataId, "schema", schema))); searchManager.index(null, md, indexKey, fields, metadataType, forceRefreshReaders, indexingMode); Log.error(Geonet.DATA_MANAGER, String.format( @@ -549,6 +549,7 @@ public void indexMetadata(final String metadataId, metadataId, indexingMode, System.currentTimeMillis() - start)); } + @Override public void indexMetadataPrivileges(String uuid, int id) throws Exception { Set operationFields = new HashSet<>(); diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataStatus.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataStatus.java index 150bd65a81c..cf5f7fc972d 100644 --- a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataStatus.java +++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataStatus.java @@ -160,9 +160,11 @@ public MetadataStatus setStatus(ServiceContext context, int id, int status, ISOD } @Override - public MetadataStatus setStatusExt(MetadataStatus metatatStatus) throws Exception { + public MetadataStatus setStatusExt(MetadataStatus metatatStatus, boolean updateIndex) throws Exception { metadataStatusRepository.save(metatatStatus); - metadataIndexer.indexMetadata(metatatStatus.getMetadataId() + "", true, IndexingMode.full); + if (updateIndex) { + metadataIndexer.indexMetadata(metatatStatus.getMetadataId() + "", true, IndexingMode.full); + } return metatatStatus; } diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java index 040acbf4aca..c5cc81ad1da 100644 --- a/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java +++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java @@ -589,7 +589,7 @@ protected String createDraft(ServiceContext context, String templateId, String g List listOfStatusChange = new ArrayList<>(1); listOfStatusChange.add(metadataStatus); - sa.onStatusChange(listOfStatusChange); + sa.onStatusChange(listOfStatusChange, true); } } diff --git a/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java b/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java index e8678d483ab..58cc82a4459 100644 --- a/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java +++ b/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2023 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -38,15 +38,22 @@ import org.fao.geonet.kernel.setting.Settings; import org.fao.geonet.repository.*; import org.fao.geonet.repository.specification.GroupSpecs; +import org.fao.geonet.util.LocalizedEmail; +import org.fao.geonet.util.LocalizedEmailParameter; +import org.fao.geonet.util.LocalizedEmailComponent; +import org.fao.geonet.languages.FeedbackLanguages; import org.fao.geonet.util.MailUtil; import org.fao.geonet.utils.Log; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; -import java.text.MessageFormat; import java.util.*; import static org.fao.geonet.kernel.setting.Settings.SYSTEM_FEEDBACK_EMAIL; +import static org.fao.geonet.util.LocalizedEmailComponent.ComponentType.*; +import static org.fao.geonet.util.LocalizedEmailComponent.KeyType; +import static org.fao.geonet.util.LocalizedEmailComponent.ReplacementType.*; +import static org.fao.geonet.util.LocalizedEmailParameter.ParameterType; public class DefaultStatusActions implements StatusActions { @@ -136,7 +143,7 @@ public void onEdit(int id, boolean minorEdit) throws Exception { * @return * @throws Exception */ - public Map onStatusChange(List listOfStatus) throws Exception { + public Map onStatusChange(List listOfStatus, boolean updateIndex) throws Exception { if (listOfStatus.stream().map(MetadataStatus::getMetadataId).distinct().count() != listOfStatus.size()) { throw new IllegalArgumentException("Multiple status update received on the same metadata"); @@ -179,16 +186,7 @@ public Map onStatusChange(List listOf context.debug("Change status of metadata with id " + status.getMetadataId() + " from " + currentStatusId + " to " + statusId); // we know we are allowed to do the change, apply any side effects - boolean deleted = applyStatusChange(status.getMetadataId(), status, statusId); - - // inform content reviewers if the status is submitted - try { - notify(getUserToNotify(status), status); - } catch (Exception e) { - context.warning(String.format( - "Failed to send notification on status change for metadata %s with status %s. Error is: %s", - status.getMetadataId(), status.getStatusValue().getId(), e.getMessage())); - } + boolean deleted = applyStatusChange(status.getMetadataId(), status, statusId, updateIndex); if (deleted) { results.put(status.getMetadataId(), StatusChangeType.DELETED); @@ -208,6 +206,15 @@ public Map onStatusChange(List listOf } } + // inform content reviewers if the status is submitted + try { + notify(getUserToNotify(status), status); + } catch (Exception e) { + context.warning(String.format( + "Failed to send notification on status change for metadata %s with status %s. Error is: %s", + status.getMetadataId(), status.getStatusValue().getId(), e.getMessage())); + } + } return results; @@ -218,10 +225,10 @@ public Map onStatusChange(List listOf * eg. if APPROVED, publish a record, * if RETIRED, unpublish or delete the record. */ - private boolean applyStatusChange(int metadataId, MetadataStatus status, String toStatusId) throws Exception { + private boolean applyStatusChange(int metadataId, MetadataStatus status, String toStatusId, boolean updateIndex) throws Exception { boolean deleted = false; if (!deleted) { - metadataStatusManager.setStatusExt(status); + metadataStatusManager.setStatusExt(status, updateIndex); } return deleted; } @@ -240,61 +247,106 @@ protected void notify(List userToNotify, MetadataStatus status) throws Exc return; } - ResourceBundle messages = ResourceBundle.getBundle("org.fao.geonet.api.Messages", new Locale(this.language)); + ApplicationContext applicationContext = ApplicationContextHolder.get(); + FeedbackLanguages feedbackLanguages = applicationContext.getBean(FeedbackLanguages.class); - String translatedStatusName = getTranslatedStatusName(status.getStatusValue().getId()); - // TODO: Refactor to allow custom messages based on the type of status - String subjectTemplate = ""; - try { - subjectTemplate = messages - .getString("status_change_" + status.getStatusValue().getName() + "_email_subject"); - } catch (MissingResourceException e) { - subjectTemplate = messages.getString("status_change_default_email_subject"); - } - String subject = MessageFormat.format(subjectTemplate, siteName, translatedStatusName, replyToDescr // Author of the change - ); + Locale[] feedbackLocales = feedbackLanguages.getLocales(new Locale(this.language)); Set listOfId = new HashSet<>(1); listOfId.add(status.getMetadataId()); - String textTemplate = ""; - try { - textTemplate = messages.getString("status_change_" + status.getStatusValue().getName() + "_email_text"); - } catch (MissingResourceException e) { - textTemplate = messages.getString("status_change_default_email_text"); - } - - // Replace link in message - ApplicationContext applicationContext = ApplicationContextHolder.get(); - SettingManager sm = applicationContext.getBean(SettingManager.class); - textTemplate = textTemplate.replace("{{link}}", sm.getNodeURL()+ "api/records/'{{'index:uuid'}}'"); - UserRepository userRepository = context.getBean(UserRepository.class); User owner = userRepository.findById(status.getOwner()).orElse(null); IMetadataUtils metadataRepository = ApplicationContextHolder.get().getBean(IMetadataUtils.class); AbstractMetadata metadata = metadataRepository.findOne(status.getMetadataId()); - String metadataUrl = metadataUtils.getDefaultUrl(metadata.getUuid(), this.language); + String subjectTemplateKey = ""; + String textTemplateKey = ""; + boolean failedToFindASpecificSubjectTemplate = false; + boolean failedToFindASpecificTextTemplate = false; - String message = MessageFormat.format(textTemplate, replyToDescr, // Author of the change - status.getChangeMessage(), translatedStatusName, status.getChangeDate(), status.getDueDate(), - status.getCloseDate(), - owner == null ? "" : Joiner.on(" ").skipNulls().join(owner.getName(), owner.getSurname()), - metadataUrl); + for (Locale feedbackLocale: feedbackLocales) { + ResourceBundle resourceBundle = ResourceBundle.getBundle("org.fao.geonet.api.Messages", feedbackLocale); + if (!failedToFindASpecificSubjectTemplate) { + try { + subjectTemplateKey = "status_change_" + status.getStatusValue().getName() + "_email_subject"; + resourceBundle.getString(subjectTemplateKey); + } catch (MissingResourceException e) { + failedToFindASpecificSubjectTemplate = true; + } + } + + if (!failedToFindASpecificTextTemplate) { + try { + textTemplateKey = "status_change_" + status.getStatusValue().getName() + "_email_text"; + resourceBundle.getString(textTemplateKey); + } catch (MissingResourceException e) { + failedToFindASpecificTextTemplate = true; + } + } + + if ((failedToFindASpecificSubjectTemplate) && (failedToFindASpecificTextTemplate)) break; + } + + if (failedToFindASpecificSubjectTemplate) { + subjectTemplateKey = "status_change_default_email_subject"; + } + + if (failedToFindASpecificTextTemplate) { + textTemplateKey = "status_change_default_email_text"; + } + + LocalizedEmailComponent emailSubjectComponent = new LocalizedEmailComponent(SUBJECT, subjectTemplateKey, KeyType.MESSAGE_KEY, NUMERIC_FORMAT); + emailSubjectComponent.enableCompileWithIndexFields(metadata.getUuid()); + + LocalizedEmailComponent emailMessageComponent = new LocalizedEmailComponent(MESSAGE, textTemplateKey, KeyType.MESSAGE_KEY, NUMERIC_FORMAT); + emailMessageComponent.enableCompileWithIndexFields(metadata.getUuid()); + emailMessageComponent.enableReplaceLinks(false); + + LocalizedEmailComponent emailSalutationComponent = new LocalizedEmailComponent(SALUTATION, "{{userName}},\n\n", KeyType.RAW_VALUE, NONE); + + for (Locale feedbackLocale : feedbackLocales) { + // TODO: Refactor to allow custom messages based on the type of status + + emailSubjectComponent.addParameters( + feedbackLocale, + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 1, siteName), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 2, getTranslatedStatusName(status.getStatusValue().getId(), feedbackLocale)), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 3, replyToDescr) + ); + + emailMessageComponent.addParameters( + feedbackLocale, + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 1, replyToDescr), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 2, status.getChangeMessage()), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 3, getTranslatedStatusName(status.getStatusValue().getId(), feedbackLocale)), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 4, status.getChangeDate()), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 5, status.getDueDate()), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 6, status.getCloseDate()), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 7, owner == null ? "" : Joiner.on(" ").skipNulls().join(owner.getName(), owner.getSurname())), + new LocalizedEmailParameter(ParameterType.RAW_VALUE, 8, metadataUtils.getDefaultUrl(metadata.getUuid(), feedbackLocale.getISO3Language())) + ); + } + + LocalizedEmail localizedEmail = new LocalizedEmail(false); + localizedEmail.addComponents(emailSubjectComponent, emailMessageComponent, emailSalutationComponent); + + String subject = localizedEmail.getParsedSubject(feedbackLocales); - subject = MailUtil.compileMessageWithIndexFields(subject, metadata.getUuid(), this.language); - message = MailUtil.compileMessageWithIndexFields(message, metadata.getUuid(), this.language); for (User user : userToNotify) { - String salutation = Joiner.on(" ").skipNulls().join(user.getName(), user.getSurname()); - //If we have a salutation then end it with a "," - if (StringUtils.isEmpty(salutation)) { - salutation = ""; + String userName = Joiner.on(" ").skipNulls().join(user.getName(), user.getSurname()); + //If we have a userName add the salutation + String message; + if (StringUtils.isEmpty(userName)) { + message = localizedEmail.getParsedMessage(feedbackLocales); } else { - salutation += ",\n\n"; + Map replacements = new HashMap<>(); + replacements.put("{{userName}}", userName); + message = localizedEmail.getParsedMessage(feedbackLocales, replacements); } - sendEmail(user.getEmail(), subject, salutation + message); + sendEmail(user.getEmail(), subject, message); } } @@ -317,6 +369,25 @@ protected List getUserToNotify(MetadataStatus status) { return new ArrayList<>(); } + // If status is DRAFT and previous status is SUBMITTED, which means either: + // - a cancel working copy (from editor) --> should be notified the reviewer. + // - rejection (from reviewer) --> should be notified the editor. + // and the notification level is recordUserAuthor or recordProfileReviewer, + // then adjust the notification level, depending on the user role + if ((status.getStatusValue().getId() == Integer.parseInt(StatusValue.Status.DRAFT)) && + (!StringUtils.isEmpty(status.getPreviousState()) && + (status.getPreviousState().equals(StatusValue.Status.SUBMITTED))) && + (notificationLevel.equals(StatusValueNotificationLevel.recordUserAuthor) || (notificationLevel.equals(StatusValueNotificationLevel.recordProfileReviewer)))) { + UserRepository userRepository = ApplicationContextHolder.get().getBean(UserRepository.class); + Optional user = userRepository.findById(status.getUserId()); + if (user.isPresent()) { + if (user.get().getProfile() == Profile.Editor) { + notificationLevel = StatusValueNotificationLevel.recordProfileReviewer; + } else { + notificationLevel = StatusValueNotificationLevel.recordUserAuthor; + } + } + } // TODO: Status does not provide batch update // So taking care of one record at a time. // Currently the code could notify a mix of reviewers @@ -408,14 +479,14 @@ protected void unsetAllOperations(int mdId) throws Exception { } } - private String getTranslatedStatusName(int statusValueId) { + private String getTranslatedStatusName(int statusValueId, Locale locale) { String translatedStatusName = ""; StatusValue s = statusValueRepository.findOneById(statusValueId); if (s == null) { translatedStatusName = statusValueId + " (Status not found in database translation table. Check the content of the StatusValueDes table.)"; } else { - translatedStatusName = s.getLabel(this.language); + translatedStatusName = s.getLabel(locale.getISO3Language()); } return translatedStatusName; } diff --git a/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActions.java b/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActions.java index 9a4aecff585..047c0b1b33a 100644 --- a/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActions.java +++ b/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActions.java @@ -38,6 +38,6 @@ public interface StatusActions { public void onEdit(int id, boolean minorEdit) throws Exception; - public Map onStatusChange(List status) throws Exception; + public Map onStatusChange(List status, boolean updateIndex) throws Exception; } diff --git a/core/src/main/java/org/fao/geonet/kernel/search/EsSearchManager.java b/core/src/main/java/org/fao/geonet/kernel/search/EsSearchManager.java index 6d1df6045f2..978ab63a750 100644 --- a/core/src/main/java/org/fao/geonet/kernel/search/EsSearchManager.java +++ b/core/src/main/java/org/fao/geonet/kernel/search/EsSearchManager.java @@ -29,8 +29,8 @@ import co.elastic.clients.elasticsearch.core.bulk.BulkOperation; import co.elastic.clients.elasticsearch.core.bulk.UpdateOperation; import co.elastic.clients.elasticsearch.core.search.Hit; -import co.elastic.clients.elasticsearch.indices.*; import co.elastic.clients.elasticsearch.indices.ExistsRequest; +import co.elastic.clients.elasticsearch.indices.*; import co.elastic.clients.transport.endpoints.BooleanResponse; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -39,6 +39,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import jeeves.server.UserSession; import jeeves.server.context.ServiceContext; @@ -72,8 +73,7 @@ import java.util.*; import static org.fao.geonet.constants.Geonet.IndexFieldNames.IS_TEMPLATE; -import static org.fao.geonet.kernel.search.IndexFields.INDEXING_ERROR_FIELD; -import static org.fao.geonet.kernel.search.IndexFields.INDEXING_ERROR_MSG; +import static org.fao.geonet.kernel.search.IndexFields.*; public class EsSearchManager implements ISearchManager { @@ -213,14 +213,21 @@ private void addMDFields(Element doc, Path schemaDir, } catch (Exception e) { LOGGER.error("Indexing stylesheet contains errors: {} \n Marking the metadata as _indexingError=1 in index", e.getMessage()); doc.addContent(new Element(INDEXING_ERROR_FIELD).setText("true")); - doc.addContent(new Element(INDEXING_ERROR_MSG).setText("GNIDX-XSL||" + e.getMessage())); - doc.addContent(new Element(IndexFields.DRAFT).setText("n")); + doc.addContent(createIndexingErrorMsgElement("indexingErrorMsg-indexingStyleSheetError", "error", + Map.of("message", e.getMessage()))); } } private void addMoreFields(Element doc, Multimap fields) { - fields.entries().forEach(e -> doc.addContent(new Element(e.getKey()) - .setText(String.valueOf(e.getValue())))); + ArrayList objectFields = Lists.newArrayList(INDEXING_ERROR_MSG); + fields.entries().forEach(e -> { + Element newElement = new Element(e.getKey()) + .setText(String.valueOf(e.getValue())); + if (objectFields.contains(e.getKey())) { + newElement.setAttribute("type", "object"); + } + doc.addContent(newElement); + }); } public Element makeField(String name, String value) { @@ -340,6 +347,7 @@ public BulkResponse updateFields(String id, Multimap fields, Set fields.asMap().forEach((e, v) -> fieldMap.put(e, v.toArray())); return updateFields(id, fieldMap, fieldsToRemove); } + public BulkResponse updateFields(String id, Map fieldMap, Set fieldsToRemove) throws IOException { fieldMap.put(Geonet.IndexFieldNames.INDEXING_DATE, new Date()); @@ -395,7 +403,7 @@ public void updateFieldsAsynch(String id, Map fields) { if (exception != null) { LOGGER.error("Failed to index {}", exception); } else { - LOGGER.info("Updated fields for document {}", id); + LOGGER.info("Updated fields for document {}", id); } }); } @@ -470,7 +478,7 @@ private void sendDocumentsToIndex() { } catch (Exception e) { LOGGER.error( "An error occurred while indexing {} documents in current indexing list. Error is {}.", - listOfDocumentsToIndex.size(), e.getMessage()); + listOfDocumentsToIndex.size(), e.getMessage()); } finally { // TODO: Trigger this async ? documents.keySet().forEach(uuid -> overviewFieldUpdater.process(uuid)); @@ -493,6 +501,7 @@ private void checkIndexResponse(BulkResponse bulkItemResponses, String id = ""; String uuid = ""; String isTemplate = ""; + String isDraft = ""; String failureDoc = documents.get(e.id()); try { @@ -501,16 +510,17 @@ private void checkIndexResponse(BulkResponse bulkItemResponses, id = node.get(IndexFields.DBID).asText(); uuid = node.get("uuid").asText(); isTemplate = node.get(IS_TEMPLATE).asText(); + isDraft = node.get(DRAFT).asText(); } catch (Exception ignoredException) { } docWithErrorInfo.put(IndexFields.DBID, id); docWithErrorInfo.put("uuid", uuid); docWithErrorInfo.put(IndexFields.RESOURCE_TITLE, resourceTitle); docWithErrorInfo.put(IS_TEMPLATE, isTemplate); - docWithErrorInfo.put(IndexFields.DRAFT, "n"); + docWithErrorInfo.put(IndexFields.DRAFT, isDraft); docWithErrorInfo.put(INDEXING_ERROR_FIELD, true); ArrayNode errors = docWithErrorInfo.putArray(INDEXING_ERROR_MSG); - errors.add(e.error().reason()); + errors.add(createIndexingErrorMsgObject(e.error().reason(), "error", Map.of())); // TODO: Report the JSON which was causing the error ? LOGGER.error("Document with error #{}: {}.", @@ -530,7 +540,7 @@ private void checkIndexResponse(BulkResponse bulkItemResponses, BulkResponse response = client.bulkRequest(defaultIndex, listErrorOfDocumentsToIndex); if (response.errors()) { LOGGER.error("Failed to save error documents {}.", - Arrays.toString(errorDocumentIds.toArray())); + Arrays.toString(errorDocumentIds.toArray())); } } } @@ -543,6 +553,7 @@ private void checkIndexResponse(BulkResponse bulkItemResponses, static { arrayFields = ImmutableSet.builder() .add(Geonet.IndexFieldNames.RECORDLINK) + .add("geom") .add("topic") .add("cat") .add("keyword") @@ -563,6 +574,7 @@ private void checkIndexResponse(BulkResponse bulkItemResponses, .add("status_text") .add("coordinateSystem") .add("identifier") + .add("maintenance") .add("responsibleParty") .add("mdLanguage") .add("otherLanguage") @@ -655,23 +667,16 @@ public ObjectNode documentToJson(Element xml) { || propertyName.endsWith("DateForResource") || propertyName.startsWith("cl_"); - if (name.equals("geom")) { - try { - doc.set("geom", mapper.readTree(nodeElements.get(0).getTextNormalize())); - } catch (IOException e) { - LOGGER.error("Parsing invalid geometry for JSON node {}. Error is: {}", - nodeElements.get(0).getTextNormalize(), e.getMessage()); - } - } else if (isArray) { + if (isArray) { ArrayNode arrayNode = doc.putArray(propertyName); for (Element node : nodeElements) { if (isObject) { try { arrayNode.add( - mapper.readTree(node.getText())); + mapper.readTree(node.getTextNormalize())); } catch (IOException e) { LOGGER.error("Parsing invalid JSON node {} for property {}. Error is: {}", - node.getTextNormalize(), propertyName, e.getMessage()); + node.getTextNormalize(), propertyName, e.getMessage()); } } else { arrayNode.add( @@ -690,7 +695,7 @@ public ObjectNode documentToJson(Element xml) { )); } catch (IOException e) { LOGGER.error("Parsing invalid JSON node {} for property {}. Error is: {}", - nodeElements.get(0).getTextNormalize(), propertyName, e.getMessage()); + nodeElements.get(0).getTextNormalize(), propertyName, e.getMessage()); } } else { doc.put(propertyName, @@ -703,7 +708,8 @@ public ObjectNode documentToJson(Element xml) { } - /** Field starting with _ not supported in Kibana + /** + * Field starting with _ not supported in Kibana * Those are usually GN internal fields */ private String getPropertyName(String name) { @@ -808,8 +814,8 @@ public SearchResponse query(JsonNode jsonRequest, Set includedFields, return client.query(defaultIndex, jsonRequest, null, includedFields, from, size); } - public Map getFieldsValues(String id, Set fields) throws IOException { - return client.getFieldsValues(defaultIndex, id, fields); + public Map getFieldsValues(String id, Set fields, String language) throws Exception { + return client.getFieldsValues(defaultIndex, id, fields, language); } @@ -942,4 +948,38 @@ public boolean isIndexWritable(String indexName) throws IOException, Elasticsear return (indexState != null) && "true".equals(indexState.toString()); } + + + /** + * Make a JSON Object that properly represents an indexingErrorMsg, to be used in the index. + * + * @param type either 'error' or 'warning' + * @param string a string that is translatable (see, e.g., en-search.json) + * @param values values that replace the placeholders in the `string` parameter + * @return a json object that represents an indexingErrorMsg + */ + public ObjectNode createIndexingErrorMsgObject(String string, String type, Map values) { + ObjectMapper objectMapper = new ObjectMapper(); + ObjectNode indexingErrorMsg = objectMapper.createObjectNode(); + indexingErrorMsg.put("string", string); + indexingErrorMsg.put("type", type); + ObjectNode valuesObject = objectMapper.createObjectNode(); + values.forEach((k, v) -> valuesObject.put(k, String.valueOf(v))); + indexingErrorMsg.set("values", valuesObject); + return indexingErrorMsg; + } + + /** + * Create an Element that represents an indexingErrorMsg object, to be used in the index. + * + * @param type either 'error' or 'warning' + * @param string a string that is translatable (see, e.g., en-search.json) + * @param values values that replace the placeholders in the `string` parameter + * @return an Element that represents an indexingErrorMsg + */ + public Element createIndexingErrorMsgElement(String string, String type, Map values) { + return new Element(INDEXING_ERROR_MSG) + .setText(createIndexingErrorMsgObject(string, type, values).toString()) + .setAttribute("type", "object"); + } } diff --git a/core/src/main/java/org/fao/geonet/kernel/security/openidconnect/bearer/GeonetworkJwtAuthenticationProvider.java b/core/src/main/java/org/fao/geonet/kernel/security/openidconnect/bearer/GeonetworkJwtAuthenticationProvider.java index 680a540ff92..220f024097c 100644 --- a/core/src/main/java/org/fao/geonet/kernel/security/openidconnect/bearer/GeonetworkJwtAuthenticationProvider.java +++ b/core/src/main/java/org/fao/geonet/kernel/security/openidconnect/bearer/GeonetworkJwtAuthenticationProvider.java @@ -52,7 +52,7 @@ import org.springframework.security.oauth2.core.user.DefaultOAuth2User; import org.springframework.security.oauth2.core.user.OAuth2User; import org.springframework.security.oauth2.jwt.Jwt; -import org.springframework.security.oauth2.server.resource.BearerTokenAuthenticationToken; +import org.springframework.security.oauth2.server.resource.authentication.BearerTokenAuthenticationToken; import org.springframework.security.oauth2.server.resource.BearerTokenError; import org.springframework.security.oauth2.server.resource.BearerTokenErrorCodes; import org.springframework.security.oauth2.server.resource.authentication.JwtAuthenticationConverter; diff --git a/core/src/main/java/org/fao/geonet/kernel/security/openidconnect/bearer/UserInfoCache.java b/core/src/main/java/org/fao/geonet/kernel/security/openidconnect/bearer/UserInfoCache.java index 4e75a1282b2..15252009821 100644 --- a/core/src/main/java/org/fao/geonet/kernel/security/openidconnect/bearer/UserInfoCache.java +++ b/core/src/main/java/org/fao/geonet/kernel/security/openidconnect/bearer/UserInfoCache.java @@ -10,19 +10,13 @@ */ public class UserInfoCache { - static Object lockobj = new Object(); + static final Object lockobj = new Object(); Map cache = new HashMap<>(); public UserInfoCacheItem getItem(String accessKey) { synchronized (lockobj) { - if (!cache.containsKey(accessKey)) - return null; - UserInfoCacheItem item = cache.get(accessKey); - if (item.isExpired()) { - cache.remove(accessKey); - return null; - } - return item; + cache.entrySet().removeIf(e -> e.getValue().isExpired()); + return cache.get(accessKey); } } diff --git a/core/src/main/java/org/fao/geonet/kernel/setting/SettingManager.java b/core/src/main/java/org/fao/geonet/kernel/setting/SettingManager.java index a3cd94bcb3c..b6f015d6b58 100644 --- a/core/src/main/java/org/fao/geonet/kernel/setting/SettingManager.java +++ b/core/src/main/java/org/fao/geonet/kernel/setting/SettingManager.java @@ -33,6 +33,7 @@ import org.fao.geonet.domain.Setting; import org.fao.geonet.domain.SettingDataType; import org.fao.geonet.domain.Setting_; +import org.fao.geonet.languages.FeedbackLanguages; import org.fao.geonet.repository.SettingRepository; import org.fao.geonet.repository.SortUtils; import org.fao.geonet.repository.SourceRepository; @@ -94,6 +95,9 @@ public class SettingManager { @Autowired DefaultLanguage defaultLanguage; + @Autowired + FeedbackLanguages feedbackLanguages; + @PostConstruct private void init() { this.pathFinder = new ServletPathFinder(servletContext); @@ -343,6 +347,12 @@ public boolean setValue(String key, String value) { repo.save(setting); + if (key.equals("system/feedback/languages")) { + feedbackLanguages.updateSupportedLocales(); + } else if (key.equals("system/feedback/translationFollowsText")) { + feedbackLanguages.updateTranslationFollowsText(); + } + return true; } diff --git a/core/src/main/java/org/fao/geonet/kernel/setting/Settings.java b/core/src/main/java/org/fao/geonet/kernel/setting/Settings.java index c3c2b209271..a96fa132585 100644 --- a/core/src/main/java/org/fao/geonet/kernel/setting/Settings.java +++ b/core/src/main/java/org/fao/geonet/kernel/setting/Settings.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -60,6 +60,8 @@ public class Settings { public static final String SYSTEM_USERS_IDENTICON = "system/users/identicon"; public static final String SYSTEM_SEARCHSTATS = "system/searchStats/enable"; public static final String SYSTEM_FEEDBACK_EMAIL = "system/feedback/email"; + public static final String SYSTEM_FEEDBACK_LANGUAGES = "system/feedback/languages"; + public static final String SYSTEM_FEEDBACK_TRANSLATION_FOLLOWS_TEXT = "system/feedback/translationFollowsText"; public static final String SYSTEM_FEEDBACK_MAILSERVER_HOST = "system/feedback/mailServer/host"; public static final String SYSTEM_FEEDBACK_MAILSERVER_PORT = "system/feedback/mailServer/port"; public static final String SYSTEM_FEEDBACK_MAILSERVER_USERNAME = "system/feedback/mailServer/username"; @@ -71,7 +73,6 @@ public class Settings { public static final String SYSTEM_ENABLE_ALL_THESAURUS = "system/metadata/allThesaurus"; public static final String SYSTEM_METADATA_THESAURUS_NAMESPACE = "system/metadata/thesaurusNamespace"; public static final String SYSTEM_METADATA_VALIDATION_REMOVESCHEMALOCATION = "system/metadata/validation/removeSchemaLocation"; - public static final String SYSTEM_METADATA_HISTORY_ENABLED = "system/metadata/history/enabled"; public static final GNSetting SYSTEM_SITE_SVNUUID = new GNSetting("system/site/svnUuid", true); public static final String SYSTEM_INTRANET_NETWORK = "system/intranet/network"; public static final String SYSTEM_INTRANET_NETMASK = "system/intranet/netmask"; @@ -84,6 +85,7 @@ public class Settings { public static final String SYSTEM_CSW_CAPABILITY_RECORD_UUID = "system/csw/capabilityRecordUuid"; public static final String SYSTEM_CSW_METADATA_PUBLIC = "system/csw/metadataPublic"; public static final String SYSTEM_USERSELFREGISTRATION_ENABLE = "system/userSelfRegistration/enable"; + public static final String SYSTEM_USERSELFREGISTRATION_EMAIL_DOMAINS = "system/userSelfRegistration/domainsAllowed"; public static final String SYSTEM_USERSELFREGISTRATION_RECAPTCHA_ENABLE = "system/userSelfRegistration/recaptcha/enable"; public static final String SYSTEM_USERSELFREGISTRATION_RECAPTCHA_PUBLICKEY = "system/userSelfRegistration/recaptcha/publickey"; public static final String SYSTEM_USERSELFREGISTRATION_RECAPTCHA_SECRETKEY = "system/userSelfRegistration/recaptcha/secretkey"; @@ -139,6 +141,8 @@ public class Settings { public static final String METADATA_IMPORT_RESTRICT = "metadata/import/restrict"; public static final String METADATA_IMPORT_USERPROFILE = "metadata/import/userprofile"; public static final String METADATA_BATCH_EDITING_ACCESS_LEVEL = "metadata/batchediting/accesslevel"; + public static final String METADATA_HISTORY_ENABLED = "metadata/history/enabled"; + public static final String METADATA_HISTORY_ACCESS_LEVEL = "metadata/history/accesslevel"; public static final String METADATA_PUBLISHED_DELETE_USERPROFILE = "metadata/delete/profilePublishedMetadata"; public static final String METADATA_PUBLISH_USERPROFILE = "metadata/publication/profilePublishMetadata"; public static final String METADATA_UNPUBLISH_USERPROFILE = "metadata/publication/profileUnpublishMetadata"; @@ -154,6 +158,9 @@ public class Settings { public static final String SYSTEM_SECURITY_PASSWORDENFORCEMENT_USEPATTERN = "system/security/passwordEnforcement/usePattern"; public static final String SYSTEM_SECURITY_PASSWORDENFORCEMENT_PATTERN = "system/security/passwordEnforcement/pattern"; public static final String SYSTEM_SECURITY_PASSWORD_ALLOWADMINRESET = "system/security/password/allowAdminReset"; + public static final String SYSTEM_TRANSLATION_PROVIDER = "system/translation/provider"; + public static final String SYSTEM_TRANSLATION_SERVICEURL = "system/translation/serviceUrl"; + public static final String SYSTEM_TRANSLATION_APIKEY = "system/translation/apiKey"; public static final String MICROSERVICES_ENABLED = "microservices/enabled"; diff --git a/core/src/main/java/org/fao/geonet/kernel/url/UrlAnalyzer.java b/core/src/main/java/org/fao/geonet/kernel/url/UrlAnalyzer.java index 5686f54e12a..e904e8114a8 100644 --- a/core/src/main/java/org/fao/geonet/kernel/url/UrlAnalyzer.java +++ b/core/src/main/java/org/fao/geonet/kernel/url/UrlAnalyzer.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2019 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -39,10 +39,6 @@ import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; -import javax.persistence.criteria.CriteriaBuilder; -import javax.persistence.criteria.CriteriaQuery; -import javax.persistence.criteria.Predicate; -import javax.persistence.criteria.Root; import java.util.Optional; @@ -78,21 +74,19 @@ public void processMetadata(Element element, AbstractMetadata md) throws org.jdo if (schemaPlugin instanceof LinkAwareSchemaPlugin) { metadataLinkRepository - .findAll(metadatalinksTargetting(md)) - .stream() - .forEach(metadatalink -> { - metadatalink.getLink().getRecords().remove(metadatalink); - }); + .findAll(metadatalinksTargetting(md)) + .stream() + .forEach(metadatalink -> metadatalink.getLink().getRecords().remove(metadatalink)); entityManager.flush(); ((LinkAwareSchemaPlugin) schemaPlugin).createLinkStreamer(new ILinkBuilder() { @Override public Link found(String url) { - Link link = linkRepository.findOneByUrl(url); - if (link != null) { - return link; + Optional linkOptional = linkRepository.findOneByUrl(url); + if (linkOptional.isPresent()) { + return linkOptional.get(); } else { - link = new Link(); + Link link = new Link(); link.setUrl(url); linkRepository.save(link); return link; @@ -102,7 +96,7 @@ public Link found(String url) { @Override public void persist(Link link, AbstractMetadata metadata) { MetadataLink metadataLink = new MetadataLink(); - metadataLink.setMetadataId(new Integer(metadata.getId())); + metadataLink.setMetadataId(metadata.getId()); metadataLink.setMetadataUuid(metadata.getUuid()); metadataLink.setLink(link); link.getRecords().add(metadataLink); @@ -115,10 +109,10 @@ public void persist(Link link, AbstractMetadata metadata) { public void purgeMetataLink(Link link) { metadataLinkRepository - .findAll(metadatalinksTargetting(link)) - .stream() - .filter(metadatalink -> isReferencingAnUnknownMetadata((MetadataLink)metadatalink)) - .forEach(metadataLinkRepository::delete); + .findAll(metadatalinksTargetting(link)) + .stream() + .filter(this::isReferencingAnUnknownMetadata) + .forEach(metadataLinkRepository::delete); entityManager.flush(); } @@ -136,28 +130,16 @@ public void testLink(Link link) { } private Specification metadatalinksTargetting(Link link) { - return new Specification() { - @Override - public Predicate toPredicate(Root root, CriteriaQuery criteriaQuery, CriteriaBuilder criteriaBuilder) { - return criteriaBuilder.equal(root.get(MetadataLink_.link).get(Link_.id), link.getId()); - } - }; + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(MetadataLink_.link).get(Link_.id), link.getId()); } private Specification metadatalinksTargetting(AbstractMetadata md) { - return new Specification() { - @Override - public Predicate toPredicate(Root root, CriteriaQuery criteriaQuery, CriteriaBuilder criteriaBuilder) { - return criteriaBuilder.equal(root.get(MetadataLink_.metadataId), md.getId()); - } - }; + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(MetadataLink_.metadataId), md.getId()); } private boolean isReferencingAnUnknownMetadata(MetadataLink metadatalink) { Optional metadata = metadataRepository.findById(metadatalink.getMetadataId()); - return !metadata.isPresent(); + return metadata.isEmpty(); } - - } diff --git a/core/src/main/java/org/fao/geonet/languages/FeedbackLanguages.java b/core/src/main/java/org/fao/geonet/languages/FeedbackLanguages.java new file mode 100644 index 00000000000..183ac8426f5 --- /dev/null +++ b/core/src/main/java/org/fao/geonet/languages/FeedbackLanguages.java @@ -0,0 +1,129 @@ +//============================================================================= +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the +//=== United Nations (FAO-UN), United Nations World Food Programme (WFP) +//=== and United Nations Environment Programme (UNEP) +//=== +//=== This library is free software; you can redistribute it and/or +//=== modify it under the terms of the GNU Lesser General Public +//=== License as published by the Free Software Foundation; either +//=== version 2.1 of the License, or (at your option) any later version. +//=== +//=== This library is distributed in the hope that it will be useful, +//=== but WITHOUT ANY WARRANTY; without even the implied warranty of +//=== MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +//=== Lesser General Public License for more details. +//=== +//=== You should have received a copy of the GNU Lesser General Public +//=== License along with this library; if not, write to the Free Software +//=== Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +//=== +//=== Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, +//=== Rome - Italy. email: geonetwork@osgeo.org +//============================================================================== + +package org.fao.geonet.languages; + +import org.apache.commons.lang.StringUtils; +import org.fao.geonet.constants.Geonet; +import org.fao.geonet.kernel.setting.SettingManager; +import org.fao.geonet.kernel.setting.Settings; +import org.fao.geonet.utils.Log; +import org.springframework.beans.factory.annotation.Autowired; + +import javax.annotation.PostConstruct; +import java.util.*; + +/** + * Represents a utility class for managing supported locales and translation follows text for feedback. + */ +public class FeedbackLanguages { + private Locale[] supportedLocales; + private String translationFollowsText; + + @Autowired + SettingManager settingManager; + + /** + * Initializes the supported locales and translation follows text after bean creation. + */ + @PostConstruct + public void init() { + updateSupportedLocales(); + updateTranslationFollowsText(); + } + + /** + * Updates the supported locales based on the system feedback languages setting. + */ + public void updateSupportedLocales() { + String systemFeedbackLanguages = getSettingsValue(Settings.SYSTEM_FEEDBACK_LANGUAGES); + + if (StringUtils.isBlank(systemFeedbackLanguages)) { + supportedLocales = null; + return; + } + + supportedLocales = Arrays.stream(systemFeedbackLanguages.split(",")) + .map(String::trim) + .map(Locale::new) + .filter(this::isValidLocale) + .toArray(Locale[]::new); + } + + /** + * Updates the translation follows text based on the system feedback translation text setting. + */ + public void updateTranslationFollowsText() { + translationFollowsText = getSettingsValue(Settings.SYSTEM_FEEDBACK_TRANSLATION_FOLLOWS_TEXT); + } + + /** + * Retrieves the supported locales. If no supported locales are found, returns a fallback locale. + * @param fallbackLocale The fallback locale to be returned if no supported locales are available. + * @return An array of supported locales or a single fallback locale if none are available. + */ + public Locale[] getLocales(Locale fallbackLocale) { + if (supportedLocales == null || supportedLocales.length < 1) { + return new Locale[] { fallbackLocale }; + } + + return supportedLocales; + } + + /** + * Retrieves the translation follows text. + * @return The translation follows text. + */ + public String getTranslationFollowsText() { + return translationFollowsText; + } + + /** + * Checks if the provided locale is valid by attempting to load a ResourceBundle. + * @param locale The locale to validate. + * @return True if the locale is valid, false otherwise. + */ + private boolean isValidLocale(Locale locale) { + Boolean isValid; + try { + isValid = locale.getLanguage().equals(Geonet.DEFAULT_LANGUAGE) + || ResourceBundle.getBundle("org.fao.geonet.api.Messages", locale).getLocale().getLanguage().equals(locale.getLanguage()); + } catch (MissingResourceException e) { + isValid = false; + } + if (!isValid) { + String localeLanguage; + try { + localeLanguage = locale.getISO3Language(); + } catch (MissingResourceException e) { + localeLanguage = locale.getLanguage(); + } + Log.warning(Log.GEONETWORK_MODULE + ".feedbacklanguages", "Locale '" + localeLanguage + "' is invalid or missing message bundles. Ensure feedback locales are correct."); + } + return isValid; + } + + private String getSettingsValue(String settingName) { + return settingManager.getValue(settingName); + } +} diff --git a/core/src/main/java/org/fao/geonet/lib/DbLib.java b/core/src/main/java/org/fao/geonet/lib/DbLib.java index 6a85a1e334c..407304043fd 100644 --- a/core/src/main/java/org/fao/geonet/lib/DbLib.java +++ b/core/src/main/java/org/fao/geonet/lib/DbLib.java @@ -183,7 +183,7 @@ private void runSQL(Statement statement, List data, boolean failOnError) * * @param type @return */ - private Path checkFilePath(ServletContext servletContext, Path appPath, Path filePath, String prefix, String type) { + private Path checkFilePath(ServletContext servletContext, Path appPath, Path filePath, String prefix, String type) throws IOException { Path finalPath; finalPath = testPath(filePath.resolve(prefix + type + SQL_EXTENSION)); @@ -214,9 +214,10 @@ private Path checkFilePath(ServletContext servletContext, Path appPath, Path fil if (finalPath != null) return finalPath; else { - Log.debug(Geonet.DB, " No default SQL script found: " + (filePath + "/" + prefix + type + SQL_EXTENSION)); + String msg = String.format("SQL script not found: %s", filePath + "/" + prefix + type + SQL_EXTENSION); + Log.debug(Geonet.DB, msg); + throw new IOException(msg); } - return toPath(""); } private Path toPath(String pathString) { diff --git a/core/src/main/java/org/fao/geonet/util/LocalizedEmail.java b/core/src/main/java/org/fao/geonet/util/LocalizedEmail.java new file mode 100644 index 00000000000..0aa1bf978fb --- /dev/null +++ b/core/src/main/java/org/fao/geonet/util/LocalizedEmail.java @@ -0,0 +1,149 @@ +//============================================================================= +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the +//=== United Nations (FAO-UN), United Nations World Food Programme (WFP) +//=== and United Nations Environment Programme (UNEP) +//=== +//=== This library is free software; you can redistribute it and/or +//=== modify it under the terms of the GNU Lesser General Public +//=== License as published by the Free Software Foundation; either +//=== version 2.1 of the License, or (at your option) any later version. +//=== +//=== This library is distributed in the hope that it will be useful, +//=== but WITHOUT ANY WARRANTY; without even the implied warranty of +//=== MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +//=== Lesser General Public License for more details. +//=== +//=== You should have received a copy of the GNU Lesser General Public +//=== License along with this library; if not, write to the Free Software +//=== Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +//=== +//=== Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, +//=== Rome - Italy. email: geonetwork@osgeo.org +//============================================================================== + +package org.fao.geonet.util; + +import org.apache.commons.lang.StringUtils; +import org.fao.geonet.ApplicationContextHolder; +import org.fao.geonet.languages.FeedbackLanguages; +import org.fao.geonet.utils.Log; + +import static org.fao.geonet.util.LocalizedEmailComponent.ComponentType.*; +import static org.fao.geonet.util.LocalizedEmailComponent.ComponentType; + +import java.util.*; + +/** + * Class representing a localized email. + */ +public class LocalizedEmail { + private final Boolean isHtml; + private final Map components; + private final String translationFollowsText; + + private static final String SUBJECT_DELIMITER = " | "; + private static final String HTML_MESSAGE_DELIMITER = "


"; + private static final String HTML_LINE_BREAK = "

"; + private static final String TEXT_MESSAGE_DELIMITER = "\n\n--------------------------------------------------------\n\n"; + private static final String TEXT_LINE_BREAK = "\n\n"; + + public LocalizedEmail(Boolean isHtml) { + this.isHtml = isHtml; + + FeedbackLanguages feedbackLanguages = ApplicationContextHolder.get().getBean(FeedbackLanguages.class); + this.translationFollowsText = feedbackLanguages.getTranslationFollowsText(); + + this.components = new HashMap<>(); + } + + /** + * Add one or more components to the email object. Existing components are replaced. + * + * @param newComponents The components to add to the email. + */ + public void addComponents(LocalizedEmailComponent... newComponents) { + + for (LocalizedEmailComponent newComponent : newComponents) { + + if (newComponent == null) { + throw new IllegalArgumentException("Null parameter not allowed"); + } + + components.put(newComponent.getComponentType(), newComponent); + } + } + + public String getParsedSubject(Locale[] feedbackLocales) { + LinkedHashMap subjects = components.get(SUBJECT).getParsedMessagesMap(feedbackLocales); + return String.join(SUBJECT_DELIMITER, subjects.values()); + } + + public String getParsedMessage(Locale[] feedbackLocales) { + return getParsedMessage(feedbackLocales, null); + } + + public String getParsedMessage(Locale[] feedbackLocales, Map replacements) { + LinkedHashMap messages = components.get(MESSAGE).getParsedMessagesMap(feedbackLocales, true); + + // Prepend the message with a salutation placeholder if the salutation component is present + if (components.containsKey(SALUTATION) && components.get(SALUTATION) != null) { + + LinkedHashMap salutations = components.get(SALUTATION).getParsedMessagesMap(feedbackLocales); + LinkedHashMap messagesWithSalutations = new LinkedHashMap<>(); + + for (Map.Entry entry : messages.entrySet()) { + //Skip messages that have no matching salutation + if (!salutations.containsKey(entry.getKey())) { + continue; + } + + String message = entry.getValue(); + String salutation = salutations.get(entry.getKey()); + + if (replacements != null && !replacements.isEmpty()) { + for (Map.Entry replacement : replacements.entrySet()) { + salutation = salutation.replace(replacement.getKey(), replacement.getValue()); + } + } + + messagesWithSalutations.put(entry.getKey(), salutation + message); + } + + messages = messagesWithSalutations; + + } + + String messageDelimiter; + String lineBreak; + + // Set the delimiter and break string to use based on email type + if (isHtml) { + messageDelimiter = HTML_MESSAGE_DELIMITER; + lineBreak = HTML_LINE_BREAK; + // Wrap each message in a div with a lang attribute for accessibility + messages.replaceAll((locale, message) -> "
" + message + "
"); + } else { + messageDelimiter = TEXT_MESSAGE_DELIMITER; + lineBreak = TEXT_LINE_BREAK; + } + + String emailMessage = String.join(messageDelimiter, messages.values()); + + // Prepend the message with the translation follows text if there is more than one language specified + if (messages.size() > 1 && !StringUtils.isBlank(translationFollowsText)) { + emailMessage = translationFollowsText + lineBreak + emailMessage; + } + + // If the email is html wrap the content in html and body tags + if (isHtml) { + if (emailMessage.contains("") || emailMessage.contains("")) { + Log.warning(Log.GEONETWORK_MODULE + ".localizedemail","Multilingual emails are unsupported for HTML emails with messages containing or tags. Reverting to first specified locale."); + return messages.get(feedbackLocales[0]); + } + emailMessage = "" + emailMessage + ""; + } + + return emailMessage; + } +} + diff --git a/core/src/main/java/org/fao/geonet/util/LocalizedEmailComponent.java b/core/src/main/java/org/fao/geonet/util/LocalizedEmailComponent.java new file mode 100644 index 00000000000..fa61f8e07f8 --- /dev/null +++ b/core/src/main/java/org/fao/geonet/util/LocalizedEmailComponent.java @@ -0,0 +1,372 @@ +//============================================================================= +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the +//=== United Nations (FAO-UN), United Nations World Food Programme (WFP) +//=== and United Nations Environment Programme (UNEP) +//=== +//=== This library is free software; you can redistribute it and/or +//=== modify it under the terms of the GNU Lesser General Public +//=== License as published by the Free Software Foundation; either +//=== version 2.1 of the License, or (at your option) any later version. +//=== +//=== This library is distributed in the hope that it will be useful, +//=== but WITHOUT ANY WARRANTY; without even the implied warranty of +//=== MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +//=== Lesser General Public License for more details. +//=== +//=== You should have received a copy of the GNU Lesser General Public +//=== License along with this library; if not, write to the Free Software +//=== Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +//=== +//=== Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, +//=== Rome - Italy. email: geonetwork@osgeo.org +//============================================================================== + +package org.fao.geonet.util; + +import org.fao.geonet.ApplicationContextHolder; +import org.fao.geonet.kernel.search.JSONLocCacheLoader; +import org.fao.geonet.kernel.setting.SettingManager; + +import java.text.MessageFormat; +import java.util.*; + +import static org.fao.geonet.util.LocalizedEmailComponent.ReplacementType.*; + +/** + * This class is used to handle email parameters used to format localized email messages + */ +public class LocalizedEmailComponent { + + private final ComponentType componentType; + private final String keyOrRawValue; + private final KeyType keyType; + private final ReplacementType replacementType; + private final Map> parameters; + private Boolean compileWithIndexFields; + private String metadataUuid; + private Boolean replaceLinks; + private Boolean replaceLinksWithHtmlFormat = false; + + /** + * Enum representing the types of components in an email. + *

+ * This enum defines four types of components: + *

    + *
  • {@link ComponentType#SUBJECT SUBJECT}: The email subject field.
  • + *
  • {@link ComponentType#MESSAGE MESSAGE}: The email body.
  • + *
  • {@link ComponentType#SALUTATION SALUTATION}: The salutation to prepend each localized message with. (Ex. 'Hello John')
  • + *
  • {@link ComponentType#NESTED NESTED}: A component of insignificant type that is used to generate other components.
  • + *
+ */ + public enum ComponentType { + /** + * The email subject field. + */ + SUBJECT, + + /** + * The email body. + */ + MESSAGE, + + /** + * The salutation to prepend each localized message with. (Ex. 'Hello John'). + */ + SALUTATION, + + /** + * A component of insignificant type that is used to generate other components. + */ + NESTED + } + + /** + * Enum representing the types of keys used to parse a components message. + *

+ * This enum defines four types of keys: + *

    + *
  • {@link KeyType#MESSAGE_OR_JSON_KEY MESSAGE_OR_JSON_KEY}: Represents a component that tries to retrieve its value using {@link ResourceBundle#getString} or JSON localization files if message key was not found.
  • + *
  • {@link KeyType#MESSAGE_KEY MESSAGE_KEY}: Represents a component that retrieves its value using {@link ResourceBundle#getString}.
  • + *
  • {@link KeyType#JSON_KEY JSON_KEY}: Represents a component that retrieves its value by searching the JSON localization files for the specified key.
  • + *
  • {@link KeyType#RAW_VALUE RAW_VALUE}: Represents a component in which keys are not required. The raw value from keyOrRawValue is used.
  • + *
+ *

+ */ + public enum KeyType { + /** + * Represents a component that tries to retrieve its value using {@link ResourceBundle#getString} or JSON localization files if message key was not found. + */ + MESSAGE_OR_JSON_KEY, + + /** + * Represents a component that retrieves its value using {@link ResourceBundle#getString}. + */ + MESSAGE_KEY, + + /** + * Represents a component that retrieves its value by searching the JSON localization files for the specified key. + */ + JSON_KEY, + + /** + * Represents a component in which keys are not required. The raw value from keyOrRawValue is used. + */ + RAW_VALUE + } + + /** + * Enum representing the types of replacements performed on the email component. + *

+ * This enum defines four types of replacement: + *

    + *
  • {@link ReplacementType#POSITIONAL_FORMAT POSITIONAL_FORMAT}: A parameter that retrieves its value using {@link ResourceBundle#getString}. + * The value property is set to the message key to search for.
  • + *
  • {@link ReplacementType#NUMERIC_FORMAT NUMERIC_FORMAT}: A parameter that retrieves its value by searching the JSON localization files for the specified key. + * The value property is set to the json key to search for.
  • + *
  • {@link ReplacementType#NAMED_FORMAT NAMED_FORMAT}: A parameter that retrieves its value using {@link XslUtil#getIndexField}. + * The value property is set to the field name to search for, and the uuid property is set to the record uuid to search for (required).
  • + *
  • {@link ReplacementType#NONE NONE}: For components that require no replacement to compute their values.
  • + *
+ *

+ */ + public enum ReplacementType { + /** + * For {@link String#format}, where parameters are replaced based on their position (Ex. %s). + * The parameter id stores an integer representing the order of the parameters. + */ + POSITIONAL_FORMAT, + + /** + * For {@link MessageFormat#format}, where parameters are replaced based on position (Ex. {0}). + * The parameter id stores an integer representing the order of the parameters. + */ + NUMERIC_FORMAT, + + /** + * For {@link String#replace}, where parameters are replaced based on their names ({{title}}). + * The parameter id stores the string to replace. + */ + NAMED_FORMAT, + + /** + * For components that require no replacement to compute their values. + */ + NONE + } + + /** + * Constructor for LocalizedEmailParameters. + * + * @param replacementType the type of template variable + */ + public LocalizedEmailComponent(ComponentType componentType, String keyOrRawValue, KeyType keyType, ReplacementType replacementType) { + this.componentType = componentType; + this.keyOrRawValue = keyOrRawValue; + this.keyType = keyType; + this.replacementType = replacementType; + this.parameters = new HashMap<>(); + this.compileWithIndexFields = false; + this.metadataUuid = null; + this.replaceLinks = false; + } + + /** + * Adds parameters to the email parameters list. + * + * @param newParameters the parameters to add + * @throws IllegalArgumentException if a null parameter is passed or if a duplicate parameter id is found + */ + public void addParameters(Locale locale, LocalizedEmailParameter... newParameters) { + // If the map does not have the locale as a key add it + if (!parameters.containsKey(locale)) { + parameters.put(locale, new ArrayList<>()); + } + + for (LocalizedEmailParameter newParameter : newParameters) { + + if (newParameter == null) { + throw new IllegalArgumentException("Null parameter not allowed"); + } + + // If the parameter id is already in the list + if (parameters.get(locale).stream().anyMatch(existingParameter -> newParameter.getId().equals(existingParameter.getId()))) { + throw new IllegalArgumentException("Duplicate parameter id: " + newParameter.getId()); + } + + // If the type of parameters are positional and the new parameters id is not an integer + if ((replacementType.equals(POSITIONAL_FORMAT) || replacementType.equals(NUMERIC_FORMAT)) && !(newParameter.getId() instanceof Integer)) { + throw new IllegalArgumentException("Positional parameter id must be an integer"); + } + + parameters.get(locale).add(newParameter); + } + } + + /** + * @return the map of locales to lists of email parameters + */ + public Map> getParameters() { + return parameters; + } + + /** + * Enables the compilation with index fields and sets the metadata UUID. + * + * @param metadataUuid the metadata UUID + */ + public void enableCompileWithIndexFields(String metadataUuid) { + this.compileWithIndexFields = true; + this.metadataUuid = metadataUuid; + } + + /** + * Sets the replace links flag and format. + * + * @param useHtmlFormat replace links using the HTML format instead of the text format. + */ + public void enableReplaceLinks(Boolean useHtmlFormat) { + this.replaceLinks = true; + this.replaceLinksWithHtmlFormat = useHtmlFormat; + } + + /** + * @return The type of the component. + */ + public ComponentType getComponentType() { + return componentType; + } + + /** + * Parses the message based on the provided key or template and locale. + * + * @param locale the locale + * @return the parsed message + * @throws RuntimeException if an unsupported template variable type is encountered + */ + public String parseMessage(Locale locale) { + + ArrayList parametersForLocale = parameters.get(locale); + + String parsedMessage; + switch (keyType) { + case MESSAGE_OR_JSON_KEY: + try { + parsedMessage = getResourceBundleString(locale); + } catch (MissingResourceException missingResourceException) { + parsedMessage = getTranslationMapString(locale); + } + break; + case MESSAGE_KEY: + try { + parsedMessage = getResourceBundleString(locale); + } catch (MissingResourceException e) { + parsedMessage = keyOrRawValue; + } + break; + case JSON_KEY: + parsedMessage = getTranslationMapString(locale); + break; + case RAW_VALUE: + parsedMessage = keyOrRawValue; + break; + default: + throw new IllegalArgumentException("Unsupported key type: " + keyType); + } + + // Handle replacements + if (replacementType == POSITIONAL_FORMAT || replacementType == NUMERIC_FORMAT) { + + Object[] parsedLocaleEmailParameters = parametersForLocale.stream() + .sorted(Comparator.comparing(parameter -> (Integer) parameter.getId())) + .map(parameter -> parameter.parseValue(locale)) + .toArray(); + + if (replacementType == POSITIONAL_FORMAT) { + parsedMessage = String.format(parsedMessage, parsedLocaleEmailParameters); + } else { + // Replace the link placeholders with index field placeholder so that it isn't interpreted as a MessageFormat arg + if (replaceLinks) { + parsedMessage = replaceLinks(parsedMessage); + } + parsedMessage = MessageFormat.format(parsedMessage, parsedLocaleEmailParameters); + } + + } else if (replacementType == NAMED_FORMAT) { + + for (LocalizedEmailParameter parameter : parametersForLocale) { + parsedMessage = parsedMessage.replace(parameter.getId().toString(), parameter.parseValue(locale)); + } + + } + + // Replace link placeholders + if (replaceLinks) { + parsedMessage = replaceLinks(parsedMessage); + } + + // Replace index field placeholders + if (compileWithIndexFields && metadataUuid != null) { + parsedMessage = MailUtil.compileMessageWithIndexFields(parsedMessage, metadataUuid, locale.getLanguage()); + } + + return parsedMessage; + } + + /** + * Returns a map of locales to parsed messages for the provided array of locales. + * + * @param feedbackLocales the array of locales + * @return the map of locales to parsed messages + */ + public LinkedHashMap getParsedMessagesMap(Locale[] feedbackLocales) { + return getParsedMessagesMap(feedbackLocales, false); + } + + /** + * Returns a map of locales to parsed messages for the provided array of locales. + * If flagged only distinct values are returned. + * + * @param feedbackLocales the array of locales + * @param distinct flag to only return messages with distinct values + * @return the map of locales to parsed messages + */ + public LinkedHashMap getParsedMessagesMap(Locale[] feedbackLocales, Boolean distinct) { + + LinkedHashMap parsedMessages = new LinkedHashMap<>(); + + for (Locale locale : feedbackLocales) { + String parsedMessage = parseMessage(locale); + if (!distinct || !parsedMessages.containsValue(parsedMessage)) { + parsedMessages.put(locale, parsedMessage); + } + } + + return parsedMessages; + } + + private String getResourceBundleString(Locale locale) { + return ResourceBundle.getBundle("org.fao.geonet.api.Messages", locale).getString(keyOrRawValue); + } + + private String getTranslationMapString(Locale locale) { + try { + Map translationMap = new JSONLocCacheLoader(ApplicationContextHolder.get(), locale.getISO3Language()).call(); + return translationMap.getOrDefault(keyOrRawValue, keyOrRawValue); + } catch (Exception exception) { + return keyOrRawValue; + } + } + + private String replaceLinks(String message) { + + SettingManager settingManager = ApplicationContextHolder.get().getBean(SettingManager.class); + + String newPlaceholder; + if (replaceLinksWithHtmlFormat) { + newPlaceholder = "{{index:uuid}}"; + } else { + newPlaceholder = "'{{'index:uuid'}}'"; + } + return message.replace("{{link}}", settingManager.getNodeURL() + "api/records/" + newPlaceholder); + } +} diff --git a/core/src/main/java/org/fao/geonet/util/LocalizedEmailParameter.java b/core/src/main/java/org/fao/geonet/util/LocalizedEmailParameter.java new file mode 100644 index 00000000000..f68c36aec38 --- /dev/null +++ b/core/src/main/java/org/fao/geonet/util/LocalizedEmailParameter.java @@ -0,0 +1,179 @@ +//============================================================================= +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the +//=== United Nations (FAO-UN), United Nations World Food Programme (WFP) +//=== and United Nations Environment Programme (UNEP) +//=== +//=== This library is free software; you can redistribute it and/or +//=== modify it under the terms of the GNU Lesser General Public +//=== License as published by the Free Software Foundation; either +//=== version 2.1 of the License, or (at your option) any later version. +//=== +//=== This library is distributed in the hope that it will be useful, +//=== but WITHOUT ANY WARRANTY; without even the implied warranty of +//=== MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +//=== Lesser General Public License for more details. +//=== +//=== You should have received a copy of the GNU Lesser General Public +//=== License along with this library; if not, write to the Free Software +//=== Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +//=== +//=== Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, +//=== Rome - Italy. email: geonetwork@osgeo.org +//============================================================================== + +package org.fao.geonet.util; + +import org.fao.geonet.ApplicationContextHolder; +import org.fao.geonet.kernel.search.JSONLocCacheLoader; + +import java.util.*; + +/** + * Class representing a parameter used in a localized email. + * It provides functionality to set and get parameter properties, and parse parameter values. + */ +public class LocalizedEmailParameter { + private final Object id; + private final ParameterType parameterType; + private final Object value; // (Based on Parameter type) + private final Object metadataUuid; + + /** + * Enum representing different types of parameters used in a localized email context. + *

+ * This enum defines five types of parameters: + *

    + *
  • {@link ParameterType#MESSAGE_OR_JSON_KEY MESSAGE_OR_JSON_KEY}: A parameter that tries to retrieve its value using {@link ResourceBundle#getString} or JSON localization files if message key was not found. + * The value property is set to the (message or json) key to search for.
  • + *
  • {@link ParameterType#MESSAGE_KEY MESSAGE_KEY}: A parameter that retrieves its value using {@link ResourceBundle#getString}. + * The value property is set to the message key to search for.
  • + *
  • {@link ParameterType#JSON_KEY JSON_KEY}: A parameter that retrieves its value by searching the JSON localization files for the specified key. + * The value property is set to the json key to search for.
  • + *
  • {@link ParameterType#INDEX_FIELD INDEX_FIELD}: A parameter that retrieves its value using {@link XslUtil#getIndexField}. + * The value property is set to the field name to search for, and the uuid property is set to the record uuid to search for (required).
  • + *
  • {@link ParameterType#RAW_VALUE RAW_VALUE}: A parameter with a precomputed value that is simply returned. + * The value property contains the precomputed value.
  • + *
+ *

+ * These types can be used to categorize parameters and define their intended use in the context of localized email parameterization. + */ + public enum ParameterType { + /** + * A parameter that tries to retrieve its value using {@link ResourceBundle#getString} or JSON localization files if message key was not found. + * The value property is set to the (message or json) key to search for. + */ + MESSAGE_OR_JSON_KEY, + + /** + * A parameter that retrieves its value using {@link ResourceBundle#getString} + * The value property is set to the message key to search for. + */ + MESSAGE_KEY, + + /** + * A parameter that retrieves its value by searching the JSON localization files for the specified key. + * The value property is set to the json key to search for. + */ + JSON_KEY, + + /** + * A parameter that retrieves its value using {@link XslUtil#getIndexField} + * The value property is set to the field name to search for. + * The uuid property is set to the record uuid to search for and is required. + */ + INDEX_FIELD, + + /** + * A parameter with a precomputed value that is simply returned. + * The value property contains the precomputed value. + */ + RAW_VALUE + } + + /** + * Constructor with parameters. + * + * @param parameterType the type of the parameter + * @param id the id of the parameter + * @param value the value of the parameter + */ + public LocalizedEmailParameter(ParameterType parameterType, Object id, Object value) { + this.parameterType = parameterType; + this.id = id; + this.value = value; + this.metadataUuid = null; + } + + /** + * Constructor with parameters. + * + * @param parameterType the type of the parameter + * @param id the id of the parameter + * @param value the value of the parameter + * @param metadataUuid The metadata uuid to use for parsing index field values + */ + public LocalizedEmailParameter(ParameterType parameterType, Object id, Object value, String metadataUuid) { + this.parameterType = parameterType; + this.id = id; + this.value = value; + this.metadataUuid = metadataUuid; + } + + /** + * @return the id of the parameter + */ + public Object getId() { + return id; + } + + /** + * Parses the value of the parameter based on its type and the provided locale + * + * @param locale the locale to use to parse the value + * @return the parsed string value + */ + public String parseValue(Locale locale) { + + if (value == null) { + return "null"; + } + + switch (parameterType) { + case MESSAGE_OR_JSON_KEY: + try { + return getResourceBundleString(locale); + } catch (MissingResourceException missingResourceException) { + return getJsonTranslationMapString(locale); + } + case MESSAGE_KEY: + try { + return getResourceBundleString(locale); + } catch (MissingResourceException e) { + return value.toString(); + } + case JSON_KEY: + return getJsonTranslationMapString(locale); + case INDEX_FIELD: + if (metadataUuid == null) throw new IllegalArgumentException("Metadata UUID is required for parameters of type INDEX_FIELD"); + return XslUtil.getIndexField(null, metadataUuid, value, locale); + case RAW_VALUE: + return value.toString(); + default: + throw new IllegalArgumentException("Unsupported parameter type: " + parameterType); + } + } + + private String getResourceBundleString(Locale locale) { + return ResourceBundle.getBundle("org.fao.geonet.api.Messages", locale).getString(value.toString()); + } + + private String getJsonTranslationMapString(Locale locale) { + try { + Map translationMap = new JSONLocCacheLoader(ApplicationContextHolder.get(), locale.getISO3Language()).call(); + return translationMap.getOrDefault(value.toString(), value.toString()); + } catch (Exception exception) { + return value.toString(); + } + } +} + diff --git a/core/src/main/java/org/fao/geonet/util/LogUtil.java b/core/src/main/java/org/fao/geonet/util/LogUtil.java index aa1d0d437f4..93f3c023f7d 100644 --- a/core/src/main/java/org/fao/geonet/util/LogUtil.java +++ b/core/src/main/java/org/fao/geonet/util/LogUtil.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -57,7 +57,7 @@ public static String initializeHarvesterLog(String type, String name) { // Filename safe representation of harvester name (using '_' as needed). final String harvesterName = name.replaceAll("\\W+", "_"); final String harvesterType = type.replaceAll("\\W+", "_"); - SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmm"); + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss"); String logfile = "harvester_" + harvesterType @@ -71,7 +71,7 @@ public static String initializeHarvesterLog(String type, String name) { } ThreadContext.put("harvest", harvesterName); - ThreadContext.putIfNull("logfile", logfile); + ThreadContext.put("logfile", logfile); ThreadContext.put("timeZone", timeZoneSetting); return logfile; diff --git a/core/src/main/java/org/fao/geonet/util/MailUtil.java b/core/src/main/java/org/fao/geonet/util/MailUtil.java index fc0c743c6fe..517a292b99f 100644 --- a/core/src/main/java/org/fao/geonet/util/MailUtil.java +++ b/core/src/main/java/org/fao/geonet/util/MailUtil.java @@ -364,9 +364,6 @@ private static void configureBasics(String hostName, Integer smtpPort, email.setAuthenticator(new DefaultAuthenticator(username, password)); } - - email.setDebug(true); - if (tls != null && tls) { email.setStartTLSEnabled(tls); email.setStartTLSRequired(tls); diff --git a/core/src/main/java/org/fao/geonet/util/XslUtil.java b/core/src/main/java/org/fao/geonet/util/XslUtil.java index 256129da48b..34b9ae272ee 100644 --- a/core/src/main/java/org/fao/geonet/util/XslUtil.java +++ b/core/src/main/java/org/fao/geonet/util/XslUtil.java @@ -47,6 +47,7 @@ import org.apache.http.impl.client.DefaultHttpClient; import org.fao.geonet.ApplicationContextHolder; import org.fao.geonet.SystemInfo; +import org.fao.geonet.analytics.WebAnalyticsConfiguration; import org.fao.geonet.api.records.attachments.FilesystemStore; import org.fao.geonet.api.records.attachments.FilesystemStoreResourceContainer; import org.fao.geonet.api.records.attachments.Store; @@ -631,13 +632,19 @@ public static MetadataResourceContainer getResourceContainerDescription(String m Store store = BeanFactoryAnnotationUtils.qualifiedBeanOfType(ApplicationContextHolder.get().getBeanFactory(), Store.class, "filesystemStore"); if (store != null) { - if (store.getResourceManagementExternalProperties() != null && store.getResourceManagementExternalProperties().isFolderEnabled()) { - ServiceContext context = ServiceContext.get(); - return store.getResourceContainerDescription(ServiceContext.get(), metadataUuid, approved); - } else { - // Return an empty object which should not be used because the folder is not enabled. - return new FilesystemStoreResourceContainer(metadataUuid, -1, null, null, null, approved); + try { + if (store.getResourceManagementExternalProperties() != null && store.getResourceManagementExternalProperties().isFolderEnabled()) { + ServiceContext context = ServiceContext.get(); + return store.getResourceContainerDescription(ServiceContext.get(), metadataUuid, approved); + } else { + // Return an empty object which should not be used because the folder is not enabled. + return new FilesystemStoreResourceContainer(metadataUuid, -1, null, null, null, approved); + } + } catch (RuntimeException e) { + Log.error(Geonet.RESOURCES, "Could not locate resource in getResourceContainerDescription due to runtime exception", e); + return null; } + } Log.error(Geonet.RESOURCES, "Could not locate a Store bean in getResourceContainerDescription"); return null; @@ -843,12 +850,10 @@ public static String getIndexField(Object appName, Object uuid, Object field, Ob try { Set fields = new HashSet<>(); fields.add(fieldname); - // TODO: Multilingual fields - final Map values = searchManager.getFieldsValues(id, fields); + final Map values = searchManager.getFieldsValues(id, fields, language); return values.get(fieldname); } catch (Exception e) { - e.printStackTrace(); - Log.error(Geonet.GEONETWORK, "Failed to get index field '" + fieldname + "' value on '" + id + "', caused by " + e.getMessage()); + Log.warning(Geonet.GEONETWORK, "Failed to get index field '" + fieldname + "' value on '" + id + "', caused by " + e.getMessage()); } return ""; } @@ -1435,6 +1440,19 @@ public static String getThesaurusIdByTitle(String title) { return thesaurus == null ? "" : "geonetwork.thesaurus." + thesaurus.getKey(); } + + /** + * Retrieve the thesaurus title using the thesaurus key. + * + * @param id the thesaurus key + * @return the thesaurus title or empty string if the thesaurus doesn't exist. + */ + public static String getThesaurusTitleByKey(String id) { + ApplicationContext applicationContext = ApplicationContextHolder.get(); + ThesaurusManager thesaurusManager = applicationContext.getBean(ThesaurusManager.class); + Thesaurus thesaurus = thesaurusManager.getThesaurusByName(id); + return thesaurus == null ? "" : thesaurus.getTitle(); + } /** @@ -1576,4 +1594,18 @@ private static List buildRecordLink(List hits, String type) { public static String escapeForJson(String value) { return StringEscapeUtils.escapeJson(value); } + + public static String getWebAnalyticsService() { + ApplicationContext applicationContext = ApplicationContextHolder.get(); + WebAnalyticsConfiguration webAnalyticsConfiguration = applicationContext.getBean(WebAnalyticsConfiguration.class); + + return webAnalyticsConfiguration.getService(); + } + + public static String getWebAnalyticsJavascriptCode() { + ApplicationContext applicationContext = ApplicationContextHolder.get(); + WebAnalyticsConfiguration webAnalyticsConfiguration = applicationContext.getBean(WebAnalyticsConfiguration.class); + + return webAnalyticsConfiguration.getJavascriptCode(); + } } diff --git a/core/src/main/java/org/fao/geonet/web/GeoNetworkStrictHttpFirewall.java b/core/src/main/java/org/fao/geonet/web/GeoNetworkStrictHttpFirewall.java new file mode 100644 index 00000000000..cdf34c45f18 --- /dev/null +++ b/core/src/main/java/org/fao/geonet/web/GeoNetworkStrictHttpFirewall.java @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.web; + +import org.springframework.security.web.firewall.StrictHttpFirewall; + +import java.util.regex.Pattern; + +import static java.nio.charset.StandardCharsets.ISO_8859_1; +import static java.nio.charset.StandardCharsets.UTF_8; + +/** + * Spring Security HttpFirewall that allows parsing UTF8 header values. + */ +public class GeoNetworkStrictHttpFirewall extends StrictHttpFirewall { + private static final Pattern ALLOWED_HEADER_VALUE_PATTERN = Pattern.compile("[\\p{IsAssigned}&&[^\\p{IsControl}]]*"); + + public GeoNetworkStrictHttpFirewall() { + super(); + + this.setAllowedHeaderValues(header -> { + String parsed = new String(header.getBytes(ISO_8859_1), UTF_8); + return ALLOWED_HEADER_VALUE_PATTERN.matcher(parsed).matches(); + }); + } +} diff --git a/core/src/main/resources/config-spring-geonetwork.xml b/core/src/main/resources/config-spring-geonetwork.xml index afaf71f9686..052e4d6ae6d 100644 --- a/core/src/main/resources/config-spring-geonetwork.xml +++ b/core/src/main/resources/config-spring-geonetwork.xml @@ -238,6 +238,8 @@ + + tools --> Delete index and reindex. +After updating use **Admin Console > Tools** and use **Delete index and reindex**. ## List of changes diff --git a/docs/manual/docs/overview/change-log/version-4.4.2.md b/docs/manual/docs/overview/change-log/version-4.4.2.md index 807950b6642..11864f8b524 100644 --- a/docs/manual/docs/overview/change-log/version-4.4.2.md +++ b/docs/manual/docs/overview/change-log/version-4.4.2.md @@ -2,15 +2,13 @@ GeoNetwork 4.4.2 release is a minor release. -## Migration notes +## Update notes -### Java - -**Version 4.4 only works on Java 11.** +When updating please review the following actions: ### Index changes -After update, don't forget to go to admin console --> tools --> Delete index and reindex. +After updating use **Admin Console > Tools** and use **Delete index and reindex**. ## List of changes diff --git a/docs/manual/docs/overview/change-log/version-4.4.3.md b/docs/manual/docs/overview/change-log/version-4.4.3.md new file mode 100644 index 00000000000..1b38da471dd --- /dev/null +++ b/docs/manual/docs/overview/change-log/version-4.4.3.md @@ -0,0 +1,44 @@ +# Version 4.4.3 {#version-423} + +GeoNetwork 4.4.3 release is a minor release. + +## Update notes + +When updating please review the following actions: + +### Index changes + +This version use Elasticsearch version 8 Java client, it is recommended to use an Elasticsearch version 8 server. +However version 7.15+ and 8+ have been tested. + +After updating use **Admin Console > Tools** and use **Delete index and reindex**. + +### Map + +[Stamen background layers are not available, update your maps](https://github.com/geonetwork/core-geonetwork/pull/7715). + + +## List of changes + +Major changes: + +- [Elasticssearch 8 upgrade](https://github.com/geonetwork/core-geonetwork/pull/7599) +- [Editor / Distribution panel improvements](https://github.com/geonetwork/core-geonetwork/pull/7468) +- [Thesaurus / Add support for codelist described using SDMX](https://github.com/geonetwork/core-geonetwork/pull/7790) +- [Thesaurus / Add support for thesaurus described using OWL format](https://github.com/geonetwork/core-geonetwork/pull/7674) +- [Thesaurus / Improve support of EU publication office SKOS format](https://github.com/geonetwork/core-geonetwork/pull/7673) +- [INSPIRE / Add testsuite for IACS](https://github.com/geonetwork/core-geonetwork/pull/7756) +- [Map viewer / Remove Stamen background layers - no longer available](https://github.com/geonetwork/core-geonetwork/pull/7715) +- [i18n / Add welsh language for user interface](https://github.com/geonetwork/core-geonetwork/pull/7851) +- [Index / Add danish language configuration](https://github.com/geonetwork/core-geonetwork/pull/7697) +- [Index / Translated the index warnings and errors](https://github.com/geonetwork/core-geonetwork/pull/7531) +- [Create a metadata / Add dynamic and download privileges to the users in the same group](https://github.com/geonetwork/core-geonetwork/pull/7744) +- [Decouple metadata user feedback from metadata rating feature](https://github.com/geonetwork/core-geonetwork/pull/7796) +- [Extend http proxy to manage duplicated parameters](https://github.com/geonetwork/core-geonetwork/pull/7854) +- [Fix MIME-types on attachments](https://github.com/geonetwork/core-geonetwork/pull/7675) +- [Fix pdf link to the application website](https://github.com/geonetwork/core-geonetwork/pull/7681) +- Update `org.json:json` from version 20140107 to 20240205 +- Documentation / Manual improvements +- Documentation / API SpringDoc fixes + +and more \... see [4.4.3 issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A4.4.3+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?page=3&q=is%3Apr+milestone%3A4.4.3+is%3Aclosed) for full details. diff --git a/docs/manual/docs/overview/change-log/version-4.4.4.md b/docs/manual/docs/overview/change-log/version-4.4.4.md new file mode 100644 index 00000000000..7a6b27c37c8 --- /dev/null +++ b/docs/manual/docs/overview/change-log/version-4.4.4.md @@ -0,0 +1,29 @@ +# Version 4.4.4 + +GeoNetwork 4.4.4 is a minor release. + +## Update notes + +When updating please review the following actions: + +### Index changes + +After updating use **Admin Console > Tools** and use **Delete index and reindex**: + +* [Passing key into update/remove process xslt for iso 19139 to fix issue with updating/deleting resources with same url](https://github.com/geonetwork/core-geonetwork/pull/7431) + +## List of changes + +Major changes: + +* [CSW / GetRecords / Number of matches in page info](https://github.com/geonetwork/core-geonetwork/pull/7937) + +* [Editor associated resources planel is redesigned with a new user-interface to link to external resources (DOI and URL to external catalogue)](https://github.com/geonetwork/core-geonetwork/pull/7669) + +Fixes: + +* [Search Export CSV - Escape double-quotes with double-quotes instead of backslash](https://github.com/geonetwork/core-geonetwork/pull/7927) + +* [Metadata feedback / Fix email to multiple recipients](https://github.com/geonetwork/core-geonetwork/pull/7875) + +and more \... see [4.4.4-0 issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A4.4.4+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?page=3&q=is%3Apr+milestone%3A4.4.4+is%3Aclosed) for full details. diff --git a/docs/manual/docs/overview/change-log/version-4.4.5.md b/docs/manual/docs/overview/change-log/version-4.4.5.md new file mode 100644 index 00000000000..e5a1f37d416 --- /dev/null +++ b/docs/manual/docs/overview/change-log/version-4.4.5.md @@ -0,0 +1,42 @@ +# Version 4.4.5 + +GeoNetwork 4.4.5 is a minor release. + +## Update notes + +When updating please review the following actions: + +### Index changes + +After updating use **Admin Console > Tools** and use **Delete index and reindex**: + + +## List of changes + +Major changes: + +* [Embed Geonetwork in other application using WebComponents](https://github.com/geonetwork/core-geonetwork/pull/6516) + +* [Configure analytics service easily eg. matomo](https://github.com/geonetwork/core-geonetwork/pull/7313) + +* [Harvester / Translate incoming records with translation providers](https://github.com/geonetwork/core-geonetwork/pull/7849) + +* [Workflow / Configuration editor board to display draft, approved record or both](https://github.com/geonetwork/core-geonetwork/pull/7477) + +* [Editor / Table mode / Add ordering control](https://github.com/geonetwork/core-geonetwork/pull/8016) + +* [API / Extent / Add geometry collection support](https://github.com/geonetwork/core-geonetwork/pull/7911) + +* [API / Search / Add support for aggregation on related records](https://github.com/geonetwork/core-geonetwork/pull/7939) + +* [i18n / Add armenian, azerbaijani, georgian, romanian and ukrainian languages](https://github.com/geonetwork/core-geonetwork/pull/7968) + +* [Library / JQuery update](https://github.com/geonetwork/core-geonetwork/pull/8015) + + +Fixes: + +* [API / CSW / Fix nextrecords when using pagination](https://github.com/geonetwork/core-geonetwork/pull/7977) + + +and more \... see [4.4.5-0 issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A4.4.5+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?page=3&q=is%3Apr+milestone%3A4.4.5+is%3Aclosed) for full details. diff --git a/docs/manual/docs/user-guide/associating-resources/doi.md b/docs/manual/docs/user-guide/associating-resources/doi.md index 31434831688..cb57e8f9589 100644 --- a/docs/manual/docs/user-guide/associating-resources/doi.md +++ b/docs/manual/docs/user-guide/associating-resources/doi.md @@ -7,15 +7,33 @@ The catalogue support DOI creation using: - [DataCite API](https://support.datacite.org/docs/mds-api-guide). - EU publication office API -Configure the API access point in the `admin console --> settings`: +Configure the DOI API access point to publish the metadata in the `Admin console --> Settings --> Doi servers`: -![](img/doi-admin-console.png) +![](img/doi-create-server.png) + +Providing the following information: + +- `Name`: A descriptive name for the server. +- `Description`: (Optional) A verbose description of the server. +- `DataCite API endpoint`: The API url, usually https://mds.datacite.org or https://mds.test.datacite.org for testing. +- `DataCite username` / `DataCite password`: Credentials required to publish the DOI resources. +- `Landing page URL template`: The URL to use to register the DOI. A good default for GeoNetwork is http://localhost:8080/geonetwork/srv/resources/records/{{uuid}}. The landing page URL MUST contains the UUID of the record. +- `Final DOI URL prefix`: (Optional) Keep it empty to use the default https://doi.org prefix. Use https://mds.test.datacite.org/doi when using the test API. +- `DOI pattern`: Default is `{{uuid}}` but the DOI structure can be customized with database id and/or record group eg. `example-{{groupOwner}}-{{id}}`. +- `DataCite prefix`: Usually looks like `10.xxxx`. You will be allowed to register DOI names only under the prefixes that have been assigned to you. +- `Publication groups`: (Optional) Select the groups which metadata should be published to the DOI server. If no groups are selected, the server will be provided to publish the metadata that has no other DOI servers related to the metadata owner group. A record can be downloaded using the DataCite format from the API using: ## Creating the DOI -Once configured, DOI can be created using the interface. DOI is created on demand. It means that a user must ask for creation of a DOI. When created, the task is notified by email to the reviewer of the group (by default, can be configured for administrator only using the notification level of the task). +Once configured, DOI can be created using the interface. DOI is created on demand. It means that a user must ask for creation of a DOI. It can be created by: + +- The user who created the metadata. +- A user with Reviewer profile in the metadata group owner. +- A user with Administrator profile. + +When created, the task is notified by email to the reviewer of the group (by default, can be configured for administrator only using the notification level of the task). ![](img/doi-request-menu.png) diff --git a/docs/manual/docs/user-guide/associating-resources/img/doi-create-server.png b/docs/manual/docs/user-guide/associating-resources/img/doi-create-server.png new file mode 100644 index 00000000000..efccf603065 Binary files /dev/null and b/docs/manual/docs/user-guide/associating-resources/img/doi-create-server.png differ diff --git a/docs/manual/docs/user-guide/describing-information/inspire-editing.md b/docs/manual/docs/user-guide/describing-information/inspire-editing.md index 7b3a9e1a358..5e54cc4145a 100644 --- a/docs/manual/docs/user-guide/describing-information/inspire-editing.md +++ b/docs/manual/docs/user-guide/describing-information/inspire-editing.md @@ -709,7 +709,7 @@ It is also possible to use the [latest ISO standard ISO19115-3:2018](https://git Click here to go to stable. +{% endblock %} diff --git a/docs/manual/pom.xml b/docs/manual/pom.xml index 68654e27401..4ac33875de1 100644 --- a/docs/manual/pom.xml +++ b/docs/manual/pom.xml @@ -27,7 +27,7 @@ gn-docs org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT 4.0.0 gn-guide diff --git a/docs/pom.xml b/docs/pom.xml index e1c5e3b5c5c..6330049c7b0 100644 --- a/docs/pom.xml +++ b/docs/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT 4.0.0 gn-docs diff --git a/doi/pom.xml b/doi/pom.xml index e0fb8dcbf9f..8f087e09ce3 100644 --- a/doi/pom.xml +++ b/doi/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT 4.0.0 diff --git a/doi/src/main/java/org/fao/geonet/doi/client/BaseDoiClient.java b/doi/src/main/java/org/fao/geonet/doi/client/BaseDoiClient.java index 46cb4ab7b99..934895b2cdd 100644 --- a/doi/src/main/java/org/fao/geonet/doi/client/BaseDoiClient.java +++ b/doi/src/main/java/org/fao/geonet/doi/client/BaseDoiClient.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2023 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -94,14 +94,22 @@ protected void create(String url, String body, String contentType, url, body, status, httpResponse.getStatusText(), responseBody); Log.info(LOGGER_NAME, message); - throw new DoiClientException(message); + throw new DoiClientException(String.format( + "Error creating DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorCreate") + .withDescriptionKey("exception.doi.serverErrorCreate.description", new String[]{message}); } else { Log.info(LOGGER_NAME, String.format( successMessage, url)); } } catch (Exception ex) { Log.error(LOGGER_NAME, " -- Error (exception): " + ex.getMessage(), ex); - throw new DoiClientException(ex.getMessage()); + throw new DoiClientException(String.format( + "Error creating DOI: %s", + ex.getMessage())) + .withMessageKey("exception.doi.serverErrorCreate") + .withDescriptionKey("exception.doi.serverErrorCreate.description", new String[]{ex.getMessage()}); } finally { if (postMethod != null) { @@ -139,13 +147,24 @@ protected String retrieve(String url) } else { Log.info(LOGGER_NAME, "Retrieve DOI metadata end -- Error: " + httpResponse.getStatusText()); - throw new DoiClientException( httpResponse.getStatusText() + - CharStreams.toString(new InputStreamReader(httpResponse.getBody()))); + String message = httpResponse.getStatusText() + + CharStreams.toString(new InputStreamReader(httpResponse.getBody())); + + throw new DoiClientException(String.format( + "Error retrieving DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorRetrieve") + .withDescriptionKey("exception.doi.serverErrorRetrieve.description", new String[]{message}); + } } catch (Exception ex) { Log.error(LOGGER_NAME, " -- Error (exception): " + ex.getMessage(), ex); - throw new DoiClientException(ex.getMessage()); + throw new DoiClientException(String.format( + "Error retrieving DOI: %s", + ex.getMessage())) + .withMessageKey("exception.doi.serverErrorRetrieve") + .withDescriptionKey("exception.doi.serverErrorRetrieve.description", new String[]{ex.getMessage()}); } finally { if (getMethod != null) { diff --git a/doi/src/main/java/org/fao/geonet/doi/client/DoiDataciteClient.java b/doi/src/main/java/org/fao/geonet/doi/client/DoiDataciteClient.java index 589d7f137a9..73317a4b122 100644 --- a/doi/src/main/java/org/fao/geonet/doi/client/DoiDataciteClient.java +++ b/doi/src/main/java/org/fao/geonet/doi/client/DoiDataciteClient.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2023 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -24,8 +24,6 @@ import org.apache.commons.httpclient.HttpStatus; import org.apache.commons.io.IOUtils; -import org.apache.http.auth.AuthScope; -import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.methods.HttpDelete; import org.fao.geonet.ApplicationContextHolder; import org.fao.geonet.utils.GeonetHttpRequestFactory; @@ -179,14 +177,24 @@ public void deleteDoiMetadata(String doi) if ((status != HttpStatus.SC_NOT_FOUND) && (status != HttpStatus.SC_OK)) { Log.info(LOGGER_NAME, "Delete DOI metadata end -- Error: " + httpResponse.getStatusText()); - throw new DoiClientException( httpResponse.getStatusText() ); + String message = httpResponse.getStatusText(); + + throw new DoiClientException(String.format( + "Error deleting DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorDelete") + .withDescriptionKey("exception.doi.serverErrorDelete.description", new String[]{message}); } else { Log.info(LOGGER_NAME, "DeleteDOI metadata end"); } } catch (Exception ex) { Log.error(LOGGER_NAME, " -- Error (exception): " + ex.getMessage(), ex); - throw new DoiClientException(ex.getMessage()); + throw new DoiClientException(String.format( + "Error deleting DOI: %s", + ex.getMessage())) + .withMessageKey("exception.doi.serverErrorDelete") + .withDescriptionKey("exception.doi.serverErrorDelete.description", new String[]{ex.getMessage()}); } finally { if (deleteMethod != null) { @@ -219,14 +227,25 @@ public void deleteDoi(String doi) if ((status != HttpStatus.SC_NOT_FOUND) && (status != HttpStatus.SC_OK)) { Log.info(LOGGER_NAME, "Delete DOI end -- Error: " + httpResponse.getStatusText()); - throw new DoiClientException( httpResponse.getStatusText() ); + String message = httpResponse.getStatusText(); + + throw new DoiClientException(String.format( + "Error deleting DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorDelete") + .withDescriptionKey("exception.doi.serverErrorDelete.description", new String[]{message}); } else { Log.info(LOGGER_NAME, "DeleteDOI end"); } } catch (Exception ex) { Log.error(LOGGER_NAME, " -- Error (exception): " + ex.getMessage(), ex); - throw new DoiClientException(ex.getMessage()); + + throw new DoiClientException(String.format( + "Error deleting DOI: %s", + ex.getMessage())) + .withMessageKey("exception.doi.serverErrorDelete") + .withDescriptionKey("exception.doi.serverErrorDelete.description", new String[]{ex.getMessage()}); } finally { if (deleteMethod != null) { diff --git a/doi/src/main/java/org/fao/geonet/doi/client/DoiManager.java b/doi/src/main/java/org/fao/geonet/doi/client/DoiManager.java index 87871c21d72..012c710585e 100644 --- a/doi/src/main/java/org/fao/geonet/doi/client/DoiManager.java +++ b/doi/src/main/java/org/fao/geonet/doi/client/DoiManager.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2010 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -32,8 +32,8 @@ import org.fao.geonet.domain.*; import org.fao.geonet.kernel.AccessManager; import org.fao.geonet.kernel.ApplicableSchematron; -import org.fao.geonet.kernel.DataManager; import org.fao.geonet.kernel.SchematronValidator; +import org.fao.geonet.kernel.datamanager.base.BaseMetadataManager; import org.fao.geonet.kernel.datamanager.base.BaseMetadataSchemaUtils; import org.fao.geonet.kernel.datamanager.base.BaseMetadataUtils; import org.fao.geonet.kernel.schema.MetadataSchema; @@ -41,12 +41,10 @@ import org.fao.geonet.kernel.search.IndexingMode; import org.fao.geonet.kernel.setting.SettingManager; import org.fao.geonet.repository.SchematronRepository; -import org.fao.geonet.utils.Log; import org.fao.geonet.utils.Xml; import org.jdom.Element; import org.jdom.JDOMException; import org.jdom.Namespace; -import org.springframework.beans.factory.annotation.Autowired; import java.io.IOException; import java.nio.file.Files; @@ -60,11 +58,9 @@ /** * Class to register/unregister DOIs using the Datacite Metadata Store (MDS) API. + *

+ * See ... * - * See https://support.datacite.org/docs/mds-api-guide - * - * @author Jose García - * @author Francois Prunayre */ public class DoiManager { private static final String DOI_ADD_XSL_PROCESS = "process/doi-add.xsl"; @@ -75,112 +71,52 @@ public class DoiManager { public static final String DOI_DEFAULT_URL = "https://doi.org/"; public static final String DOI_DEFAULT_PATTERN = "{{uuid}}"; - private IDoiClient client; - private String doiPrefix; - private String doiPattern; - private String landingPageTemplate; - private boolean initialised = false; - private boolean isMedra = false; - - DataManager dm; - SettingManager sm; - BaseMetadataSchemaUtils schemaUtils; - - @Autowired - BaseMetadataUtils metadataUtils; - - @Autowired - SchematronValidator validator; - - @Autowired - DoiBuilder doiBuilder; - - @Autowired - SchematronRepository schematronRepository; + private final SettingManager sm; + private final BaseMetadataSchemaUtils schemaUtils; + private final BaseMetadataManager metadataManager; + private final BaseMetadataUtils metadataUtils; + private final SchematronValidator validator; + private final DoiBuilder doiBuilder; + private final SchematronRepository schematronRepository; + + + public DoiManager(final SettingManager sm, final BaseMetadataSchemaUtils schemaUtils, + final BaseMetadataManager metadataManager, final BaseMetadataUtils metadataUtils, + final SchematronValidator validator, final DoiBuilder doiBuilder, + final SchematronRepository schematronRepository) { + this.sm = sm; + this.schemaUtils = schemaUtils; + this.metadataManager = metadataManager; + this.metadataUtils = metadataUtils; + this.validator = validator; + this.doiBuilder = doiBuilder; + this.schematronRepository = schematronRepository; - - public DoiManager() { - sm = ApplicationContextHolder.get().getBean(SettingManager.class); - dm = ApplicationContextHolder.get().getBean(DataManager.class); - schemaUtils = ApplicationContextHolder.get().getBean(BaseMetadataSchemaUtils.class); - - loadConfig(); } - public boolean isInitialised() { - return initialised; + private IDoiClient createDoiClient(DoiServer doiServer) { + boolean isMedra = isMedraServer(doiServer); + return isMedra ? + new DoiMedraClient(doiServer.getUrl(), doiServer.getUsername(), doiServer.getPassword(), doiServer.getPublicUrl()) : + new DoiDataciteClient(doiServer.getUrl(), doiServer.getUsername(), doiServer.getPassword(), doiServer.getPublicUrl()); } - /** - * Check parameters and build the client. - * - */ - public void loadConfig() { - initialised = false; - if (sm != null) { - - String serverUrl = sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_DOIURL); - String doiPublicUrl = StringUtils.defaultIfEmpty( - sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_DOIPUBLICURL), - DOI_DEFAULT_URL); - String username = sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_DOIUSERNAME); - String password = sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_DOIPASSWORD); - - doiPrefix = sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_DOIKEY); - doiPattern = StringUtils.defaultIfEmpty( - sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_DOIPATTERN), - DOI_DEFAULT_PATTERN - ); - - landingPageTemplate = sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_LANDING_PAGE_TEMPLATE); - - final boolean emptyUrl = StringUtils.isEmpty(serverUrl); - final boolean emptyUsername = StringUtils.isEmpty(username); - final boolean emptyPassword = StringUtils.isEmpty(password); - final boolean emptyPrefix = StringUtils.isEmpty(doiPrefix); - if (emptyUrl || - emptyUsername || - emptyPassword || - emptyPrefix) { - StringBuilder report = new StringBuilder("DOI configuration is not complete. Check in System Configuration to fill the DOI configuration."); - if (emptyUrl) { - report.append("\n* URL MUST be set"); - } - if (emptyUsername) { - report.append("\n* Username MUST be set"); - } - if (emptyPassword) { - report.append("\n* Password MUST be set"); - } - if (emptyPrefix) { - report.append("\n* Prefix MUST be set"); - } - Log.warning(DoiSettings.LOGGER_NAME, - report.toString()); - } else { - Log.debug(DoiSettings.LOGGER_NAME, - "DOI configuration looks perfect."); - isMedra = serverUrl.contains(MEDRA_SEARCH_KEY); - this.client = - isMedra ? - new DoiMedraClient(serverUrl, username, password, doiPublicUrl) : - new DoiDataciteClient(serverUrl, username, password, doiPublicUrl); - initialised = true; - } - } - } + public String checkDoiUrl(DoiServer doiServer, AbstractMetadata metadata) throws DoiClientException { + checkInitialised(doiServer); + checkCanHandleMetadata(doiServer, metadata); - public String checkDoiUrl(AbstractMetadata metadata) { - return doiBuilder.create(doiPattern, doiPrefix, metadata); + return doiBuilder.create(doiServer.getPattern(), doiServer.getPrefix(), metadata); } - public Map check(ServiceContext serviceContext, AbstractMetadata metadata, Element dataciteMetadata) throws Exception { + public Map check(ServiceContext serviceContext, DoiServer doiServer, AbstractMetadata metadata, Element dataciteMetadata) throws Exception { Map conditions = new HashMap<>(); - checkInitialised(); + checkInitialised(doiServer); + checkCanHandleMetadata(doiServer, metadata); conditions.put(DoiConditions.API_CONFIGURED, true); - String doi = doiBuilder.create(doiPattern, doiPrefix, metadata); - checkPreConditions(metadata, doi); + IDoiClient doiClient = createDoiClient(doiServer); + String doi = doiBuilder.create(doiServer.getPattern(), doiServer.getPrefix(), metadata); + checkPreConditions(doiClient, metadata, doi); conditions.put(DoiConditions.RECORD_IS_PUBLIC, true); conditions.put(DoiConditions.STANDARD_SUPPORT, true); @@ -188,26 +124,26 @@ public Map check(ServiceContext serviceContext, AbstractMetadat // ** Convert to DataCite format Element dataciteFormatMetadata = dataciteMetadata == null ? - convertXmlToDataCiteFormat(metadata.getDataInfo().getSchemaId(), - metadata.getXmlData(false), doi) : dataciteMetadata; - checkPreConditionsOnDataCite(metadata, doi, dataciteFormatMetadata, serviceContext.getLanguage()); + convertXmlToDataCiteFormat(doiServer, metadata.getDataInfo().getSchemaId(), + metadata.getXmlData(false), doi) : dataciteMetadata; + checkPreConditionsOnDataCite(doiClient, metadata, doi, dataciteFormatMetadata, serviceContext.getLanguage()); conditions.put(DoiConditions.DATACITE_FORMAT_IS_VALID, true); return conditions; } - public Map register(ServiceContext context, AbstractMetadata metadata) throws Exception { + public Map register(ServiceContext context, DoiServer doiServer, AbstractMetadata metadata) throws Exception { Map doiInfo = new HashMap<>(3); // The new DOI for this record - String doi = doiBuilder.create(doiPattern, doiPrefix, metadata); + String doi = doiBuilder.create(doiServer.getPattern(), doiServer.getPrefix(), metadata); doiInfo.put("doi", doi); // The record in datacite format Element dataciteFormatMetadata = - convertXmlToDataCiteFormat(metadata.getDataInfo().getSchemaId(), - metadata.getXmlData(false), doi); + convertXmlToDataCiteFormat(doiServer, metadata.getDataInfo().getSchemaId(), + metadata.getXmlData(false), doi); try { - check(context, metadata, dataciteFormatMetadata); + check(context, doiServer, metadata, dataciteFormatMetadata); } catch (ResourceAlreadyExistException ignore) { // Update DOI doiInfo.put("update", "true"); @@ -215,7 +151,8 @@ public Map register(ServiceContext context, AbstractMetadata met throw e; } - createDoi(context, metadata, doiInfo, dataciteFormatMetadata); + IDoiClient doiClient = createDoiClient(doiServer); + createDoi(context, doiClient, doiServer, metadata, doiInfo, dataciteFormatMetadata); checkDoiCreation(metadata, doiInfo); return doiInfo; @@ -230,7 +167,7 @@ public Map register(ServiceContext context, AbstractMetadata met * @throws IOException * @throws JDOMException */ - private void checkPreConditions(AbstractMetadata metadata, String doi) throws DoiClientException, IOException, JDOMException, ResourceAlreadyExistException { + private void checkPreConditions(IDoiClient doiClient, AbstractMetadata metadata, String doi) throws DoiClientException, IOException, JDOMException, ResourceAlreadyExistException { // Record MUST be public AccessManager am = ApplicationContextHolder.get().getBean(AccessManager.class); boolean visibleToAll = false; @@ -239,11 +176,11 @@ private void checkPreConditions(AbstractMetadata metadata, String doi) throws Do } catch (Exception e) { throw new DoiClientException(String.format( "Failed to check if record '%s' is visible to all for DOI creation." + - " Error is %s.", + " Error is %s.", metadata.getUuid(), e.getMessage())) .withMessageKey("exception.doi.failedVisibilityCheck") .withDescriptionKey("exception.doi.failedVisibilityCheck.description", - new String[]{ metadata.getUuid(), e.getMessage() }); + new String[]{metadata.getUuid(), e.getMessage()}); } if (!visibleToAll) { @@ -251,7 +188,7 @@ private void checkPreConditions(AbstractMetadata metadata, String doi) throws Do "Record '%s' is not public and we cannot request a DOI for such a record. Publish this record first.", metadata.getUuid())) .withMessageKey("exception.doi.recordNotPublic") - .withDescriptionKey("exception.doi.recordNotPublic.description", new String[]{ metadata.getUuid() }); + .withDescriptionKey("exception.doi.recordNotPublic.description", new String[]{metadata.getUuid()}); } // Record MUST not contains a DOI @@ -259,7 +196,7 @@ private void checkPreConditions(AbstractMetadata metadata, String doi) throws Do String currentDoi = metadataUtils.getDoi(metadata.getUuid()); if (StringUtils.isNotEmpty(currentDoi)) { // Current doi does not match the one going to be inserted. This is odd - String newDoi = client.createPublicUrl(doi); + String newDoi = doiClient.createPublicUrl(doi); if (!currentDoi.equals(newDoi)) { throw new DoiClientException(String.format( "Record '%s' already contains a DOI %s which is not equal " + @@ -269,7 +206,7 @@ private void checkPreConditions(AbstractMetadata metadata, String doi) throws Do "an existing DOI.", metadata.getUuid(), currentDoi, currentDoi, newDoi)) .withMessageKey("exception.doi.resourcesContainsDoiNotEqual") - .withDescriptionKey("exception.doi.resourcesContainsDoiNotEqual.description", new String[]{ metadata.getUuid(), currentDoi, currentDoi, newDoi }); + .withDescriptionKey("exception.doi.resourcesContainsDoiNotEqual.description", new String[]{metadata.getUuid(), currentDoi, currentDoi, newDoi}); } throw new ResourceAlreadyExistException(String.format( @@ -279,7 +216,7 @@ private void checkPreConditions(AbstractMetadata metadata, String doi) throws Do metadata.getUuid(), currentDoi, currentDoi)) .withMessageKey("exception.doi.resourceContainsDoi") .withDescriptionKey("exception.doi.resourceContainsDoi.description", - new String[]{ metadata.getUuid(), currentDoi, currentDoi }); + new String[]{metadata.getUuid(), currentDoi, currentDoi}); } } catch (ResourceNotFoundException e) { final MetadataSchema schema = schemaUtils.getSchema(metadata.getDataInfo().getSchemaId()); @@ -299,24 +236,23 @@ private void checkPreConditions(AbstractMetadata metadata, String doi) throws Do schema.getName())) .withMessageKey("exception.doi.missingSavedquery") .withDescriptionKey("exception.doi.missingSavedquery.description", - new String[]{ metadata.getUuid(), schema.getName(), - SavedQuery.DOI_GET, e.getMessage(), - schema.getName() }); + new String[]{metadata.getUuid(), schema.getName(), + SavedQuery.DOI_GET, e.getMessage(), + schema.getName()}); } } /** * Check conditions on DataCite side. + * * @param metadata * @param doi * @param dataciteMetadata * @param language */ - private void checkPreConditionsOnDataCite(AbstractMetadata metadata, String doi, Element dataciteMetadata, String language) throws DoiClientException, ResourceAlreadyExistException { + private void checkPreConditionsOnDataCite(IDoiClient doiClient, AbstractMetadata metadata, String doi, Element dataciteMetadata, String language) throws DoiClientException, ResourceAlreadyExistException { // * DataCite API is up an running ? - - try { List validations = new ArrayList<>(); List applicableSchematron = Lists.newArrayList(); @@ -341,7 +277,7 @@ private void checkPreConditionsOnDataCite(AbstractMetadata metadata, String doi, StringBuilder message = new StringBuilder(); if (!failures.isEmpty()) { message.append("

    "); - failures.forEach(f -> message.append("
  • ").append(((Element)f).getTextNormalize()).append("
  • ")); + failures.forEach(f -> message.append("
  • ").append(((Element) f).getTextNormalize()).append("
  • ")); message.append("
"); throw new DoiClientException(String.format( @@ -349,9 +285,9 @@ private void checkPreConditionsOnDataCite(AbstractMetadata metadata, String doi, metadata.getUuid(), failures.size(), message)) .withMessageKey("exception.doi.recordNotConformantMissingInfo") .withDescriptionKey("exception.doi.recordNotConformantMissingInfo.description", - new String[]{ metadata.getUuid(), String.valueOf(failures.size()), message.toString() }); + new String[]{metadata.getUuid(), String.valueOf(failures.size()), message.toString()}); } - } catch (IOException|JDOMException e) { + } catch (IOException | JDOMException e) { throw new DoiClientException(String.format( "Record '%s' is not conform with DataCite validation rules for mandatory fields. Error is: %s. " + "Required fields in DataCite are: identifier, creators, titles, publisher, publicationYear, resourceType. " + @@ -360,7 +296,7 @@ private void checkPreConditionsOnDataCite(AbstractMetadata metadata, String doi, metadata.getUuid(), e.getMessage(), sm.getNodeURL(), metadata.getUuid())) .withMessageKey("exception.doi.recordNotConformantMissingMandatory") .withDescriptionKey("exception.doi.recordNotConformantMissingMandatory.description", - new String[]{ metadata.getUuid(), e.getMessage(), sm.getNodeURL(), metadata.getUuid() }); + new String[]{metadata.getUuid(), e.getMessage(), sm.getNodeURL(), metadata.getUuid()}); } // XSD validation @@ -375,24 +311,24 @@ private void checkPreConditionsOnDataCite(AbstractMetadata metadata, String doi, metadata.getUuid(), e.getMessage(), sm.getNodeURL(), metadata.getUuid())) .withMessageKey("exception.doi.recordInvalid") .withDescriptionKey("exception.doi.recordInvalid.description", - new String[]{ metadata.getUuid(), e.getMessage(), sm.getNodeURL(), metadata.getUuid() }); + new String[]{metadata.getUuid(), e.getMessage(), sm.getNodeURL(), metadata.getUuid()}); } // * MDS / DOI does not exist already // curl -i --user username:password https://mds.test.datacite.org/doi/10.5072/GN // Return 404 - final String doiResponse = client.retrieveDoi(doi); + final String doiResponse = doiClient.retrieveDoi(doi); if (doiResponse != null) { throw new ResourceAlreadyExistException(String.format( "Record '%s' looks to be already published on DataCite with DOI '%s'. DOI on Datacite point to: %s. " + "If the DOI is not correct, remove it from the record and ask for a new one.", metadata.getUuid(), - client.createUrl("doi") + "/" + doi, + doiClient.createUrl("doi") + "/" + doi, doi, doi, doiResponse)) .withMessageKey("exception.doi.resourceAlreadyPublished") - .withDescriptionKey("exception.doi.resourceAlreadyPublished.description", new String[]{ metadata.getUuid(), - client.createUrl("doi") + "/" + doi, - doi, doi, doiResponse }); + .withDescriptionKey("exception.doi.resourceAlreadyPublished.description", new String[]{metadata.getUuid(), + doiClient.createUrl("doi") + "/" + doi, + doi, doi, doiResponse}); } // TODO: Could be relevant at some point to return states (draft/findable) @@ -404,10 +340,12 @@ private void checkPreConditionsOnDataCite(AbstractMetadata metadata, String doi, /** * Use the DataCite API to register the new DOI. + * * @param context * @param metadata */ - private void createDoi(ServiceContext context, AbstractMetadata metadata, Map doiInfo, Element dataciteMetadata) throws Exception { + private void createDoi(ServiceContext context, IDoiClient doiClient, DoiServer doiServer, + AbstractMetadata metadata, Map doiInfo, Element dataciteMetadata) throws Exception { // * Now, let's create the DOI // picking a DOI name, @@ -418,29 +356,30 @@ private void createDoi(ServiceContext context, AbstractMetadata metadata, Map doi } - public void unregisterDoi(AbstractMetadata metadata, ServiceContext context) throws DoiClientException, ResourceNotFoundException { - checkInitialised(); + public void unregisterDoi(DoiServer doiServer, AbstractMetadata metadata, ServiceContext context) throws DoiClientException, ResourceNotFoundException { + checkInitialised(doiServer); + checkCanHandleMetadata(doiServer, metadata); - final String doi = doiBuilder.create(doiPattern, doiPrefix, metadata); - final String doiResponse = client.retrieveDoi(doi); + IDoiClient doiClient = createDoiClient(doiServer); + final String doi = doiBuilder.create(doiServer.getPattern(), doiServer.getPrefix(), metadata); + final String doiResponse = doiClient.retrieveDoi(doi); if (doiResponse == null) { throw new ResourceNotFoundException(String.format( "Record '%s' is not available on DataCite. DOI '%s' does not exist.", @@ -467,31 +408,40 @@ public void unregisterDoi(AbstractMetadata metadata, ServiceContext context) thr Element md = metadata.getXmlData(false); String doiUrl = metadataUtils.getDoi(metadata.getUuid()); - client.deleteDoiMetadata(doi); - client.deleteDoi(doi); + doiClient.deleteDoiMetadata(doi); + doiClient.deleteDoi(doi); Element recordWithoutDoi = removeDOIValue(doiUrl, metadata.getDataInfo().getSchemaId(), md); - dm.updateMetadata(context, metadata.getId() + "", recordWithoutDoi, false, true, + metadataManager.updateMetadata(context, metadata.getId() + "", recordWithoutDoi, false, true, context.getLanguage(), new ISODate().toString(), true, IndexingMode.full); } catch (Exception ex) { - throw new DoiClientException(ex.getMessage()); + throw new DoiClientException(String.format( + "Error unregistering DOI: %s", + ex.getMessage())) + .withMessageKey("exception.doi.serverErrorUnregister") + .withDescriptionKey("exception.doi.serverErrorUnregister.description", new String[]{ex.getMessage()}); } } /** * Sets the DOI URL value in the metadata record using the process DOI_ADD_XSL_PROCESS. - * */ - public Element setDOIValue(String doi, String schema, Element md) throws Exception { - Path styleSheet = dm.getSchemaDir(schema).resolve(DOI_ADD_XSL_PROCESS); + public Element setDOIValue(IDoiClient doiClient, String doi, String schema, Element md) throws Exception { + Path styleSheet = schemaUtils.getSchemaDir(schema).resolve(DOI_ADD_XSL_PROCESS); boolean exists = Files.exists(styleSheet); if (!exists) { - throw new DoiClientException(String.format("To create a DOI, the schema has to defined how to insert a DOI in the record. The schema_plugins/%s/process/%s was not found. Create the XSL transformation.", - schema, DOI_ADD_XSL_PROCESS)); + String message = String.format("To create a DOI, the schema has to defined how to insert a DOI in the record. The schema_plugins/%s/process/%s was not found. Create the XSL transformation.", + schema, DOI_ADD_XSL_PROCESS); + + throw new DoiClientException(String.format( + "Error creating DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorCreate") + .withDescriptionKey("exception.doi.serverErrorCreate.description", new String[]{message}); } - String doiPublicUrl = client.createPublicUrl(""); + String doiPublicUrl = doiClient.createPublicUrl(""); Map params = new HashMap<>(1); params.put("doi", doi); @@ -501,14 +451,20 @@ public Element setDOIValue(String doi, String schema, Element md) throws Excepti /** * Sets the DOI URL value in the metadata record using the process DOI_ADD_XSL_PROCESS. - * */ public Element removeDOIValue(String doi, String schema, Element md) throws Exception { - Path styleSheet = dm.getSchemaDir(schema).resolve(DOI_REMOVE_XSL_PROCESS); + Path styleSheet = schemaUtils.getSchemaDir(schema).resolve(DOI_REMOVE_XSL_PROCESS); boolean exists = Files.exists(styleSheet); if (!exists) { - throw new DoiClientException(String.format("To remove a DOI, the schema has to defined how to remove a DOI in the record. The schema_plugins/%s/process/%s was not found. Create the XSL transformation.", - schema, DOI_REMOVE_XSL_PROCESS)); + String message = String.format("To remove a DOI, the schema has to defined how to remove a DOI in the record. The schema_plugins/%s/process/%s was not found. Create the XSL transformation.", + schema, DOI_REMOVE_XSL_PROCESS); + + throw new DoiClientException(String.format( + "Error deleting DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorDelete") + .withDescriptionKey("exception.doi.serverErrorDelete.description", new String[]{message}); + } Map params = new HashMap<>(1); @@ -523,24 +479,68 @@ public Element removeDOIValue(String doi, String schema, Element md) throws Exce * @return The record converted into the DataCite format. * @throws Exception if there is no conversion available. */ - private Element convertXmlToDataCiteFormat(String schema, Element md, String doi) throws Exception { - final Path styleSheet = dm.getSchemaDir(schema).resolve( - isMedra ? DATACITE_MEDRA_XSL_CONVERSION_FILE : DATACITE_XSL_CONVERSION_FILE); + private Element convertXmlToDataCiteFormat(DoiServer doiServer, String schema, Element md, String doi) throws Exception { + final Path styleSheet = schemaUtils.getSchemaDir(schema).resolve( + isMedraServer(doiServer) ? DATACITE_MEDRA_XSL_CONVERSION_FILE : DATACITE_XSL_CONVERSION_FILE); final boolean exists = Files.exists(styleSheet); if (!exists) { - throw new DoiClientException(String.format("To create a DOI, the record needs to be converted to the DataCite format (https://schema.datacite.org/). You need to create a formatter for this in schema_plugins/%s/%s. If the standard is a profile of ISO19139, you can simply point to the ISO19139 formatter.", - schema, DATACITE_XSL_CONVERSION_FILE)); + String message = String.format("To create a DOI, the record needs to be converted to the DataCite format (https://schema.datacite.org/). You need to create a formatter for this in schema_plugins/%s/%s. If the standard is a profile of ISO19139, you can simply point to the ISO19139 formatter.", + schema, DATACITE_XSL_CONVERSION_FILE); + + throw new DoiClientException(String.format( + "Error creating DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorCreate") + .withDescriptionKey("exception.doi.serverErrorCreate.description", new String[]{message}); } - Map params = new HashMap<>(); + Map params = new HashMap<>(); params.put(DOI_ID_PARAMETER, doi); return Xml.transform(md, styleSheet, params); } - private void checkInitialised() throws DoiClientException { - if (!initialised) { - throw new DoiClientException("DOI configuration is not complete. Check System Configuration and set the DOI configuration."); + private void checkInitialised(DoiServer doiServer) throws DoiClientException { + final boolean emptyUrl = StringUtils.isEmpty(doiServer.getUrl()); + final boolean emptyUsername = StringUtils.isEmpty(doiServer.getUsername()); + final boolean emptyPassword = StringUtils.isEmpty(doiServer.getPassword()); + final boolean emptyPrefix = StringUtils.isEmpty(doiServer.getPrefix()); + + if (emptyUrl || + emptyUsername || + emptyPassword || + emptyPrefix) { + throw new DoiClientException("DOI server configuration is not complete. Check the DOI server configuration to complete it.") + .withMessageKey("exception.doi.configurationMissing") + .withDescriptionKey("exception.doi.configurationMissing.description", new String[]{}); + + } + } + + /** + * Checks if the DOI server can handle the metadata: + * - The DOI server is not publishing metadata for certain metadata group(s) or + * - it publishes metadata from the metadata group owner. + * + * @param doiServer The DOI server. + * @param metadata The metadata to process. + * @throws DoiClientException + */ + private void checkCanHandleMetadata(DoiServer doiServer, AbstractMetadata metadata) throws DoiClientException { + if (!doiServer.getPublicationGroups().isEmpty()) { + Integer groupOwner = metadata.getSourceInfo().getGroupOwner(); + + if (doiServer.getPublicationGroups().stream().noneMatch(g -> g.getId() == groupOwner)) { + throw new DoiClientException( + String.format("DOI server '%s' can not handle the metadata with UUID '%s'.", + doiServer.getName(), metadata.getUuid())) + .withMessageKey("exception.doi.serverCanNotHandleRecord") + .withDescriptionKey("exception.doi.serverCanNotHandleRecord.description", new String[]{doiServer.getName(), metadata.getUuid()}); + } } + } + private boolean isMedraServer(DoiServer doiServer) { + return doiServer.getUrl().contains(MEDRA_SEARCH_KEY); + } } diff --git a/doi/src/main/java/org/fao/geonet/doi/client/DoiMedraClient.java b/doi/src/main/java/org/fao/geonet/doi/client/DoiMedraClient.java index 823545decfa..fd7f7b2699e 100644 --- a/doi/src/main/java/org/fao/geonet/doi/client/DoiMedraClient.java +++ b/doi/src/main/java/org/fao/geonet/doi/client/DoiMedraClient.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2010 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -58,7 +58,10 @@ public String retrieveDoi(String doi) throws DoiClientException { @Override public String retrieveAllDoi(String doi) throws DoiClientException { - throw new DoiClientException(MEDRA_NOT_SUPPORTED_EXCEPTION_MESSAGE); + throw new DoiClientException(MEDRA_NOT_SUPPORTED_EXCEPTION_MESSAGE) + .withMessageKey("exception.doi.operationNotSupported") + .withDescriptionKey("exception.doi.operationNotSupported.description", + new String[]{ MEDRA_NOT_SUPPORTED_EXCEPTION_MESSAGE }); } /** diff --git a/domain/pom.xml b/domain/pom.xml index 48d5cd34fa4..812778871e8 100644 --- a/domain/pom.xml +++ b/domain/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT 4.0.0 diff --git a/domain/src/main/java/org/fao/geonet/domain/DoiServer.java b/domain/src/main/java/org/fao/geonet/domain/DoiServer.java new file mode 100644 index 00000000000..90c93c31c6d --- /dev/null +++ b/domain/src/main/java/org/fao/geonet/domain/DoiServer.java @@ -0,0 +1,284 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.domain; + +import org.fao.geonet.entitylistener.DoiServerEntityListenerManager; +import org.hibernate.annotations.Type; + +import javax.persistence.*; +import java.util.HashSet; +import java.util.Set; + +@Entity +@Table(name = "Doiservers") +@Cacheable +@Access(AccessType.PROPERTY) +@EntityListeners(DoiServerEntityListenerManager.class) +@SequenceGenerator(name = DoiServer.ID_SEQ_NAME, initialValue = 100, allocationSize = 1) +public class DoiServer extends GeonetEntity { + static final String ID_SEQ_NAME = "doiserver_id_seq"; + + private int id; + private String name; + private String description; + private String url; + private String username; + private String password; + private String landingPageTemplate; + private String publicUrl; + private String pattern = "{{uuid}}"; + private String prefix; + private Set publicationGroups = new HashSet<>(); + + /** + * Get the id of the DOI server.

This is autogenerated and when a new DOI server is created + * the DOI server will be assigned a new value.

+ * + * @return the id of the DOI server. + */ + @Id + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = ID_SEQ_NAME) + @Column(nullable = false) + public int getId() { + return id; + } + + /** + * Set the id of the DOI server.

If you want to update an existing DOI server then you should + * set this id to the DOI server you want to update and set the other values to the desired + * values.

+ * + * @param id the id of the group. + * @return this DOI server object + */ + public DoiServer setId(int id) { + this.id = id; + return this; + } + + /** + * Get the basic/default name of the DOI server. This is non-translated and can be used to look + * up the DOI server like an id can.

This is a required property.

There is a max length + * to the name allowed. See the annotation for the length value.

+ * + * @return DOI server name + */ + @Column(nullable = false, length = 32) + public String getName() { + return name; + } + + /** + * Set the basic/default name of the DOI server. This is non-translated and can be used to look + * up the DOI server like an id can.

This is a required property.

There is a max length + * to the name allowed. See the annotation on {@link #getName()} for the length value.

+ */ + public DoiServer setName(String name) { + this.name = name; + return this; + } + + /** + * Get a description of the DOI server. + * + * @return the description. + */ + @Column(length = 255) + public String getDescription() { + return description; + } + + /** + * Set the DOI server description. + * + * @param description the description. + * @return this DOI server object. + */ + public DoiServer setDescription(String description) { + this.description = description; + return this; + } + + + /** + * Get the API URL for the DOI server. + * + * @return the DOI server API URL. + */ + @Column(nullable = false, length = 255) + public String getUrl() { + return url; + } + + /** + * Set the REST API configuration URL for the DOI server. + * + * @param url the server URL. + * @return this DOI server object. + */ + public DoiServer setUrl(String url) { + this.url = url; + return this; + } + + /** + * Get the username to use for connecting to the DOI server. + * + * @return the username. + */ + @Column(length = 128) + public String getUsername() { + return username; + } + + public DoiServer setUsername(String username) { + this.username = username; + return this; + } + + /** + * Get the password to use for connecting to the DOI server. + * + * @return the password. + */ + @Column(length = 128) + @Type(type="encryptedString") + public String getPassword() { + return password; + } + + public DoiServer setPassword(String password) { + this.password = password; + return this; + } + + /** + * Set the DOI landing page URL template. + * + * @param landingPageTemplate the landing page URL template. + * @return this DOI server object. + */ + public DoiServer setLandingPageTemplate(String landingPageTemplate) { + this.landingPageTemplate = landingPageTemplate; + return this; + } + + /** + * Get the DOI landing page URL template. + * + * @return the landing page URL template. + */ + @Column(nullable = false, length = 255) + public String getLandingPageTemplate() { + return landingPageTemplate; + } + + /** + * Set the DOI URL prefix. + * + * @param publicUrl the URL prefix. + * @return this DOI server object. + */ + public DoiServer setPublicUrl(String publicUrl) { + this.publicUrl = publicUrl; + return this; + } + + /** + * Get the DOI URL prefix. + * + * @return the URL prefix. + */ + @Column(nullable = false, length = 255) + public String getPublicUrl() { + return publicUrl; + } + + /** + * Set the DOI identifier pattern. + * + * @param pattern the identifier pattern. + * @return this DOI server object. + */ + public DoiServer setPattern(String pattern) { + this.pattern = pattern; + return this; + } + + /** + * Get the DOI identifier pattern. + * + * @return the identifier pattern. + */ + @Column(nullable = false, length = 255) + public String getPattern() { + return pattern; + } + + + /** + * Set the DOI prefix. + * + * @param prefix the DOI prefix. + * @return this DOI server object. + */ + public DoiServer setPrefix(String prefix) { + this.prefix = prefix; + return this; + } + + /** + * Get the DOI prefix. + * + * @return the DOI prefix. + */ + @Column(nullable = false, length = 15) + public String getPrefix() { + return prefix; + } + + /** + * Sets the groups which metadata should be published to the DOI server. + * + * @param publicationGroups Publication groups. + * @return + */ + public void setPublicationGroups(Set publicationGroups) { + this.publicationGroups = publicationGroups; + } + + /** + * Get the groups which metadata is published to the DOI server. + * + * @return Publication groups. + */ + @ManyToMany(fetch = FetchType.EAGER, cascade = CascadeType.PERSIST) + @JoinTable( + name = "doiservers_group", + joinColumns = @JoinColumn(name = "doiserver_id"), + inverseJoinColumns = @JoinColumn(name = "group_id")) + public Set getPublicationGroups() { + return publicationGroups; + } +} diff --git a/domain/src/main/java/org/fao/geonet/entitylistener/DoiServerEntityListenerManager.java b/domain/src/main/java/org/fao/geonet/entitylistener/DoiServerEntityListenerManager.java new file mode 100644 index 00000000000..8d4af1bdf92 --- /dev/null +++ b/domain/src/main/java/org/fao/geonet/entitylistener/DoiServerEntityListenerManager.java @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.entitylistener; + +import org.fao.geonet.domain.DoiServer; + +import javax.persistence.*; + +public class DoiServerEntityListenerManager extends AbstractEntityListenerManager { + @PrePersist + public void prePresist(final DoiServer entity) { + handleEvent(PersistentEventType.PrePersist, entity); + } + + @PreRemove + public void preRemove(final DoiServer entity) { + handleEvent(PersistentEventType.PreRemove, entity); + } + + @PostPersist + public void postPersist(final DoiServer entity) { + handleEvent(PersistentEventType.PostPersist, entity); + } + + @PostRemove + public void postRemove(final DoiServer entity) { + handleEvent(PersistentEventType.PostRemove, entity); + } + + @PreUpdate + public void preUpdate(final DoiServer entity) { + handleEvent(PersistentEventType.PreUpdate, entity); + } + + @PostUpdate + public void postUpdate(final DoiServer entity) { + handleEvent(PersistentEventType.PostUpdate, entity); + } + + @PostLoad + public void postLoad(final DoiServer entity) { + handleEvent(PersistentEventType.PostLoad, entity); + } +} diff --git a/domain/src/main/java/org/fao/geonet/repository/DoiServerRepository.java b/domain/src/main/java/org/fao/geonet/repository/DoiServerRepository.java new file mode 100644 index 00000000000..25ca32429ce --- /dev/null +++ b/domain/src/main/java/org/fao/geonet/repository/DoiServerRepository.java @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.repository; + +import org.fao.geonet.domain.DoiServer; +import org.springframework.data.jpa.repository.JpaSpecificationExecutor; + +import java.util.Optional; + +public interface DoiServerRepository extends + GeonetRepository, + JpaSpecificationExecutor { + + Optional findOneById(int id); +} diff --git a/domain/src/main/java/org/fao/geonet/repository/GeonetRepositoryImpl.java b/domain/src/main/java/org/fao/geonet/repository/GeonetRepositoryImpl.java index 280b24dc2a8..765b55b7b9c 100644 --- a/domain/src/main/java/org/fao/geonet/repository/GeonetRepositoryImpl.java +++ b/domain/src/main/java/org/fao/geonet/repository/GeonetRepositoryImpl.java @@ -110,6 +110,7 @@ protected static Element findAllAsXml(EntityManager ent return rootEl; } + @Transactional public T update(ID id, Updater updater) { final T entity = _entityManager.find(this._entityClass, id); diff --git a/domain/src/main/java/org/fao/geonet/repository/LinkRepository.java b/domain/src/main/java/org/fao/geonet/repository/LinkRepository.java index eb5cb852a9b..ef6f510df98 100644 --- a/domain/src/main/java/org/fao/geonet/repository/LinkRepository.java +++ b/domain/src/main/java/org/fao/geonet/repository/LinkRepository.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -27,8 +27,8 @@ import org.springframework.data.jpa.repository.JpaSpecificationExecutor; import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.List; +import java.util.Optional; /** * Data Access object for the {@link Link} entities. @@ -39,9 +39,7 @@ public interface LinkRepository extends GeonetRepository, LinkRep * * @return one link or null. */ - @Nullable - Link findOneByUrl(@Nonnull String url); + Optional findOneByUrl(@Nonnull String url); - @Nullable List findAllByUrlIn(@Nonnull List url); } diff --git a/domain/src/main/java/org/fao/geonet/repository/MetadataValidationRepositoryCustom.java b/domain/src/main/java/org/fao/geonet/repository/MetadataValidationRepositoryCustom.java index 6b723f8eaf9..cf53f7fb36b 100644 --- a/domain/src/main/java/org/fao/geonet/repository/MetadataValidationRepositoryCustom.java +++ b/domain/src/main/java/org/fao/geonet/repository/MetadataValidationRepositoryCustom.java @@ -51,7 +51,7 @@ public interface MetadataValidationRepositoryCustom { * @param metadataId the id of the metadata. * @return the number of rows deleted */ - @Modifying(clearAutomatically=true) + @Modifying(flushAutomatically = true, clearAutomatically = true) @Transactional @Query(value="DELETE FROM MetadataValidation v where v.id.metadataId = ?1 AND valtype != 'inspire'") int deleteAllInternalValidationById_MetadataId(Integer metadataId); diff --git a/domain/src/main/java/org/fao/geonet/repository/specification/LinkSpecs.java b/domain/src/main/java/org/fao/geonet/repository/specification/LinkSpecs.java index cd77c680549..8ddfc576891 100644 --- a/domain/src/main/java/org/fao/geonet/repository/specification/LinkSpecs.java +++ b/domain/src/main/java/org/fao/geonet/repository/specification/LinkSpecs.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -23,31 +23,15 @@ package org.fao.geonet.repository.specification; -import org.fao.geonet.domain.Link; -import org.fao.geonet.domain.Link_; -import org.fao.geonet.domain.Metadata; -import org.fao.geonet.domain.MetadataLink; -import org.fao.geonet.domain.MetadataLink_; -import org.fao.geonet.domain.MetadataSourceInfo_; -import org.fao.geonet.domain.Metadata_; -import org.fao.geonet.domain.OperationAllowed; -import org.fao.geonet.domain.OperationAllowedId_; -import org.fao.geonet.domain.OperationAllowed_; -import org.fao.geonet.domain.ReservedGroup; -import org.fao.geonet.domain.ReservedOperation; +import com.google.common.collect.Sets; +import org.fao.geonet.domain.*; import org.springframework.data.jpa.domain.Specification; -import javax.persistence.criteria.CriteriaBuilder; -import javax.persistence.criteria.CriteriaQuery; -import javax.persistence.criteria.Join; -import javax.persistence.criteria.JoinType; -import javax.persistence.criteria.Path; -import javax.persistence.criteria.Predicate; -import javax.persistence.criteria.Root; -import javax.persistence.criteria.Subquery; +import javax.persistence.criteria.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Set; public class LinkSpecs { private LinkSpecs() { @@ -58,92 +42,126 @@ public static Specification filter(String urlPartToContain, List associatedRecords, Integer[] groupPublishedIds, Integer[] groupOwnerIds, + Integer[] httpStatusValueFilter, + boolean excludeHarvestedMetadataFilter, Integer[] editingGroupIds) { - return new Specification() { - @Override - public Predicate toPredicate(Root root, CriteriaQuery query, CriteriaBuilder cb) { - List predicates = new ArrayList<>(); + return (root, query, cb) -> { + List predicates = new ArrayList<>(); - if (state != null) { - Path statePath = root.get(Link_.lastState); - predicates.add(cb.equal(statePath, state)); - } + if (state != null) { + Path statePath = root.get(Link_.lastState); + predicates.add(cb.equal(statePath, state)); + } - if (urlPartToContain != null) { - Path urlPath = root.get(Link_.url); - predicates.add( - cb.like(urlPath, - cb.literal(String.format("%%%s%%", urlPartToContain)))); - } + if (urlPartToContain != null) { + Path urlPath = root.get(Link_.url); + predicates.add( + cb.like(urlPath, + cb.literal(String.format("%%%s%%", urlPartToContain)))); + } + + if (associatedRecords != null) { + Join metadataJoin = root.join(Link_.records, JoinType.INNER); + predicates.add(metadataJoin.get("metadataUuid").in(associatedRecords)); + } + + if (excludeHarvestedMetadataFilter) { + Join metadataJoin = root.join(Link_.records, JoinType.INNER); - if (associatedRecords != null) { - Join metadataJoin = root.join(Link_.records, JoinType.INNER); - predicates.add(metadataJoin.get("metadataUuid").in(associatedRecords)); + Subquery subquery = query.subquery(Integer.class); + final Root metadataRoot = subquery.from(Metadata.class); + Path isHarvestedAttributePath = metadataRoot.get(AbstractMetadata_.harvestInfo).get(MetadataHarvestInfo_.harvested_JPAWorkaround); + Predicate equalHarvestPredicate = cb.equal(isHarvestedAttributePath, cb.literal(Constants.toYN_EnabledChar(false))); + subquery.where( + equalHarvestPredicate); + + Path metadataId = metadataRoot.get(AbstractMetadata_.id); + subquery.select(metadataId); + + predicates.add(metadataJoin.get(MetadataLink_.metadataId).in(subquery)); + query.distinct(true); + } + + if (httpStatusValueFilter != null && httpStatusValueFilter.length > 0) { + Join linkLinkStatusJoin = root.join(Link_.linkStatus, JoinType.LEFT); + + Integer[] valuesIn = Arrays.stream(httpStatusValueFilter).filter(i -> i >= 0).toArray(Integer[]::new); + Set setValuesNotIn = Sets.newHashSet(httpStatusValueFilter); + setValuesNotIn.removeAll(Arrays.asList(valuesIn)); + Integer[] valuesNotIn = setValuesNotIn.stream().map(i -> -1 * i).toArray(Integer[]::new); + + if (valuesIn.length > 0) { + predicates.add(cb.and( + cb.equal(linkLinkStatusJoin.get(LinkStatus_.checkDate), root.get(Link_.lastCheck)), + linkLinkStatusJoin.get((LinkStatus_.statusValue)).in(Arrays.asList( + Arrays.stream(valuesIn).map(String::valueOf).toArray())))); } - if (editingGroupIds != null && editingGroupIds.length > 0) { - Join metadataJoin = root.join(Link_.records, JoinType.INNER); - - Subquery subquery = query.subquery(Integer.class); - final Root opAllowRoot = subquery.from(OperationAllowed.class); - final Root metadataRoot = subquery.from(Metadata.class); - final Predicate groupOwnerPredicate = - metadataRoot.get(Metadata_.sourceInfo).get(MetadataSourceInfo_.groupOwner).in(editingGroupIds); - final Predicate metadataOperations = cb.equal(metadataRoot.get(Metadata_.id), opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.metadataId)); - Predicate editableGroups = opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.groupId).in(Arrays.asList(editingGroupIds)); - Predicate operationTypeEdit = - cb.equal( - opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.operationId), - cb.literal(ReservedOperation.editing.getId())); - subquery.where( - cb.or( - cb.and(metadataOperations, groupOwnerPredicate), - cb.and(editableGroups, operationTypeEdit))); - - Path opAllowedMetadataId = opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.metadataId); - subquery.select(opAllowedMetadataId); - - predicates.add(metadataJoin.get(MetadataLink_.metadataId).in(subquery)); - query.distinct(true); + if (valuesNotIn.length > 0) { + predicates.add(cb.and( + cb.equal(linkLinkStatusJoin.get(LinkStatus_.checkDate), root.get(Link_.lastCheck)), + cb.not(linkLinkStatusJoin.get((LinkStatus_.statusValue)).in(Arrays.asList( + Arrays.stream(valuesNotIn).map(String::valueOf).toArray()))))); } + } - if (groupPublishedIds != null && groupPublishedIds.length > 0) { - Join metadataJoin = root.join(Link_.records, JoinType.INNER); + Join metadataJoin = root.join(Link_.records, JoinType.INNER); + Subquery subquery = query.subquery(Integer.class); + final Root opAllowRoot = subquery.from(OperationAllowed.class); + final Root metadataRoot = subquery.from(Metadata.class); - Subquery subquery = query.subquery(Integer.class); - Root opAllowRoot = subquery.from(OperationAllowed.class); - Predicate publishedToIndicatedGroup = - opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.groupId).in(groupPublishedIds); - Predicate operationTypeView = cb.equal( + boolean editinGroupQuery = editingGroupIds != null && editingGroupIds.length > 0; + boolean groupPublishedQuery = groupPublishedIds != null && groupPublishedIds.length > 0; + boolean groupOwnerQuery = groupOwnerIds != null && groupOwnerIds.length > 0; + + List subQueryPredicates = new ArrayList<>(); + + if (editinGroupQuery) { + final Predicate groupOwnerPredicate = + metadataRoot.get(Metadata_.sourceInfo).get(MetadataSourceInfo_.groupOwner).in(editingGroupIds); + final Predicate metadataOperations = cb.equal(metadataRoot.get(Metadata_.id), opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.metadataId)); + Predicate editableGroups = opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.groupId).in(Arrays.asList(editingGroupIds)); + Predicate operationTypeEdit = + cb.equal( opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.operationId), - cb.literal(ReservedOperation.view.getId())); - subquery.where( - cb.and(publishedToIndicatedGroup, operationTypeView)); + cb.literal(ReservedOperation.editing.getId())); - Path opAllowedMetadataId = opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.metadataId); - subquery.select(opAllowedMetadataId); + subQueryPredicates.add(cb.or( + cb.and(metadataOperations, groupOwnerPredicate), + cb.and(editableGroups, operationTypeEdit))); + } - predicates.add(metadataJoin.get(MetadataLink_.metadataId).in(subquery)); - query.distinct(true); - } + if (groupPublishedQuery) { + Predicate publishedToIndicatedGroup = + opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.groupId).in(groupPublishedIds); + Predicate operationTypeView = cb.equal( + opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.operationId), + cb.literal(ReservedOperation.view.getId())); - if (groupOwnerIds != null && groupOwnerIds.length > 0) { - Join metadataJoin = root.join(Link_.records, JoinType.INNER); - Subquery subquery = query.subquery(Integer.class); - final Root metadataRoot = subquery.from(Metadata.class); - final Predicate groupOwnerPredicate = - metadataRoot.get(Metadata_.sourceInfo).get(MetadataSourceInfo_.groupOwner).in(groupOwnerIds); - subquery.where(groupOwnerPredicate); + subQueryPredicates.add(cb.and(publishedToIndicatedGroup, operationTypeView)); + } - Path metadataId = metadataRoot.get(Metadata_.id); - subquery.select(metadataId); + if (groupOwnerQuery) { + final Predicate groupOwnerPredicate = + metadataRoot.get(Metadata_.sourceInfo).get(MetadataSourceInfo_.groupOwner).in(groupOwnerIds); - predicates.add(metadataJoin.get(MetadataLink_.metadataId).in(subquery)); - query.distinct(true); - } - return cb.and(predicates.toArray(new Predicate[]{})); + subQueryPredicates.add(groupOwnerPredicate); } + + + if (subQueryPredicates.size() > 0) { + subquery.where(subQueryPredicates.toArray(new Predicate[]{})); + + Path opAllowedMetadataId = opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.metadataId); + subquery.select(opAllowedMetadataId); + + predicates.add(metadataJoin.get(MetadataLink_.metadataId).in(subquery)); + } + + query.distinct(true); + + return cb.and(predicates.toArray(new Predicate[]{})); }; } } diff --git a/domain/src/test/java/org/fao/geonet/repository/DoiServerRepositoryTest.java b/domain/src/test/java/org/fao/geonet/repository/DoiServerRepositoryTest.java new file mode 100644 index 00000000000..bc8daaf4bb6 --- /dev/null +++ b/domain/src/test/java/org/fao/geonet/repository/DoiServerRepositoryTest.java @@ -0,0 +1,142 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.repository; + +import org.fao.geonet.domain.DoiServer; +import org.fao.geonet.domain.Group; +import org.jasypt.encryption.pbe.StandardPBEStringEncryptor; +import org.jasypt.hibernate5.encryptor.HibernatePBEEncryptorRegistry; +import org.junit.BeforeClass; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.junit.Assert.assertEquals; + +public class DoiServerRepositoryTest extends AbstractSpringDataTest { + + @Autowired + private DoiServerRepository doiServerRepository; + + @Autowired + private GroupRepository groupRepository; + + @PersistenceContext + EntityManager entityManager; + + @BeforeClass + public static void init() { + StandardPBEStringEncryptor strongEncryptor = new StandardPBEStringEncryptor(); + strongEncryptor.setPassword("testpassword"); + + HibernatePBEEncryptorRegistry registry = + HibernatePBEEncryptorRegistry.getInstance(); + registry.registerPBEStringEncryptor("STRING_ENCRYPTOR", strongEncryptor); + } + + public static DoiServer newDoiServer(AtomicInteger nextId) { + int id = nextId.incrementAndGet(); + return new DoiServer() + .setName("Name " + id) + .setDescription("Desc " + id) + .setUrl("http://server" + id) + .setUsername("username" + id) + .setPassword("password" + id) + .setLandingPageTemplate("http://landingpage" + id) + .setPublicUrl("http://publicurl" + id) + .setPattern("pattern" + id) + .setPrefix("prefix" + id); + } + + @Test + public void test_Save_Count_FindOnly_DeleteAll() throws Exception { + assertEquals(0, doiServerRepository.count()); + DoiServer doiServer = newDoiServer(); + DoiServer savedDoiServer = doiServerRepository.save(doiServer); + + doiServerRepository.flush(); + entityManager.flush(); + entityManager.clear(); + + doiServer.setId(savedDoiServer.getId()); + assertEquals(1, doiServerRepository.count()); + Optional retrievedDoiServerByIdOpt = doiServerRepository.findOneById(doiServer.getId()); + assertEquals(true, retrievedDoiServerByIdOpt.isPresent()); + assertSameContents(doiServer, retrievedDoiServerByIdOpt.get()); + + doiServerRepository.deleteAll(); + + doiServerRepository.flush(); + entityManager.flush(); + entityManager.clear(); + + assertEquals(0, doiServerRepository.count()); + } + + @Test + public void testUpdate() throws Exception { + Group group1 = groupRepository.save(GroupRepositoryTest.newGroup(_inc)); + Group group2 = groupRepository.save(GroupRepositoryTest.newGroup(_inc)); + + assertEquals(0, doiServerRepository.count()); + DoiServer doiServer = newDoiServer(); + doiServer.getPublicationGroups().add(group1); + + DoiServer savedDoiServer = doiServerRepository.save(doiServer); + + doiServerRepository.flush(); + entityManager.flush(); + entityManager.clear(); + + doiServer.setId(savedDoiServer.getId()); + + assertEquals(1, doiServerRepository.count()); + Optional retrievedDoiServerByIdOpt = doiServerRepository.findOneById(doiServer.getId()); + assertEquals(true, retrievedDoiServerByIdOpt.isPresent()); + assertSameContents(doiServer, retrievedDoiServerByIdOpt.get()); + + doiServer.setName("New Name"); + doiServer.getPublicationGroups().add(group2); + DoiServer savedDoiServer2 = doiServerRepository.save(doiServer); + + doiServerRepository.flush(); + entityManager.flush(); + entityManager.clear(); + + assertSameContents(savedDoiServer, savedDoiServer2); + + assertEquals(1, doiServerRepository.count()); + retrievedDoiServerByIdOpt = doiServerRepository.findOneById(doiServer.getId()); + assertSameContents(doiServer, retrievedDoiServerByIdOpt.get()); + } + + + private DoiServer newDoiServer() { + return newDoiServer(_inc); + } +} diff --git a/domain/src/test/java/org/fao/geonet/repository/LinkRespositoryTest.java b/domain/src/test/java/org/fao/geonet/repository/LinkRespositoryTest.java new file mode 100644 index 00000000000..4bcdcab74c7 --- /dev/null +++ b/domain/src/test/java/org/fao/geonet/repository/LinkRespositoryTest.java @@ -0,0 +1,92 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.repository; + +import org.fao.geonet.domain.Link; +import org.fao.geonet.domain.LinkType; +import org.junit.Assert; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + + +public class LinkRespositoryTest extends AbstractSpringDataTest { + + @Autowired + private LinkRepository repository; + + @Test + public void testFindAllByUrlIn() { + Link link = new Link(); + link.setLinkType(LinkType.HTTP); + link.setUrl("https://test.com/link"); + + repository.save(link); + + List links = new ArrayList<>(); + links.add(link.getUrl()); + List linkList = repository.findAllByUrlIn(links); + + Assert.assertNotNull(linkList); + Assert.assertEquals(1, linkList.size()); + Assert.assertEquals(link.getUrl(), linkList.get(0).getUrl()); + } + + @Test + public void testFindAllByUrlInNoResults() { + List links = new ArrayList<>(); + links.add("https://test.com/link"); + List linkList = repository.findAllByUrlIn(links); + + Assert.assertNotNull(linkList); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testFindOneByUrl() { + Link link = new Link(); + link.setLinkType(LinkType.HTTP); + link.setUrl("https://test.com/link"); + + repository.save(link); + + Optional linkToCheck = repository.findOneByUrl("https://test.com/link"); + + Assert.assertNotNull(linkToCheck); + Assert.assertTrue(linkToCheck.isPresent()); + Assert.assertEquals(link.getUrl(), linkToCheck.get().getUrl()); + } + + @Test + public void testFindOneByUrlNoResult() { + Optional link = repository.findOneByUrl("https://test.com/link"); + + Assert.assertNotNull(link); + Assert.assertTrue(link.isEmpty()); + } + +} diff --git a/domain/src/test/java/org/fao/geonet/repository/specification/LinkSpecsTest.java b/domain/src/test/java/org/fao/geonet/repository/specification/LinkSpecsTest.java new file mode 100644 index 00000000000..6f025abd585 --- /dev/null +++ b/domain/src/test/java/org/fao/geonet/repository/specification/LinkSpecsTest.java @@ -0,0 +1,324 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.repository.specification; + +import org.fao.geonet.domain.*; +import org.fao.geonet.repository.*; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.*; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; + +public class LinkSpecsTest extends AbstractSpringDataTest { + @Autowired + MetadataRepository metadataRepository; + + @Autowired + MetadataLinkRepository metadataLinkRepository; + + @Autowired + LinkStatusRepository linkStatusRepository; + + @Autowired + OperationAllowedRepository operationAllowedRepository; + + @Autowired + LinkRepository linkRepository; + + AtomicInteger inc = new AtomicInteger(); + + @Before + public void createTestData() { + // Create a non harvested metadata + Metadata metadata = MetadataRepositoryTest.newMetadata(inc); + metadata.getSourceInfo().setGroupOwner(2); + metadataRepository.save(metadata); + + Link link = new Link(); + link.setLinkType(LinkType.HTTP); + link.setUrl("https://test.com/link"); + link.setLastState(1); + + ISODate checkStatusDate = new ISODate(); + + Set linkStatuses = new HashSet<>(); + LinkStatus linkStatus = new LinkStatus(); + linkStatus.setLink(link); + linkStatus.setStatusValue("200"); + linkStatus.setCheckDate(checkStatusDate); + linkStatuses.add(linkStatus); + + link.setLinkStatus(linkStatuses); + + MetadataLink metadataLink = new MetadataLink(); + metadataLink.setMetadataId(metadata.getId()); + metadataLink.setMetadataUuid(metadata.getUuid()); + metadataLink.setLink(link); + + Set recordLinks = new HashSet<>(); + recordLinks.add(metadataLink); + link.setRecords(recordLinks); + link.setLastCheck(checkStatusDate); + linkRepository.save(link); + + metadataLinkRepository.save(metadataLink); + linkStatusRepository.save(linkStatus); + + // View in group 2, edit in group 2 in implicit from metadata owner group + OperationAllowed operationAllowedViewMd1 = new OperationAllowed(); + OperationAllowedId operationAllowedIdViewMd1 = new OperationAllowedId(); + operationAllowedIdViewMd1.setMetadataId(metadata.getId()); + operationAllowedIdViewMd1.setGroupId(2); + operationAllowedIdViewMd1.setOperationId(ReservedOperation.view.getId()); + operationAllowedViewMd1.setId(operationAllowedIdViewMd1); + operationAllowedRepository.save(operationAllowedViewMd1); + + // Edit in group 3 + OperationAllowed operationAllowedEditMd1 = new OperationAllowed(); + OperationAllowedId operationAllowedIdEditMd1 = new OperationAllowedId(); + operationAllowedIdEditMd1.setMetadataId(metadata.getId()); + operationAllowedIdEditMd1.setGroupId(3); + operationAllowedIdEditMd1.setOperationId(ReservedOperation.editing.getId()); + operationAllowedEditMd1.setId(operationAllowedIdEditMd1); + operationAllowedRepository.save(operationAllowedEditMd1); + + // Create a harvested metadata + Metadata metadata2 = MetadataRepositoryTest.newMetadata(inc); + metadata2.getSourceInfo().setGroupOwner(2); + MetadataHarvestInfo metadataHarvestInfo = new MetadataHarvestInfo(); + metadataHarvestInfo.setHarvested(true); + metadataHarvestInfo.setUuid(UUID.randomUUID().toString()); + metadata2.setHarvestInfo(metadataHarvestInfo); + + metadataRepository.save(metadata2); + + Link link2 = new Link(); + link2.setLinkType(LinkType.HTTP); + link2.setUrl("https://test.com/link2"); + link2.setLastCheck(checkStatusDate); + link2.setLastState(-1); + + Set linkStatuses2 = new HashSet<>(); + LinkStatus linkStatus2 = new LinkStatus(); + linkStatus2.setLink(link2); + linkStatus2.setStatusValue("404"); + linkStatus2.setCheckDate(checkStatusDate); + linkStatuses2.add(linkStatus2); + + link2.setLinkStatus(linkStatuses2); + + MetadataLink metadataLink2 = new MetadataLink(); + metadataLink2.setMetadataId(metadata2.getId()); + metadataLink2.setMetadataUuid(metadata2.getUuid()); + metadataLink2.setLink(link2); + + Set recordLinks2 = new HashSet<>(); + recordLinks2.add(metadataLink2); + link2.setRecords(recordLinks2); + linkRepository.save(link2); + + metadataLinkRepository.save(metadataLink2); + linkStatusRepository.save(linkStatus2); + + // View in group 2, edit in group 2 in implicit from metadata owner group + OperationAllowed operationAllowedViewMd2 = new OperationAllowed(); + OperationAllowedId operationAllowedIdViewMd2 = new OperationAllowedId(); + operationAllowedIdViewMd2.setMetadataId(metadata2.getId()); + operationAllowedIdViewMd2.setGroupId(2); + operationAllowedIdViewMd2.setOperationId(ReservedOperation.view.getId()); + operationAllowedViewMd2.setId(operationAllowedIdViewMd2); + operationAllowedRepository.save(operationAllowedViewMd2); + } + + @Test + public void testLinkSpecsFilterUrlPartToContainMatch() { + // Query excluding harvested metadata + List linkList = linkRepository.findAll(LinkSpecs.filter("https://test.com", null, null, null, null, null, true, null)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter("https://test.com", null, null, null, null, null, false, null)); + Assert.assertEquals(2, linkList2.size()); + + } + + @Test + public void testLinkSpecsFilterUrlPartToContainNoMatch() { + List linkList = linkRepository.findAll(LinkSpecs.filter("https://test2.com", null, null, null, null, null, false, null)); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testLinkSpecsFilterAssociatedRecordsMatch() { + List associatedRecords = metadataRepository.findAll().stream().map(Metadata::getUuid).collect(Collectors.toList()); + + // Query excluding harvested metadata + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, associatedRecords, null, null, null, true, null)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, null, associatedRecords, null, null, null, false, null)); + Assert.assertEquals(2, linkList2.size()); + } + + @Test + public void testLinkSpecsFilterAssociatedRecordsNoMatch() { + List associatedRecords = new ArrayList<>(); + associatedRecords.add("aaaa"); + + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, associatedRecords, null, null, null, false, null)); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testLinkSpecsFilterHttpStatusFilterMatch() { + Integer[] httpStatusValueFilter = new Integer[]{200, 404}; + + // Query excluding harvested metadata + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, httpStatusValueFilter, true, null)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, httpStatusValueFilter, false, null)); + Assert.assertEquals(2, linkList2.size()); + } + + @Test + public void testLinkSpecsFilterHttpStatusFilterNoMatch() { + Integer[] httpStatusValueFilter = new Integer[]{500}; + + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, httpStatusValueFilter, false, null)); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testLinkSpecsFilterGroupOwnersIdsMatch() { + Integer[] groupOwnerIds = new Integer[]{2}; + + // Query excluding harvested metadata + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, groupOwnerIds, null, true, null)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, groupOwnerIds, null, false, null)); + Assert.assertEquals(2, linkList2.size()); + } + + @Test + public void testLinkSpecsFilterGroupOwnersIdsNoMatch() { + Integer[] groupOwnerIds = new Integer[]{3}; + + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, groupOwnerIds, null, false, null)); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testLinkSpecsFilterGroupPublishedIdsMatch() { + Integer[] groupPublishedIds = new Integer[]{2}; + + // Query excluding harvested metadata + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, groupPublishedIds, null, null, true, null)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, null, null, groupPublishedIds, null, null, false, null)); + Assert.assertEquals(2, linkList2.size()); + } + + @Test + public void testLinkSpecsFilterGroupPublishedIdsNoMatch() { + Integer[] groupPublishedIds = new Integer[]{3}; + + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, groupPublishedIds, null, null, false, null)); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testLinkSpecsFilterEditingGroupIdsMatch() { + Integer[] editingGroupIds1 = new Integer[]{2}; + + // Query excluding harvested metadata + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, null, true, editingGroupIds1)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, null, false, editingGroupIds1)); + Assert.assertEquals(2, linkList2.size()); + + Integer[] editingGroupIds2 = new Integer[]{3}; + + // Query excluding harvested metadata + List linkList3 = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, null, true, editingGroupIds2)); + Assert.assertEquals(1, linkList3.size()); + + // Query not excluding harvested metadata + List linkList4 = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, null, false, editingGroupIds2)); + Assert.assertEquals(1, linkList4.size()); + } + + @Test + public void testLinkSpecsFilterEditingGroupIdsNoMatch() { + Integer[] editingGroupIds = new Integer[]{4}; + + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, null, false, editingGroupIds)); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testLinkSpecsStateMatch() { + List linkList = linkRepository.findAll(LinkSpecs.filter(null, -1, null, null, null, null, false, null)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, 1, null, null, null, null, false, null)); + Assert.assertEquals(1, linkList2.size()); + } + + @Test + public void testLinkSpecsStateNoMatch() { + List linkList = linkRepository.findAll(LinkSpecs.filter(null, 0, null, null, null, null, false, null)); + Assert.assertEquals(0, linkList.size()); + } + + + @Test + public void testLinkSpecsSeveralFilters() { + // Find links with state 1, related to metadata published to group 2 + Integer[] groupPublishedIds = new Integer[]{2}; + + List linkList = linkRepository.findAll(LinkSpecs.filter(null, 1, null, groupPublishedIds, null, null, false, null)); + Assert.assertEquals(1, linkList.size()); + + // Find links that contain the url 'https://test.com', with http status 200 / 404 / 500, related to metadata owned by groups 2 / 3 + Integer[] httpStatusValueFilter = new Integer[]{200, 404, 500}; + Integer[] groupOwnerIds = new Integer[]{2, 3}; + + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, groupOwnerIds, httpStatusValueFilter, false, null)); + Assert.assertEquals(2, linkList2.size()); + } +} diff --git a/es/README.md b/es/README.md index bd0bdaefefd..c4b8cb15ac2 100644 --- a/es/README.md +++ b/es/README.md @@ -1,19 +1,48 @@ # Install, configure and start Elasticsearch -## Manual installation +## Installation options -1. Download Elasticsearch (at least 7.6.2 for Geonetwork 4.0.x) from https://www.elastic.co/downloads/elasticsearch -and copy to the ES module, e.g., es/elasticsearch-7.6.2 - -2. Start ES using: +This section describes several methods for configuring Elasticsearch for development. - ```shell script - ./bin/elasticsearch +These configurations should not be used for a production deployment. + +### Docker installation (Recommended) + +1. Use docker pull to download the image (you can check version in the :file:`pom.xml` file): + + ``` + docker pull docker.elastic.co/elasticsearch/elasticsearch:8.14.0 + ``` + +2. Use docker run, leaving 9200 available: + + ``` + docker run -p 9200:9200 -p 9300:9300 \ + -e "discovery.type=single-node" \ + -e "xpack.security.enabled=false" \ + -e "xpack.security.enrollment.enabled=false" \ + docker.elastic.co/elasticsearch/elasticsearch:8.14.0 ``` 3. Check that elasticsearch is running by visiting http://localhost:9200 in a browser -## Maven installation +### Docker compose installation + +1. Use docker compose with the provided [docker-compose.yml](docker-compose.yml): + + ``` + cd es + docker-compose up + ``` + +3. Check that it is running using your browser: + + * Elasticsearch: http://localhost:9200 + * Kibana: http://localhost:5601 + +### Maven installation + +Maven installation ensure you always are using the ``es.version`` version specified in ``pom.xml``. 1. Maven can take care of the installation steps: @@ -30,38 +59,32 @@ and copy to the ES module, e.g., es/elasticsearch-7.6.2 ``` 3. Check that elasticsearch is running by visiting http://localhost:9200 in a browser -## Docker installation +## Manual installation -1. Use docker pull to download the image (you can check version in the :file:`pom.xml` file): +1. Download Elasticsearch 8.14.0 from https://www.elastic.co/downloads/elasticsearch +and copy to the ES module, e.g., ``es/elasticsearch-8.14.0` - ``` - docker pull docker.elastic.co/elasticsearch/elasticsearch:7.6.2 - ``` +2. Disable the security -2. Use docker run, leaving 9200 available: + Elasticsearch 8 has security enabled by default. To disable this configuration for development, update the file `config/elasticsearch.yml` adding at the end: ``` - docker run -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" docker.elastic.co/elasticsearch/elasticsearch:7.6.2 + xpack.security.enabled: false + xpack.security.enrollment.enabled: false ``` -3. Check that elasticsearch is running by visiting http://localhost:9200 in a browser - -## Docker compose installation - -1. Use docker compose with the provided [docker-compose.yml](docker-compose.yml): +3. Start ES using: - ``` - cd es - docker-compose up + ```shell script + ./bin/elasticsearch ``` -3. Check that it is running using your browser: - - * Elasticsearch: http://localhost:9200 - * Kibana: http://localhost:5601 +4. Check that elasticsearch is running by visiting http://localhost:9200 in a browser # Configuration +## Index management + Optionally you can manually create index but they will be created by the catalogue when the Elastic instance is available and if index does not exist. @@ -104,7 +127,7 @@ Don't hesitate to propose a Pull Request with the new language. 1. Configure ES to start on server startup. It is recommended to protect `gn-records` index from the Internet access. - * Note that for debian-based servers the current deb download (7.3.2) can be installed rather than installing manually and can be configured to run as a service using the instructions here: https://www.elastic.co/guide/en/elasticsearch/reference/current/starting-elasticsearch.html + * Note that for debian-based servers the current deb download (8.14.0) can be installed rather than installing manually and can be configured to run as a service using the instructions here: https://www.elastic.co/guide/en/elasticsearch/reference/current/starting-elasticsearch.html # Troubleshoot @@ -145,3 +168,22 @@ field expansion for [*] matches too many fields, limit: 1024 An option is to restrict `queryBase` to limit the number of field to query on. `any:(${any}) resourceTitleObject.default:(${any})^2` is a good default. Using `${any}` will probably trigger the error if the number of records is high. The other option is to increase `indices.query.bool.max_clause_count`. + + +## Disk space threshold + +The server application will refuse to write new content unless there is enough free space available (by default 1/4 of your hard drive). + +To turn off this check: + +``` + curl -XPUT http://localhost:9200/_cluster/settings -H 'Content-Type: application/json' -d '{ "transient" : { "cluster.routing.allocation.disk.threshold_enabled" : false } }' +``` + +## Blocked by index read-only / allow delete + +To recover: + +``` +curl -XPUT -H "Content-Type: application/json" http://localhost:9200/_all/_settings -d '{"index.blocks.read_only_allow_delete": null}' +``` diff --git a/es/docker-compose.yml b/es/docker-compose.yml index 4c8f574bea2..6d30f675bb1 100644 --- a/es/docker-compose.yml +++ b/es/docker-compose.yml @@ -2,23 +2,25 @@ version: '3' services: elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:7.6.2 - container_name: elasticsearch + image: docker.elastic.co/elasticsearch/elasticsearch:8.14.0 + container_name: elasticsearch8 environment: - cluster.name=docker-cluster - bootstrap.memory_lock=true - discovery.type=single-node - "ES_JAVA_OPTS=-Xms512m -Xmx512m" + - xpack.security.enabled=false + - xpack.security.enrollment.enabled=false ulimits: memlock: soft: -1 hard: -1 volumes: - - ./es-dashboards/data:/usr/share/elasticsearch/data + - ./es-dashboards/data/index:/usr/share/elasticsearch/data ports: - "9200:9200" kibana: - image: docker.elastic.co/kibana/kibana:7.6.2 - container_name: kibana + image: docker.elastic.co/kibana/kibana:8.14.0 + container_name: kibana8 ports: - "5601:5601" diff --git a/es/es-dashboards/README.md b/es/es-dashboards/README.md index b95aa299cb4..9a2b7527487 100644 --- a/es/es-dashboards/README.md +++ b/es/es-dashboards/README.md @@ -1,24 +1,20 @@ # Install, configure and start Kibana -## Manual installation - -Download Kibana from https://www.elastic.co/downloads/kibana. For Geonetwork 3.8.x download at least version 7.2.1 +## Installation options -Set Kibana base path and index name in config/kibana.yml: +### Docker compose installation (Recommended) -``` -server.basePath: "/geonetwork/dashboards" -server.rewriteBasePath: false -``` +1. Use docker compose with the provided [docker-compose.yml](es/docker-compose.yml): -Adapt if needed ```elasticsearch.url``` and ```server.host```. - -Start Kibana manually: + ``` + cd es + docker-compose up + ``` -``` -cd kibana/bin -./kibana -``` +3. Check that it is running using your browser: + + * Elasticsearch: http://localhost:9200 + * Kabana: http://localhost:5601 ## Maven installation @@ -41,20 +37,26 @@ cd kibana/bin mvn exec:exec -Dkb-start ``` -## Docker compose installation +## Manual installation + +1. Download Kibana 8.14.0 from https://www.elastic.co/downloads/kibana + +2. Set Kibana base path and index name in config/kibana.yml: + + ``` + server.basePath: "/geonetwork/dashboards" + server.rewriteBasePath: false + ``` + +3. Adapt if needed ```elasticsearch.url``` and ```server.host```. -1. Use docer compose with the provided [docker-compose.yml](docker-compose.yml): +4. Start Kibana manually: ``` - cd es - docker-compose up + cd kibana/bin + ./kibana ``` -3. Check that it is running using your browser: - - * Elasticsearch: http://localhost:9200 - * Kabana: http://localhost:5601 - ## Import Configuration 1. Kibana should be running from: @@ -69,16 +71,17 @@ cd kibana/bin http://localhost:8080/geonetwork/dashboards ``` + ## Troubleshoot If it does not start properly, check Kibana log files (eg. it may fail if Elasticsearch version is not compatible with Kibana version). -Visit Kibana in a browser using one of the above links and go to 'Saved Objects'. Import export.ndjson from https://github.com/geonetwork/core-geonetwork/blob/4.0.x/es/es-dashboards/data/export.ndjson +Visit Kibana in a browser using one of the above links and go to 'Saved Objects'. Import export.ndjson from https://github.com/geonetwork/core-geonetwork/blob/main/es/es-dashboards/data/export.ndjson ### Production Use -Kibana can be installed from the debian files, and 7.3.2 is confirmed as working with Geonetwork 3.8.x. +Kibana can be installed from the debian files, and Kibana 8.14.0 is confirmed as working with Geonetwork 4.4.x. Set Kibana to start when the server starts up, using the instructions at https://www.elastic.co/guide/en/kibana/current/start-stop.html diff --git a/es/es-dashboards/data/index/.gitkeep b/es/es-dashboards/data/index/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/es/es-dashboards/pom.xml b/es/es-dashboards/pom.xml index cc49025ac53..81ab69fcab1 100644 --- a/es/es-dashboards/pom.xml +++ b/es/es-dashboards/pom.xml @@ -24,12 +24,74 @@ 4.0.0 gn-es-dashboards GeoNetwork dashboard app based on Kibana - gn-es org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT + + + + + org.apache.maven.plugins + maven-enforcer-plugin + + + check-readme + + enforce + + + + + Update README.md examples for Elasticsearch ${es.version} + + import java.util.regex.Pattern; + + esVersion = "${es.version}"; + print("Scanning README for " + esVersion); + + docker = Pattern.compile("Kibana (\\d.\\d\\d.\\d)"); + download = Pattern.compile("Download Kibana (\\d.\\d\\d.\\d)"); + + patterns = new Pattern[]{ docker, download}; + + readme = new BufferedReader(new FileReader("README.md")); + + number = 0; + while ((line = readme.readLine()) != null) { + number++; + for (pattern : patterns ){ + matcher = pattern.matcher(line); + if (matcher.find()) { + if (!esVersion.equals(matcher.group(1))) { + print("README.md:"+number+" FAILURE: " + line); + return false; + } + } + } + } + readme.close(); + true; + + + + + + + check-docker + + enforce + + + true + + + + + + + kb-download diff --git a/es/pom.xml b/es/pom.xml index 5e6a40550da..77d91442f84 100644 --- a/es/pom.xml +++ b/es/pom.xml @@ -5,13 +5,112 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT 4.0.0 gn-es GeoNetwork index using Elasticsearch pom + + + + org.apache.maven.plugins + maven-enforcer-plugin + + + check-readme + + enforce + + + + + Update README.md examples for Elasticsearch ${es.version} + + import java.util.regex.Pattern; + + esVersion = "${es.version}"; + print("Scanning README for " + esVersion); + + docker = Pattern.compile("elasticsearch:(\\d.\\d\\d.\\d)"); + download = Pattern.compile("Download Elasticsearch (\\d.\\d\\d.\\d)"); + folder = Pattern.compile("es/elasticsearch-(\\d.\\d\\d.\\d)"); + + patterns = new Pattern[]{ docker, download, folder}; + + readme = new BufferedReader(new FileReader("README.md")); + + number = 0; + while ((line = readme.readLine()) != null) { + number++; + for (pattern : patterns ){ + matcher = pattern.matcher(line); + if (matcher.find()) { + if (!esVersion.equals(matcher.group(1))) { + print("README.md:"+number+" FAILURE: " + line); + return false; + } + } + } + } + readme.close(); + true; + + + + + + + check-docker + + enforce + + + + + Update docker-compose.yml for Elasticsearch ${es.version} + + import java.util.regex.Pattern; + + boolean scanDockerCompose(String filename){ + esVersion = "${es.version}"; + print("Scanning "+filename+" for " + esVersion); + + docker = Pattern.compile("elasticsearch:(\\d.\\d\\d.\\d)"); + kibana = Pattern.compile("kibana:(\\d.\\d\\d.\\d)"); + patterns = new Pattern[]{ docker, kibana}; + + reader = new BufferedReader(new FileReader("${project.basedir}"+"/"+filename)); + + number = 0; + while ((line = reader.readLine()) != null) { + number++; + for (pattern : patterns ){ + matcher = pattern.matcher(line); + if (matcher.find()) { + if (!esVersion.equals(matcher.group(1))) { + print(filename+":"+number+" FAILURE: " + line); + return false; + } + } + } + } + reader.close(); + return true; + } + + return scanDockerCompose("docker-compose.yml"); + + + + + + + + + + es-download diff --git a/estest/pom.xml b/estest/pom.xml index e9ba611bc41..e1285a801bd 100644 --- a/estest/pom.xml +++ b/estest/pom.xml @@ -5,7 +5,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT 4.0.0 diff --git a/events/pom.xml b/events/pom.xml index 8b8405f0b5e..332f074acbb 100644 --- a/events/pom.xml +++ b/events/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT GeoNetwork Events diff --git a/harvesters/pom.xml b/harvesters/pom.xml index c830794b85b..ad15ba83add 100644 --- a/harvesters/pom.xml +++ b/harvesters/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT 4.0.0 diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/BaseAligner.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/BaseAligner.java index 09502913bfb..101f6fd78ab 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/BaseAligner.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/BaseAligner.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -28,6 +28,7 @@ import org.fao.geonet.domain.AbstractMetadata; import org.fao.geonet.domain.MetadataCategory; import org.fao.geonet.kernel.DataManager; +import org.fao.geonet.kernel.SchemaManager; import org.fao.geonet.kernel.datamanager.IMetadataManager; import org.fao.geonet.kernel.harvest.harvester.AbstractHarvester; import org.fao.geonet.kernel.harvest.harvester.AbstractParams; @@ -35,14 +36,20 @@ import org.fao.geonet.kernel.harvest.harvester.GroupMapper; import org.fao.geonet.kernel.harvest.harvester.Privileges; import org.fao.geonet.kernel.setting.SettingManager; +import org.fao.geonet.kernel.setting.Settings; import org.fao.geonet.repository.MetadataCategoryRepository; import org.fao.geonet.repository.OperationAllowedRepository; +import org.fao.geonet.utils.Xml; import org.jdom.Element; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.StringUtils; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Arrays; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; @@ -62,7 +69,7 @@ public abstract class BaseAligner

extends AbstractAlig public final AtomicBoolean cancelMonitor; - public BaseAligner(AtomicBoolean cancelMonitor) { + protected BaseAligner(AtomicBoolean cancelMonitor) { this.cancelMonitor = cancelMonitor; } @@ -71,7 +78,7 @@ public void addCategories(AbstractMetadata metadata, Iterable categories String serverCategory, boolean saveMetadata) { MetadataCategoryRepository metadataCategoryRepository = context.getBean(MetadataCategoryRepository.class); - Map nameToCategoryMap = new HashMap(); + Map nameToCategoryMap = new HashMap<>(); for (MetadataCategory metadataCategory : metadataCategoryRepository.findAll()) { nameToCategoryMap.put("" + metadataCategory.getId(), metadataCategory); } @@ -119,9 +126,9 @@ public void addPrivileges(String id, Iterable privilegesIterable, Gr String name = localGroups.getName(priv.getGroupId()); if (name == null) { - LOGGER.debug(" - Skipping removed group with id:{}", priv.getGroupId()); + LOGGER.debug(" - Skipping removed group with id: {}", priv.getGroupId()); } else { - LOGGER.debug(" - Setting privileges for group : {}", name); + LOGGER.debug(" - Setting privileges for group: {}", name); for (int opId : priv.getOperations()) { name = dataManager.getAccessManager().getPrivilegeName(opId); //--- all existing operation @@ -133,4 +140,63 @@ public void addPrivileges(String id, Iterable privilegesIterable, Gr } } } + + /** + * Applies a xslt process (schema_folder/process/translate.xsl) to translate create the metadata + * fields configured in the harvester to the languages configured, using the translation provider + * configured in the application settings. + * + * If no translation provider is configured or if the schema doesn't have the translation xslt, + * the translation process is not applied to the metadata. + * + * @param context + * @param md + * @param schema + * @return + */ + public Element translateMetadataContent(ServiceContext context, + Element md, + String schema) { + + SettingManager settingManager = context.getBean(SettingManager.class); + + String translationProvider = settingManager.getValue(Settings.SYSTEM_TRANSLATION_PROVIDER); + + if (!StringUtils.hasLength(translationProvider)) { + LOGGER.warn(" metadata content can't be translated. Translation provider not configured."); + return md; + } + + if (!StringUtils.hasLength(params.getTranslateContentLangs()) || + !StringUtils.hasLength(params.getTranslateContentFields())) { + LOGGER.warn(" metadata content can't be translated. No languages or fields provided to translate."); + return md; + } + + SchemaManager schemaManager = context.getBean(SchemaManager.class); + + Path filePath = schemaManager.getSchemaDir(schema).resolve("process").resolve( "translate.xsl"); + + if (!Files.exists(filePath)) { + LOGGER.debug(String.format(" metadata content translation process not available for schema %s", schema)); + } else { + Element processedMetadata; + try { + Map processParams = new HashMap<>(); + List langs = Arrays.asList(params.getTranslateContentLangs().split(",")); + processParams.put("languages", langs); + + List fields = Arrays.asList(params.getTranslateContentFields().split("\\n")); + processParams.put("fieldsToTranslate", fields); + + processedMetadata = Xml.transform(md, filePath, processParams); + LOGGER.debug(" metadata content translated."); + md = processedMetadata; + } catch (Exception e) { + LOGGER.warn(String.format(" metadata content translated error: %s", e.getMessage())); + } + } + return md; + } + } diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java index bec3d2cda1a..2398aa96c10 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2020 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -85,15 +85,7 @@ import java.time.OffsetDateTime; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TimeZone; -import java.util.UUID; +import java.util.*; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantLock; @@ -140,8 +132,12 @@ public abstract class AbstractHarvester ownedByHarvester = Specification.where(MetadataSpecs.hasHarvesterUuid(getParams().getUuid())); Set sources = new HashSet<>(); - for (Integer id : metadataRepository.findAllIdsBy(ownedByHarvester)) { - sources.add(metadataUtils.findOne(id).getSourceInfo().getSourceId()); - metadataManager.deleteMetadata(context, "" + id); + for (Integer metadataId : metadataRepository.findAllIdsBy(ownedByHarvester)) { + sources.add(metadataUtils.findOne(metadataId).getSourceInfo().getSourceId()); + metadataManager.deleteMetadata(context, "" + metadataId); } // Remove all sources related to the harvestUuid if they are not linked to any record anymore @@ -569,7 +565,10 @@ private void login() throws Exception { UserRepository repository = this.context.getBean(UserRepository.class); User user = null; if (StringUtils.isNotEmpty(ownerId)) { - user = repository.findById(Integer.parseInt(ownerId)).get(); + Optional userOptional = repository.findById(Integer.parseInt(ownerId)); + if (userOptional.isPresent()) { + user = userOptional.get(); + } } // for harvesters created before owner was added to the harvester code, @@ -693,21 +692,21 @@ protected OperResult harvest() { private void logHarvest(String logfile, Logger logger, String nodeName, String lastRun, long elapsedTime) { try { // record the results/errors for this harvest in the database - Element result = getResult(); + Element resultEl = getResult(); if (error != null) { - result = JeevesException.toElement(error); + resultEl = JeevesException.toElement(error); } - Element priorLogfile_ = result.getChild("logfile"); - if (priorLogfile_ != null) { + Element priorLogfileEl = resultEl.getChild("logfile"); + if (priorLogfileEl != null) { // removing prior logfile - logger.warning("Detected duplicate logfile: " + priorLogfile_.getText()); - result.getChildren().remove(priorLogfile_); + logger.warning("Detected duplicate logfile: " + priorLogfileEl.getText()); + resultEl.getChildren().remove(priorLogfileEl); } - Element logfile_ = new Element("logfile"); - logfile_.setText(logfile); - result.addContent(logfile_); + Element logfileEl = new Element("logfile"); + logfileEl.setText(logfile); + resultEl.addContent(logfileEl); - result.addContent(toElement(errors)); + resultEl.addContent(toElement(errors)); final HarvestHistoryRepository historyRepository = context.getBean(HarvestHistoryRepository.class); final HarvestHistory history = new HarvestHistory() .setHarvesterType(getType()) @@ -716,7 +715,7 @@ private void logHarvest(String logfile, Logger logger, String nodeName, String l .setElapsedTime((int) elapsedTime) .setHarvestDate(new ISODate(lastRun)) .setParams(getParams().getNodeElement()) - .setInfo(result); + .setInfo(resultEl); historyRepository.save(history); @@ -742,18 +741,18 @@ private void logHarvest(String logfile, Logger logger, String nodeName, String l */ private Element toElement(List errors) { Element res = new Element("errors"); - for (HarvestError error : errors) { + for (HarvestError harvestError : errors) { Element herror = new Element("error"); Element desc = new Element("description"); - desc.setText(error.getDescription()); + desc.setText(harvestError.getDescription()); herror.addContent(desc); Element hint = new Element("hint"); - hint.setText(error.getHint()); + hint.setText(harvestError.getHint()); herror.addContent(hint); - herror.addContent(JeevesException.toElement(error.getOrigin())); + herror.addContent(JeevesException.toElement(harvestError.getOrigin())); res.addContent(herror); } return res; @@ -810,8 +809,8 @@ private final String doAdd(Element node) throws BadInputEx, SQLException { //--- force the creation of a new uuid params.setUuid(UUID.randomUUID().toString()); - String id = harvesterSettingsManager.add("harvesting", "node", getType()); - storeNode(params, "id:" + id); + String nodeId = harvesterSettingsManager.add("harvesting", "node", getType()); + storeNode(params, "id:" + nodeId); Source source = new Source(params.getUuid(), params.getName(), params.getTranslations(), SourceType.harvester); final String icon = params.getIcon(); @@ -822,7 +821,7 @@ private final String doAdd(Element node) throws BadInputEx, SQLException { } context.getBean(SourceRepository.class).save(source); - return id; + return nodeId; } private void doUpdate(String id, Element node) throws BadInputEx, SQLException { @@ -915,6 +914,9 @@ private void storeNode(P params, String path) throws SQLException { harvesterSettingsManager.add(ID_PREFIX + contentId, "importxslt", params.getImportXslt()); harvesterSettingsManager.add(ID_PREFIX + contentId, "batchEdits", params.getBatchEdits()); harvesterSettingsManager.add(ID_PREFIX + contentId, "validate", params.getValidate()); + harvesterSettingsManager.add(ID_PREFIX + contentId, "translateContent", params.isTranslateContent()); + harvesterSettingsManager.add(ID_PREFIX + contentId, "translateContentLangs", params.getTranslateContentLangs()); + harvesterSettingsManager.add(ID_PREFIX + contentId, "translateContentFields", params.getTranslateContentFields()); //--- setup stats node ---------------------------------------- @@ -948,8 +950,8 @@ private void storePrivileges(P params, String path) { private void storeCategories(P params, String path) { String categId = harvesterSettingsManager.add(path, "categories", ""); - for (String id : params.getCategories()) { - harvesterSettingsManager.add(ID_PREFIX + categId, "category", id); + for (String cId : params.getCategories()) { + harvesterSettingsManager.add(ID_PREFIX + categId, "category", cId); } } diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractParams.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractParams.java index 0a405e2390b..3f67e5d82af 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractParams.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractParams.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -95,6 +95,10 @@ public enum OverrideUuid { private String ownerIdUser; private OverrideUuid overrideUuid; + private boolean translateContent; + private String translateContentLangs; + private String translateContentFields; + /** * When more than one harvester harvest the same record, then record is usually rejected. * It can override existing, but the privileges are not preserved. This option @@ -200,6 +204,9 @@ public void create(Element node) throws BadInputEx { setImportXslt(Util.getParam(content, "importxslt", "none")); setBatchEdits(Util.getParam(content, "batchEdits", "")); + setTranslateContent(Util.getParam(content, "translateContent", false)); + setTranslateContentLangs(Util.getParam(content, "translateContentLangs", "")); + setTranslateContentFields(Util.getParam(content, "translateContentFields", "")); this.setValidate(readValidateFromParams(content)); @@ -280,6 +287,9 @@ public void update(Element node) throws BadInputEx { setImportXslt(Util.getParam(content, "importxslt", "none")); setBatchEdits(Util.getParam(content, "batchEdits", getBatchEdits())); + setTranslateContent(Util.getParam(content, "translateContent", false)); + setTranslateContentLangs(Util.getParam(content, "translateContentLangs", "")); + setTranslateContentFields(Util.getParam(content, "translateContentFields", "")); this.setValidate(readValidateFromParams(content)); if (privil != null) { @@ -330,7 +340,9 @@ protected void copyTo(AbstractParams copy) { copy.setImportXslt(getImportXslt()); copy.setBatchEdits(getBatchEdits()); + copy.setTranslateContent(isTranslateContent()); copy.setValidate(getValidate()); + copy.setTranslateContent(isTranslateContent()); for (Privileges p : alPrivileges) { copy.addPrivilege(p.copy()); @@ -643,4 +655,28 @@ public String getBatchEdits() { public void setBatchEdits(String batchEdits) { this.batchEdits = batchEdits; } + + public boolean isTranslateContent() { + return translateContent; + } + + public void setTranslateContent(boolean translateContent) { + this.translateContent = translateContent; + } + + public String getTranslateContentLangs() { + return translateContentLangs; + } + + public void setTranslateContentLangs(String translateContentLangs) { + this.translateContentLangs = translateContentLangs; + } + + public String getTranslateContentFields() { + return translateContentFields; + } + + public void setTranslateContentFields(String translateContentFields) { + this.translateContentFields = translateContentFields; + } } diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/UriMapper.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/UriMapper.java index d25c803cb68..b15c89ab302 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/UriMapper.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/UriMapper.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -26,6 +26,7 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; +import java.util.Optional; import org.fao.geonet.domain.AbstractMetadata; import org.fao.geonet.kernel.datamanager.IMetadataUtils; @@ -41,7 +42,7 @@ */ public class UriMapper { - private HashMap> hmUriRecords = new HashMap>(); + private HashMap> hmUriRecords = new HashMap<>(); //-------------------------------------------------------------------------- //--- @@ -49,21 +50,21 @@ public class UriMapper { //--- //-------------------------------------------------------------------------- - public UriMapper(ServiceContext context, String harvestUuid) throws Exception { + public UriMapper(ServiceContext context, String harvestUuid) { final IMetadataUtils metadataRepository = context.getBean(IMetadataUtils.class); final List metadataList = metadataRepository.findAll(MetadataSpecs.hasHarvesterUuid(harvestUuid)); - for (AbstractMetadata record : metadataList) { - String uri = record.getHarvestInfo().getUri(); + for (AbstractMetadata metadataRecord : metadataList) { + String uri = Optional.ofNullable(metadataRecord.getHarvestInfo().getUri()).orElse(""); - List records = hmUriRecords.get(uri); + List records = hmUriRecords.computeIfAbsent(uri, k -> new ArrayList<>()); if (records == null) { - records = new ArrayList(); + records = new ArrayList<>(); hmUriRecords.put(uri, records); } - records.add(new RecordInfo(record)); + records.add(new RecordInfo(metadataRecord)); } } diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java index 7a224fa1345..5097d9a600c 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -232,7 +232,7 @@ private void insertOrUpdate(Collection records, Collection { private UUIDMapper localUuids; private String processName; private String preferredSchema; - private Map processParams = new HashMap(); + private Map processParams = new HashMap<>(); private MetadataRepository metadataRepository; - private Map> hmRemoteGroups = new HashMap>(); + private Map> hmRemoteGroups = new HashMap<>(); private SettingManager settingManager; public Aligner(AtomicBoolean cancelMonitor, Logger log, ServiceContext context, XmlRequest req, @@ -119,7 +120,7 @@ private void setupLocEntity(List list, Map> for (Element entity : list) { String name = entity.getChildText("name"); - Map hm = new HashMap(); + Map hm = new HashMap<>(); hmEntity.put(name, hm); @SuppressWarnings("unchecked") @@ -163,7 +164,7 @@ public HarvestResult align(SortedSet records, List err result.locallyRemoved++; } - } catch (Throwable t) { + } catch (Exception t) { log.error("Couldn't remove metadata with uuid " + uuid); log.error(t); result.unchangedMetadata++; @@ -197,7 +198,6 @@ public HarvestResult align(SortedSet records, List err String id = dataMan.getMetadataId(ri.uuid); // look up value of localrating/enable - SettingManager settingManager = context.getBean(SettingManager.class); String localRating = settingManager.getValue(Settings.SYSTEM_LOCALRATING_ENABLE); if (id == null) { @@ -216,7 +216,6 @@ public HarvestResult align(SortedSet records, List err params.useChangeDateForUpdate(), localUuids.getChangeDate(ri.uuid), true); log.info("Overriding record with uuid " + ri.uuid); - result.updatedMetadata++; if (params.isIfRecordExistAppendPrivileges()) { addPrivileges(id, params.getPrivileges(), localGroups, context); @@ -230,6 +229,7 @@ public HarvestResult align(SortedSet records, List err case SKIP: log.debug("Skipping record with uuid " + ri.uuid); result.uuidSkipped++; + break; default: break; } @@ -248,7 +248,7 @@ public HarvestResult align(SortedSet records, List err } } - } catch (Throwable t) { + } catch (Exception t) { log.error("Couldn't insert or update metadata with uuid " + ri.uuid); log.error(t); result.unchangedMetadata++; @@ -282,7 +282,7 @@ private Element extractValidMetadataForImport(DirectoryStream files, Eleme Log.debug(Geonet.MEF, "Multiple metadata files"); Map> mdFiles = - new HashMap>(); + new HashMap<>(); for (Path file : files) { if (Files.isRegularFile(file)) { Element metadata = Xml.loadFile(file); @@ -353,8 +353,8 @@ private Element extractValidMetadataForImport(DirectoryStream files, Eleme } private void addMetadata(final RecordInfo ri, final boolean localRating, String uuid) throws Exception { - final String id[] = {null}; - final Element md[] = {null}; + final String[] id = {null}; + final Element[] md = {null}; //--- import metadata from MEF file @@ -462,6 +462,11 @@ private String addMetadata(RecordInfo ri, Element md, Element info, boolean loca if (log.isDebugEnabled()) log.debug(" - Adding metadata with remote uuid:" + ri.uuid); + // Translate metadata + if (params.isTranslateContent()) { + md = translateMetadataContent(context, md, schema); + } + try { Integer groupIdVal = null; if (StringUtils.isNotEmpty(params.getOwnerIdGroup())) { @@ -595,13 +600,13 @@ private void addPrivilegesFromGroupPolicy(String id, Element privil) throws Exce } private Map> buildPrivileges(Element privil) { - Map> map = new HashMap>(); + Map> map = new HashMap<>(); for (Object o : privil.getChildren("group")) { Element group = (Element) o; String name = group.getAttributeValue("name"); - Set set = new HashSet(); + Set set = new HashSet<>(); map.put(name, set); for (Object op : group.getChildren("operation")) { @@ -662,9 +667,9 @@ private String createGroup(String name) throws Exception { */ private void updateMetadata(final RecordInfo ri, final String id, final boolean localRating, final boolean useChangeDate, String localChangeDate, Boolean force) throws Exception { - final Element md[] = {null}; - final Element publicFiles[] = {null}; - final Element privateFiles[] = {null}; + final Element[] md = {null}; + final Element[] publicFiles = {null}; + final Element[] privateFiles = {null}; if (localUuids.getID(ri.uuid) == null && !force) { if (log.isDebugEnabled()) @@ -743,6 +748,11 @@ private void updateMetadata(RecordInfo ri, String id, Element md, String date = localUuids.getChangeDate(ri.uuid); + // Translate metadata + if (params.isTranslateContent()) { + md = translateMetadataContent(context, md, ri.schema); + } + try { Integer groupIdVal = null; if (StringUtils.isNotEmpty(params.getOwnerIdGroup())) { @@ -756,7 +766,6 @@ private void updateMetadata(RecordInfo ri, String id, Element md, return; } - final IMetadataManager metadataManager = context.getBean(IMetadataManager.class); Metadata metadata; if (!force && !ri.isMoreRecentThan(date)) { if (log.isDebugEnabled()) @@ -883,12 +892,18 @@ private void saveFile(final Store store, String metadataUuid, String file, ISODate remIsoDate = new ISODate(changeDate); boolean saveFile; - final MetadataResource description = store.getResourceDescription(context, metadataUuid, visibility, file, true); - if (description == null) { - saveFile = true; - } else { - ISODate locIsoDate = new ISODate(description.getLastModification().getTime(), false); + Store.ResourceHolder resourceHolder; + try { + resourceHolder = store.getResource(context, metadataUuid, visibility, file, true); + } catch (ResourceNotFoundException ex) { + resourceHolder = null; + } + + if ((resourceHolder != null) && (resourceHolder.getMetadata() != null)) { + ISODate locIsoDate = new ISODate(resourceHolder.getMetadata().getLastModification().getTime(), false); saveFile = (remIsoDate.timeDifferenceInSeconds(locIsoDate) > 0); + } else { + saveFile = true; } if (saveFile) { diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFilesystemHarvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFilesystemHarvester.java index 312a0285b5f..640ddbee67b 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFilesystemHarvester.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFilesystemHarvester.java @@ -45,7 +45,6 @@ import org.fao.geonet.kernel.harvest.harvester.HarvestResult; import org.fao.geonet.kernel.search.IndexingMode; import org.fao.geonet.repository.MetadataRepository; -import org.fao.geonet.repository.OperationAllowedRepository; import org.fao.geonet.repository.specification.MetadataSpecs; import org.fao.geonet.utils.IO; import org.jdom.Element; @@ -146,6 +145,12 @@ void updateMetadata(Element xml, final String id, GroupMapper localGroups, final String language = context.getLanguage(); + // Translate metadata + if (params.isTranslateContent()) { + String schema = dataMan.getMetadataSchema(id); + xml = aligner.translateMetadataContent(context, xml, schema); + } + final AbstractMetadata metadata = metadataManager.updateMetadata(context, id, xml, false, false, language, changeDate, true, IndexingMode.none); @@ -158,8 +163,6 @@ void updateMetadata(Element xml, final String id, GroupMapper localGroups, final metadataManager.save(metadata); } - OperationAllowedRepository repository = context.getBean(OperationAllowedRepository.class); - repository.deleteAllByMetadataId(Integer.parseInt(id)); aligner.addPrivileges(id, params.getPrivileges(), localGroups, context); metadata.getCategories().clear(); @@ -193,6 +196,12 @@ String addMetadata(Element xml, String uuid, String schema, GroupMapper localGro if (!uuid.equals(xmlUuid)) { md = metadataUtils.setUUID(schema, uuid, md); } + + // Translate metadata + if (params.isTranslateContent()) { + md = aligner.translateMetadataContent(context, md, schema); + } + metadata.getDataInfo(). setSchemaId(schema). setRoot(xml.getQualifiedName()). diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFsHarvesterFileVisitor.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFsHarvesterFileVisitor.java index c188611e549..791f9a17cda 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFsHarvesterFileVisitor.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFsHarvesterFileVisitor.java @@ -54,8 +54,6 @@ import org.json.JSONException; import org.json.JSONObject; import org.json.XML; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import javax.annotation.Nonnull; import java.io.IOException; @@ -75,7 +73,6 @@ * @author Jesse on 11/6/2014. */ class LocalFsHarvesterFileVisitor extends SimpleFileVisitor { - private Logger LOGGER = LoggerFactory.getLogger(Geonet.HARVESTER); private final LocalFilesystemParams params; private final DataManager dataMan; @@ -110,9 +107,7 @@ public LocalFsHarvesterFileVisitor(AtomicBoolean cancelMonitor, ServiceContext c this.repo = context.getBean(IMetadataUtils.class); this.startTime = System.currentTimeMillis(); - String harvesterName = params.getName().replaceAll("\\W+", "_"); - LOGGER = LoggerFactory.getLogger(harvesterName); - LOGGER.debug("Start visiting files at {}.", this.startTime); + harvester.getLogger().debug(String.format("Start visiting files at %s.", this.startTime)); } @Override @@ -136,9 +131,9 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IO try { result.totalMetadata++; - if (LOGGER.isDebugEnabled() && result.totalMetadata % 1000 == 0) { + if (harvester.getLogger().isDebugEnabled() && result.totalMetadata % 1000 == 0) { long elapsedTime = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - startTime); - LOGGER.debug("{} records inserted in {} s ({} records/s).", new Object[] { + harvester.getLogger().debug("{} records inserted in {} s ({} records/s).", new Object[] { result.totalMetadata, elapsedTime, result.totalMetadata / elapsedTime}); @@ -152,7 +147,7 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IO processXml(file); } } catch (Exception e) { - LOGGER.error("An error occurred while harvesting file {}. Error is: {}.", + harvester.getLogger().error("An error occurred while harvesting file {}. Error is: {}.", file.toAbsolutePath().normalize(), e.getMessage()); } return FileVisitResult.CONTINUE; @@ -168,7 +163,7 @@ private void processJson(Path file) throws Exception { ObjectMapper objectMapper = new ObjectMapper(); Element recordAsElement; try { - LOGGER.debug("reading file: {}", filePath); + harvester.getLogger().debug("reading file: {}", filePath); String uuid = com.google.common.io.Files.getNameWithoutExtension(file.getFileName().toString()); String recordAsJson = objectMapper.readTree(filePath.toFile()).toString(); JSONObject sanitizedJson = sanitize(new JSONObject(recordAsJson)); @@ -180,18 +175,18 @@ private void processJson(Path file) throws Exception { recordAsElement = Xml.loadString(recordAsXml, false); recordAsElement.addContent(new Element("uuid").setText(uuid)); } catch (JsonProcessingException e) { - LOGGER.error("Error processing JSON from file {}, ignoring", filePath); - LOGGER.error("full stack", e); + harvester.getLogger().error("Error processing JSON from file {}, ignoring", filePath); + harvester.getLogger().error("full stack", e); result.badFormat++; return; } catch (JDOMException e) { - LOGGER.error("Error transforming JSON into XML from file {}, ignoring", filePath); - LOGGER.error("full stack", e); + harvester.getLogger().error("Error transforming JSON into XML from file {}, ignoring", filePath); + harvester.getLogger().error("full stack", e); result.badFormat++; return; } catch (Exception e) { - LOGGER.error("Error retrieving JSON from file {}, ignoring", filePath); - LOGGER.error("full stack", e); + harvester.getLogger().error("Error retrieving JSON from file {}, ignoring", filePath); + harvester.getLogger().error("full stack", e); result.unretrievable++; return; } @@ -241,16 +236,16 @@ private void processXml(Path file) throws Exception { Element xml; try { - LOGGER.debug("reading file: {}", filePath); + harvester.getLogger().debug(String.format("reading file: %s", filePath)); xml = Xml.loadFile(file); } catch (JDOMException e) { - LOGGER.error("Error loading XML from file {}, ignoring", filePath); - LOGGER.error("full stack", e); + harvester.getLogger().error("Error loading XML from file {}, ignoring", filePath); + harvester.getLogger().error("full stack", e); result.badFormat++; return; } catch (Exception e) { - LOGGER.error("Error retrieving XML from file {}, ignoring", filePath); - LOGGER.error("full stack", e); + harvester.getLogger().error("Error retrieving XML from file {}, ignoring", filePath); + harvester.getLogger().error("full stack", e); result.unretrievable++; return; } @@ -266,7 +261,7 @@ private void processXmlData(Path file, Element rawXml) throws Exception { try { xml = Xml.transform(xml, thisXslt); } catch (Exception e) { - LOGGER.error("Cannot transform XML from file {}, ignoring. Error was: {}", filePath, e.getMessage()); + harvester.getLogger().error("Cannot transform XML from file {}, ignoring. Error was: {}", filePath, e.getMessage()); result.badFormat++; return; } @@ -288,7 +283,7 @@ private void processXmlData(Path file, Element rawXml) throws Exception { params.getValidate().validate(dataMan, context, xml, groupIdVal); } catch (Exception e) { - LOGGER.error("Cannot validate XML from file {}, ignoring. Error was: {}", filePath, e.getMessage()); + harvester.getLogger().error("Cannot validate XML from file {}, ignoring. Error was: {}", filePath, e.getMessage()); result.doesNotValidate++; return; } @@ -315,14 +310,14 @@ private void processXmlData(Path file, Element rawXml) throws Exception { updateMetadata(file, filePath, xml, schema, id, metadata, true); break; case RANDOM: - LOGGER.debug("Generating random uuid for remote record with uuid " + metadata.getUuid()); + harvester.getLogger().debug("Generating random uuid for remote record with uuid " + metadata.getUuid()); String createDate = getCreateDate(file, xml, schema, uuid); String newUuid = UUID.randomUUID().toString(); id = addMetadata(xml, schema, newUuid, createDate); break; case SKIP: - LOGGER.debug("Skipping record with uuid " + metadata.getUuid()); + harvester.getLogger().debug("Skipping record with uuid " + metadata.getUuid()); result.uuidSkipped++; result.unchangedMetadata++; @@ -351,7 +346,7 @@ private String getCreateDate(Path file, Element xml, String schema, String uuid) try { createDate = dataMan.extractDateModified(schema, xml); } catch (Exception ex) { - LOGGER.error("LocalFilesystemHarvester - addMetadata - can't get metadata modified date for metadata uuid= {} " + + harvester.getLogger().error("LocalFilesystemHarvester - addMetadata - can't get metadata modified date for metadata uuid= {} " + "using current date for modified date", uuid); createDate = new ISODate().toString(); } @@ -376,25 +371,25 @@ private void updateMetadata(Path file, Path filePath, Element xml, String schema String changeDate = new ISODate(fileDate.getTime(), false).getDateAndTime(); - LOGGER.debug(" File date is: {} / record date is: {}", filePath, modified); + harvester.getLogger().debug(" File date is: {} / record date is: {}", filePath, modified); if (DateUtils.truncate(recordDate, Calendar.SECOND) .before(DateUtils.truncate(fileDate, Calendar.SECOND))) { - LOGGER.debug(" Db record is older than file. Updating record with id: {}", id); + harvester.getLogger().debug(String.format(" Db record is older than file. Updating record with id: %s", id)); updateMedata(xml, id, changeDate, force); } else { - LOGGER.debug(" Db record is not older than last modified date of file. No need for update."); + harvester.getLogger().debug(" Db record is not older than last modified date of file. No need for update."); result.unchangedMetadata++; } } else { - LOGGER.debug(" updating existing metadata, id is: " + id); + harvester.getLogger().debug(" updating existing metadata, id is: " + id); String changeDate; try { changeDate = dataMan.extractDateModified(schema, xml); } catch (Exception ex) { - LOGGER.error("LocalFilesystemHarvester - updateMetadata - can't get metadata modified date for " + + harvester.getLogger().error("LocalFilesystemHarvester - updateMetadata - can't get metadata modified date for " + "metadata id= {}, using current date for modified date", id); changeDate = new ISODate().toString(); } @@ -406,7 +401,7 @@ private void updateMetadata(Path file, Path filePath, Element xml, String schema private void processMef(Path file) { Path filePath = file.toAbsolutePath().normalize(); - LOGGER.debug("reading file: {}", filePath); + harvester.getLogger().debug(String.format("reading file: %s", filePath)); try { String xsl = params.getImportXslt(); MEFLib.Version version = MEFLib.getMEFVersion(file); @@ -439,7 +434,7 @@ private void processMef(Path file) { params.getValidate() != NOVALIDATION, false, context, file); for (String id : ids) { - LOGGER.debug("Metadata imported from MEF: {}", id); + harvester.getLogger().debug(String.format("Metadata imported from MEF: %s", id)); context.getBean(MetadataRepository.class).update(Integer.valueOf(id), new Updater() { @Override public void apply(@Nonnull final Metadata metadata) { @@ -454,8 +449,8 @@ public void apply(@Nonnull final Metadata metadata) { result.addedMetadata++; } } catch (Exception e) { - LOGGER.error("Error retrieving MEF from file {}, ignoring", filePath); - LOGGER.error("Error: ", e); + harvester.getLogger().error("Error retrieving MEF from file {}, ignoring", filePath); + harvester.getLogger().error("Error: ", e); result.unretrievable++; } } @@ -465,26 +460,26 @@ private String getUuidFromFile(Element xml, Path filePath, String schema) { try { uuid = dataMan.extractUUID(schema, xml); } catch (Exception e) { - LOGGER.debug("Failed to extract metadata UUID for file {}" + + harvester.getLogger().debug("Failed to extract metadata UUID for file {}" + " using XSL extract-uuid. The record is probably " + "a subtemplate. Will check uuid attribute on root element.", filePath); // Extract UUID from uuid attribute in subtemplates String uuidAttribute = xml.getAttributeValue("uuid"); if (uuidAttribute != null) { - LOGGER.debug("Found uuid attribute {} for file {}.", uuidAttribute, filePath); + harvester.getLogger().debug("Found uuid attribute {} for file {}.", uuidAttribute, filePath); uuid = uuidAttribute; } else { // Assigning a new UUID uuid = UUID.randomUUID().toString(); - LOGGER.debug("No UUID found, the record will be assigned a random uuid {} for file {}.", uuid, filePath); + harvester.getLogger().debug("No UUID found, the record will be assigned a random uuid {} for file {}.", uuid, filePath); } } return uuid; } private String addMetadata(Element xml, String schema, String uuid, String createDate) throws Exception { - LOGGER.debug("adding new metadata"); + harvester.getLogger().debug("adding new metadata"); String id = harvester.addMetadata(xml, uuid, schema, localGroups, localCateg, createDate, aligner, false); listOfRecordsToIndex.add(Integer.valueOf(id)); result.addedMetadata++; diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/oaipmh/Harvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/oaipmh/Harvester.java index e22b3dc96be..79bc1fb174b 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/oaipmh/Harvester.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/oaipmh/Harvester.java @@ -396,6 +396,11 @@ private void addMetadata(XmlRequest t, RecordInfo ri, String processName, Map { public HarvestResult getResult() { return result; } - private Map processParams = new HashMap(); + private Logger log; public Aligner(AtomicBoolean cancelMonitor, ServiceContext sc, SimpleUrlParams params, Logger log) throws OperationAbortedEx { @@ -142,6 +141,7 @@ private void insertOrUpdate(Map records, Collection records) throws Exception } - private void addMetadata(Map.Entry record, String overrideUuidValue) throws Exception { + private void addMetadata(Map.Entry recordInfo, String overrideUuidValue) throws Exception { if (cancelMonitor.get()) { return; } - Element xml = record.getValue(); + Element xml = recordInfo.getValue(); if (xml == null) { result.unretrievable++; return; @@ -205,20 +205,25 @@ private void addMetadata(Map.Entry record, String overrideUuidV String schema = dataMan.autodetectSchema(xml, null); if (schema == null) { - log.debug(" - Metadata skipped due to unknown schema. uuid:" + record.getKey()); + log.debug(" - Metadata skipped due to unknown schema. uuid:" + recordInfo.getKey()); result.unknownSchema++; return; } - String uuid = record.getKey(); + String uuid = recordInfo.getKey(); if (overrideUuidValue != null) { - log.debug(String.format(" - Overriding UUID %s by %s", record.getKey(), overrideUuidValue)); + log.debug(String.format(" - Overriding UUID %s by %s", recordInfo.getKey(), overrideUuidValue)); uuid = overrideUuidValue; - xml = dataMan.setUUID(schema, uuid, record.getValue()); + xml = dataMan.setUUID(schema, uuid, recordInfo.getValue()); } applyBatchEdits(uuid, xml, schema, params.getBatchEdits(), context, null); + // Translate metadata + if (params.isTranslateContent()) { + xml = translateMetadataContent(context, xml, schema); + } + log.debug(" - Adding metadata with uuid:" + uuid + " schema:" + schema); final String dateModified = dataMan.extractDateModified(schema, xml); @@ -274,12 +279,12 @@ boolean updateMetadata(Map.Entry ri, String id, Boolean force) final AbstractMetadata metadata = metadataManager.updateMetadata(context, id, md, validate, ufo, language, dateModified, true, IndexingMode.none); - if (force) { + if (Boolean.TRUE.equals(force)) { //change ownership of metadata to new harvester metadata.getHarvestInfo().setUuid(params.getUuid()); metadata.getSourceInfo().setSourceId(params.getUuid()); - metadataManager.save((Metadata) metadata); + metadataManager.save(metadata); } OperationAllowedRepository repository = context.getBean(OperationAllowedRepository.class); diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/Harvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/Harvester.java index 789cbc44ba2..81dad939cad 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/Harvester.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/Harvester.java @@ -287,6 +287,11 @@ private void addMetadata(RemoteFile rf) throws Exception { if (log.isDebugEnabled()) log.debug(" - Adding metadata with remote path : " + rf.getPath()); + // Translate metadata + if (params.isTranslateContent()) { + md = translateMetadataContent(context, md, schema); + } + // // insert metadata // @@ -471,6 +476,10 @@ private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) thr } + // Translate metadata + if (params.isTranslateContent()) { + md = translateMetadataContent(context, md, schema); + } // // update metadata diff --git a/healthmonitor/pom.xml b/healthmonitor/pom.xml index 805dae47790..adcc4d6f283 100644 --- a/healthmonitor/pom.xml +++ b/healthmonitor/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT 4.0.0 diff --git a/healthmonitor/src/main/java/org/fao/geonet/monitor/health/NoIndexErrorsHealthCheck.java b/healthmonitor/src/main/java/org/fao/geonet/monitor/health/NoIndexErrorsHealthCheck.java index d3afc90e4f1..3c60aa7f000 100644 --- a/healthmonitor/src/main/java/org/fao/geonet/monitor/health/NoIndexErrorsHealthCheck.java +++ b/healthmonitor/src/main/java/org/fao/geonet/monitor/health/NoIndexErrorsHealthCheck.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -44,7 +44,8 @@ protected Result check() throws Exception { GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME); EsSearchManager searchMan = gc.getBean(EsSearchManager.class); - long numDocs = searchMan.getNumDocs("+" + IndexFields.INDEXING_ERROR_FIELD + ":true"); + long numDocs = searchMan.getNumDocs("-" + IndexFields.INDEXING_ERROR_MSG + ".type:warning +" + + IndexFields.INDEXING_ERROR_FIELD + ":true"); if (numDocs > 0) { return Result.unhealthy(String.format("Found %d metadata that had errors during indexing", numDocs)); diff --git a/index/pom.xml b/index/pom.xml index 9e82d3e9130..8aa023929ec 100644 --- a/index/pom.xml +++ b/index/pom.xml @@ -5,7 +5,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT 4.0.0 gn-index diff --git a/index/src/main/java/org/fao/geonet/index/es/EsRestClient.java b/index/src/main/java/org/fao/geonet/index/es/EsRestClient.java index 2f031f726b2..4b21f7c61a3 100644 --- a/index/src/main/java/org/fao/geonet/index/es/EsRestClient.java +++ b/index/src/main/java/org/fao/geonet/index/es/EsRestClient.java @@ -413,47 +413,23 @@ public Map getDocument(String index, String id) throws Exception /** * Query the index for a specific record and return values for a set of fields. */ - public Map getFieldsValues(String index, String id, Set fields) throws IOException { + public Map getFieldsValues(String index, String id, Set fields, String language) throws Exception { if (!activated) { return Collections.emptyMap(); } - Map fieldValues = new HashMap<>(fields.size()); - try { - String query = String.format("_id:\"%s\"", id); - // TODO: Check maxRecords - // TODO: Use _doc API? - - - final SearchResponse searchResponse = this.query(index, query, null, fields, new HashMap<>(), 0, 1, null); - - List totalHits = searchResponse.hits().hits(); - long matches = totalHits.size(); - if (matches == 0) { - return fieldValues; - } else if (matches == 1) { - final Hit hit = totalHits.get(0); - - fields.forEach(f -> { - final Object o = hit.fields().get(f); - if (o instanceof String) { - fieldValues.put(f, (String) o); - } else if (o instanceof HashMap && f.endsWith("Object")) { - fieldValues.put(f, (String) ((HashMap) o).get("default")); - } - }); - } else { - throw new IOException(String.format( - "Your query '%s' returned more than one record, %d in fact. Can't retrieve field values for more than one record.", - query, - matches - )); + Map fieldValues = new HashMap<>(); + Map sources = getDocument(index, id); + + for (String field : fields) { + Object value = sources.get(field); + if (value instanceof String) { + fieldValues.put(field, (String) value); + } else if (value instanceof Map && field.endsWith("Object")) { + Map valueMap = (Map) value; + String languageValue = (String) valueMap.get("lang" + language); + fieldValues.put(field, languageValue != null ? languageValue : (String) valueMap.get("default")); } - - } catch (Exception e) { - throw new IOException(String.format( - "Error during fields value retrieval. Errors is '%s'.", e.getMessage() - )); } return fieldValues; } diff --git a/inspire-atom/pom.xml b/inspire-atom/pom.xml index 31849c58ccb..effbea67089 100644 --- a/inspire-atom/pom.xml +++ b/inspire-atom/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT 4.0.0 diff --git a/inspire-atom/src/main/java/org/fao/geonet/inspireatom/util/InspireAtomUtil.java b/inspire-atom/src/main/java/org/fao/geonet/inspireatom/util/InspireAtomUtil.java index a452d0733d0..622f8fe4ca3 100644 --- a/inspire-atom/src/main/java/org/fao/geonet/inspireatom/util/InspireAtomUtil.java +++ b/inspire-atom/src/main/java/org/fao/geonet/inspireatom/util/InspireAtomUtil.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2023 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -63,7 +63,7 @@ * @author Jose García */ public class InspireAtomUtil { - private final static String EXTRACT_DATASETS_FROM_SERVICE_XSLT = "extract-datasetinfo-from-service-feed.xsl"; + private static final String EXTRACT_DATASETS_FROM_SERVICE_XSLT = "extract-datasetinfo-from-service-feed.xsl"; /** * Xslt process to get the related datasets in service metadata. @@ -395,7 +395,15 @@ public static String retrieveDatasetUuidFromIdentifier(EsSearchManager searchMan " \"value\": \"%s\"" + " }" + " }" + + " }," + + " {" + + " \"term\": {" + + " \"isPublishedToAll\": {" + + " \"value\": \"true\"" + + " }" + + " }" + " }" + + " ]" + " }" + "}"; diff --git a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomDescribe.java b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomDescribe.java index 97091e008e1..95871555b1d 100644 --- a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomDescribe.java +++ b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomDescribe.java @@ -24,6 +24,8 @@ import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -107,7 +109,7 @@ public class AtomDescribe { ) @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Feeds."), - @ApiResponse(responseCode = "204", description = "Not authenticated.") + @ApiResponse(responseCode = "204", description = "Not authenticated.", content = {@Content(schema = @Schema(hidden = true))}) }) @ResponseStatus(OK) @ResponseBody diff --git a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomGetData.java b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomGetData.java index a9133fe38a7..33d0ace6128 100644 --- a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomGetData.java +++ b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomGetData.java @@ -23,6 +23,8 @@ package org.fao.geonet.services.inspireatom; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -84,7 +86,7 @@ public class AtomGetData { ) @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Get a data file related to dataset"), - @ApiResponse(responseCode = "204", description = "Not authenticated.") + @ApiResponse(responseCode = "204", description = "Not authenticated.", content = {@Content(schema = @Schema(hidden = true))}) }) @ResponseStatus(OK) @ResponseBody diff --git a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomHarvester.java b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomHarvester.java index 94eeb33e4ce..a30dcbb0331 100644 --- a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomHarvester.java +++ b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomHarvester.java @@ -23,6 +23,8 @@ package org.fao.geonet.services.inspireatom; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -61,7 +63,7 @@ public class AtomHarvester { @PreAuthorize("hasAuthority('Administrator')") @ApiResponses(value = { @ApiResponse(responseCode = "201", description = "Scan completed."), - @ApiResponse(responseCode = "204", description = "Not authenticated.") + @ApiResponse(responseCode = "204", description = "Not authenticated.", content = {@Content(schema = @Schema(hidden = true))}) }) @ResponseStatus(CREATED) @ResponseBody diff --git a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomSearch.java b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomSearch.java index 0e27e9c8763..5253d3146ac 100644 --- a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomSearch.java +++ b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomSearch.java @@ -27,6 +27,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -114,7 +115,7 @@ public class AtomSearch { ) @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Get a list of feeds."), - @ApiResponse(responseCode = "204", description = "Not authenticated.") + @ApiResponse(responseCode = "204", description = "Not authenticated.", content = {@io.swagger.v3.oas.annotations.media.Content(schema = @Schema(hidden = true))}) }) @ResponseStatus(OK) public Object feeds( diff --git a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomServiceDescription.java b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomServiceDescription.java index 87a255411b2..6c7b99ffbc2 100644 --- a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomServiceDescription.java +++ b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomServiceDescription.java @@ -23,6 +23,8 @@ package org.fao.geonet.services.inspireatom; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -91,7 +93,7 @@ public class AtomServiceDescription { produces = MediaType.APPLICATION_XML_VALUE) @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Feeds."), - @ApiResponse(responseCode = "204", description = "Not authenticated.") + @ApiResponse(responseCode = "204", description = "Not authenticated.", content = {@Content(schema = @Schema(hidden = true))}) }) @ResponseStatus(OK) @ResponseBody diff --git a/jmeter/pom.xml b/jmeter/pom.xml index bf6daf9d308..4d7e0c7f02c 100644 --- a/jmeter/pom.xml +++ b/jmeter/pom.xml @@ -29,7 +29,7 @@ org.geonetwork-opensource geonetwork - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT diff --git a/listeners/pom.xml b/listeners/pom.xml index d46c1217e63..e60deaa21e4 100644 --- a/listeners/pom.xml +++ b/listeners/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT GeoNetwork Events diff --git a/listeners/src/main/java/org/fao/geonet/listener/history/GenericMetadataEventListener.java b/listeners/src/main/java/org/fao/geonet/listener/history/GenericMetadataEventListener.java index 06299c4be91..b9e052d5e99 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/history/GenericMetadataEventListener.java +++ b/listeners/src/main/java/org/fao/geonet/listener/history/GenericMetadataEventListener.java @@ -70,7 +70,7 @@ public final void handleEvent(AbstractHistoryEvent event) { */ public final void storeContentHistoryEvent(AbstractHistoryEvent event) { - if(settingManager.getValueAsBool(Settings.SYSTEM_METADATA_HISTORY_ENABLED)) { + if(settingManager.getValueAsBool(Settings.METADATA_HISTORY_ENABLED)) { Integer metadataid = Math.toIntExact(event.getMdId()); diff --git a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApprovePublishedRecord.java b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApprovePublishedRecord.java index 823a76e2ea5..546571cec96 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApprovePublishedRecord.java +++ b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApprovePublishedRecord.java @@ -121,7 +121,7 @@ private void changeToApproved(AbstractMetadata md, MetadataStatus previousStatus status.setChangeDate(new ISODate()); status.setUserId(ServiceContext.get().getUserSession().getUserIdAsInt()); - metadataStatus.setStatusExt(status); + metadataStatus.setStatusExt(status, true); Log.trace(Geonet.DATA_MANAGER, "Metadata with id " + md.getId() + " automatically approved due to publishing."); } diff --git a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApproveRecord.java b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApproveRecord.java index 1a315ca05af..a987943bf39 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApproveRecord.java +++ b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApproveRecord.java @@ -148,7 +148,7 @@ private AbstractMetadata approveWithDraft(MetadataStatusChanged event) throws Nu status.setChangeDate(new ISODate()); status.setUserId(event.getUser()); - metadataStatus.setStatusExt(status); + metadataStatus.setStatusExt(status, false); } else if (md instanceof Metadata) { draft = null; //metadataDraftRepository.findOneByUuid(md.getUuid()); diff --git a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftUtilities.java b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftUtilities.java index 4bd04801be7..ce418b4062f 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftUtilities.java +++ b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftUtilities.java @@ -87,6 +87,7 @@ public AbstractMetadata replaceMetadataWithDraft(AbstractMetadata md) { * @return */ public AbstractMetadata replaceMetadataWithDraft(AbstractMetadata md, AbstractMetadata draft) { + Log.info(Geonet.DATA_MANAGER, String.format("Replacing metadata approved record (%d) with draft record (%d)", md.getId(), draft.getId())); Log.trace(Geonet.DATA_MANAGER, "Found approved record with id " + md.getId()); Log.trace(Geonet.DATA_MANAGER, "Found draft with id " + draft.getId()); // Reassign metadata validations @@ -131,6 +132,7 @@ public AbstractMetadata replaceMetadataWithDraft(AbstractMetadata md, AbstractMe } // Reassign file uploads + Log.info(Geonet.DATA_MANAGER, String.format("Copying draft record '%d' resources to approved record '%d'", draft.getId(), md.getId())); draftMetadataUtils.replaceFiles(draft, md); metadataFileUploadRepository.deleteAll(MetadataFileUploadSpecs.hasMetadataId(md.getId())); @@ -146,7 +148,6 @@ public AbstractMetadata replaceMetadataWithDraft(AbstractMetadata md, AbstractMe Element xmlData = draft.getXmlData(false); String changeDate = draft.getDataInfo().getChangeDate().getDateAndTime(); - store.delResources(context, draft.getUuid(), false); removeDraft((MetadataDraft) draft); // Copy contents @@ -155,8 +156,10 @@ public AbstractMetadata replaceMetadataWithDraft(AbstractMetadata md, AbstractMe xmlData, false, false, context.getLanguage(), changeDate, true, IndexingMode.full); - Log.info(Geonet.DATA_MANAGER, "Record updated with draft contents: " + md.getId()); + Log.info(Geonet.DATA_MANAGER, "Record '" + md.getUuid() + "(" +md.getId() +")' update with draft contents from metadata id '" + draft.getId() +"'."); + Log.info(Geonet.DATA_MANAGER, "Cleaning up draft record resources for metadata '" + draft.getUuid() + "(" +draft.getId() +")'"); + store.delResources(context, draft.getId()); } catch (Exception e) { Log.error(Geonet.DATA_MANAGER, "Error upgrading from draft record with id " + md.getId(), e); } diff --git a/messaging/pom.xml b/messaging/pom.xml index 54eb690f6d2..0bf61674795 100644 --- a/messaging/pom.xml +++ b/messaging/pom.xml @@ -5,7 +5,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT 4.0.0 diff --git a/oaipmh/pom.xml b/oaipmh/pom.xml index efa7fe0facc..dbe912e5e31 100644 --- a/oaipmh/pom.xml +++ b/oaipmh/pom.xml @@ -30,7 +30,7 @@ org.geonetwork-opensource geonetwork - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT diff --git a/pom.xml b/pom.xml index 47993745350..8e6770b471a 100644 --- a/pom.xml +++ b/pom.xml @@ -29,7 +29,7 @@ org.geonetwork-opensource geonetwork pom - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT GeoNetwork opensource GeoNetwork opensource is a standards based, Free and Open Source catalog application to manage spatially referenced @@ -122,7 +122,7 @@ maven-dependency-plugin - 3.1.2 + 3.7.0 maven-clean-plugin @@ -245,12 +245,36 @@ maven-toolchains-plugin 3.0.0 + + org.apache.maven.plugins + maven-enforcer-plugin + 3.4.1 + + org.apache.maven.plugins + maven-enforcer-plugin + + + enforce-maven + + enforce + + + + + 3.8.3 + + + + + + + maven-compiler-plugin 11 @@ -269,7 +293,6 @@ - org.apache.maven.plugins maven-resources-plugin UTF-8 @@ -295,7 +318,6 @@ - org.apache.maven.plugins maven-surefire-plugin org.fao.geonet.repository.AbstractSpringDataTest @@ -868,7 +890,7 @@ com.google.guava guava - 30.0-jre + 33.2.1-jre com.yammer.metrics @@ -1385,6 +1407,7 @@ estest index datastorages + translationproviders @@ -1428,6 +1451,21 @@ darwin-x86 tar.gz + + + macOS_aarch64 + + + mac + aarch64 + + + + darwin-aarch64 + kibana.sh + darwin-aarch64 + tar.gz + windows @@ -1527,7 +1565,7 @@ 8080 8090 - 8.11.3 + 8.14.0 linux-x86_64 tar.gz http @@ -1585,7 +1623,7 @@ request the list of hosts (but JPA cache db queries). --> * - 9.4.53.v20231009 + 9.4.54.v20240208 jetty-distribution-${jetty.version} https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-distribution/${jetty.version}/${jetty.file}.tar.gz @@ -1593,13 +1631,13 @@ of Postgres is used and update pg.version if needed. --> 30.0 1.19.0 - 42.6.0 + 42.7.3 - 5.3.31 - 5.8.8 + 5.3.33 + 5.8.11 2.7.18 2.7.0 - 1.5.13 + 1.8.0 5.6.15.Final 2.2.0 @@ -1609,8 +1647,8 @@ 1.10.1 true 2.7 - 2.1.1 - 2.15.3 + 2.10.0 + 2.16.2 9.1.22 2.25.1 2.17.2 diff --git a/release-build.sh b/release-build.sh new file mode 100755 index 00000000000..f9b39c11f33 --- /dev/null +++ b/release-build.sh @@ -0,0 +1,85 @@ +#!/bin/bash + +buildRequiredApps=( "java" "git" "mvn" "ant" "xmlstarlet" ) + +for app in "${buildRequiredApps[@]}"; do : + if ! [ -x "$(command -v ${app})" ]; then + echo "Error: ${app} is not installed." >&2 + exit 1 + fi +done + +function showUsage +{ + echo -e "\nThis script is used to build a release for the current branch" + echo +} + +if [ "$1" = "-h" ] +then + showUsage + exit +fi + +projectVersion=`xmlstarlet sel -t -m "/_:project/_:version" -v . -n pom.xml` +subVersion=`cut -d "-" -f 2 <<< $projectVersion` +mainVersion=`cut -d "-" -f 1 <<< $projectVersion` +mainVersionMajor=`cut -d "." -f 1 <<< $mainVersion` +mainVersionMinor=`cut -d "." -f 2 <<< $mainVersion` +mainVersionSub=`cut -d "." -f 3 <<< $mainVersion` + +gitBranch=`git branch --show-current` + +nextVersionNumber="${mainVersionMajor}.${mainVersionMinor}.$((mainVersionSub+1))" +previousVersionNumber="${mainVersionMajor}.${mainVersionMinor}.$((mainVersionSub-1))" + +from=origin +frombranch=origin/${gitBranch} +series=${mainVersionMajor}.${mainVersionMinor} +versionbranch=${gitBranch} +version=${projectVersion} +minorversion=0 +release=latest +newversion=${mainVersion}-$minorversion +currentversion=${projectVersion} +previousversion=${previousVersionNumber} +nextversion=${nextVersionNumber}-SNAPSHOT + +echo "Building release for version ${newversion} (from ${currentversion})." +echo "" +echo "Before you start:" +echo "1. Use web-ui/download-from-transifex.sh to update translations" +echo "2. Use release-notes.sh to update change log and release notes" +echo "" +echo "After being build you can test the release before publishing. Git branch ${gitBranch}." +read -p "Press enter to continue" + +# Update version number (in pom.xml, installer config and SQL) +./update-version.sh $currentversion $newversion + +# Then commit the new version +git add . +git commit -m "Update version to $newversion" +git tag -a $mainVersion -m "Tag for $newversion release" + +# Build the new release +mvn clean install -DskipTests -ntp -Pwar -Pwro4j-prebuild-cache + +(cd datastorages && mvn clean install -DskipTests -ntp -Drelease -DskipTests) + +# Download Jetty and create the installer +(cd release && mvn clean install -Pjetty-download && ant) + +# generate checksum for download artifacts + +if [ -f "release/target/GeoNetwork-$version/geonetwork-bundle-$newversion.zip.MD5" ]; then + rm release/target/GeoNetwork-$version/geonetwork-bundle-$newversion.zip.MD5 +fi + +if [[ ${OSTYPE:0:6} == 'darwin' ]]; then + md5 -r web/target/geonetwork.war > web/target/geonetwork.war.md5 + md5 -r release/target/GeoNetwork-$nextVersionNumber/geonetwork-bundle-$newversion.zip > release/target/GeoNetwork-$nextVersionNumber/geonetwork-bundle-$newversion.zip.md5 +else + (cd web/target && md5sum geonetwork.war > geonetwork.war.md5) + (cd release/target/GeoNetwork-$nextVersionNumber && md5sum geonetwork-bundle-$newversion.zip > geonetwork-bundle-$newversion.zip.md5) +fi diff --git a/release-notes.sh b/release-notes.sh new file mode 100755 index 00000000000..0c4e40c3d00 --- /dev/null +++ b/release-notes.sh @@ -0,0 +1,94 @@ +#!/bin/bash + +buildRequiredApps=( "java" "git" "mvn" "ant" "xmlstarlet" ) + +for app in "${buildRequiredApps[@]}"; do : + if ! [ -x "$(command -v ${app})" ]; then + echo "Error: ${app} is not installed." >&2 + exit 1 + fi +done + +function showUsage +{ + echo -e "\nThis script is used to build a release for the current branch" + echo +} + +if [ "$1" = "-h" ] +then + showUsage + exit +fi + +projectVersion=`xmlstarlet sel -t -m "/_:project/_:version" -v . -n pom.xml` +subVersion=`cut -d "-" -f 2 <<< $projectVersion` +mainVersion=`cut -d "-" -f 1 <<< $projectVersion` +mainVersionMajor=`cut -d "." -f 1 <<< $mainVersion` +mainVersionMinor=`cut -d "." -f 2 <<< $mainVersion` +mainVersionSub=`cut -d "." -f 3 <<< $mainVersion` + +gitBranch=`git branch --show-current` + +nextVersionNumber="${mainVersionMajor}.${mainVersionMinor}.$((mainVersionSub+1))" +previousVersionNumber="${mainVersionMajor}.${mainVersionMinor}.$((mainVersionSub-1))" + +from=origin +frombranch=origin/${gitBranch} +series=${mainVersionMajor}.${mainVersionMinor} +versionbranch=${gitBranch} +version=${projectVersion} +minorversion=0 +release=latest +newversion=${mainVersion}-$minorversion +currentversion=${projectVersion} +previousversion=${previousVersionNumber} +nextversion=${nextVersionNumber}-SNAPSHOT + +echo "Creating change log and release notes for version ${newversion} (from ${currentversion}). Git branch ${gitBranch}:" +echo " docs/changes/changes$newversion.txt" +echo " docs/manual/docs/overview/change-log/version-$mainVersion.md" +echo "When generated please review and update:" +echo " docs/manual/mkdocs.yml" +echo " docs/manual/docs/overview/latest/index.md" +echo " docs/manual/docs/overview/change-log/version-$mainVersion.md" +echo "" +read -p "Press enter to continue" + +# Generate list of changes +cat < docs/changes/changes$newversion.txt +================================================================================ +=== +=== GeoNetwork $version: List of changes +=== +================================================================================ +EOF +git log --pretty='format:- %s' $previousversion... >> docs/changes/changes$newversion.txt + +# Generate release notes + +cat < docs/manual/docs/overview/change-log/version-$mainVersion.md +# Version $mainVersion + +GeoNetwork $mainVersion is a minor release. + +## Migration notes + +### API changes + +### Installation changes + +### Index changes + +## List of changes + +Major changes: + +EOF + +git log --pretty='format:* %N' $previousversion.. | grep -v "^* $" >> docs/manual/docs/overview/change-log/version-$mainVersion.md + +cat <> docs/manual/docs/overview/change-log/version-$mainVersion.md + +and more \... see [$newversion issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A$mainVersion+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?page=3&q=is%3Apr+milestone%3A$mainVersion+is%3Aclosed) for full details. +EOF diff --git a/release-publish.sh b/release-publish.sh new file mode 100755 index 00000000000..a62f7229ebd --- /dev/null +++ b/release-publish.sh @@ -0,0 +1,56 @@ +#!/bin/bash + +function showUsage +{ + echo -e "\nThis script is used to publish a release on sourceforge, github and maven repository" + echo + echo -e "Usage: ./`basename $0` sourceforge_username [remote]" + echo + echo -e "Example:" + echo -e "\t./`basename $0` sourceforgeusername" + echo -e "\t./`basename $0` sourceforgeusername upstream" + echo +} + +if [ "$1" = "-h" ] +then + showUsage + exit +fi + +if [[ ($# -ne 1) && ($# -ne 2) ]] +then + showUsage + exit +fi + +projectVersion=`xmlstarlet sel -t -m "/_:project/_:version" -v . -n pom.xml` +version=`cut -d "-" -f 1 <<< $projectVersion` +versionbranch=`git branch --show-current` +sourceforge_username=$1 +remote=origin + +if [ $# -eq 2 ] +then + remote=$2 +fi + +# Push the branch and tag to github +git push $remote $versionbranch +git push $remote $version +# TODO: attach release notes to version + +sftp $sourceforge_username,geonetwork@frs.sourceforge.net << EOT +cd /home/frs/project/g/ge/geonetwork/GeoNetwork_opensource +mkdir v${version} +cd v${version} +put docs/changes/changes${version}-0.txt +put release/target/GeoNetwork*/geonetwork-bundle*.zip* +put web/target/geonetwork.war* +put datastorages/*/target/*.zip +bye +EOT + +# Deploy to osgeo repository (requires credentials in ~/.m2/settings.xml) +mvn deploy -DskipTests -Drelease + diff --git a/release-restore.sh b/release-restore.sh new file mode 100755 index 00000000000..2b98413ce4c --- /dev/null +++ b/release-restore.sh @@ -0,0 +1,49 @@ +#!/bin/bash + +buildRequiredApps=( "java" "git" "mvn" "ant" "xmlstarlet" ) + +for app in "${buildRequiredApps[@]}"; do : + if ! [ -x "$(command -v ${app})" ]; then + echo "Error: ${app} is not installed." >&2 + exit 1 + fi +done + +projectVersion=`xmlstarlet sel -t -m "/_:project/_:version" -v . -n pom.xml` +subVersion=`cut -d "-" -f 2 <<< $projectVersion` +mainVersion=`cut -d "-" -f 1 <<< $projectVersion` +mainVersionMajor=`cut -d "." -f 1 <<< $mainVersion` +mainVersionMinor=`cut -d "." -f 2 <<< $mainVersion` +mainVersionSub=`cut -d "." -f 3 <<< $mainVersion` + +gitBranch=`git branch --show-current` + +nextVersionNumber="${mainVersionMajor}.${mainVersionMinor}.$((mainVersionSub+1))" +previousVersionNumber="${mainVersionMajor}.${mainVersionMinor}.$((mainVersionSub-1))" + +from=origin +frombranch=origin/${gitBranch} +series=${mainVersionMajor}.${mainVersionMinor} +versionbranch=${gitBranch} +version=${projectVersion} +minorversion=0 +release=latest +newversion=${mainVersion}-$minorversion +currentversion=${projectVersion} +previousversion=${previousVersionNumber} +nextversion=${nextVersionNumber}-SNAPSHOT + + +echo "Update version number to ${nextversion} (from ${newversion})." +echo "" +echo "After update. Push changes to Git branch ${gitBranch}." +read -p "Press enter to continue" + + +# Set version number to SNAPSHOT +./update-version.sh $newversion $nextversion + +git add . +git commit -m "Update version to $nextversion" + + diff --git a/release-test.sh b/release-test.sh new file mode 100755 index 00000000000..98e49da4b0b --- /dev/null +++ b/release-test.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +projectVersion=`xmlstarlet sel -t -m "/_:project/_:version" -v . -n pom.xml` +version=`cut -d "-" -f 1 <<< $projectVersion` +versionbranch=`git branch --show-current` + +echo "Testing zip in release/target/GeoNetwork-$version ..." + +cd "release/target/GeoNetwork-$version" +unzip -q "geonetwork-bundle-$projectVersion.zip" -d "geonetwork-bundle-$projectVersion" +cd "geonetwork-bundle-$projectVersion/bin" +./startup.sh -f diff --git a/release/build.properties b/release/build.properties index e7183fe9410..f77dcb0ce66 100644 --- a/release/build.properties +++ b/release/build.properties @@ -5,11 +5,11 @@ homepage=https://geonetwork-opensource.org supportEmail=geonetwork-users@lists.sourceforge.net # Application version properties -version=4.4.3 +version=4.4.6 subVersion=SNAPSHOT # Java runtime properties javaVersion=11 javaDisplayVersion=11 -jreUrl=https://adoptium.net/en-GB/temurin/releases/?version=4.4.3 +jreUrl=https://adoptium.net/en-GB/temurin/releases/?version=4.4.6 jreName=AdoptOpenJDK diff --git a/release/pom.xml b/release/pom.xml index 920336a32a0..41e95a1b782 100644 --- a/release/pom.xml +++ b/release/pom.xml @@ -7,7 +7,7 @@ org.geonetwork-opensource geonetwork - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT gn-release diff --git a/schemas-test/pom.xml b/schemas-test/pom.xml index 6df035d8937..e6e2bbe4021 100644 --- a/schemas-test/pom.xml +++ b/schemas-test/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.6-SNAPSHOT 4.0.0 jar diff --git a/schemas/config-editor.xsd b/schemas/config-editor.xsd index 0c8511f7ab1..31455f489a3 100644 --- a/schemas/config-editor.xsd +++ b/schemas/config-editor.xsd @@ -263,12 +263,14 @@ Configure here the list of fields to display using a table. This only applies to
- - - - +
+ + + + + @@ -434,6 +436,7 @@ Table column. + @@ -903,6 +906,7 @@ Define if this tab is the default one for the view. Only one tab should be the d @@ -1000,6 +1004,15 @@ e.g. only 2 INSPIRE themes: ]]> + + + + + + @@ -1085,6 +1098,7 @@ the mandatory section with no name and then the inner elements. + @@ -1114,9 +1128,11 @@ Note: Only sections with forEach support del attribute.

Distribution

-
- - +
+ +

@@ -1153,6 +1169,119 @@ Note: Only sections with forEach support del attribute. + + + + + + + + + + + + + + + + + + + ]]> + + + + + + + + + + + + + + + + Add a hyperlink on the item + + + + + + + + An optional name to override the default one base on field name for the + section. The name must be defined in ``{schema}/loc/{lang}/strings.xml``. + + + + + + The XPath of the element to create list items. + + + + + + + + + XPath of the element to sort the list by. Must use full name of each nodes eg. gmd:organisationName/gco:CharacterString + + + + + + + An optional attribute to collapse the section. If not set the section is expanded. + + + + + + An optional attribute to not allow collapse for the section. If not set the section is expandable. + + + + + + + Local name to match if the element does not exist. @@ -1167,6 +1296,46 @@ Note: Only sections with forEach support del attribute. + + + + +