diff --git a/core/src/main/java/jeeves/interfaces/ApplicationHandler.java b/core/src/main/java/jeeves/interfaces/ApplicationHandler.java
index 38d22256d1b7..560de0cf8428 100644
--- a/core/src/main/java/jeeves/interfaces/ApplicationHandler.java
+++ b/core/src/main/java/jeeves/interfaces/ApplicationHandler.java
@@ -29,12 +29,19 @@
//=============================================================================
+/**
+ * Used to maintain registry of handlers for {@link jeeves.server.dispatchers.ServiceManager}.
+ */
public interface ApplicationHandler {
- public String getContextName();
+ /** Context name for registry lookup */
+ String getContextName();
- public Object start(Element config, ServiceContext s) throws Exception;
+ /** Start application handler, returning application context managed in registry */
- public void stop();
+ Object start(Element config, ServiceContext s) throws Exception;
+
+ /** Stop handler */
+ void stop();
}
//=============================================================================
diff --git a/core/src/main/java/jeeves/monitor/MonitorManager.java b/core/src/main/java/jeeves/monitor/MonitorManager.java
index 0e0b73044f4c..a5887fd02a91 100644
--- a/core/src/main/java/jeeves/monitor/MonitorManager.java
+++ b/core/src/main/java/jeeves/monitor/MonitorManager.java
@@ -30,6 +30,7 @@
import jeeves.constants.ConfigFile;
import jeeves.server.context.ServiceContext;
+import jeeves.server.dispatchers.ServiceManager;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.LogManager;
import org.fao.geonet.Util;
@@ -75,6 +76,13 @@ public class MonitorManager {
private MetricsRegistry metricsRegistry;
private JmxReporter jmxReporter;
+ /**
+ * Internal service context created during init, and cleaned up during shutdown.
+ *
+ * A distinct service context is required as health checks are performed in the background.
+ */
+ private ServiceContext monitorContext;
+
public void init(ServletContext context, String baseUrl) {
String webappName = "";
@@ -130,44 +138,64 @@ private HealthCheckRegistry lookUpHealthCheckRegistry(ServletContext context, St
return tmpHealthCheckRegistry;
}
- public void initMonitorsForApp(ServiceContext context) {
- createHealthCheck(context, criticalServiceContextHealthChecks, criticalHealthCheckRegistry, "critical health check");
- createHealthCheck(context, warningServiceContextHealthChecks, warningHealthCheckRegistry, "warning health check");
- createHealthCheck(context, expensiveServiceContextHealthChecks, expensiveHealthCheckRegistry, "expensive health check");
+ public void initMonitorsForApp(ServiceContext initContext) {
+ ServiceManager serviceManager = initContext.getBean(ServiceManager.class);
+ monitorContext = serviceManager.createServiceContext("monitor", initContext);
+
+ createHealthCheck(monitorContext, criticalServiceContextHealthChecks, criticalHealthCheckRegistry, "critical health check");
+ createHealthCheck(monitorContext, warningServiceContextHealthChecks, warningHealthCheckRegistry, "warning health check");
+ createHealthCheck(monitorContext, expensiveServiceContextHealthChecks, expensiveHealthCheckRegistry, "expensive health check");
for (Class>> factoryClass : serviceContextGauges.keySet()) {
Log.info(Log.ENGINE, "Instantiating : " + factoryClass.getName());
- Gauge> instance = create(factoryClass, context, SERVICE_CONTEXT_GAUGE);
+ Gauge> instance = create(factoryClass, monitorContext, SERVICE_CONTEXT_GAUGE);
serviceContextGauges.put(factoryClass, instance);
}
for (Class> factoryClass : serviceContextTimers.keySet()) {
Log.info(Log.ENGINE, "Instantiating : " + factoryClass.getName());
- Timer instance = create(factoryClass, context, SERVICE_CONTEXT_TIMER);
+ Timer instance = create(factoryClass, monitorContext, SERVICE_CONTEXT_TIMER);
serviceContextTimers.put(factoryClass, instance);
}
for (Class> factoryClass : serviceContextCounters.keySet()) {
Log.info(Log.ENGINE, "Instantiating : " + factoryClass.getName());
- Counter instance = create(factoryClass, context, SERVICE_CONTEXT_COUNTER);
+ Counter instance = create(factoryClass, monitorContext, SERVICE_CONTEXT_COUNTER);
serviceContextCounters.put(factoryClass, instance);
}
for (Class> factoryClass : serviceContextHistogram.keySet()) {
Log.info(Log.ENGINE, "Instantiating : " + factoryClass.getName());
- Histogram instance = create(factoryClass, context, SERVICE_CONTEXT_HISTOGRAM);
+ Histogram instance = create(factoryClass, monitorContext, SERVICE_CONTEXT_HISTOGRAM);
serviceContextHistogram.put(factoryClass, instance);
}
for (Class> factoryClass : serviceContextMeter.keySet()) {
Log.info(Log.ENGINE, "Instantiating : " + factoryClass.getName());
- Meter instance = create(factoryClass, context, SERVICE_CONTEXT_METER);
+ Meter instance = create(factoryClass, monitorContext, SERVICE_CONTEXT_METER);
serviceContextMeter.put(factoryClass, instance);
}
}
+ /**
+ * Create and register health checks
+ *
+ * @param context
+ * @param checks factories used to create health checks
+ * @param registry registry listing heath checks
+ * @param type
+ */
private void createHealthCheck(ServiceContext context, List checks, HealthCheckRegistry registry, String type) {
+ ServiceManager serviceManager = context.getBean(ServiceManager.class);
+
for (HealthCheckFactory healthCheck : checks) {
- Log.info(Log.ENGINE, "Registering " + type + ": " + healthCheck.getClass().getName());
- HealthCheck check = healthCheck.create(context);
- healthCheckRegistry.register(check);
- registry.register(check);
+ String factoryName = healthCheck.getClass().getName();
+ try {
+ HealthCheck check = healthCheck.create(context);
+
+ Log.info(Log.ENGINE, "Registering " + type + ": " + factoryName);
+ healthCheckRegistry.register(check);
+ registry.register(check);
+ }
+ catch (Throwable t){
+ Log.info(Log.ENGINE, "Unable to register " + type + ": " + factoryName);
+ }
}
}
@@ -306,6 +334,9 @@ public ResourceTracker getResourceTracker() {
@PreDestroy
public void shutdown() {
Log.info(Log.ENGINE, "MonitorManager#shutdown");
+ if (monitorContext != null){
+ monitorContext.clear();
+ }
if (resourceTracker != null) {
resourceTracker.clean();
}
diff --git a/core/src/main/java/jeeves/server/JeevesEngine.java b/core/src/main/java/jeeves/server/JeevesEngine.java
index a9e5898419d9..33a6ab69be1c 100644
--- a/core/src/main/java/jeeves/server/JeevesEngine.java
+++ b/core/src/main/java/jeeves/server/JeevesEngine.java
@@ -92,6 +92,8 @@ public class JeevesEngine {
private Path _appPath;
private int _maxUploadSize;
+ /** AppHandler service context used during init, tasks and background activities */
+ private ServiceContext appHandlerContext;
public static void handleStartupError(Throwable e) {
Log.fatal(Log.ENGINE, "Raised exception during init");
@@ -391,6 +393,13 @@ private void initDefault(Element defaults, ServiceManager serviceMan) throws Exc
//---
//---------------------------------------------------------------------------
+ /**
+ * Setup application hanlder using the provided handler definition.
+ *
+ * @param handler handler definition
+ * @param servlet jeeves servlet responsible for http distpatch
+ * @throws Exception
+ */
private void initAppHandler(Element handler, JeevesServlet servlet) throws Exception {
if (handler == null) {
info("Handler not found");
@@ -413,20 +422,20 @@ private void initAppHandler(Element handler, JeevesServlet servlet) throws Excep
ApplicationHandler h = (ApplicationHandler) c.newInstance();
- ServiceContext srvContext = serviceMan.createServiceContext("AppHandler", appContext);
- srvContext.setLanguage(_defaultLang);
- srvContext.setLogger(_appHandLogger);
- srvContext.setServlet(servlet);
- srvContext.setAsThreadLocal();
+ appHandlerContext = serviceMan.createAppHandlerServiceContext(appContext);
+ appHandlerContext.setLanguage(_defaultLang);
+ appHandlerContext.setLogger(_appHandLogger);
+ appHandlerContext.setServlet(servlet);
+ appHandlerContext.setAsThreadLocal();
try {
info("--- Starting handler --------------------------------------");
- Object context = h.start(handler, srvContext);
+ Object context = h.start(handler, appHandlerContext);
_appHandlers.add(h);
serviceMan.registerContext(h.getContextName(), context);
- monitorManager.initMonitorsForApp(srvContext);
+ monitorManager.initMonitorsForApp(appHandlerContext);
info("--- Handler started ---------------------------------------");
} catch (Exception e) {
@@ -450,6 +459,9 @@ private void initAppHandler(Element handler, JeevesServlet servlet) throws Excep
serviceMan.setStartupErrors(errors);
}
}
+ finally {
+ appHandlerContext.clearAsThreadLocal();
+ }
}
}
@@ -508,7 +520,11 @@ public void destroy() {
info("Stopping handlers...");
stopHandlers();
+ info("Clearing application handler context...");
+ appHandlerContext.clear();
+
info("=== System stopped ========================================");
+
} catch (Exception e) {
error("Raised exception during destroy");
error(" Exception : " + e);
@@ -525,7 +541,12 @@ public void destroy() {
private void stopHandlers() throws Exception {
for (ApplicationHandler h : _appHandlers) {
- h.stop();
+ try {
+ h.stop();
+ } catch (Throwable unexpected){
+ _appHandLogger.error("Difficulty while stopping "+h.getContextName());
+ _appHandLogger.error(unexpected);
+ }
}
}
diff --git a/core/src/main/java/jeeves/server/context/BasicContext.java b/core/src/main/java/jeeves/server/context/BasicContext.java
index 24930fd02acc..27cffb2af908 100644
--- a/core/src/main/java/jeeves/server/context/BasicContext.java
+++ b/core/src/main/java/jeeves/server/context/BasicContext.java
@@ -43,9 +43,8 @@
//=============================================================================
/**
- * Contains a minimun context for a job execution (schedule, service etc...)
+ * Contains a minimum context for a job execution (schedule, service etc...)
*/
-
public class BasicContext implements Logger {
private final ConfigurableApplicationContext jeevesApplicationContext;
diff --git a/core/src/main/java/jeeves/server/context/ServiceContext.java b/core/src/main/java/jeeves/server/context/ServiceContext.java
index a64c2c7fb1f5..77668b6be7d9 100644
--- a/core/src/main/java/jeeves/server/context/ServiceContext.java
+++ b/core/src/main/java/jeeves/server/context/ServiceContext.java
@@ -44,6 +44,7 @@
import java.nio.file.Path;
import java.util.HashMap;
import java.util.Map;
+import java.util.Objects;
import javax.annotation.CheckForNull;
import javax.persistence.EntityManager;
@@ -52,21 +53,179 @@
/**
* Contains the context for a service execution.
+ *
+ * When creating a ServiceContext you are responsible for manging its use on the current thread and any cleanup:
+ *
+ * try {
+ * context = serviceMan.createServiceContext("AppHandler", appContext);
+ * context.setAsThreadLocal();
+ * ...
+ * } finally {
+ * context.clearAsThreadLocal();
+ * context.clear();
+ * }
+ *
+ * @see ServiceManager
*/
-public class ServiceContext extends BasicContext {
+public class ServiceContext extends BasicContext implements AutoCloseable {
+ /**
+ * ServiceContext is managed as a thread locale using setAsThreadLocal, clearAsThreadLocal and clear methods.
+ * ThreadLocalPolicy defines the behaviour of these methods double checking that they are being used correctly.
+ */
+ public static enum ThreadLocalPolicy {
+ /** Direct management of thread local with no checking. */
+ DIRECT,
+ /** Check behavior and log any unexpected use. */
+ TRACE,
+ /** Raise any {@link IllegalStateException} for unexpected behaviour */
+ STRICT };
+ /**
+ * Use -Djeeves.server.context.service.policy to define policy:
+ *
+ * direct: direct management of thread local with no checking
+ * trace: check behavior and log unusal use
+ * strict: raise illegal state exception for unusual behavior
+ *
+ */
+ private static final ThreadLocalPolicy POLICY;
+ static {
+ String property = System.getProperty("jeeves.server.context.policy", "TRACE");
+ ThreadLocalPolicy policy;
+ try {
+ policy = ThreadLocalPolicy.valueOf(property.toUpperCase());
+ }
+ catch (IllegalArgumentException defaultToDirect) {
+ policy = ThreadLocalPolicy.DIRECT;
+ }
+ POLICY = policy;
+ }
+
+ /**
+ * Be careful with thread local to avoid leaking resources, set POLICY above to trace allocation.
+ */
private static final InheritableThreadLocal THREAD_LOCAL_INSTANCE = new InheritableThreadLocal();
- private UserSession _userSession = new UserSession();
- private InputMethod _input;
- private OutputMethod _output;
- private Map _headers;
- private String _language;
- private String _service;
- private String _ipAddress;
- private int _maxUploadSize;
- private JeevesServlet _servlet;
- private boolean _startupError = false;
- private Map _startupErrors;
+
+ /**
+ * Simple data structure recording service details.
+ *
+ * Lightweight data structure used logging service details such as name.
+ */
+ public static class ServiceDetails {
+ private String ipAddress;
+ private String service;
+ private String language;
+ public ServiceDetails(ServiceContext context){
+ this.ipAddress = context.getIpAddress();
+ }
+
+ public String getIpAddress() {
+ return ipAddress;
+ }
+
+ public String getService() {
+ return service;
+ }
+
+ public String getLanguage() {
+ return language;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ServiceDetails that = (ServiceDetails) o;
+ return Objects.equals(ipAddress, that.ipAddress) && service.equals(that.service) && Objects.equals(language, that.language);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(ipAddress, service, language);
+ }
+ }
+
+ /**
+ * Shared service context offering limited functionality.
+ *
+ * Use of service context has been assumed in many parts of the codebase. This shared "AppHandler" service context
+ * is used during Jeeves startup and has some protection from being cleared. Additional managers such HarvestManager
+ * also use a shared service context to support background processes.
+ */
+ public static class AppHandlerServiceContext extends ServiceContext {
+
+ /**
+ * Shared AppHandler service context associated with application lifecycle.
+ *
+ * See factory method {@link ServiceManager#createAppHandlerServiceContext(ConfigurableApplicationContext, String)} and
+ * {@link ServiceManager#createAppHandlerServiceContext(ConfigurableApplicationContext)}.
+ *
+ * @param service Service name
+ * @param jeevesApplicationContext Application context
+ * @param contexts Handler context
+ * @param entityManager
+ */
+ public AppHandlerServiceContext(final String service, final ConfigurableApplicationContext jeevesApplicationContext,
+ final Map contexts, final EntityManager entityManager) {
+ super( service, jeevesApplicationContext, contexts, entityManager );
+ _language = "?";
+ _userSession = null;
+ _ipAddress = "?";
+ }
+
+ @Override
+ public void setUserSession(UserSession session) {
+ if (session != null) {
+ warning("Shared service context \"" + _service + "\" context should not be configured with user session");
+ }
+ super.setUserSession(session);
+ }
+ @Override
+ public void setIpAddress(String address) {
+ if( address != null && !"?".equals(address)) {
+ warning("Shared service context \""+_service+"\" should not be associated with an ip address");
+ }
+ super.setIpAddress(address);
+ }
+
+ public void clear() {
+ warning("Shared service context \""+_service+"\" context is shared, and should not be cleared");
+ }
+
+ @Override
+ public String toString() {
+ final StringBuilder sb = new StringBuilder("AppHandlerServiceContext ");
+ sb.append("'").append(_service).append('\'');
+
+ return sb.toString();
+ }
+
+ };
+ /**
+ * Trace allocation via {@link #setAsThreadLocal()}.
+ *
+ * Recording where the service context was assigned to the thread local to aid in debugging.
+ */
+ protected Throwable allocation = null;
+
+ /**
+ * Trace deallocation via {@link #clear()} method.
+ *
+ * Recording where the service context clear() was called to aid in debugging.
+ */
+ protected Throwable deAllocation = null;
+
+ protected UserSession _userSession = new UserSession();
+ protected InputMethod _input;
+ protected OutputMethod _output;
+ protected Map _headers;
+ protected String _language;
+ protected String _service;
+ protected String _ipAddress;
+ protected int _maxUploadSize;
+ protected JeevesServlet _servlet;
+ protected boolean _startupError = false;
+ protected Map _startupErrors;
/**
* Property to be able to add custom response headers depending on the code (and not the xml of
* Jeeves)
@@ -76,7 +235,7 @@ public class ServiceContext extends BasicContext {
*
* @see #_statusCode
*/
- private Map _responseHeaders;
+ protected Map _responseHeaders;
/**
* Property to be able to add custom http status code headers depending on the code (and not the
* xml of Jeeves)
@@ -86,7 +245,16 @@ public class ServiceContext extends BasicContext {
*
* @see #_responseHeaders
*/
- private Integer _statusCode;
+ protected Integer _statusCode;
+
+ /**
+ * Context for service execution.
+ *
+ * @param service Service name
+ * @param jeevesApplicationContext Application context
+ * @param contexts Handler context
+ * @param entityManager
+ */
public ServiceContext(final String service, final ConfigurableApplicationContext jeevesApplicationContext,
final Map contexts, final EntityManager entityManager) {
super(jeevesApplicationContext, contexts, entityManager);
@@ -104,9 +272,47 @@ public ServiceContext(final String service, final ConfigurableApplicationContext
*/
@CheckForNull
public static ServiceContext get() {
- return THREAD_LOCAL_INSTANCE.get();
+ ServiceContext context = THREAD_LOCAL_INSTANCE.get();
+ if(context != null && context.isCleared()) {
+ context.checkCleared("Thread local access");
+ }
+ return context;
}
+ /**
+ * Auto closable for try-with-resources support.
+ *
+ * For use when creating service context for use as a parameter, will check and handle {@link #clear()} if needed:
+ *
+ * try (ServiceContext context = serviceMan.createServiceContext("AppHandler", appContext)){
+ * ...
+ * }
+ *
+ *
+ * Close will also check and handle {@link #clearAsThreadLocal()} if needed:
+ *
+ * try (ServiceContext context = serviceMan.createServiceContext("AppHandler", appContext)){
+ * setAsThreadLocal();
+ * ....
+ * }
+ *
+ *
+ */
+ public void close() {
+ try {
+ ServiceContext check = THREAD_LOCAL_INSTANCE.get();
+ if( this == check){
+ clearAsThreadLocal();
+ }
+ }
+ finally {
+ if( !isCleared() ){
+ // reuse clear method, but provide useful deAllocation context
+ clear();
+ deAllocation = new Throwable("ServiceContext "+_service+" closed");
+ }
+ }
+ }
//--------------------------------------------------------------------------
//---
//--- Constructor
@@ -115,10 +321,193 @@ public static ServiceContext get() {
/**
* Called to set the Service context for this thread and inherited threads.
+ *
+ * If you call this method you are responsible for thread context management and {@link #clearAsThreadLocal()}.
+ *
+ * try {
+ * context.setAsThreadLocal();
+ * }
+ * finally {
+ * context.clearAsThreadLocal();
+ * }
+ *
*/
public void setAsThreadLocal() {
+ ServiceContext check = THREAD_LOCAL_INSTANCE.get();
+
+ if( POLICY == ThreadLocalPolicy.DIRECT || check == null){
+ // step one set thread local
+ THREAD_LOCAL_INSTANCE.set(this);
+ // step two ensure ApplicationContextHolder thread local kept in sync
+ ApplicationContextHolder.set(this.getApplicationContext());
+ // step three details on allocation
+ allocation = new Throwable("ServiceContext "+_service+" allocated to thread");
+
+ return;
+ }
+
+ if (this == check) {
+ String unexpected = "Service " + _service + " Context: already in use for this thread";
+ if( allocation != null ){
+ // details on prior allocation
+ unexpected += "\n\tContext '"+check._service+"' conflict: " + check.allocation.getStackTrace()[1];
+ }
+ // step one set thread local
+ // (already done)
+ // step two ensure ApplicationContextHolder thread local kept in sync
+ if( ApplicationContextHolder.get() != null ){
+ ApplicationContextHolder.clear();
+ }
+ ApplicationContextHolder.set(this.getApplicationContext());
+ // step three detail on re-allocation
+ allocation = new Throwable("ServiceContext "+_service+" allocated to thread");
+
+ unexpected += "\n\tService '"+_service+"' allocate: " + allocation.getStackTrace()[1];
+ checkUnexpectedState( unexpected );
+ return;
+ }
+
+ // thread being recycled or reused for new service context
+ //
+ THREAD_LOCAL_INSTANCE.remove();
+
+ String unexpected = "Service " + _service + " Context: Clearing prior service context " + check._service;
+ if( check.allocation != null ){
+ // details on prior allocation
+ unexpected += "\n\tContext '"+check._service+"' conflict: " + check.allocation.getStackTrace()[1];
+ }
+
+ // step one set thread local
THREAD_LOCAL_INSTANCE.set(this);
+ // step two ensure ApplicationContextHolder thread local kept in sync
+ if( ApplicationContextHolder.get() != null ){
+ ApplicationContextHolder.clear();
+ }
ApplicationContextHolder.set(this.getApplicationContext());
+ // step three detail on present re-allocation
+ allocation = new Throwable("ServiceContext "+_service+" allocated to thread");
+
+ unexpected += "\n\tService '"+_service+"' allocate: " + allocation.getStackTrace()[1];
+ checkUnexpectedState( unexpected );
+ }
+
+ /**
+ * Check if _service name is available, or raise a NullPointerException if clear() has already been called.
+ *
+ * @param message message to use if clear() has already been called.
+ */
+ protected void checkCleared(String message){
+ if(isCleared()){
+ String unavailable = message + " - service context no longer available\nCleared by " + deAllocation.getStackTrace()[1];
+ throw new NullPointerException(unavailable);
+ }
+ }
+
+ /** Log or raise exception based on {@link POLICY} */
+ protected void checkUnexpectedState( String unexpected ){
+ if( unexpected == null ){
+ return; // nothing unexpected to report
+ }
+ switch( POLICY ){
+ case DIRECT:
+ break; // ignore
+ case TRACE:
+ debug(unexpected);
+ break;
+ case STRICT:
+ throw new IllegalStateException(unexpected);
+ }
+ }
+
+ /**
+ * Called to clear the Service context for this thread and inherited threads.
+ *
+ * In general code that creates ServiceContext is responsible thread management and any cleanup:
+ *
+ * try {
+ * context.setAsThreadLocal();
+ * }
+ * finally {
+ * context.clearAsThreadLocal();
+ * }
+ *
+ */
+ public void clearAsThreadLocal() {
+ ServiceContext check = THREAD_LOCAL_INSTANCE.get();
+
+ // clean up thread local
+ if( POLICY == ThreadLocalPolicy.DIRECT){
+ if( check != null ){
+ check.allocation = null;
+ check = null;
+ }
+ THREAD_LOCAL_INSTANCE.remove();
+ allocation = null;
+ // ApplicationContextHolder.clear();
+ return;
+ }
+ if( check == null ){
+ String unexpected = "ServiceContext "+_service+" clearAsThreadLocal: '"+_service+"' unexpected state, thread local already cleared";
+ if( allocation != null ){
+ unexpected += "\n\tContext '"+_service+"' allocation: " + allocation.getStackTrace()[1];
+ }
+ allocation = null;
+ // ApplicationContextHolder.clear();
+ checkUnexpectedState( unexpected );
+ return;
+ }
+ if (check == this){
+ THREAD_LOCAL_INSTANCE.remove();
+ allocation = null;
+ // ApplicationContextHolder.clear();
+ return;
+ }
+
+ String unexpected = "ServiceContext clearAsThreadLocal: \"+_service+\" unexpected state, thread local presently used by service context '"+check._service+"'";
+ if( check.allocation != null ){
+ unexpected += "\n\tContext '"+check._service+"' conflict: " + check.allocation.getStackTrace()[1];
+ }
+ if( allocation != null ){
+ unexpected += "\n\tContext '"+_service+"' allocation: " + allocation.getStackTrace()[1];
+ }
+ THREAD_LOCAL_INSTANCE.remove();
+ allocation = null;
+ // ApplicationContextHolder.clear();
+ if( ApplicationContextHolder.get() != null && ApplicationContextHolder.get() != getApplicationContext() ){
+ unexpected += "\n\tApplicationContext '"+ApplicationContextHolder.get().getApplicationName()+"' conflict detected";
+ unexpected += "\n\tApplicationContext '"+getApplicationContext().getApplicationName()+"' expected";
+ ApplicationContextHolder.clear();
+ }
+ checkUnexpectedState( unexpected );
+ }
+
+ /**
+ * Release any resources tied up by this service context.
+ *
+ * In general code that creates a ServiceContext is responsible thread management and any cleanup:
+ *
+ * ServiceContext context = serviceMan.createServiceContext("AppHandler", appContext);
+ * try {
+ * context.setAsThreadLocal();
+ * }
+ * finally {
+ * context.clearAsThreadLocal();
+ * context.clear();
+ * }
+ *
+ */
+ public void clear(){
+ if( this._service != null) {
+ deAllocation = new Throwable("ServiceContext "+_service+" cleared");
+ this._service = null;
+ this._headers = null;
+ this._responseHeaders = null;
+ this._servlet = null;
+ this._userSession = null;
+ }
+ else {
+ debug("Service context unexpectedly cleared twice, previously cleared by "+deAllocation.getStackTrace()[1]);
+ }
}
//--------------------------------------------------------------------------
@@ -127,27 +516,65 @@ public void setAsThreadLocal() {
//---
//--------------------------------------------------------------------------
+ /**
+ * Language code, or "?"
if undefined.
+ * @return language code, or "?"
if undefined.
+ */
public String getLanguage() {
+ // checkCleared("language not available");
return _language;
}
+ /**
+ * Language code, or "?"
if undefined.
+ * @param lang language code, or "?"
if undefined.
+ */
public void setLanguage(final String lang) {
_language = lang;
}
+ /**
+ * True if {@link #clear()} has been called to reclaim resources.
+ *
+ * @return true if service context has been cleared
+ */
+ public boolean isCleared(){
+ return _service == null;
+ }
+
+
+ /**
+ * Service name, or null if service context is no longer in use.
+ *
+ * @return service name, or null if service is no longer in use
+ */
public String getService() {
return _service;
}
- public void setService(final String service) {
+ public void setService(String service) {
+ if( service == null ){
+ service = "internal";
+ }
this._service = service;
logger = Log.createLogger(Log.WEBAPP + "." + service);
}
+ /**
+ * IP address of request, or "?"
for local loopback request.
+ *
+ * @return ip address, or "?"
for loopback request.
+ */
public String getIpAddress() {
+ checkCleared("ip address not available");
return _ipAddress;
}
+ /**
+ * IP address of request, or "?"
for local loopback request.
+ *
+ * @param address ip, address or "?"
for loopback request.
+ */
public void setIpAddress(final String address) {
_ipAddress = address;
}
@@ -157,6 +584,7 @@ public Path getUploadDir() {
}
public int getMaxUploadSize() {
+ checkCleared("max upload size not available");
return _maxUploadSize;
}
@@ -170,6 +598,7 @@ public void setMaxUploadSize(final int size) {
* @return the user session stored on httpsession
*/
public UserSession getUserSession() {
+ checkCleared("user session not available");
return _userSession;
}
@@ -177,6 +606,23 @@ public void setUserSession(final UserSession session) {
_userSession = session;
}
+ /**
+ * Safely look up user name, or anonymous
.
+ *
+ * This is a quick null safe lookup of user name suitable for use in logging and error messages.
+ *
+ * @return username, or anonymous
if unavailable.
+ */
+ public String userName(){
+ if (_userSession == null || _userSession.getUsername() == null ){
+ return "anonymous";
+ }
+ if( _userSession.getProfile() != null ){
+ return _userSession.getUsername() + "/" + _userSession.getProfile();
+ }
+ return _userSession.getUsername();
+ }
+
public ProfileManager getProfileManager() {
return getBean(ProfileManager.class);
}
@@ -184,6 +630,7 @@ public ProfileManager getProfileManager() {
//--------------------------------------------------------------------------
public InputMethod getInputMethod() {
+ checkCleared("input method not available");
return _input;
}
@@ -192,6 +639,7 @@ public void setInputMethod(final InputMethod m) {
}
public OutputMethod getOutputMethod() {
+ checkCleared("output method not available");
return _output;
}
@@ -200,6 +648,7 @@ public void setOutputMethod(final OutputMethod m) {
}
public Map getStartupErrors() {
+ checkCleared("startup errors not available");
return _startupErrors;
}
@@ -209,6 +658,7 @@ public void setStartupErrors(final Map errs) {
}
public boolean isStartupError() {
+ checkCleared("is startup error not available");
return _startupError;
}
@@ -234,6 +684,7 @@ public void setLogger(final Logger l) {
* @return The map of headers from the request
*/
public Map getHeaders() {
+ checkCleared("headers not available");
return _headers;
}
@@ -249,6 +700,7 @@ public void setHeaders(Map headers) {
}
public JeevesServlet getServlet() {
+ checkCleared("servlet not available");
return _servlet;
}
@@ -269,20 +721,27 @@ public void executeOnly(final LocalServiceRequest request) throws Exception {
new TransactionTask() {
@Override
public Void doInTransaction(TransactionStatus transaction) throws Throwable {
- final ServiceContext context = new ServiceContext(request.getService(), getApplicationContext(), htContexts, getEntityManager());
- UserSession session = ServiceContext.this._userSession;
- if (session == null) {
- session = new UserSession();
- }
+ final ServiceManager serviceManager = getApplicationContext().getBean(ServiceManager.class);
+ final ServiceContext localServiceContext = serviceManager.createServiceContext(request.getService(), getApplicationContext());
+
try {
- final ServiceManager serviceManager = context.getBean(ServiceManager.class);
- serviceManager.dispatch(request, session, context);
+ UserSession session = ServiceContext.this._userSession;
+ if (session == null) {
+ session = new UserSession();
+ }
+ localServiceContext.setUserSession(session);
+
+ serviceManager.dispatch(request, session, localServiceContext);
} catch (Exception e) {
Log.error(Log.XLINK_PROCESSOR, "Failed to parse result xml" + request.getService());
throw new ServiceExecutionFailedException(request.getService(), e);
} finally {
- // set old context back as thread local
- setAsThreadLocal();
+ if( localServiceContext == ServiceContext.get()){
+ // dispatch failed to clear cleanup localServiceContext
+ // restoring back as thread local
+ ServiceContext.this.setAsThreadLocal();
+ }
+ localServiceContext.clear();
}
return null;
}
@@ -320,6 +779,17 @@ public void setStatusCode(Integer statusCode) {
this._statusCode = statusCode;
}
+ @Override
+ public String toString() {
+ final StringBuilder sb = new StringBuilder("ServiceContext ");
+ sb.append("'").append(_service).append('\'');
+ sb.append(" ").append(_input).append(" --> ").append(_output);
+ sb.append(" { _language='").append(_language).append('\'');
+ sb.append(", _ipAddress='").append(_ipAddress).append('\'');
+ sb.append('}');
+
+ return sb.toString();
+ }
}
//=============================================================================
diff --git a/core/src/main/java/jeeves/server/dispatchers/ServiceManager.java b/core/src/main/java/jeeves/server/dispatchers/ServiceManager.java
index 0bc08f24274f..37b64e69325e 100644
--- a/core/src/main/java/jeeves/server/dispatchers/ServiceManager.java
+++ b/core/src/main/java/jeeves/server/dispatchers/ServiceManager.java
@@ -1,5 +1,5 @@
//=============================================================================
-//=== Copyright (C) 2001-2005 Food and Agriculture Organization of the
+//=== Copyright (C) 2001-2021 Food and Agriculture Organization of the
//=== United Nations (FAO-UN), United Nations World Food Programme (WFP)
//=== and United Nations Environment Programme (UNEP)
//===
@@ -49,12 +49,14 @@
import org.fao.geonet.Constants;
import org.fao.geonet.NodeInfo;
import org.fao.geonet.Util;
+import org.fao.geonet.domain.User;
import org.fao.geonet.exceptions.JeevesException;
import org.fao.geonet.exceptions.NotAllowedEx;
import org.fao.geonet.exceptions.ServiceNotFoundEx;
import org.fao.geonet.exceptions.ServiceNotMatchedEx;
import org.fao.geonet.kernel.GeonetworkDataDirectory;
import org.fao.geonet.kernel.setting.SettingManager;
+import org.fao.geonet.repository.UserRepository;
import org.fao.geonet.util.XslUtil;
import org.fao.geonet.utils.BLOB;
import org.fao.geonet.utils.BinaryFile;
@@ -64,6 +66,9 @@
import org.fao.geonet.utils.Xml;
import org.jdom.Element;
import org.springframework.context.ConfigurableApplicationContext;
+import org.springframework.web.context.request.RequestAttributes;
+import org.springframework.web.context.request.RequestContextHolder;
+import org.springframework.web.context.request.ServletRequestAttributes;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
@@ -78,6 +83,10 @@
import java.util.Map.Entry;
//=============================================================================
+
+/**
+ * Handles operations on services.
+ */
public class ServiceManager {
private Map> htServices = new HashMap>(100);
private Map htContexts = new HashMap();
@@ -342,6 +351,106 @@ public void addErrorPage(Element err) throws Exception {
vErrorPipe.add(buildErrorPage(err));
}
+ /**
+ * Used to create an appContext placeholder service context used for initialization, background tasks and activities.
+ *
+ * This ServiceContext is used during initialization and is independent of any user session.
+ * This instance is the responsibility of a specific manager and will produce a warning if an attempt is made
+ * to clear or assign a user session.
+ *
+ * @param appContext GeoNetwork Application Context
+ * @return new service context with limited functionality
+ */
+ public ServiceContext.AppHandlerServiceContext createAppHandlerServiceContext(ConfigurableApplicationContext appContext) {
+ ServiceContext.AppHandlerServiceContext context = new ServiceContext.AppHandlerServiceContext("AppHandler", appContext, htContexts, entityManager);
+ context.setBaseUrl(baseUrl);
+ context.setMaxUploadSize(maxUploadSize);
+ context.setServlet(servlet);
+
+ return context;
+ }
+
+ /**
+ * Used to create an appContext placeholder service context for a specific manager used for initialization, background tasks and activities.
+ *
+ * Previously a single AppHandler service context was shared managed by Jeeves.
+ *
+ * This instance is the responsibility of a specific manager and will produce a warning if an attempt is made
+ * to clear or assign a user session.
+ *
+ * @param manager Manager name such as AppContext or harvester
+ * @return new service context with limited functionality
+ */
+ public ServiceContext.AppHandlerServiceContext createAppHandlerServiceContext(String manager, ServiceContext parent) {
+ ServiceContext.AppHandlerServiceContext context = new ServiceContext.AppHandlerServiceContext(manager, parent.getApplicationContext(), htContexts, entityManager);
+ context.setBaseUrl(baseUrl);
+ context.setMaxUploadSize(maxUploadSize);
+ context.setServlet(servlet);
+
+ return context;
+ }
+
+
+ /**
+ * Used to create a serviceContext for later use, the object provided the new serviceContext is responsible
+ * for cleanup.
+ *
+ * final ServiceContext taskContext = serviceMan.createServiceContext( serviceContext, "task");
+ * return new Runnable(){
+ * public abstract void run(){
+ * try {
+ * taskContext.setAsThreadLocal();
+ *
+ * }
+ * finally {
+ * taskContext.clear();
+ * }
+ * }
+ * };
+ *
+ *
+ * @param name
+ * @param parent
+ * @return new service context
+ */
+ public ServiceContext createServiceContext(String name, ServiceContext parent ){
+ ServiceContext context = createServiceContext( name, parent.getApplicationContext());
+ context.setBaseUrl(parent.getBaseUrl());
+ context.setLanguage(parent.getLanguage());
+ context.setUserSession(null); // because this is intended for later use user session not included
+ context.setIpAddress(parent.getIpAddress());
+ context.setMaxUploadSize(parent.getMaxUploadSize());
+ context.setServlet(parent.getServlet());
+
+ return context;
+ }
+ /**
+ * Create an internal service context, not associated with a user or ip address.
+ *
+ * When creating a ServiceContext you are responsible for manging its use on the current thread and any cleanup.
+ *
+ * Using auto closable:
+ *
+ * try(ServiceContext context = serviceMan.createServiceContext("AppHandler", appContext)){
+ * ...
+ * }
+ *
+ *
+ * Or manually:
+ *
+ * try {
+ * context = serviceMan.createServiceContext("AppHandler", appContext);
+ * context.setAsThreadLocal();
+ * ...
+ * } finally {
+ * context.clearAsThreadLocal();
+ * context.clear();
+ * }
+ *
+ * @param name context name
+ * @param appContext application context
+ * @return ServiceContext
+ */
public ServiceContext createServiceContext(String name, ConfigurableApplicationContext appContext) {
ServiceContext context = new ServiceContext(name, appContext, htContexts,
entityManager);
@@ -356,6 +465,36 @@ public ServiceContext createServiceContext(String name, ConfigurableApplicationC
return context;
}
+ /**
+ * Used to create a ServiceContext.
+ *
+ * When creating a ServiceContext you are responsible for manging its use on the current thread and any cleanup.
+ *
+ * Using auto closable:
+ *
+ * try(ServiceContext context = serviceMan.createServiceContext("md.thumbnail.upload", lang, request)){
+ * ...
+ * }
+ *
+ * Or manually:
+ *
+ * ServiceContext context = serviceMan.createServiceContext("md.thumbnail.upload", lang, request);
+ * try {
+ *
+ * context.setAsThreadLocal();
+ * ...
+ * } finally {
+ * context.clearAsThreadLocal();
+ * context.clear();
+ * }
+ *
+ * The serviceContext is creating using the ApplicationContext from {@link ApplicationContextHolder}.
+ *
+ * @param name context name
+ * @param lang
+ * @param request servlet request
+ * @return ServiceContext
+ */
public ServiceContext createServiceContext(String name, String lang, HttpServletRequest request) {
ServiceContext context = new ServiceContext(name, ApplicationContextHolder.get(), htContexts, entityManager);
@@ -381,10 +520,80 @@ public ServiceContext createServiceContext(String name, String lang, HttpServlet
return context;
}
+ /**
+ * Create a transitory service context for use in a single try-with-resources block.
+ *
+ * Makes use of current http session if available (the usual case), or a temporary user session using the provided
+ * userId (when used from a background task or job).
+ *
+ * Code creating a service context is responsible for handling resources and cleanup.
+ *
+ *
+ * try( ServiceContext context = createServiceContext("approve_record", event.getUser()){
+ * ... utility methods can now use ServiceContext.get() ...
+ * }
+ *
+ * @param name service context name for approval record handling
+ * @param defaultUserId If a user session is not available, this id is used to create a temporary user session
+ * @return service context for approval record event handling
+ */
+ public ServiceContext createServiceContext(String name, int defaultUserId){
+ // If this implementation is generally useful it should migrate to ServiceManager, rather than cut and paste
+ ConfigurableApplicationContext applicationContext = ApplicationContextHolder.get();
+
+ ServiceContext context;
+
+ HttpServletRequest request = getCurrentHttpRequest();
+ if( request != null ) {
+ // reuse user session from http request
+ context = createServiceContext(name, "?", request);
+ }
+ else {
+ // Not in an http request, creating a temporary user session wiht provided userId
+ context = createServiceContext(name, applicationContext);
+
+ UserRepository userRepository = applicationContext.getBean(UserRepository.class);
+ Optional user = userRepository.findById( defaultUserId );
+ if( user.isPresent() ){
+ UserSession session = new UserSession();
+ session.loginAs(user.get());
+ context.setUserSession(session);
+ }
+ }
+ context.setAsThreadLocal();
+
+ return context;
+ }
+
+ /**
+ * Look up current HttpServletRequest if running in a servlet dispatch.
+ *
+ * @return http request, or null if running in a background task
+ */
+ private static HttpServletRequest getCurrentHttpRequest(){
+ RequestAttributes requestAttributes = RequestContextHolder.getRequestAttributes();
+ if (requestAttributes instanceof ServletRequestAttributes) {
+ HttpServletRequest request = ((ServletRequestAttributes)requestAttributes).getRequest();
+ return request;
+ }
+ return null; // not called during http request
+ }
+
+
+ /**
+ * Dispatch service request, creating a service context with the provided user session.
+ *
+ * @param req service request
+ * @param session user session
+ */
public void dispatch(ServiceRequest req, UserSession session) {
ServiceContext context = new ServiceContext(req.getService(), ApplicationContextHolder.get(),
htContexts, entityManager);
- dispatch(req, session, context);
+ try {
+ dispatch(req, session, context);
+ } finally {
+ context.clear();
+ }
}
//---------------------------------------------------------------------------
@@ -396,6 +605,14 @@ public void dispatch(ServiceRequest req, UserSession session) {
//--- Dispatching methods
//---
//---------------------------------------------------------------------------
+
+ /**
+ * Dispatch service request, configuring context with the provided user session.
+ *
+ * @param req service request
+ * @param session user session
+ * @param context service context
+ */
public void dispatch(ServiceRequest req, UserSession session, ServiceContext context) {
context.setBaseUrl(baseUrl);
context.setLanguage(req.getLanguage());
@@ -408,6 +625,11 @@ public void dispatch(ServiceRequest req, UserSession session, ServiceContext con
context.setServlet(servlet);
if (startupError) context.setStartupErrors(startupErrors);
+ ServiceContext priorContext = ServiceContext.get();
+ if( priorContext != null){
+ priorContext.debug("ServiceManger dispatch replacing current ServiceContext");
+ priorContext.clearAsThreadLocal();
+ }
context.setAsThreadLocal();
//--- invoke service and build result
@@ -505,6 +727,20 @@ public void dispatch(ServiceRequest req, UserSession session, ServiceContext con
handleError(req, response, context, srvInfo, e);
}
}
+ finally {
+ ServiceContext checkContext = ServiceContext.get();
+ if( checkContext == context ) {
+ context.clearAsThreadLocal();
+ }
+ else {
+ context.debug("ServiceManager dispatch context was replaced before cleanup");
+ }
+ context.clearAsThreadLocal();
+ if( priorContext != null){
+ priorContext.debug("ServiceManger dispatch restoring ServiceContext");
+ priorContext.setAsThreadLocal();
+ }
+ }
}
//---------------------------------------------------------------------------
@@ -844,7 +1080,7 @@ else if (outPage.isBLOB()) {
} finally {
timerContext.stop();
}
-
+
if (outPage.getContentType() != null
&& outPage.getContentType().startsWith("text/plain")) {
req.beginStream(outPage.getContentType(), -1, "attachment;", cache);
diff --git a/core/src/main/java/jeeves/server/sources/ServiceRequest.java b/core/src/main/java/jeeves/server/sources/ServiceRequest.java
index 063025dbf294..7098075ee867 100644
--- a/core/src/main/java/jeeves/server/sources/ServiceRequest.java
+++ b/core/src/main/java/jeeves/server/sources/ServiceRequest.java
@@ -196,21 +196,36 @@ public boolean hasJSONOutput() {
return jsonOutput;
}
+ /**
+ * Write provided response element, ending the stream.
+ *
+ * @param response
+ * @throws IOException
+ */
public void write(Element response) throws IOException {
Xml.writeResponse(new Document(response), outStream);
endStream();
}
/**
- * called when the system starts streaming data
+ * Called when the system starts streaming data
+ * @param contentType mime type
+ * @param cache true if content can be cached, false to disable caching for dynamic content
*/
-
public void beginStream(String contentType, boolean cache) {
}
//---------------------------------------------------------------------------
- public void beginStream(String contentType, int contentLength, String contentDisp,
+ /**
+ * Called when the system starts streaming data, filling in appropriate header details supported by protocol.
+ *
+ * @param contentType mime type
+ * @param contentLength content length in bytes if known, -1 if unknown
+ * @param contentDisposition content disposition (inline|attachment|attachment;filename="filename.jpg")
+ * @param cache true if content can be cached, false to disable caching for dynamic content
+ */
+ public void beginStream(String contentType, int contentLength, String contentDisposition,
boolean cache) {
}
@@ -219,7 +234,6 @@ public void beginStream(String contentType, int contentLength, String contentDis
/**
* called when the system ends streaming data
*/
-
public void endStream() throws IOException {
}
diff --git a/core/src/main/java/org/fao/geonet/ContextContainer.java b/core/src/main/java/org/fao/geonet/ContextContainer.java
index e6f7022df0fe..eb607d5e7e16 100644
--- a/core/src/main/java/org/fao/geonet/ContextContainer.java
+++ b/core/src/main/java/org/fao/geonet/ContextContainer.java
@@ -37,8 +37,10 @@
public class ContextContainer implements ApplicationContextAware {
- //private GeonetContext geoctx;
+ /** Shared service context for GeoNetwork application */
private ServiceContext srvctx;
+
+ /** Shared application context provided during initialization */
private ApplicationContext ctx;
public ContextContainer() {
@@ -46,30 +48,44 @@ public ContextContainer() {
}
- /*
- public GeonetContext getGeoctx() {
- return geoctx;
- }
-
- public void setGeoctx(GeonetContext geoctx) {
-
- this.geoctx = geoctx;
- }
- */
-
+ /**
+ * Service context shared for GeoNetwork application.
+ *
+ * This is an application wide service context with limited functionality, no user session for example.
+ *
+ * Use of {@link ServiceContext#get()} is recommended in most situations for access
+ * to current user session.
+ *
+ * @return Shared service context for GeoNetwork application
+ */
public ServiceContext getSrvctx() {
return srvctx;
}
+ /**
+ * Service context shared for GeoNetwork application.
+ *
+ * @param srvctx Shared app handler service context for GeoNetwork application
+ */
public void setSrvctx(ServiceContext srvctx) {
this.srvctx = srvctx;
}
+ /**
+ * GeoNetwork application context provided during initialization.
+ *
+ * @return geonetwork application context
+ */
public ApplicationContext getApplicationContext() {
if (ctx == null) throw new RuntimeException("applicationcontext not yet initialized");
return ctx;
}
+ /**
+ * GeoNetwork application context provided during initialization.
+ * @param arg0
+ * @throws BeansException
+ */
public void setApplicationContext(ApplicationContext arg0)
throws BeansException {
diff --git a/core/src/main/java/org/fao/geonet/GeonetContext.java b/core/src/main/java/org/fao/geonet/GeonetContext.java
index 9453be1f80bd..c1419b0fb58e 100644
--- a/core/src/main/java/org/fao/geonet/GeonetContext.java
+++ b/core/src/main/java/org/fao/geonet/GeonetContext.java
@@ -25,10 +25,12 @@
import com.google.common.annotations.VisibleForTesting;
-import org.fao.geonet.kernel.metadata.StatusActions;
import org.fao.geonet.util.ThreadPool;
import org.springframework.context.ApplicationContext;
+/**
+ * GeoNetwork context managing application context and a shared thread pool.
+ */
public class GeonetContext {
private final ApplicationContext _springAppContext;
private final ThreadPool _threadPool;
diff --git a/core/src/main/java/org/fao/geonet/api/records/attachments/AbstractStore.java b/core/src/main/java/org/fao/geonet/api/records/attachments/AbstractStore.java
index 575105f21029..a6bfd1364048 100644
--- a/core/src/main/java/org/fao/geonet/api/records/attachments/AbstractStore.java
+++ b/core/src/main/java/org/fao/geonet/api/records/attachments/AbstractStore.java
@@ -125,23 +125,34 @@ protected int canEdit(ServiceContext context, String metadataUuid, MetadataResou
boolean canEdit = getAccessManager(context).canEdit(context, String.valueOf(metadataId));
if ((visibility == null && !canEdit) || (visibility == MetadataResourceVisibility.PRIVATE && !canEdit)) {
throw new SecurityException(String.format("User '%s' does not have privileges to access '%s' resources for metadata '%s'.",
- context.getUserSession() != null ?
- context.getUserSession().getUsername() + "/" + context.getUserSession()
- .getProfile() :
- "anonymous", visibility == null ? "any" : visibility, metadataUuid));
+ context.userName(), visibility == null ? "any" : visibility, metadataUuid));
}
return metadataId;
}
+ /**
+ *
+ * @param context Service context used to determine user
+ * @param metadataUuid UUID of metadata record to check
+ * @param visibility resource visibility
+ * @param approved
+ * @return The metadata id used to access resources, obtained and approved from provided metadataUuid
+ * @throws Exception A security exception if the content is not allowed to access these resources
+ */
protected int canDownload(ServiceContext context, String metadataUuid, MetadataResourceVisibility visibility, Boolean approved)
throws Exception {
int metadataId = getAndCheckMetadataId(metadataUuid, approved);
if (visibility == MetadataResourceVisibility.PRIVATE) {
- boolean canDownload = getAccessManager(context).canDownload(context, String.valueOf(metadataId));
- if (!canDownload) {
- throw new SecurityException(String.format(
+ if(context instanceof ServiceContext.AppHandlerServiceContext) {
+ // internal access granted
+ }
+ else {
+ boolean canDownload = getAccessManager(context).canDownload(context, String.valueOf(metadataId));
+ if (!canDownload) {
+ throw new SecurityException(String.format(
"Current user can't download resources for metadata '%s' and as such can't access the requested resource.",
metadataUuid));
+ }
}
}
return metadataId;
diff --git a/core/src/main/java/org/fao/geonet/kernel/AccessManager.java b/core/src/main/java/org/fao/geonet/kernel/AccessManager.java
index 5ae066e1c55a..8d4699b8bdeb 100644
--- a/core/src/main/java/org/fao/geonet/kernel/AccessManager.java
+++ b/core/src/main/java/org/fao/geonet/kernel/AccessManager.java
@@ -99,13 +99,27 @@ public class AccessManager {
* perform on that metadata (an set of OPER_XXX as keys). If the user is authenticated the
* permissions are taken from the groups the user belong. If the user is not authenticated, a
* dynamic group is assigned depending on user location (0 for internal and 1 for external).
+ *
+ * @param context service context
+ * @param mdId metadata record to check
+ * @param ip IP Address used to determine base operations
+ * @return set of base operations along with any additional reserved operations available to user
*/
public Set getOperations(ServiceContext context, String mdId, String ip) throws Exception {
return getOperations(context, mdId, ip, null);
}
/**
- * TODO javadoc.
+ * Given a user(session) a list of groups and a metadata returns all operations that user can
+ * perform on that metadata (an set of OPER_XXX as keys). If the user is authenticated the
+ * permissions are taken from the groups the user belong. If the user is not authenticated, a
+ * dynamic group is assigned depending on user location (0 for internal and 1 for external).
+ *
+ * @param context service context
+ * @param mdId metadata record to check
+ * @param ip IP Address used to determine base operations (if not provided)
+ * @param operations base operations
+ * @return set of base operations along with any additional reserved operations available to user
*/
public Set getOperations(ServiceContext context, String mdId, String ip, Collection operations) throws Exception {
Set results;
diff --git a/core/src/main/java/org/fao/geonet/kernel/DataManager.java b/core/src/main/java/org/fao/geonet/kernel/DataManager.java
index 500e5ab882c1..f76579667c0a 100644
--- a/core/src/main/java/org/fao/geonet/kernel/DataManager.java
+++ b/core/src/main/java/org/fao/geonet/kernel/DataManager.java
@@ -3,7 +3,7 @@
//=== DataManager
//===
//=============================================================================
-//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the
+//=== Copyright (C) 2001-2021 Food and Agriculture Organization of the
//=== United Nations (FAO-UN), United Nations World Food Programme (WFP)
//=== and United Nations Environment Programme (UNEP)
//===
@@ -122,24 +122,33 @@ public static void setNamespacePrefix(final Element md) {
}
/**
- * Init Data manager and refresh index if needed. Can also be called after GeoNetwork startup in order to rebuild the lucene index
+ * Init data manager components.
*
- * @param force Force reindexing all from scratch
+ * @param context Service context used for setup
**/
- public void init(ServiceContext context, Boolean force) throws Exception {
- this.metadataIndexer.init(context, force);
- this.metadataManager.init(context, force);
- this.metadataUtils.init(context, force);
+ public void init(ServiceContext context) throws Exception {
+ this.metadataIndexer.init(context);
+ this.metadataManager.init(context);
+ this.metadataUtils.init(context);
// FIXME this shouldn't login automatically ever!
- if (context.getUserSession() == null) {
- LOGGER_DATA_MANAGER.debug("Automatically login in as Administrator. Who is this? Who is calling this?");
- UserSession session = new UserSession();
- context.setUserSession(session);
- session.loginAs(new User().setUsername("admin").setId(-1).setProfile(Profile.Administrator));
- LOGGER_DATA_MANAGER.debug("Hopefully this is cron job or routinely background task. Who called us?",
- new Exception("Dummy Exception to know the stacktrace"));
- }
+// if (context.getUserSession() == null) {
+// LOGGER_DATA_MANAGER.debug("Automatically login in as Administrator. Who is this? Who is calling this?");
+// UserSession session = new UserSession();
+// context.setUserSession(session);
+// session.loginAs(new User().setUsername("admin").setId(-1).setProfile(Profile.Administrator));
+// LOGGER_DATA_MANAGER.debug("Hopefully this is cron job or routinely background task. Who called us?",
+// new Exception("Dummy Exception to know the stacktrace"));
+// }
+ }
+
+ /**
+ * Clean up data manager during application shutdown.
+ */
+ public void destroy() throws Exception {
+ this.metadataIndexer.destroy();
+ this.metadataManager.destroy();
+ this.metadataUtils.destroy();
}
@Deprecated
@@ -153,8 +162,8 @@ public synchronized void rebuildIndexForSelection(final ServiceContext context,
}
@Deprecated
- public void batchIndexInThreadPool(ServiceContext context, List> metadataIds) {
- metadataIndexer.batchIndexInThreadPool(context, metadataIds);
+ public void batchIndexInThreadPool(List> metadataIds) {
+ metadataIndexer.batchIndexInThreadPool(metadataIds);
}
@Deprecated
diff --git a/core/src/main/java/org/fao/geonet/kernel/IndexMetadataTask.java b/core/src/main/java/org/fao/geonet/kernel/IndexMetadataTask.java
index ac7db5535f35..d793f21d42b5 100644
--- a/core/src/main/java/org/fao/geonet/kernel/IndexMetadataTask.java
+++ b/core/src/main/java/org/fao/geonet/kernel/IndexMetadataTask.java
@@ -25,11 +25,13 @@
import jeeves.server.context.ServiceContext;
+import jeeves.server.dispatchers.ServiceManager;
import org.fao.geonet.Util;
import org.fao.geonet.constants.Geonet;
import org.fao.geonet.domain.User;
import org.fao.geonet.kernel.search.EsSearchManager;
import org.fao.geonet.utils.Log;
+import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.transaction.TransactionStatus;
import java.io.IOException;
@@ -45,28 +47,37 @@
*/
public final class IndexMetadataTask implements Runnable {
- private final ServiceContext _context;
+ private final String serviceName;
+ private final ServiceManager serviceManager;
private final List> _metadataIds;
private final TransactionStatus _transactionStatus;
private final Set _batchIndex;
private final EsSearchManager searchManager;
+ private final ConfigurableApplicationContext appContext;
private final AtomicInteger indexed;
private User _user;
/**
- * Constructor.
+ * Setup index metadata task to be run.
*
- * @param context context object
+ * The context is used to look up beans for setup and configuration only. The task will create its own serviceContext
+ * to be used during indexing.
+ *
+ * @param context context object responsible for starting the activity
* @param metadataIds the metadata ids to index (either integers or strings)
+ * @param batchIndex Set used to track outstanding tasks
* @param transactionStatus if non-null, wait for the transaction to complete before indexing
+ * @param indexed Used to track number of indexed records
*/
public IndexMetadataTask(@Nonnull ServiceContext context, @Nonnull List> metadataIds, Set batchIndex,
@Nullable TransactionStatus transactionStatus, @Nonnull AtomicInteger indexed) {
this.indexed = indexed;
this._transactionStatus = transactionStatus;
- this._context = context;
+ this.serviceName = context.getService();
this._metadataIds = metadataIds;
this._batchIndex = batchIndex;
+ this.serviceManager = context.getBean(ServiceManager.class);
+ this.appContext = context.getApplicationContext();
this.searchManager = context.getBean(EsSearchManager.class);
batchIndex.add(this);
@@ -76,9 +87,17 @@ public IndexMetadataTask(@Nonnull ServiceContext context, @Nonnull List> metad
}
}
+ /**
+ * Perform index task in a seperate thread.
+ *
+ * Task waits for transactionStatus (if available) to be completed, and for servlet to be initialized.
+ *
+ */
+ @Override
public void run() {
- try {
- _context.setAsThreadLocal();
+ try (ServiceContext indexMedataContext = serviceManager.createServiceContext(serviceName+":IndexTask", appContext)) {
+ indexMedataContext.setAsThreadLocal();
+ indexMedataContext.setAsThreadLocal();
while (_transactionStatus != null && !_transactionStatus.isCompleted()) {
try {
Thread.sleep(100);
@@ -87,7 +106,7 @@ public void run() {
}
}
// poll context to see whether servlet is up yet
- while (!_context.isServletInitialized()) {
+ while (!indexMedataContext.isServletInitialized()) {
if (Log.isDebugEnabled(Geonet.DATA_MANAGER)) {
Log.debug(Geonet.DATA_MANAGER, "Waiting for servlet to finish initializing..");
}
@@ -98,7 +117,7 @@ public void run() {
}
}
- DataManager dataManager = _context.getBean(DataManager.class);
+ DataManager dataManager = indexMedataContext.getBean(DataManager.class);
// servlet up so safe to index all metadata that needs indexing
for (Object metadataId : _metadataIds) {
this.indexed.incrementAndGet();
@@ -113,8 +132,8 @@ public void run() {
+ "\n" + Util.getStackTrace(e));
}
}
- if (_user != null && _context.getUserSession().getUserId() == null) {
- _context.getUserSession().loginAs(_user);
+ if (_user != null && indexMedataContext.getUserSession().getUserId() == null) {
+ indexMedataContext.getUserSession().loginAs(_user);
}
searchManager.forceIndexChanges();
} finally {
diff --git a/core/src/main/java/org/fao/geonet/kernel/SvnManager.java b/core/src/main/java/org/fao/geonet/kernel/SvnManager.java
index 798b4b07814f..41ca715ae80d 100644
--- a/core/src/main/java/org/fao/geonet/kernel/SvnManager.java
+++ b/core/src/main/java/org/fao/geonet/kernel/SvnManager.java
@@ -90,11 +90,19 @@
import static org.fao.geonet.kernel.setting.Settings.METADATA_VCS;
+/**
+ * Subversion manager.
+ */
public class SvnManager implements AfterCommitTransactionListener, BeforeRollbackTransactionListener {
private static String username = "geonetwork";
private static String password = "geonetwork";
// configure via setter in Geonetwork app
- private ServiceContext context;
+ /**
+ * Shared application handler service context provided by GeoNetwork application for bean lookup.
+ *
+ * In most cases a ServiceContext is provided for access ot the user session.
+ */
+ private ServiceContext appHandlerContext;
// configure in init method
private SVNURL repoUrl;
// configure in spring configuration of bean
@@ -279,7 +287,7 @@ public void setSubversionPath(String subversionPath) {
}
public void setContext(ServiceContext context) {
- this.context = context;
+ this.appHandlerContext = context;
}
/**
@@ -553,7 +561,7 @@ public void commitMetadata(String id, ISVNEditor editor) throws Exception {
return;
}
- DataManager dataMan = context.getBean(DataManager.class);
+ DataManager dataMan = appHandlerContext.getBean(DataManager.class);
try {
// get the metadata record and if different commit changes
@@ -694,8 +702,8 @@ private void commitMetadataOwner(ISVNEditor editor, String id) throws Exception
// get owner from the database
Set ids = new HashSet();
ids.add(Integer.valueOf(id));
- AbstractMetadata metadata = this.context.getBean(IMetadataUtils.class).findOne(id);
- User user = this.context.getBean(UserRepository.class).findById(metadata.getSourceInfo().getOwner()).get();
+ AbstractMetadata metadata = appHandlerContext.getBean(IMetadataUtils.class).findOne(id);
+ User user = appHandlerContext.getBean(UserRepository.class).findById(metadata.getSourceInfo().getOwner()).get();
// Backwards compatibility. Format the metadata as XML in same format as previous versions.
Element xml = new Element("results").addContent(
new Element("record")
diff --git a/core/src/main/java/org/fao/geonet/kernel/ThesaurusManager.java b/core/src/main/java/org/fao/geonet/kernel/ThesaurusManager.java
index f86346c9b334..867202ced346 100644
--- a/core/src/main/java/org/fao/geonet/kernel/ThesaurusManager.java
+++ b/core/src/main/java/org/fao/geonet/kernel/ThesaurusManager.java
@@ -43,6 +43,7 @@
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
+import jeeves.server.dispatchers.ServiceManager;
import org.fao.geonet.Util;
import org.fao.geonet.constants.Geonet;
import org.fao.geonet.domain.AbstractMetadata;
@@ -73,6 +74,7 @@
import jeeves.server.context.ServiceContext;
import jeeves.xlink.Processor;
+import org.springframework.context.ConfigurableApplicationContext;
public class ThesaurusManager implements ThesaurusFinder {
@@ -149,7 +151,17 @@ private void batchBuildTable(boolean synchRun, ServiceContext context, Path thes
try {
Runnable worker = new InitThesauriTableTask(context, thesauriDir);
if (synchRun) {
- worker.run();
+ ServiceContext restore = ServiceContext.get();
+ try {
+ if( restore != null){
+ restore.clearAsThreadLocal();
+ }
+ worker.run();
+ } finally {
+ if( restore != null){
+ restore.setAsThreadLocal();
+ }
+ }
} else {
executor = Executors.newFixedThreadPool(1);
executor.execute(worker);
@@ -546,17 +558,20 @@ public Element buildResultfromThTable(ServiceContext context) throws SQLExceptio
*/
final class InitThesauriTableTask implements Runnable {
- private final ServiceContext context;
+ //private final ServiceContext context;
private final Path thesauriDir;
+ private final ServiceManager serviceManager;
+ private final ConfigurableApplicationContext appContext;
InitThesauriTableTask(ServiceContext context, Path thesauriDir) {
- this.context = context;
+ //this.context = context;
+ this.serviceManager = context.getBean(ServiceManager.class);
+ this.appContext = context.getApplicationContext();
this.thesauriDir = thesauriDir;
}
public void run() {
- context.setAsThreadLocal();
- try {
+ try (ServiceContext context = serviceManager.createServiceContext(Geonet.THESAURUS_MAN, appContext)) {
// poll context to see whether servlet is up yet
while (!context.isServletInitialized()) {
if (Log.isDebugEnabled(Geonet.THESAURUS_MAN)) {
diff --git a/core/src/main/java/org/fao/geonet/kernel/backup/ArchiveAllMetadataJob.java b/core/src/main/java/org/fao/geonet/kernel/backup/ArchiveAllMetadataJob.java
index a0edb9b06e4a..cea3a563f9c5 100644
--- a/core/src/main/java/org/fao/geonet/kernel/backup/ArchiveAllMetadataJob.java
+++ b/core/src/main/java/org/fao/geonet/kernel/backup/ArchiveAllMetadataJob.java
@@ -93,22 +93,25 @@ public class ArchiveAllMetadataJob extends QuartzJobBean {
@Override
protected void executeInternal(JobExecutionContext jobContext) throws JobExecutionException {
- ServiceContext serviceContext = serviceManager.createServiceContext("backuparchive", context);
- serviceContext.setLanguage("eng");
- serviceContext.setAsThreadLocal();
+ try (ServiceContext serviceContext = serviceManager.createServiceContext("backuparchive", context)) {
+ serviceContext.setLanguage("eng");
+ serviceContext.setAsThreadLocal();
- ApplicationContextHolder.set(this.context);
+ // note: perhaps already done by setAsThreadLocal() above
+ ApplicationContextHolder.set(this.context);
- if(!settingManager.getValueAsBool(Settings.METADATA_BACKUPARCHIVE_ENABLE)) {
- Log.info(BACKUP_LOG, "Backup archive not enabled");
- return;
- }
+ if(!settingManager.getValueAsBool(Settings.METADATA_BACKUPARCHIVE_ENABLE)) {
+ Log.info(BACKUP_LOG, "Backup archive not enabled");
+ return;
+ }
- try {
- createBackup(serviceContext);
- } catch (Exception e) {
- Log.error(Geonet.GEONETWORK, "Error running " + ArchiveAllMetadataJob.class.getSimpleName(), e);
+ try {
+ createBackup(serviceContext);
+ } catch (Exception e) {
+ Log.error(Geonet.GEONETWORK, "Error running " + ArchiveAllMetadataJob.class.getSimpleName(), e);
+ }
}
+
}
public void createBackup(ServiceContext serviceContext) throws Exception {
diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataIndexer.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataIndexer.java
index 5c2a00a8d5b7..d6cd000c7758 100644
--- a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataIndexer.java
+++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataIndexer.java
@@ -1,5 +1,5 @@
//=============================================================================
-//=== Copyright (C) 2001-2011 Food and Agriculture Organization of the
+//=== Copyright (C) 2001-2021 Food and Agriculture Organization of the
//=== United Nations (FAO-UN), United Nations World Food Programme (WFP)
//=== and United Nations Environment Programme (UNEP)
//===
@@ -43,12 +43,21 @@
public interface IMetadataIndexer {
/**
- * This is a hopefully soon to be deprecated when no deps on context
- *
- * @param context
+ * Setup metadata indexer using app service context.
+ *
+ * This is a hopefully soon to be deprecated when no deps on context.
+ *
+ * @param context App Service context used for initial setup
+ * @throws Exception
+ */
+ void init(ServiceContext context) throws Exception;
+
+ /**
+ * Clean up when service is not in use.
+ *
* @throws Exception
*/
- public void init(ServiceContext context, Boolean force) throws Exception;
+ void destroy() throws Exception;
/**
* Force the index to wait until all changes are processed and the next reader obtained will get the latest data.
@@ -78,10 +87,9 @@ public interface IMetadataIndexer {
* Index multiple metadata in a separate thread. Wait until the current transaction commits before starting threads (to make sure that
* all metadata are committed).
*
- * @param context context object
* @param metadataIds the metadata ids to index
*/
- void batchIndexInThreadPool(ServiceContext context, List> metadataIds);
+ void batchIndexInThreadPool(List> metadataIds);
/**
* Is the platform currently indexing?
diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataManager.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataManager.java
index 006b49340d7b..e337a42db312 100644
--- a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataManager.java
+++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataManager.java
@@ -53,11 +53,17 @@ public interface IMetadataManager {
/**
* This is a hopefully soon to be deprecated initialization function to replace the @Autowired annotation
*
- * @param context
- * @param force
+ * @param context ServiceContext used for initialization
+ * @throws Exception
+ */
+ void init(ServiceContext context) throws Exception;
+
+ /**
+ * Clean up metadata manager during application shutdown.
+ *
* @throws Exception
*/
- public void init(ServiceContext context, Boolean force) throws Exception;
+ void destroy() throws Exception;
/**
* Removes the record with the id metadataId
diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataUtils.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataUtils.java
index 4dc52beb5b64..8db3815c07d4 100644
--- a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataUtils.java
+++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataUtils.java
@@ -50,21 +50,28 @@
import jeeves.server.context.ServiceContext;
/**
- * Utility interface for records
+ * Utility interface for working with records.
*
- * @author delawen
+ * Operates as a facade with utility methods orchestrating common operations using a constellation
+ * beans drawn from across the application.
*
+ * @author delawen
*/
public interface IMetadataUtils {
/**
* This is a hopefully soon to be deprecated initialization function to replace the @Autowired annotation
*
- * @param context
- * @param force
+ * @param appHandlerContext this is the app handler context from jeeves initialization
+ * @throws Exception
+ */
+ public void init(ServiceContext appHandlerContext) throws Exception;
+
+ /**
+ * Clean up during application shutdown.
* @throws Exception
*/
- public void init(ServiceContext context, Boolean force) throws Exception;
+ public void destroy() throws Exception;
/**
* Return the uuid of the record with the defined id
diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataIndexer.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataIndexer.java
index 6bc9c08aec4b..4989e7c7cd54 100644
--- a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataIndexer.java
+++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataIndexer.java
@@ -1,5 +1,5 @@
//=============================================================================
-//=== Copyright (C) 2001-2011 Food and Agriculture Organization of the
+//=== Copyright (C) 2001-2021 Food and Agriculture Organization of the
//=== United Nations (FAO-UN), United Nations World Food Programme (WFP)
//=== and United Nations Environment Programme (UNEP)
//===
@@ -27,6 +27,7 @@
import com.google.common.collect.Multimap;
import jeeves.server.UserSession;
import jeeves.server.context.ServiceContext;
+import jeeves.server.dispatchers.ServiceManager;
import jeeves.xlink.Processor;
import org.apache.commons.lang.StringUtils;
import org.eclipse.jetty.util.ConcurrentHashSet;
@@ -71,7 +72,15 @@
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
-
+/**
+ * Metadata indexer responsible for updating index in a background executor.
+ *
+ * Helper method exist to schedule records for reindex by id. These methods make use of the service context
+ * of the current thread if needed to access user session.
+ *
+ * This class maintains its own service context for use in the background, and does not have access
+ * to a user session.
+ */
public class BaseMetadataIndexer implements IMetadataIndexer, ApplicationEventPublisherAware {
@Autowired
@@ -110,16 +119,28 @@ public class BaseMetadataIndexer implements IMetadataIndexer, ApplicationEventPu
@Autowired
private Resources resources;
- // FIXME remove when get rid of Jeeves
- private ServiceContext servContext;
-
private ApplicationEventPublisher publisher;
+ /** Private service context managed by service init / destroy for use by metadata indexing tasks. */
+ private ServiceContext indexMetadataTaskContext;
+
public BaseMetadataIndexer() {
}
- public void init(ServiceContext context, Boolean force) throws Exception {
- servContext = context;
+ public void init(ServiceContext context) throws Exception {
+ ServiceManager serviceManager = context.getBean(ServiceManager.class);
+ if( indexMetadataTaskContext == null ) {
+ indexMetadataTaskContext = serviceManager.createServiceContext("_indexMetadataTask", context);
+ } else {
+ context.getLogger().debug("Metadata Indexer already initialized");
+ }
+ }
+
+ public void destroy(){
+ if (indexMetadataTaskContext != null) {
+ indexMetadataTaskContext.clear();
+ indexMetadataTaskContext = null;
+ }
}
@Override
@@ -210,7 +231,7 @@ public synchronized void rebuildIndexXLinkedMetadata(final ServiceContext contex
stringIds.add(id.toString());
}
// execute indexing operation
- batchIndexInThreadPool(context, stringIds);
+ batchIndexInThreadPool(stringIds);
}
}
@@ -247,7 +268,7 @@ public synchronized void rebuildIndexForSelection(final ServiceContext context,
}
// execute indexing operation
- batchIndexInThreadPool(context, listOfIdsToIndex);
+ batchIndexInThreadPool(listOfIdsToIndex);
}
}
@@ -256,11 +277,10 @@ public synchronized void rebuildIndexForSelection(final ServiceContext context,
* transaction commits before starting threads (to make sure that all metadata
* are committed).
*
- * @param context context object
* @param metadataIds the metadata ids to index
*/
@Override
- public void batchIndexInThreadPool(ServiceContext context, List> metadataIds) {
+ public void batchIndexInThreadPool(List> metadataIds) {
TransactionStatus transactionStatus = null;
try {
@@ -299,11 +319,11 @@ public void batchIndexInThreadPool(ServiceContext context, List> metadataIds)
}
// create threads to process this chunk of ids
- Runnable worker = new IndexMetadataTask(context, subList, batchIndex, transactionStatus, numIndexedTracker);
+ Runnable worker = new IndexMetadataTask(indexMetadataTaskContext, subList, batchIndex, transactionStatus, numIndexedTracker);
executor.execute(worker);
index += count;
}
-
+ // let the started threads finish in the background and then clean up executor
executor.shutdown();
}
@@ -324,6 +344,10 @@ public void indexMetadata(final String metadataId, final boolean forceRefreshRea
throws Exception {
AbstractMetadata fullMd;
+ if (searchManager == null) {
+ searchManager = getServiceContext().getBean(EsSearchManager.class);
+ }
+
try {
Multimap fields = ArrayListMultimap.create();
int id$ = Integer.parseInt(metadataId);
@@ -348,6 +372,11 @@ public void indexMetadata(final String metadataId, final boolean forceRefreshRea
}
fullMd = metadataUtils.findOne(id$);
+ if( fullMd == null){
+ // Metadata record has been subsequently deleted
+ searchManager.delete(metadataId);
+ return;
+ }
final String schema = fullMd.getDataInfo().getSchemaId();
final String createDate = fullMd.getDataInfo().getCreateDate().getDateAndTime();
@@ -614,9 +643,17 @@ public void versionMetadata(ServiceContext context, String id, Element md) throw
}
}
- private ServiceContext getServiceContext() {
+ /**
+ * Service context for the current thread if available, or the one provided during init.
+ *
+ * @return service context for current thread if available, or service context used during init.
+ */
+ protected ServiceContext getServiceContext() {
ServiceContext context = ServiceContext.get();
- return context == null ? servContext : context;
+ if( context != null ){
+ return context; // use ServiceContext from current ThreadLocal
+ }
+ return indexMetadataTaskContext; // backup ServiceContext provided during init
}
public void setApplicationEventPublisher(ApplicationEventPublisher publisher) {
diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataManager.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataManager.java
index ef9ed62c4185..1eb12f2df6f5 100644
--- a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataManager.java
+++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataManager.java
@@ -159,9 +159,15 @@ public void init() {
metadataIndexer.setMetadataManager(this);
}
- public void init(ServiceContext context, Boolean force) throws Exception {
+ /**
+ * Setup using app handler service context.
+ *
+ * @param appHandlerServiceContext
+ * @throws Exception
+ */
+ public void init(ServiceContext appHandlerServiceContext) throws Exception {
try {
- harvestInfoProvider = context.getBean(HarvestInfoProvider.class);
+ harvestInfoProvider = appHandlerServiceContext.getBean(HarvestInfoProvider.class);
} catch (Exception e) {
// If it doesn't exist, that's fine
}
@@ -170,6 +176,10 @@ public void init(ServiceContext context, Boolean force) throws Exception {
searchManager.init(false, java.util.Optional.empty());
}
+ @Override
+ public void destroy() throws Exception {
+ }
+
/**
* Refresh index if needed. Can also be called after GeoNetwork startup in
* order to rebuild the lucene index
@@ -240,7 +250,7 @@ public void synchronizeDbWithIndex(ServiceContext context, Boolean force, Boolea
context.getBean(DataManager.class),
integerList).process(false);
} else {
- metadataIndexer.batchIndexInThreadPool(context, toIndex);
+ metadataIndexer.batchIndexInThreadPool(toIndex);
}
}
diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataUtils.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataUtils.java
index fd9987153266..1b199074692d 100644
--- a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataUtils.java
+++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataUtils.java
@@ -68,7 +68,11 @@ public class BaseMetadataUtils implements IMetadataUtils {
@Autowired
private MetadataRepository metadataRepository;
- // FIXME Remove when get rid of Jeeves
+ /**
+ * Shared application handler service context.
+ *
+ * Used by {@link #getServiceContext()} if current service context unavailable.
+ */
private ServiceContext servContext;
@Autowired
protected IMetadataSchemaUtils metadataSchemaUtils;
@@ -103,8 +107,8 @@ public void setMetadataManager(IMetadataManager metadataManager) {
this.metadataManager = metadataManager;
}
- public void init(ServiceContext context, Boolean force) throws Exception {
- servContext = context;
+ public void init(ServiceContext appHandlerContext) throws Exception {
+ servContext = appHandlerContext;
stylePath = dataDirectory.resolveWebResource(Geonet.Path.STYLESHEETS);
}
@@ -116,6 +120,10 @@ public void init() {
this.metadataIndexer.setMetadataUtils(this);
}
+ @Override
+ public void destroy() throws Exception {
+ servContext = null;
+ }
/**
* @param id
* @return
diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataIndexer.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataIndexer.java
index 9def9ac9f4e0..862ede485167 100644
--- a/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataIndexer.java
+++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataIndexer.java
@@ -39,6 +39,9 @@
import java.util.HashMap;
import java.util.Map;
+/**
+ * MetadataIndexer for indexing draft content in a background executor (see super class for details).
+ */
public class DraftMetadataIndexer extends BaseMetadataIndexer implements IMetadataIndexer {
@Autowired
@@ -48,8 +51,8 @@ public class DraftMetadataIndexer extends BaseMetadataIndexer implements IMetada
EsSearchManager searchManager;
@Override
- public void init(ServiceContext context, Boolean force) throws Exception {
- super.init(context, force);
+ public void init(ServiceContext context) throws Exception {
+ super.init(context);
metadataDraftRepository = context.getBean(MetadataDraftRepository.class);
}
diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataManager.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataManager.java
index 9872a5476bf7..94e1ed1e5324 100644
--- a/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataManager.java
+++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataManager.java
@@ -59,9 +59,9 @@ public void init() {
super.init();
}
- public void init(ServiceContext context, Boolean force) throws Exception {
+ public void init(ServiceContext context) throws Exception {
metadataDraftRepository = context.getBean(MetadataDraftRepository.class);
- super.init(context, force);
+ super.init(context);
}
diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java
index 957cf2a48c69..1a7e34100b8b 100644
--- a/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java
+++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java
@@ -88,9 +88,8 @@ public class DraftMetadataUtils extends BaseMetadataUtils {
private ServiceContext context;
- public void init(ServiceContext context, Boolean force) throws Exception {
- this.context = context;
- super.init(context, force);
+ public void init(ServiceContext appHandlerContext) throws Exception {
+ super.init(appHandlerContext);
}
@Override
@@ -249,7 +248,7 @@ public boolean existsMetadataUuid(String uuid) throws Exception {
public AbstractMetadata findOneByUuid(String uuid) {
AbstractMetadata md = super.findOneByUuid(uuid);
try {
- if (md != null && am.canEdit(context, Integer.toString(md.getId()))) {
+ if (md != null && am.canEdit(getServiceContext(), Integer.toString(md.getId()))) {
AbstractMetadata tmp = metadataDraftRepository.findOneByUuid(uuid);
if (tmp != null) {
md = tmp;
@@ -560,8 +559,8 @@ protected String createDraft(ServiceContext context, String templateId, String g
// --- use StatusActionsFactory and StatusActions class to
// --- change status and carry out behaviours for status changes
- StatusActionsFactory saf = context.getBean(StatusActionsFactory.class);
- StatusActions sa = saf.createStatusActions(context);
+ StatusActionsFactory statusActionsFactory = context.getBean(StatusActionsFactory.class);
+ StatusActions statusActions = statusActionsFactory.createStatusActions(context);
int author = context.getUserSession().getUserIdAsInt();
Integer status = Integer.valueOf(StatusValue.Status.DRAFT);
@@ -580,7 +579,7 @@ protected String createDraft(ServiceContext context, String templateId, String g
List listOfStatusChange = new ArrayList<>(1);
listOfStatusChange.add(metadataStatus);
- sa.onStatusChange(listOfStatusChange);
+ statusActions.onStatusChange(listOfStatusChange);
}
}
@@ -594,7 +593,7 @@ protected String createDraft(ServiceContext context, String templateId, String g
@Override
public void cloneFiles(AbstractMetadata original, AbstractMetadata dest) {
try {
- StoreUtils.copyDataDir(context, original.getUuid(), dest.getUuid(), false);
+ StoreUtils.copyDataDir(getServiceContext(), original.getUuid(), dest.getUuid(), false);
cloneStoreFileUploadRequests(original, dest);
} catch (Exception ex) {
@@ -614,7 +613,7 @@ public void replaceFiles(AbstractMetadata original, AbstractMetadata dest) {
oldApproved=false;
newApproved=true;
}
- StoreUtils.replaceDataDir(context, original.getUuid(), dest.getUuid(), oldApproved, newApproved);
+ StoreUtils.replaceDataDir(getServiceContext(), original.getUuid(), dest.getUuid(), oldApproved, newApproved);
cloneStoreFileUploadRequests(original, dest);
} catch (Exception ex) {
@@ -653,7 +652,7 @@ public void cancelEditingSession(ServiceContext context, String id) throws Excep
* Stores a file upload request in the MetadataFileUploads table.
*/
private void cloneStoreFileUploadRequests(AbstractMetadata original, AbstractMetadata copy) {
- MetadataFileUploadRepository repo = context.getBean(MetadataFileUploadRepository.class);
+ MetadataFileUploadRepository repo = getServiceContext().getBean(MetadataFileUploadRepository.class);
repo.deleteAll(MetadataFileUploadSpecs.hasMetadataId(copy.getId()));
diff --git a/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java b/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java
index 2f1b550e6380..0e6a42230953 100644
--- a/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java
+++ b/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java
@@ -72,6 +72,8 @@
public class DefaultStatusActions implements StatusActions {
public static final Pattern metadataLuceneField = Pattern.compile("\\{\\{index:([^\\}]+)\\}\\}");
+
+ /** Externally managed service context */
protected ServiceContext context;
protected String language;
protected DataManager dm;
@@ -152,8 +154,8 @@ public void onEdit(int id, boolean minorEdit) throws Exception {
/**
* Called when a record status is added.
*
- * @param listOfStatus
- * @return
+ * @param listOfStatus List of status to update
+ * @return Ids of unchanged metadata records
* @throws Exception
*/
public Set onStatusChange(List listOfStatus) throws Exception {
@@ -195,18 +197,39 @@ public Set onStatusChange(List listOfStatus) throws Exc
status.getMetadataId(), status.getStatusValue().getId(), e.getMessage()));
}
- //Throw events
- Log.trace(Geonet.DATA_MANAGER, "Throw workflow events.");
+ // Issue events
+ Log.trace(Geonet.DATA_MANAGER, "Issue workflow events.");
+
+ List unsuccessful = new ArrayList<>();
+ Throwable statusChangeFailure = null;
for (Integer mid : listOfId) {
if (!unchanged.contains(mid)) {
- Log.debug(Geonet.DATA_MANAGER, " > Status changed for record (" + mid + ") to status " + status);
- context.getApplicationContext().publishEvent(new MetadataStatusChanged(
- metadataUtils.findOne(Integer.valueOf(mid)),
- status.getStatusValue(), status.getChangeMessage(),
- status.getUserId()));
+ try {
+ Log.debug(Geonet.DATA_MANAGER, " > Status changed for record (" + mid + ") to status " + status);
+ context.getApplicationContext().publishEvent(new MetadataStatusChanged(
+ metadataUtils.findOne(Integer.valueOf(mid)),
+ status.getStatusValue(), status.getChangeMessage(),
+ status.getUserId()));
+ } catch (Exception error) {
+ Log.error(Geonet.DATA_MANAGER,
+ String.format("Failed to update metadata %s to status %s. Error is: %s",
+ mid, status.getStatusValue(), error.getMessage()),
+ error
+ );
+ unsuccessful.add(String.valueOf(mid));
+ if( statusChangeFailure == null ){
+ statusChangeFailure = error;
+ }
+ }
}
}
-
+ if (!unsuccessful.isEmpty()){
+ throw new Exception(
+ "Unable to change status for metadata records: "+
+ String.join(",", unsuccessful),
+ statusChangeFailure
+ );
+ }
}
return unchanged;
diff --git a/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActions.java b/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActions.java
index 216555857387..af29be4c6427 100644
--- a/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActions.java
+++ b/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActions.java
@@ -31,12 +31,32 @@
import org.fao.geonet.domain.ISODate;
import org.fao.geonet.domain.MetadataStatus;
+/**
+ * Facade performing actions with record status.
+ */
public interface StatusActions {
- public void init(ServiceContext context) throws Exception;
-
- public void onEdit(int id, boolean minorEdit) throws Exception;
-
- public Set onStatusChange(List status) throws Exception;
+ /**
+ * Setup using provided externally managed service context.
+ *
+ * @param context Externally managed service context.
+ */
+ void init(ServiceContext context) throws Exception;
+
+ /**
+ * Called when a record is edited to set/reset status.
+ *
+ * @param id The metadata id that has been edited.
+ * @param minorEdit If true then the edit was a minor edit.
+ */
+ void onEdit(int id, boolean minorEdit) throws Exception;
+
+ /**
+ * Called when a record status is added.
+ *
+ * @param statusList List of status to update
+ * @return Ids of unchanged metadata records
+ */
+ Set onStatusChange(List statusList) throws Exception;
}
diff --git a/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActionsFactory.java b/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActionsFactory.java
index 3d9222958ac0..34e58fed02b3 100644
--- a/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActionsFactory.java
+++ b/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActionsFactory.java
@@ -30,29 +30,39 @@
import java.lang.reflect.Constructor;
+/**
+ * Handle creation of {@link StatusActions} instances.
+ */
public class StatusActionsFactory {
Class statusRules;
+ /**
+ * StatusAction implementation configured for creation.
+ */
private String className;
private static final String DEFAULT_STATUS_ACTION_CLASS = "org.fao.geonet.kernel.metadata.DefaultStatusActions";
/**
- * Constructor.
- *
+ * Setup factory with default StatusActions implementation.
*/
public StatusActionsFactory() {
new StatusActionsFactory(DEFAULT_STATUS_ACTION_CLASS);
}
+
+ /**
+ * Setup factory to use named StatusActions implementation.
+ * @param className StatusActions implementation
+ */
public StatusActionsFactory(String className) {
this.className = className;
try {
this.statusRules = (Class) Class.forName(this.className);
} catch (ClassNotFoundException e) {
- Log.error(Geonet.DATA_MANAGER, String.format(
+ Log.warning(Geonet.DATA_MANAGER, String.format(
"Class name '%s' is not found. You MUST use a valid class name loaded in the classpath. " +
"The default status action class is used (ie. %s)",
- this.className
+ this.className, DEFAULT_STATUS_ACTION_CLASS
));
try {
this.statusRules = (Class) Class.forName(DEFAULT_STATUS_ACTION_CLASS);
@@ -63,7 +73,6 @@ public StatusActionsFactory(String className) {
));
}
}
-
}
/**
@@ -72,10 +81,11 @@ public StatusActionsFactory(String className) {
* @param context ServiceContext from Jeeves
*/
public StatusActions createStatusActions(ServiceContext context) throws Exception {
- Constructor ct = this.statusRules.getConstructor();
- StatusActions sa = ct.newInstance();
- sa.init(context);
- return sa;
+ Constructor constructor = this.statusRules.getConstructor();
+ StatusActions statusAction = constructor.newInstance();
+ statusAction.init(context);
+
+ return statusAction;
}
public String getClassName() {
diff --git a/core/src/main/java/org/fao/geonet/kernel/search/index/IndexingTask.java b/core/src/main/java/org/fao/geonet/kernel/search/index/IndexingTask.java
index dfb4eb305ec0..ab441935ed90 100644
--- a/core/src/main/java/org/fao/geonet/kernel/search/index/IndexingTask.java
+++ b/core/src/main/java/org/fao/geonet/kernel/search/index/IndexingTask.java
@@ -83,14 +83,15 @@ private void indexRecords() {
@Override
protected void executeInternal(JobExecutionContext jobContext) throws JobExecutionException {
- ServiceContext serviceContext = serviceManager.createServiceContext("indexing", applicationContext);
- serviceContext.setLanguage("eng");
- serviceContext.setAsThreadLocal();
+ try (ServiceContext serviceContext = serviceManager.createServiceContext("indexing", applicationContext)) {
+ serviceContext.setLanguage("eng");
+ serviceContext.setAsThreadLocal();
- if (Log.isDebugEnabled(Geonet.INDEX_ENGINE)) {
- Log.debug(Geonet.INDEX_ENGINE, "Indexing task / Start at: "
- + new Date() + ". Checking if any records need to be indexed ...");
+ if (Log.isDebugEnabled(Geonet.INDEX_ENGINE)) {
+ Log.debug(Geonet.INDEX_ENGINE, "Indexing task / Start at: "
+ + new Date() + ". Checking if any records need to be indexed ...");
+ }
+ indexRecords();
}
- indexRecords();
}
}
diff --git a/core/src/main/java/org/fao/geonet/kernel/thumbnail/ThumbnailMaker.java b/core/src/main/java/org/fao/geonet/kernel/thumbnail/ThumbnailMaker.java
index 3ac0fa272d77..7ef5508d35fb 100644
--- a/core/src/main/java/org/fao/geonet/kernel/thumbnail/ThumbnailMaker.java
+++ b/core/src/main/java/org/fao/geonet/kernel/thumbnail/ThumbnailMaker.java
@@ -110,6 +110,11 @@ public static BufferedImage rotate(BufferedImage image, double angle) {
return result;
}
+ /**
+ * Setup for use based on service context configuration.
+ *
+ * @param context AppService context used to look up configuration details
+ */
public void init(ServiceContext context) {
configFilePath = context.getAppPath() + File.separator + CONFIG_FILE;
initMapPrinter();
diff --git a/core/src/main/java/org/fao/geonet/lib/NetLib.java b/core/src/main/java/org/fao/geonet/lib/NetLib.java
index c47e52e34d65..82d4438d2053 100644
--- a/core/src/main/java/org/fao/geonet/lib/NetLib.java
+++ b/core/src/main/java/org/fao/geonet/lib/NetLib.java
@@ -97,6 +97,14 @@ public void setupProxy(SettingManager sm, XmlRequest req) {
//---------------------------------------------------------------------------
+ /**
+ * Setup proxy for http client
+ *
+ * @param context Service context used to lookup settings.
+ * @param client Http implementation
+ * @param requestHost
+ * @return
+ */
public CredentialsProvider setupProxy(ServiceContext context, HttpClientBuilder client, String requestHost) {
GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME);
SettingManager sm = gc.getBean(SettingManager.class);
@@ -106,6 +114,9 @@ public CredentialsProvider setupProxy(ServiceContext context, HttpClientBuilder
/**
* Setup proxy for http client
+ * @param sm settings
+ * @param client Http implementation
+ * @param requestHost
*/
public CredentialsProvider setupProxy(SettingManager sm, HttpClientBuilder client, String requestHost) {
boolean enabled = sm.getValueAsBool(Settings.SYSTEM_PROXY_USE, false);
diff --git a/core/src/main/java/org/fao/geonet/resources/Resources.java b/core/src/main/java/org/fao/geonet/resources/Resources.java
index ed9cc4f2cba6..e560c45afbc0 100644
--- a/core/src/main/java/org/fao/geonet/resources/Resources.java
+++ b/core/src/main/java/org/fao/geonet/resources/Resources.java
@@ -283,18 +283,20 @@ public void copyLogo(ServiceContext context, String icon,
try {
Path srcPath = locateResource(locateResourcesDir(context), servletContext, appDir, icon);
String extension = Files.getFileExtension(srcPath.getFileName().toString());
- try(ResourceHolder src = getImage(context, srcPath.getFileName().toString(), srcPath.getParent());
- ResourceHolder des = getWritableImage(context, destName + "." + extension,
- logosDir)) {
- if (src != null) {
- java.nio.file.Files.copy(src.getPath(), des.getPath(), REPLACE_EXISTING, NOFOLLOW_LINKS);
- } else {
- des.abort();
+ try(ResourceHolder src = getImage(context, srcPath.getFileName().toString(), srcPath.getParent())){
+ if( src == null) {
+ throw new IOException("Resource not found: "+srcPath.toString());
+ }
+ try (ResourceHolder des = getWritableImage(context, destName + "." + extension, logosDir)) {
+ if (src != null) {
+ java.nio.file.Files.copy(src.getPath(), des.getPath(), REPLACE_EXISTING, NOFOLLOW_LINKS);
+ } else {
+ des.abort();
+ }
}
}
} catch (IOException e) {
// --- we ignore exceptions here, just log them
-
context.warning("Cannot copy icon -> " + e.getMessage());
context.warning(" (C) Source : " + icon);
context.warning(" (C) Destin : " + logosDir);
diff --git a/core/src/main/java/org/fao/geonet/services/util/z3950/provider/GN/GNResultSet.java b/core/src/main/java/org/fao/geonet/services/util/z3950/provider/GN/GNResultSet.java
index cfa1c7637a94..1a41129429a3 100644
--- a/core/src/main/java/org/fao/geonet/services/util/z3950/provider/GN/GNResultSet.java
+++ b/core/src/main/java/org/fao/geonet/services/util/z3950/provider/GN/GNResultSet.java
@@ -49,7 +49,8 @@
public class GNResultSet extends AbstractIRResultSet implements IRResultSet {
private GNXMLQuery query;
- private ServiceContext srvxtx;
+ /** Service context assumed to be shared */
+ private ServiceContext appHandlerContext;
private int status;
private int fragmentcount;
@@ -61,7 +62,7 @@ public GNResultSet(GNXMLQuery query, Object userInfo, Observer[] observers,
ServiceContext srvctx) throws Exception {
super(observers);
this.query = query;
- this.srvxtx = srvctx;
+ this.appHandlerContext = srvctx;
throw new NotImplementedException("Z39.50 not implemented in ES");
// try {
//
@@ -78,6 +79,16 @@ public GNResultSet(GNXMLQuery query, Object userInfo, Observer[] observers,
// }
}
+ /**
+ * Access the current ServiceContext if available.
+ *
+ * @return current service context if available, or app handler context fallback.
+ */
+ protected ServiceContext getServiceContext(){
+ ServiceContext serviceContext = ServiceContext.get();
+ return serviceContext != null ? serviceContext : appHandlerContext;
+ }
+
public int evaluate(int timeout) {
try {
if (Log.isDebugEnabled(Geonet.SRU))
@@ -96,7 +107,7 @@ public int evaluate(int timeout) {
// perform the search and save search results
- metasearcher.search(this.srvxtx, request, config);
+ metasearcher.search(getServiceContext(), request, config);
// System.out.println("summary:\n" + Xml.getString(s.getSummary()));
// // DEBUG
@@ -136,7 +147,7 @@ public InformationFragment[] getFragment(int startingFragment, int count,
Log.debug(Geonet.SRU, "Search request:\n"
+ Xml.getString(request));
// get result set
- Element result = this.metasearcher.present(this.srvxtx, request,
+ Element result = this.metasearcher.present(getServiceContext(), request,
config);
if (Log.isDebugEnabled(Geonet.SRU))
diff --git a/core/src/main/java/org/fao/geonet/services/util/z3950/provider/GN/GNSearchable.java b/core/src/main/java/org/fao/geonet/services/util/z3950/provider/GN/GNSearchable.java
index 18c53e711bde..016f1d80a6f5 100644
--- a/core/src/main/java/org/fao/geonet/services/util/z3950/provider/GN/GNSearchable.java
+++ b/core/src/main/java/org/fao/geonet/services/util/z3950/provider/GN/GNSearchable.java
@@ -34,9 +34,9 @@
import java.util.Map;
import java.util.Observer;
-//import org.fao.geonet.services.util.z3950.GNSearchTask;
-
/**
+ * Present GeoNetwork search results as JZkit Searchable.
+ *
* @author 'Timo Proescholdt ' interface between JZkit and GN. not currently
* used
*/
diff --git a/core/src/test/java/org/fao/geonet/AbstractCoreIntegrationTest.java b/core/src/test/java/org/fao/geonet/AbstractCoreIntegrationTest.java
index c32bc8a1825a..6f718ba231b4 100644
--- a/core/src/test/java/org/fao/geonet/AbstractCoreIntegrationTest.java
+++ b/core/src/test/java/org/fao/geonet/AbstractCoreIntegrationTest.java
@@ -186,6 +186,15 @@ protected String getGeonetworkNodeId() {
/**
* Create a Service context without a user session but otherwise ready to use.
+ *
+ * This method assigns the created service context to the current thread, you are responsible for managing cleanup.
+ *
+ * try {
+ * context = createServiceContext();
+ * finally {
+ * context.clearAsThreadLocal();
+ * }
+ *
*/
protected ServiceContext createServiceContext() throws Exception {
final HashMap contexts = new HashMap();
diff --git a/core/src/test/java/org/fao/geonet/GeonetTestFixture.java b/core/src/test/java/org/fao/geonet/GeonetTestFixture.java
index bcb9c42dfdbe..7df1ef60ef86 100644
--- a/core/src/test/java/org/fao/geonet/GeonetTestFixture.java
+++ b/core/src/test/java/org/fao/geonet/GeonetTestFixture.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2001-2016 Food and Agriculture Organization of the
+ * Copyright (C) 2001-2021 Food and Agriculture Organization of the
* United Nations (FAO-UN), United Nations World Food Programme (WFP)
* and United Nations Environment Programme (UNEP)
*
@@ -155,21 +155,27 @@ public boolean accept(Path entry) throws IOException {
ServiceContext serviceContext = test.createServiceContext();
- ApplicationContextHolder.set(_applicationContext);
- serviceContext.setAsThreadLocal();
+ try {
+ ApplicationContextHolder.set(_applicationContext);
+ //serviceContext.setAsThreadLocal();
+
// TODOES
// _applicationContext.getBean(EsSearchManager.class).initNonStaticData(100);
- _applicationContext.getBean(DataManager.class).init(serviceContext, false);
- _applicationContext.getBean(ThesaurusManager.class).init(true, serviceContext, "WEB-INF/data/config/codelist");
+ _applicationContext.getBean(DataManager.class).init(serviceContext);
+ _applicationContext.getBean(ThesaurusManager.class).init(true, serviceContext, "WEB-INF/data/config/codelist");
- addSourceUUID(dataDir);
+ addSourceUUID(dataDir);
- final DataSource dataSource = _applicationContext.getBean(DataSource.class);
- try (Connection conn = dataSource.getConnection()) {
- ThreadUtils.init(conn.getMetaData().getURL(), _applicationContext.getBean(SettingManager.class));
+ final DataSource dataSource = _applicationContext.getBean(DataSource.class);
+ try (Connection conn = dataSource.getConnection()) {
+ ThreadUtils.init(conn.getMetaData().getURL(), _applicationContext.getBean(SettingManager.class));
+ }
+ } finally {
+ serviceContext.clearAsThreadLocal();
}
+
}
diff --git a/core/src/test/java/org/fao/geonet/kernel/DataManagerWorksWithoutTransactionIntegrationTest.java b/core/src/test/java/org/fao/geonet/kernel/DataManagerWorksWithoutTransactionIntegrationTest.java
index d56af28c2960..7b381ac97233 100644
--- a/core/src/test/java/org/fao/geonet/kernel/DataManagerWorksWithoutTransactionIntegrationTest.java
+++ b/core/src/test/java/org/fao/geonet/kernel/DataManagerWorksWithoutTransactionIntegrationTest.java
@@ -51,25 +51,36 @@ public void testDataManagerCutpoints() throws Exception {
(new TestTask() {
@Override
public void run() throws Exception {
- ServiceContext serviceContext = createContextAndLogAsAdmin();
+ ServiceContext restore = ServiceContext.get();
+ if( restore != null ){
+ restore.clearAsThreadLocal();
+ }
- String metadataCategory = metadataCategoryRepository.findAll().get(0).getName();
- Element sampleMetadataXml = getSampleMetadataXml();
- UserSession userSession = serviceContext.getUserSession();
- int userIdAsInt = userSession.getUserIdAsInt();
- String schema = dataManager.autodetectSchema(sampleMetadataXml);
- String mdId = dataManager.insertMetadata(serviceContext, schema, sampleMetadataXml,
- UUID.randomUUID().toString(), userIdAsInt, "2", "source",
- MetadataType.METADATA.codeString, null, metadataCategory, new ISODate().getDateAndTime(),
- new ISODate().getDateAndTime(), false, false);
- Element newMd = new Element(sampleMetadataXml.getName(), sampleMetadataXml.getNamespace()).addContent(new Element("fileIdentifier",
- GMD).addContent(new Element("CharacterString", GCO)));
+ ServiceContext serviceContext = createContextAndLogAsAdmin();
+ try {
+ String metadataCategory = metadataCategoryRepository.findAll().get(0).getName();
+ Element sampleMetadataXml = getSampleMetadataXml();
+ UserSession userSession = serviceContext.getUserSession();
+ int userIdAsInt = userSession.getUserIdAsInt();
+ String schema = dataManager.autodetectSchema(sampleMetadataXml);
+ String mdId = dataManager.insertMetadata(serviceContext, schema, sampleMetadataXml,
+ UUID.randomUUID().toString(), userIdAsInt, "2", "source",
+ MetadataType.METADATA.codeString, null, metadataCategory, new ISODate().getDateAndTime(),
+ new ISODate().getDateAndTime(), false, false);
+ Element newMd = new Element(sampleMetadataXml.getName(), sampleMetadataXml.getNamespace()).addContent(new Element("fileIdentifier",
+ GMD).addContent(new Element("CharacterString", GCO)));
- AbstractMetadata updateMd = dataManager.updateMetadata(serviceContext, mdId, newMd, false, false, false, "eng",
- new ISODate().getDateAndTime(), false);
- assertNotNull(updateMd);
- boolean hasNext = updateMd.getCategories().iterator().hasNext();
- assertTrue(hasNext);
+ AbstractMetadata updateMd = dataManager.updateMetadata(serviceContext, mdId, newMd, false, false, false, "eng",
+ new ISODate().getDateAndTime(), false);
+ assertNotNull(updateMd);
+ boolean hasNext = updateMd.getCategories().iterator().hasNext();
+ assertTrue(hasNext);
+ } finally {
+ serviceContext.clearAsThreadLocal();
+ if( restore != null ){
+ restore.setAsThreadLocal();
+ }
+ }
}
});
}
diff --git a/core/src/test/java/org/fao/geonet/kernel/LocalXLinksInMetadataIntegrationTest.java b/core/src/test/java/org/fao/geonet/kernel/LocalXLinksInMetadataIntegrationTest.java
index 683c05e4242a..ce9e40024c30 100644
--- a/core/src/test/java/org/fao/geonet/kernel/LocalXLinksInMetadataIntegrationTest.java
+++ b/core/src/test/java/org/fao/geonet/kernel/LocalXLinksInMetadataIntegrationTest.java
@@ -92,57 +92,58 @@ public void testResolveLocalXLink() throws Exception {
}
final Element metadata = getSampleMetadataXml().setContent(content);
- ServiceContext context = createServiceContext();
- context.setAsThreadLocal();
- loginAsAdmin(context);
-
- _settingManager.setValue(Settings.SYSTEM_XLINKRESOLVER_ENABLE, true);
-
- String schema = _dataManager.autodetectSchema(metadata);
- String uuid = UUID.randomUUID().toString();
- int owner = context.getUserSession().getUserIdAsInt();
- String groupOwner = "" + ReservedGroup.intranet.getId();
- String source = _settingManager.getSiteId();
- String metadataType = MetadataType.METADATA.codeString;
- String changeDate;
- String createDate = changeDate = new ISODate().getDateAndTime();
- String id = _dataManager.insertMetadata(context, schema, metadata, uuid, owner, groupOwner, source, metadataType, null,
- null, createDate, changeDate, false, false);
-
- SpringLocalServiceInvoker mockInvoker = resetAndGetMockInvoker();
-
- String keyword1 = "World";
- Element element1 = new SAXBuilder().build(new StringReader(String.format(responseTemplate, keyword1))).getRootElement();
- when(mockInvoker.invoke(any(String.class))).thenReturn(element1);
-
- final String xpath = "*//gmd:descriptiveKeywords//gmd:keyword/gco:CharacterString";
- assertNull(Xml.selectElement(metadata, xpath));
- verify(mockInvoker, never()).invoke(any(String.class));
-
- final Element loadedMetadataNoXLinkAttributesNotEdit = _dataManager.getMetadata(context, id, false, false, false);
- assertEqualsText(keyword1, loadedMetadataNoXLinkAttributesNotEdit, xpath, GCO, GMD);
- verify(mockInvoker, times(1)).invoke(any(String.class));
-
- final Element loadedMetadataKeepXLinkAttributesNotEdit = _dataManager.getMetadata(context, id, false, false, true);
- assertEqualsText(keyword1, loadedMetadataKeepXLinkAttributesNotEdit, xpath, GCO, GMD);
- verify(mockInvoker, times(2)).invoke(any(String.class));
-
- final Element loadedMetadataNoXLinkAttributesEdit = _dataManager.getMetadata(context, id, false, true, false);
- assertEqualsText(keyword1, loadedMetadataNoXLinkAttributesEdit, xpath, GCO, GMD);
- verify(mockInvoker, times(3)).invoke(any(String.class));
-
- final Element loadedMetadataKeepXLinkAttributesEdit = _dataManager.getMetadata(context, id, false, true, true);
- assertEqualsText(keyword1, loadedMetadataKeepXLinkAttributesEdit, xpath, GCO, GMD);
- verify(mockInvoker, times(4)).invoke(any(String.class));
-
- Processor.clearCache();
-
- String keyword2 = "Other Word";
- Element element2 = new SAXBuilder().build(new StringReader(String.format(responseTemplate, keyword2))).getRootElement();
- when(mockInvoker.invoke(any(String.class))).thenReturn(element2);
-
- final Element newLoad = _dataManager.getMetadata(context, id, false, true, true);
- assertEqualsText(keyword2, newLoad, xpath, GCO, GMD);
- verify(mockInvoker, times(5)).invoke(any(String.class));
+ try (ServiceContext context = createServiceContext()) {
+ // context.setAsThreadLocal();
+ loginAsAdmin(context);
+
+ _settingManager.setValue(Settings.SYSTEM_XLINKRESOLVER_ENABLE, true);
+
+ String schema = _dataManager.autodetectSchema(metadata);
+ String uuid = UUID.randomUUID().toString();
+ int owner = context.getUserSession().getUserIdAsInt();
+ String groupOwner = "" + ReservedGroup.intranet.getId();
+ String source = _settingManager.getSiteId();
+ String metadataType = MetadataType.METADATA.codeString;
+ String changeDate;
+ String createDate = changeDate = new ISODate().getDateAndTime();
+ String id = _dataManager.insertMetadata(context, schema, metadata, uuid, owner, groupOwner, source, metadataType, null,
+ null, createDate, changeDate, false, false);
+
+ SpringLocalServiceInvoker mockInvoker = resetAndGetMockInvoker();
+
+ String keyword1 = "World";
+ Element element1 = new SAXBuilder().build(new StringReader(String.format(responseTemplate, keyword1))).getRootElement();
+ when(mockInvoker.invoke(any(String.class))).thenReturn(element1);
+
+ final String xpath = "*//gmd:descriptiveKeywords//gmd:keyword/gco:CharacterString";
+ assertNull(Xml.selectElement(metadata, xpath));
+ verify(mockInvoker, never()).invoke(any(String.class));
+
+ final Element loadedMetadataNoXLinkAttributesNotEdit = _dataManager.getMetadata(context, id, false, false, false);
+ assertEqualsText(keyword1, loadedMetadataNoXLinkAttributesNotEdit, xpath, GCO, GMD);
+ verify(mockInvoker, times(1)).invoke(any(String.class));
+
+ final Element loadedMetadataKeepXLinkAttributesNotEdit = _dataManager.getMetadata(context, id, false, false, true);
+ assertEqualsText(keyword1, loadedMetadataKeepXLinkAttributesNotEdit, xpath, GCO, GMD);
+ verify(mockInvoker, times(2)).invoke(any(String.class));
+
+ final Element loadedMetadataNoXLinkAttributesEdit = _dataManager.getMetadata(context, id, false, true, false);
+ assertEqualsText(keyword1, loadedMetadataNoXLinkAttributesEdit, xpath, GCO, GMD);
+ verify(mockInvoker, times(3)).invoke(any(String.class));
+
+ final Element loadedMetadataKeepXLinkAttributesEdit = _dataManager.getMetadata(context, id, false, true, true);
+ assertEqualsText(keyword1, loadedMetadataKeepXLinkAttributesEdit, xpath, GCO, GMD);
+ verify(mockInvoker, times(4)).invoke(any(String.class));
+
+ Processor.clearCache();
+
+ String keyword2 = "Other Word";
+ Element element2 = new SAXBuilder().build(new StringReader(String.format(responseTemplate, keyword2))).getRootElement();
+ when(mockInvoker.invoke(any(String.class))).thenReturn(element2);
+
+ final Element newLoad = _dataManager.getMetadata(context, id, false, true, true);
+ assertEqualsText(keyword2, newLoad, xpath, GCO, GMD);
+ verify(mockInvoker, times(5)).invoke(any(String.class));
+ }
}
}
diff --git a/core/src/test/java/org/fao/geonet/kernel/LocalXLinksUpdateDeleteTest.java b/core/src/test/java/org/fao/geonet/kernel/LocalXLinksUpdateDeleteTest.java
index 31f09e63c2d5..e3a245923cb1 100644
--- a/core/src/test/java/org/fao/geonet/kernel/LocalXLinksUpdateDeleteTest.java
+++ b/core/src/test/java/org/fao/geonet/kernel/LocalXLinksUpdateDeleteTest.java
@@ -53,6 +53,7 @@ public class LocalXLinksUpdateDeleteTest extends AbstractIntegrationTestWithMock
@Autowired
private SettingManager settingManager;
+ /** Service context for test */
private ServiceContext context;
@Before
diff --git a/core/src/test/java/org/fao/geonet/kernel/mef/MEFLibIntegrationTest.java b/core/src/test/java/org/fao/geonet/kernel/mef/MEFLibIntegrationTest.java
index 02343bd7b793..292e4b083d46 100644
--- a/core/src/test/java/org/fao/geonet/kernel/mef/MEFLibIntegrationTest.java
+++ b/core/src/test/java/org/fao/geonet/kernel/mef/MEFLibIntegrationTest.java
@@ -92,8 +92,11 @@ public void testDoImportMefVersion2() throws Exception {
}
}
+ /** Import records for integration test */
public static class ImportMetadata {
private final AbstractCoreIntegrationTest testClass;
+
+ /** Integration test context */
private ServiceContext context;
private List metadataIds = new ArrayList<>();
private List mefFilesToLoad = new ArrayList<>();
diff --git a/core/src/test/java/org/fao/geonet/kernel/search/AbstractLanguageSearchOrderIntegrationTest.java b/core/src/test/java/org/fao/geonet/kernel/search/AbstractLanguageSearchOrderIntegrationTest.java
index bddc9171b289..f44483531e00 100644
--- a/core/src/test/java/org/fao/geonet/kernel/search/AbstractLanguageSearchOrderIntegrationTest.java
+++ b/core/src/test/java/org/fao/geonet/kernel/search/AbstractLanguageSearchOrderIntegrationTest.java
@@ -50,6 +50,7 @@
public abstract class AbstractLanguageSearchOrderIntegrationTest extends AbstractCoreIntegrationTest {
public static List METADATA_TO_IMPORT = new ArrayList(10);
protected MetaSearcher _luceneSearcher;
+ /** Test service context */
protected ServiceContext _serviceContext;
protected String _abstractSearchTerm;
@Autowired
diff --git a/core/src/test/java/org/fao/geonet/kernel/url/UrlAnalyzerTest.java b/core/src/test/java/org/fao/geonet/kernel/url/UrlAnalyzerTest.java
index 1a27c54e2be0..be1ecd08c082 100644
--- a/core/src/test/java/org/fao/geonet/kernel/url/UrlAnalyzerTest.java
+++ b/core/src/test/java/org/fao/geonet/kernel/url/UrlAnalyzerTest.java
@@ -76,6 +76,7 @@ public class UrlAnalyzerTest extends AbstractCoreIntegrationTest {
@PersistenceContext
private EntityManager entityManager;
+ /** Service context for test */
private ServiceContext context;
@Before
diff --git a/domain/src/main/java/org/fao/geonet/domain/Profile.java b/domain/src/main/java/org/fao/geonet/domain/Profile.java
index e3a24e71074e..0a1542ff3525 100644
--- a/domain/src/main/java/org/fao/geonet/domain/Profile.java
+++ b/domain/src/main/java/org/fao/geonet/domain/Profile.java
@@ -31,7 +31,15 @@
/**
* The enumeration of profiles available in geonetwork.
- *
+ *
+ * Administrator
+ * Administrator - UserAdmin
+ * Administrator - UserAdmin - Reviewer
+ * Administrator - UserAdmin - Reviewer - Editor
+ * Administrator - UserAdmin - Reviewer - Editor - RegisteredUser
+ * Administrator - UserAdmin - Reviewer - Editor - RegisteredUser - Guest
+ * Administrator - Monitor
+ *
* @author Jesse
*/
public enum Profile {
diff --git a/domain/src/main/java/org/fao/geonet/domain/StatusValue.java b/domain/src/main/java/org/fao/geonet/domain/StatusValue.java
index 26d5bf4ccc92..43fb5743ad2e 100644
--- a/domain/src/main/java/org/fao/geonet/domain/StatusValue.java
+++ b/domain/src/main/java/org/fao/geonet/domain/StatusValue.java
@@ -239,4 +239,13 @@ public static final class Events {
public static final String RECORDIMPORTED = "62";
public static final String RECORDRESTORED = "63";
}
+
+ @Override
+ public String toString() {
+ final StringBuffer sb = new StringBuffer("StatusValue{");
+ sb.append("_id=").append(_id);
+ sb.append(", _name='").append(_name).append('\'');
+ sb.append('}');
+ return sb.toString();
+ }
}
diff --git a/harvesters/src/main/java/org/fao/geonet/component/harvester/csw/Harvest.java b/harvesters/src/main/java/org/fao/geonet/component/harvester/csw/Harvest.java
index 24f53ee8a625..b479033be2c0 100644
--- a/harvesters/src/main/java/org/fao/geonet/component/harvester/csw/Harvest.java
+++ b/harvesters/src/main/java/org/fao/geonet/component/harvester/csw/Harvest.java
@@ -635,7 +635,7 @@ private Element createAcknowledgeResponse(Element asyncRequest) {
}
/**
- * Runs the harvester. In synchronous mode, waits for it to finish.
+ * Runs CSW harvester in synchronous mode, waiting for it to finish.
*
* @param harvester - the harvester
* @param context - everywhere in GN !
@@ -813,6 +813,7 @@ private class AsyncHarvestResponse implements RunnableFuture {
Semaphore ready = new Semaphore(0);
private Element harvester;
private String responseHandler;
+ /** Shared service context managed by HarvestManager */
private ServiceContext serviceContext;
/**
diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/HarvestManager.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/HarvestManager.java
index 2afdf443f31f..386919c14ed6 100644
--- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/HarvestManager.java
+++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/HarvestManager.java
@@ -106,26 +106,37 @@ public interface HarvestManager {
boolean update(Element node, String ownerId) throws BadInputEx, SQLException, SchedulerException;
/**
- * Remove all metadata associated to one harvester
+ * Removes all the metadata associated with one harvester.
*
* @param id of the harvester
+ * @return {@link Common.OperResult#OK} indicating removal of associated metadata.
+ * @throws Exception
*/
Common.OperResult clearBatch(String id) throws Exception;
+ /**
+ * Remove the harvester, including deleting harvester history.
+ *
+ * @param id harvester id
+ * @return {@link Common.OperResult#OK} if remove succeeded, or {@link Common.OperResult#NOT_FOUND} if not available
+ * @throws Exception
+ */
Common.OperResult remove(String id) throws Exception;
/**
* Set harvester status to {@link org.fao.geonet.kernel.harvest.Common.Status#ACTIVE} and
* schedule the harvester to be ran at the next time according to the harvesters schedule.
*
- * @return return {@link org.fao.geonet.kernel.harvest.Common.OperResult#ALREADY_ACTIVE} if the
- * harvester is already active or {@link org.fao.geonet.kernel.harvest.Common.OperResult#OK}
+ * @param id harvester id
+ * @return {@link Common.OperResult#ALREADY_ACTIVE} if the
+ * harvester is already active or {@link Common.OperResult#OK}
*/
Common.OperResult start(String id) throws SQLException, SchedulerException;
/**
* Set the harvester status to the provided status and unschedule any scheduled jobs.
*
+ * @param id harvester id
* @return {@link org.fao.geonet.kernel.harvest.Common.OperResult#ALREADY_INACTIVE} if the not
* currently enabled or {@link org.fao.geonet.kernel.harvest.Common.OperResult#OK}
*/
diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/HarvestManagerImpl.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/HarvestManagerImpl.java
index 43ed1b4d60a2..881310a48cfc 100644
--- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/HarvestManagerImpl.java
+++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/HarvestManagerImpl.java
@@ -24,6 +24,7 @@
package org.fao.geonet.kernel.harvest;
import jeeves.server.context.ServiceContext;
+import jeeves.server.dispatchers.ServiceManager;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.math.NumberUtils;
import org.fao.geonet.GeonetContext;
@@ -90,10 +91,18 @@ public class HarvestManagerImpl implements HarvestInfoProvider, HarvestManager {
private HarvesterSettingsManager settingMan;
private DataManager dataMan;
private Path xslPath;
- private ServiceContext context;
+
+ /** Harvester service context */
+ private ServiceContext.AppHandlerServiceContext harvesterContext;
+
+ /** Read only mode */
private boolean readOnly;
private ConfigurableApplicationContext applicationContext;
+
+ /** Harvester available by id */
private Map hmHarvesters = new HashMap<>();
+
+ /** Harvester lookup by uuid */
private Map hmHarvestLookup = new HashMap<>();
private TranslationPackBuilder translationPackBuilder;
@@ -111,21 +120,22 @@ public ConfigurableApplicationContext getApplicationContext() {
/**
* initialize the manager.
*
- * @param context service context
+ * @param initContext service context
* @throws Exception hmm
*/
@Override
- public void init(ServiceContext context, boolean isReadOnly) throws Exception {
- this.context = context;
- this.dataMan = context.getBean(DataManager.class);
- this.settingMan = context.getBean(HarvesterSettingsManager.class);
- this.translationPackBuilder = context.getBean(TranslationPackBuilder.class);
+ public void init(ServiceContext initContext, boolean isReadOnly) throws Exception {
+ //create a new (shared) context instead of using the Jeeves one
+ ServiceManager serviceManager = initContext.getBean(ServiceManager.class);
+ this.harvesterContext = serviceManager.createAppHandlerServiceContext("harvester", initContext);
- applicationContext = context.getApplicationContext();
+ this.dataMan = harvesterContext.getBean(DataManager.class);
+ this.settingMan = harvesterContext.getBean(HarvesterSettingsManager.class);
+ applicationContext = harvesterContext.getApplicationContext();
this.readOnly = isReadOnly;
Log.debug(Geonet.HARVEST_MAN, "HarvesterManager initializing, READONLYMODE is " + this.readOnly);
- xslPath = context.getAppPath().resolve(Geonet.Path.STYLESHEETS).resolve("xml/harvesting/");
+ xslPath = harvesterContext.getAppPath().resolve(Geonet.Path.STYLESHEETS).resolve("xml/harvesting/");
AbstractHarvester.getScheduler().getListenerManager().addJobListener(
HarversterJobListener.getInstance(this));
@@ -140,8 +150,8 @@ public void init(ServiceContext context, boolean isReadOnly) throws Exception {
String id = node.getAttributeValue("id");
try {
- AbstractHarvester ah = AbstractHarvester.create(type, context);
- ah.init(node, context);
+ AbstractHarvester ah = AbstractHarvester.create(type, harvesterContext);
+ ah.init(node);
hmHarvesters.put(ah.getID(), ah);
hmHarvestLookup.put(ah.getParams().getUuid(), ah);
} catch (OperationAbortedEx oae) {
@@ -155,7 +165,7 @@ public void init(ServiceContext context, boolean isReadOnly) throws Exception {
}
/**
- * TODO Javadoc.
+ * Sorted harvester nodes.
*
* @param nodes harvest nodes
* @param sortField sort field
@@ -170,7 +180,7 @@ private Element transformSort(Element nodes, String sortField) throws Exception
}
/**
- * TODO Javadoc.
+ * Transformed harvest node.
*
* @param node harvest node
* @return transformed harvest node
@@ -183,7 +193,7 @@ private Element transform(Element node) throws Exception {
}
/**
- * TODO Javadoc.
+ * Clean up harvest manager.
*/
@Override
public void shutdown() {
@@ -199,6 +209,11 @@ public void shutdown() {
} catch (SchedulerException e) {
Log.error(Geonet.HARVEST_MAN, "Error shutting down harvester scheduler");
}
+ //we created the context, so we have to clean it up
+ if (harvesterContext != null){
+ // Call superclass cleanup to avoid AppHandlerServiceContext protections
+ ((ServiceContext)harvesterContext).clear();
+ }
}
//---------------------------------------------------------------------------
@@ -208,7 +223,7 @@ public void shutdown() {
//---------------------------------------------------------------------------
/**
- * TODO javadoc.
+ * Harvest node, filtered to only include nodes visible to user session.
*
* @param id harvester id
* @param context servicecontext
@@ -236,7 +251,7 @@ public Element get(String id, ServiceContext context, String sort) throws Except
// and use it for call harvesterSettingsManager.get
// don't forget to clean parameter when update or delete
- Profile profile = context.getUserSession().getProfile();
+ Profile profile = context.getUserSession() == null ? Profile.Administrator : context.getUserSession().getProfile();
if (id != null && !id.equals("-1")) {
// you're an Administrator
if (profile == Profile.Administrator) {
@@ -295,7 +310,7 @@ public Element get(String id, ServiceContext context, String sort) throws Except
}
/**
- * TODO javadoc.
+ * Add harvester, returning the id new harvester.
*
* @param node harvester config
* @param ownerId the id of the user doing this
@@ -309,7 +324,7 @@ public String addHarvesterReturnId(Element node, String ownerId) throws JeevesEx
Log.debug(Geonet.HARVEST_MAN, "Adding harvesting node : \n" + Xml.getString(node));
}
String type = node.getAttributeValue("type");
- AbstractHarvester ah = AbstractHarvester.create(type, context);
+ AbstractHarvester ah = AbstractHarvester.create(type, harvesterContext);
Element ownerIdE = new Element("ownerId");
ownerIdE.setText(ownerId);
@@ -328,7 +343,8 @@ public String addHarvesterReturnId(Element node, String ownerId) throws JeevesEx
}
/**
- * TODO Javadoc.
+ * Add harvester, returning UUID.
+ * @return uuid of new harvester
*/
@Override
public String addHarvesterReturnUUID(Element node) throws JeevesException, SQLException {
@@ -336,7 +352,7 @@ public String addHarvesterReturnUUID(Element node) throws JeevesException, SQLEx
Log.debug(Geonet.HARVEST_MAN, "Adding harvesting node : \n" + Xml.getString(node));
}
String type = node.getAttributeValue("type");
- AbstractHarvester ah = AbstractHarvester.create(type, context);
+ AbstractHarvester ah = AbstractHarvester.create(type, harvesterContext);
ah.add(node);
hmHarvesters.put(ah.getID(), ah);
@@ -352,9 +368,10 @@ public String addHarvesterReturnUUID(Element node) throws JeevesException, SQLEx
}
/**
- * TODO javadoc.
+ * Clone harvester.
*
* @param ownerId id of the user doing this
+ * @return id of the new harvester
*/
@Override
public synchronized String createClone(String id, String ownerId, ServiceContext context) throws Exception {
@@ -387,9 +404,10 @@ public synchronized String createClone(String id, String ownerId, ServiceContext
}
/**
- * TODO javadoc.
+ * Update on harvester progress.
*
* @param ownerId id of the user doing this
+ * @return true if harvester updated
*/
@Override
public synchronized boolean update(Element node, String ownerId) throws BadInputEx, SQLException, SchedulerException {
@@ -439,7 +457,7 @@ public synchronized OperResult remove(final String id) throws Exception {
if (StringUtils.isNotBlank(harvesterSetting)) {
settingMan.remove("harvesting/id:" + id);
- final HarvestHistoryRepository historyRepository = context.getBean(HarvestHistoryRepository.class);
+ final HarvestHistoryRepository historyRepository = harvesterContext.getBean(HarvestHistoryRepository.class);
// set deleted status in harvest history table to 'y'
historyRepository.markAllAsDeleted(uuid);
hmHarvesters.remove(id);
@@ -460,7 +478,8 @@ public synchronized OperResult remove(final String id) throws Exception {
/**
- * TODO Javadoc.
+ * Start harvester
+ * @param id harvester id
*/
@Override
public OperResult start(String id) throws SQLException, SchedulerException {
@@ -476,7 +495,10 @@ public OperResult start(String id) throws SQLException, SchedulerException {
}
/**
- * TODO Javadoc.
+ * Stop harvester with provided status, and unschedule any outstanding jobs.
+ * @param id harvester id
+ * @param status New status
+ * @return harvester progress
*/
@Override
public OperResult stop(String id, Common.Status status) throws SQLException, SchedulerException {
@@ -492,7 +514,9 @@ public OperResult stop(String id, Common.Status status) throws SQLException, Sch
}
/**
- * TODO Javadoc.
+ * Start harvester run
+ * @parm id harvester to run
+ * return harvester progress
*/
@Override
public OperResult run(String id) throws SQLException, SchedulerException {
@@ -516,7 +540,7 @@ public OperResult run(String id) throws SQLException, SchedulerException {
}
/**
- * TODO Javadoc.
+ * Run the harvester check if harvest correctly completed.
*/
@Override
public OperResult invoke(String id) {
@@ -540,7 +564,10 @@ public OperResult invoke(String id) {
}
/**
- * TODO Javadoc.
+ * Harvester details
+ * @param harvestUuid uuid to look up harvester info
+ * @param id
+ * @param uuid
*/
public Element getHarvestInfo(String harvestUuid, String id, String uuid) {
Element info = new Element(Edit.Info.Elem.HARVEST_INFO);
@@ -580,10 +607,13 @@ private void addInfo(Element node) {
/**
* Remove harvester information. For example, when records are removed, clean the last status
* information if any.
+ *
+ * @param id harvester id
+ * @param ownerId
*/
public void removeInfo(String id, String ownerId) throws Exception {
// get the specified harvester from the settings table
- Element node = get(id, context, null);
+ Element node = get(id, harvesterContext, null);
if (node != null) {
Element info = node.getChild("info");
if (info != null) {
@@ -611,6 +641,13 @@ public void setReadOnly(boolean readOnly) {
this.readOnly = readOnly;
}
+ /**
+ * Removes all the metadata associated with one harvester.
+ *
+ * @param id of the harvester
+ * @return {@link OperResult#OK} indicating removal of associated metadata.
+ * @throws Exception
+ */
public synchronized OperResult clearBatch(String id) throws Exception {
if (Log.isDebugEnabled(Geonet.HARVEST_MAN))
Log.debug(Geonet.HARVEST_MAN, "Clearing harvesting with id : " + id);
@@ -631,8 +668,18 @@ public synchronized OperResult clearBatch(String id) throws Exception {
elapsedTime = (System.currentTimeMillis() - elapsedTime) / 1000;
// clear last run info
- removeInfo(id, context.getUserSession().getUserId());
- ah.emptyResult();
+ ServiceContext context = ah.getContext();
+ if (context.getUserSession() != null ) {
+ // Use user session if logged in
+ String userId = context.getUserSession().getUserId();
+ removeInfo(id, userId);
+ ah.emptyResult();
+ } else {
+ // Identify owner if not logged in
+ String userId = ah.getParams().getOwnerId();
+ removeInfo(id, userId);
+ ah.emptyResult();
+ }
Element historyEl = new Element("result");
historyEl.addContent(new Element("cleared").
@@ -640,7 +687,7 @@ public synchronized OperResult clearBatch(String id) throws Exception {
final String lastRun = OffsetDateTime.now(ZoneOffset.UTC).format(DateTimeFormatter.ISO_DATE_TIME);
ISODate lastRunDate = new ISODate(lastRun);
- HarvestHistoryRepository historyRepository = context.getBean(HarvestHistoryRepository.class);
+ HarvestHistoryRepository historyRepository = harvesterContext.getBean(HarvestHistoryRepository.class);
HarvestHistory history = new HarvestHistory();
history.setDeleted(true);
history.setElapsedTime((int) elapsedTime);
diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java
index f514708afc2d..c3d84e968565 100644
--- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java
+++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java
@@ -25,6 +25,7 @@
import jeeves.server.UserSession;
import jeeves.server.context.ServiceContext;
+import jeeves.server.dispatchers.ServiceManager;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.EnhancedPatternLayout;
import org.apache.log4j.FileAppender;
@@ -132,6 +133,12 @@ public abstract class AbstractHarvester errors = Collections.synchronizedList(new LinkedList<>());
private volatile boolean running = false;
- public static AbstractHarvester, ?> create(String type, ServiceContext context) throws BadParameterEx, OperationAbortedEx {
+ /**
+ * Factory method for creating appropriate harvester.
+ *
+ * This method configures the harvester with its own service context and appropriate beans
+ * for operation. It is your responsibility to call {@link #destroy()} to clean up
+ * after this harvester when it is no longer used.
+ *
+ * @param type harvester type
+ * @param harvesterContext shared harvester service context used for bean lookup
+ * @return harvester instance
+ * @throws BadParameterEx
+ * @throws OperationAbortedEx
+ */
+ public static AbstractHarvester, ?> create(String type, ServiceContext harvesterContext) throws BadParameterEx, OperationAbortedEx {
if (type == null) {
throw new BadParameterEx("type", null);
}
+ ServiceManager serviceManager = harvesterContext.getBean(ServiceManager.class);
try {
- AbstractHarvester, ?> ah = context.getBean(type, AbstractHarvester.class);
- ah.setContext(context);
+ AbstractHarvester, ?> ah = harvesterContext.getBean(type, AbstractHarvester.class);
+
+ ServiceContext context = serviceManager.createServiceContext( "harvester."+type, harvesterContext );
+ ah.initContext( context );
return ah;
} catch (Exception e) {
throw new OperationAbortedEx("Cannot instantiate harvester of type " + type, e);
}
}
- protected void setContext(ServiceContext context) {
+ /**
+ * Service context provided for harvester for bean discovery and to manage user session.
+ *
+ * This method uses the provided context to look up data manager, metadata utils, harvester settings manager
+ * and other services for use during operation.
+ *
+ * @param context service context provided to this harvester to manage
+ */
+ public void initContext(ServiceContext context) {
this.context = context;
this.dataMan = context.getBean(DataManager.class);
this.metadataUtils = context.getBean(IMetadataUtils.class);
@@ -236,15 +267,20 @@ public void add(Element node) throws BadInputEx, SQLException {
id = doAdd(node);
}
- public void init(Element node, ServiceContext context) throws BadInputEx, SchedulerException {
+ /**
+ * Setup harvester using provide configuration.
+ *
+ * @param node harevester configuration
+ * @throws BadInputEx
+ * @throws SchedulerException
+ */
+ public void init(Element node) throws BadInputEx, SchedulerException {
id = node.getAttributeValue("id");
status = Status.parse(node.getChild("options").getChildText("status"));
error = null;
- this.context = context;
doInit(node);
-
- initInfo(context);
+ initInfo();
initializeLog();
if (status == Status.ACTIVE) {
@@ -252,7 +288,7 @@ public void init(Element node, ServiceContext context) throws BadInputEx, Schedu
}
}
- private void initInfo(ServiceContext context) {
+ private void initInfo() {
final HarvestHistoryRepository historyRepository = context.getBean(HarvestHistoryRepository.class);
Specification spec = HarvestHistorySpecs.hasHarvesterUuid(getParams().getUuid());
Pageable pageRequest = PageRequest.of(0, 1,
@@ -310,8 +346,12 @@ public static Scheduler getScheduler() throws SchedulerException {
return QuartzSchedulerUtils.getScheduler(SCHEDULER_ID, true);
}
+ /** Called during application shutdown to remove scheduled job and clean up service context. */
public void shutdown() throws SchedulerException {
getScheduler().deleteJob(jobKey(getParams().getUuid(), HARVESTER_GROUP_NAME));
+
+ context.clear();
+ context = null;
}
public static void shutdownScheduler() throws SchedulerException {
@@ -581,14 +621,20 @@ public void addHarvestInfo(Element info, String id, String uuid) {
info.addContent(new Element("type").setText(getType()));
}
- public ServiceContext getServiceContext() {
- return context;
- }
-
public Status getStatus() {
return status;
}
+ /**
+ * Service context maintained by this harvester, may be used to access UserSession.
+ *
+ * @return service context maintained by harvester
+ */
+ public ServiceContext getContext() {
+ return this.context;
+ }
+
+
/**
* Nested class to handle harvesting with fast indexing.
*/
@@ -828,10 +874,20 @@ public final String getType() {
return types[0];
}
+ /**
+ * Harvester configuration.
+ *
+ * @return configuration, strongly typed
+ */
public P getParams() {
return params;
}
+ /**
+ * Setup harvester with the provided configuration
+ * @param node harevester configuration
+ * @throws BadInputEx
+ */
private void doInit(Element node) throws BadInputEx {
setParams(createParams());
params.create(node);
diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractParams.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractParams.java
index 5c45287d71b7..6277cda514e7 100644
--- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractParams.java
+++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/AbstractParams.java
@@ -116,7 +116,9 @@ private static HarvestValidationEnum readValidateFromParams(Element content) {
}
/**
- * @param node
+ * Setup parameters with the provided configuration.
+ *
+ * @param node harvester configuration
* @throws BadInputEx
*/
public void create(Element node) throws BadInputEx {
diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java
index 2d195d5f6451..fc5c6e262246 100644
--- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java
+++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java
@@ -89,6 +89,7 @@
public class Aligner extends BaseAligner {
+ /** Shared service context managed by HarvestManager */
private ServiceContext context;
private DataManager dataMan;
private CategoryMapper localCateg;
diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Harvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Harvester.java
index 4630f6ac5788..c2489c46a0a2 100644
--- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Harvester.java
+++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Harvester.java
@@ -1,5 +1,5 @@
//=============================================================================
-//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the
+//=== Copyright (C) 2001-2021 Food and Agriculture Organization of the
//=== United Nations (FAO-UN), United Nations World Food Programme (WFP)
//=== and United Nations Environment Programme (UNEP)
//===
@@ -88,6 +88,7 @@ class Harvester implements IHarvester {
private Logger log;
private CswParams params;
+ /** Shared service context managed by HarvestManager */
private ServiceContext context;
/**
diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/fragment/FragmentHarvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/fragment/FragmentHarvester.java
index b25afccd403c..2788767aa36f 100644
--- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/fragment/FragmentHarvester.java
+++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/fragment/FragmentHarvester.java
@@ -77,6 +77,7 @@ public class FragmentHarvester extends BaseAligner {
private static final String REPLACEMENT_GROUP = "replacementGroup";
private Logger log;
+ /** Shared service context managed by HarvestManager */
private ServiceContext context;
private DataManager dataMan;
private IMetadataManager metadataManager;
diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geoPREST/Aligner.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geoPREST/Aligner.java
index 9271fe93921e..90dc86f68545 100644
--- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geoPREST/Aligner.java
+++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geoPREST/Aligner.java
@@ -61,6 +61,7 @@
public class Aligner extends BaseAligner {
private Logger log;
+ /** Shared service context managed by HarvetManager */
private ServiceContext context;
private XmlRequest request;
private DataManager dataMan;
diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geoPREST/Harvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geoPREST/Harvester.java
index d62553ca281c..68acaa1e1a3f 100644
--- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geoPREST/Harvester.java
+++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geoPREST/Harvester.java
@@ -81,6 +81,7 @@ class Harvester implements IHarvester {
private GeoPRESTParams params;
//---------------------------------------------------------------------------
+ /** Shared service context managed by HarvestManager */
private ServiceContext context;
//---------------------------------------------------------------------------
diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geonet/Aligner.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geonet/Aligner.java
index e6d937bd300d..40df9137eed0 100644
--- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geonet/Aligner.java
+++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geonet/Aligner.java
@@ -88,6 +88,7 @@ public class Aligner extends BaseAligner {
private Logger log;
+ /** Shared service context managed by HarvestManager */
private ServiceContext context;
private XmlRequest request;
private DataManager dataMan;
diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geonet/Harvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geonet/Harvester.java
index 96ac8c5660b7..9a8aaebfb779 100644
--- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geonet/Harvester.java
+++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geonet/Harvester.java
@@ -72,6 +72,7 @@ class Harvester implements IHarvester {
private Logger log;
private GeonetParams params;
+ /** Shared service context managed by HarvestManager */
private ServiceContext context;
/**
diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geonet20/Aligner.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geonet20/Aligner.java
index ee10ff3e8f90..6475a2438228 100644
--- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geonet20/Aligner.java
+++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geonet20/Aligner.java
@@ -64,6 +64,7 @@ public class Aligner extends AbstractAligner {
private IMetadataManager metadataManager;
+ /** Shared service context managed by HarvestManager */
private ServiceContext context;
private CategoryMapper localCateg;
diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFilesystemHarvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFilesystemHarvester.java
index e4338d7de992..4e887f2e167e 100644
--- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFilesystemHarvester.java
+++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFilesystemHarvester.java
@@ -131,7 +131,7 @@ private HarvestResult align(Path root) throws Exception {
log.debug(String.format(
"Starting indexing in batch thread pool of %d updated records ...",
listOfRecordsToIndex.size()));
- dataMan.batchIndexInThreadPool(context, listOfRecordsToIndex);
+ dataMan.batchIndexInThreadPool(listOfRecordsToIndex);
log.debug("End of alignment for : " + params.getName());
return result;
diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/oaipmh/Harvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/oaipmh/Harvester.java
index f906d8e5dcd1..ee5b1cdc9b5b 100644
--- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/oaipmh/Harvester.java
+++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/oaipmh/Harvester.java
@@ -88,6 +88,7 @@ class Harvester extends BaseAligner implements IHarvester implements IHarvester implements IHarvester {
-
- // Namespaces needed here....
-
+ // Namespaces needed here....
static private final Namespace invCatalog = Namespace.getNamespace("http://www.unidata.ucar.edu/namespaces/thredds/InvCatalog/v1.0");
static private final Namespace wms = Namespace.getNamespace("http://www.opengis.net/wms");
static private final Namespace gmd = Namespace.getNamespace("gmd", "http://www.isotc211.org/2005/gmd");
static private final Namespace srv = Namespace.getNamespace("srv", "http://www.isotc211.org/2005/srv");
static private final Namespace xlink = Namespace.getNamespace("xlink", "http://www.w3.org/1999/xlink");
- static private final Namespace xsi = Namespace.getNamespace("xsi","http://www.w3.org/2001/XMLSchema-instance");
- static private final Namespace gco = Namespace.getNamespace("gco","http://www.isotc211.org/2005/gco");
+ static private final Namespace xsi = Namespace.getNamespace("xsi","http://www.w3.org/2001/XMLSchema-instance");
+ static private final Namespace gco = Namespace.getNamespace("gco","http://www.isotc211.org/2005/gco");
static private final Namespace gmi = Namespace.getNamespace("gmi","http://www.isotc211.org/2005/gmi");
static private final Namespace gmx = Namespace.getNamespace("gmx","http://www.isotc211.org/2005/gmx");
static private final Namespace gsr = Namespace.getNamespace("gsr","http://www.isotc211.org/2005/gsr");
@@ -179,6 +177,7 @@ class Harvester extends BaseAligner implements IHarvester implements IHarvester {
private Logger log;
+ /** Shared service context managed by HarvesterManager */
private ServiceContext context;
private DataManager dataMan;
private IMetadataManager metadataManager;
diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavRetriever.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavRetriever.java
index 2651740afab8..037f2bbafc5c 100644
--- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavRetriever.java
+++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavRetriever.java
@@ -45,6 +45,7 @@
class WebDavRetriever implements RemoteRetriever {
private Logger log;
+ /** Shared service context managed by HarvesterManager */
private ServiceContext context;
private WebDavParams params;
diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/wfsfeatures/Harvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/wfsfeatures/Harvester.java
index e1343c4cdaaa..af0e77a82b3c 100644
--- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/wfsfeatures/Harvester.java
+++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/wfsfeatures/Harvester.java
@@ -132,6 +132,7 @@ class Harvester implements IHarvester {
//--- API methods
//---
//---------------------------------------------------------------------------
+ /** Shared service context managed by HarvesterManager */
private ServiceContext context;
private WfsFeaturesParams params;
private IMetadataManager metadataManager;
@@ -152,12 +153,14 @@ class Harvester implements IHarvester {
* Contains a list of accumulated errors during the executing of this harvest.
*/
private List errors = new LinkedList();
+
/**
* Constructor
*
+ * @param cancelMonitor Sentinel used to cancel
+ * @param log Logger to track progress
* @param context Jeeves context
- * @param params harvesting configuration for the node
- * @return null
+ * @param params WFS harvesting configuration for the node
*/
public Harvester(AtomicBoolean cancelMonitor, Logger log, ServiceContext context, WfsFeaturesParams params) {
this.cancelMonitor = cancelMonitor;
diff --git a/harvesters/src/main/java/org/fao/geonet/services/harvesting/Util.java b/harvesters/src/main/java/org/fao/geonet/services/harvesting/Util.java
index 5e66b6d0d9c0..ea0667041be1 100644
--- a/harvesters/src/main/java/org/fao/geonet/services/harvesting/Util.java
+++ b/harvesters/src/main/java/org/fao/geonet/services/harvesting/Util.java
@@ -37,6 +37,14 @@
//=============================================================================
+/**
+ * Utility class used by HarvestManager to schedule background activities.
+ *
+ * Please note that background activities make use of a shared service context and
+ * do not have access to the user session unless you take special care
+ * to provide a service context for their use.
+ *
+ */
public class Util {
//--------------------------------------------------------------------------
//---
@@ -44,6 +52,17 @@ public class Util {
//---
//--------------------------------------------------------------------------
+ /**
+ * Utility method used to schedule job on a number of metadata records.
+ *
+ * Exec will process the provided job for each id provided as part of params.
+ *
+ * @param params Element listing harvesters to run
+ * @param context Service context used t look up GeonetContext
+ * @param job Job to run for each indicated harvester
+ * @return Response structured with each harvest job and their
+ * @throws Exception
+ */
public static Element exec(Element params, ServiceContext context, Job job) throws Exception {
GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME);
HarvestManager hm = gc.getBean(HarvestManager.class);
@@ -74,8 +93,19 @@ public static Element exec(Element params, ServiceContext context, Job job) thro
//---
//--------------------------------------------------------------------------
+ /**
+ * Execute job to run on all input ids, the status is returned for each one.
+ */
public interface Job {
- public OperResult execute(HarvestManager hm, String id) throws Exception;
+ /**
+ * Execute job on input id, returning status.
+ *
+ * @param hm HarvestManager scheduling activity
+ * @param id harvester id
+ * @return operation result indicating job status
+ * @throws Exception
+ */
+ OperResult execute(HarvestManager hm, String id) throws Exception;
}
}
diff --git a/harvesters/src/test/java/org/fao/geonet/kernel/harvest/AbstractHarvesterIntegrationTest.java b/harvesters/src/test/java/org/fao/geonet/kernel/harvest/AbstractHarvesterIntegrationTest.java
index 6ccd276106cd..8865ecc5927b 100644
--- a/harvesters/src/test/java/org/fao/geonet/kernel/harvest/AbstractHarvesterIntegrationTest.java
+++ b/harvesters/src/test/java/org/fao/geonet/kernel/harvest/AbstractHarvesterIntegrationTest.java
@@ -77,7 +77,8 @@ public void testHarvest() throws Exception {
customizeParams(params);
final String harvesterUuid = _harvestManager.addHarvesterReturnUUID(params);
AbstractHarvester _harvester = _harvestManager.getHarvester(harvesterUuid);
- _harvester.init(params, context);
+ _harvester.initContext( context );
+ _harvester.init(params);
_harvester.invoke();
final Element result = _harvester.getResult();
diff --git a/harvesters/src/test/java/org/fao/geonet/kernel/harvest/harvester/geoPREST/HarvesterTest.java b/harvesters/src/test/java/org/fao/geonet/kernel/harvest/harvester/geoPREST/HarvesterTest.java
index 1f04dc970df8..b61287cc7650 100644
--- a/harvesters/src/test/java/org/fao/geonet/kernel/harvest/harvester/geoPREST/HarvesterTest.java
+++ b/harvesters/src/test/java/org/fao/geonet/kernel/harvest/harvester/geoPREST/HarvesterTest.java
@@ -18,6 +18,9 @@
import java.text.ParseException;
import java.util.Date;
+import java.util.HashMap;
+
+import jeeves.server.context.ServiceContext;
import org.fao.geonet.utils.Log;
import org.junit.Assert;
@@ -25,6 +28,7 @@
public class HarvesterTest
{
+ ServiceContext context = new ServiceContext("TEST", null, new HashMap<>(), null);
public HarvesterTest() {
}
@@ -32,7 +36,7 @@ public HarvesterTest() {
@Test
public void testParseDate() throws Exception {
- Harvester h = new Harvester(null, Log.createLogger("TEST"), null, null);
+ Harvester h = new Harvester(null, Log.createLogger("TEST"), context, null);
// test EN date
h.parseDate("Mon, 04 Feb 2013 10:19:00 +1000");
@@ -47,7 +51,7 @@ public void testParseDate() throws Exception {
@Test
public void testJDK8136539Workaround() throws Exception {
- Harvester h = new Harvester(null, Log.createLogger("TEST"), null, null);
+ Harvester h = new Harvester(null, Log.createLogger("TEST"), context, null);
Date p0 = h.parseDate("Fr, 24 Mär 2017 10:58:59 +0100");
Date p1 = h.parseDate("Fr, 24 Mrz 2017 10:58:59 +0100");
@@ -57,7 +61,7 @@ public void testJDK8136539Workaround() throws Exception {
@Test
public void testUnparsableDate() throws Exception {
- Harvester h = new Harvester(null, Log.createLogger("TEST"), null, null);
+ Harvester h = new Harvester(null, Log.createLogger("TEST"), context, null);
try {
h.parseDate("Xyz, 04 Feb 2013 10:19:00 +1000");
diff --git a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomPredefinedFeed.java b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomPredefinedFeed.java
index 84beb2312385..87e9cb666844 100644
--- a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomPredefinedFeed.java
+++ b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomPredefinedFeed.java
@@ -90,17 +90,18 @@ public HttpEntity localServiceDescribe(
@RequestParam(value = "language", required = false) String language,
NativeWebRequest webRequest) throws Exception {
- ServiceContext context = createServiceContext(Geonet.DEFAULT_LANGUAGE, webRequest.getNativeRequest(HttpServletRequest.class));
+ try (ServiceContext context = createServiceContext(Geonet.DEFAULT_LANGUAGE, webRequest.getNativeRequest(HttpServletRequest.class))) {
+ SettingManager sm = context.getBean(SettingManager.class);
+ boolean inspireEnable = sm.getValueAsBool(Settings.SYSTEM_INSPIRE_ENABLE);
+ if (!inspireEnable) {
+ Log.info(Geonet.ATOM, "INSPIRE is disabled");
+ throw new OperationNotAllowedEx("INSPIRE option is not enabled on this catalog.");
+ }
- SettingManager sm = context.getBean(SettingManager.class);
- boolean inspireEnable = sm.getValueAsBool(Settings.SYSTEM_INSPIRE_ENABLE);
- if (!inspireEnable) {
- Log.info(Geonet.ATOM, "INSPIRE is disabled");
- throw new OperationNotAllowedEx("INSPIRE option is not enabled on this catalog.");
+ Element feed = getServiceFeed(context, uuid, language);
+ return writeOutResponse(Xml.getString(feed),"application", "atom+xml");
}
- Element feed = getServiceFeed(context, uuid, language);
- return writeOutResponse(Xml.getString(feed),"application", "atom+xml");
}
/**
@@ -123,22 +124,22 @@ public HttpEntity localDatasetDescribe(
@RequestParam(value = "q", required = false) String searchTerms,
NativeWebRequest webRequest) throws Exception
{
- ServiceContext context = createServiceContext("eng", webRequest.getNativeRequest(HttpServletRequest.class));
-
- SettingManager sm = context.getBean(SettingManager.class);
- boolean inspireEnable = sm.getValueAsBool(Settings.SYSTEM_INSPIRE_ENABLE);
- if (!inspireEnable) {
- Log.info(Geonet.ATOM, "INSPIRE is disabled");
- throw new OperationNotAllowedEx("INSPIRE option is not enabled on this catalog.");
- }
+ try (ServiceContext context = createServiceContext("eng", webRequest.getNativeRequest(HttpServletRequest.class))) {
+ SettingManager sm = context.getBean(SettingManager.class);
+ boolean inspireEnable = sm.getValueAsBool(Settings.SYSTEM_INSPIRE_ENABLE);
+ if (!inspireEnable) {
+ Log.info(Geonet.ATOM, "INSPIRE is disabled");
+ throw new OperationNotAllowedEx("INSPIRE option is not enabled on this catalog.");
+ }
- Map params = getDefaultXSLParams(sm, context, XslUtil.twoCharLangCode(context.getLanguage()));
- if (StringUtils.isNotBlank(searchTerms)) {
- params.put("searchTerms", searchTerms.toLowerCase());
+ Map params = getDefaultXSLParams(sm, context, XslUtil.twoCharLangCode(context.getLanguage()));
+ if (StringUtils.isNotBlank(searchTerms)) {
+ params.put("searchTerms", searchTerms.toLowerCase());
+ }
+ Element feed = InspireAtomUtil.getDatasetFeed(context, spIdentifier, spNamespace, params, language);
+ return writeOutResponse(Xml.getString(feed), "application", "atom+xml");
}
- Element feed = InspireAtomUtil.getDatasetFeed(context, spIdentifier, spNamespace, params, language);
- return writeOutResponse(Xml.getString(feed), "application", "atom+xml");
}
private Element getServiceFeed(ServiceContext context, final String uuid, final String language) throws Exception {
@@ -191,6 +192,15 @@ private Map getDefaultXSLParams(SettingManager settingManager, S
return params;
}
+ /**
+ * Service context for atom.service.
+ *
+ * When creating a new service context you are responsible for thread local management and any cleanup.
+ *
+ * @param lang
+ * @param request
+ * @return service context for atom.service
+ */
private ServiceContext createServiceContext(String lang, HttpServletRequest request) {
final ServiceManager serviceManager = ApplicationContextHolder.get().getBean(ServiceManager.class);
return serviceManager.createServiceContext("atom.service", lang, request);
@@ -215,7 +225,7 @@ private HttpEntity writeOutResponse(String content, String contentType,
* @param language the language to be used for translation of title, etc. in the resulting dataset ATOM feed
* @param searchTerms the searchTerms for filtering of the spatial datasets
* @param webRequest the request object
- * @return
+ * @return atom feed
* @throws Exception
*/
@RequestMapping(value = "/" + InspireAtomUtil.LOCAL_DOWNLOAD_DATASET_URL_SUFFIX)
@@ -228,57 +238,57 @@ public HttpEntity localDatasetDownload(
@RequestParam(value = "q", required = false) String searchTerms,
NativeWebRequest webRequest) throws Exception
{
- ServiceContext context = createServiceContext(Geonet.DEFAULT_LANGUAGE, webRequest.getNativeRequest(HttpServletRequest.class));
-
- SettingManager sm = context.getBean(SettingManager.class);
- boolean inspireEnable = sm.getValueAsBool(Settings.SYSTEM_INSPIRE_ENABLE);
- if (!inspireEnable) {
- Log.info(Geonet.ATOM, "INSPIRE is disabled");
- throw new OperationNotAllowedEx("INSPIRE option is not enabled on this catalog.");
- }
+ try (ServiceContext context = createServiceContext(Geonet.DEFAULT_LANGUAGE, webRequest.getNativeRequest(HttpServletRequest.class))) {
+ SettingManager sm = context.getBean(SettingManager.class);
+ boolean inspireEnable = sm.getValueAsBool(Settings.SYSTEM_INSPIRE_ENABLE);
+ if (!inspireEnable) {
+ Log.info(Geonet.ATOM, "INSPIRE is disabled");
+ throw new OperationNotAllowedEx("INSPIRE option is not enabled on this catalog.");
+ }
- Map params = getDefaultXSLParams(sm, context, context.getLanguage());
- if (StringUtils.isNotBlank(crs)) {
- crs = URLDecoder.decode(crs,Constants.ENCODING);
- params.put("requestedCrs", crs);
- }
- if (StringUtils.isNotBlank(searchTerms)) {
- params.put("searchTerms", searchTerms.toLowerCase());
- }
- Element feed = InspireAtomUtil.getDatasetFeed(context, spIdentifier, spNamespace, params, language);
- Map crsCounts = new HashMap();;
- Namespace ns = Namespace.getNamespace("http://www.w3.org/2005/Atom");
- if (crs!=null) {
- crsCounts = countDatasetsForCrs(feed, crs, ns);
- } else {
- List entries = (feed.getChildren("entry", ns));
- if (entries.size()==1) {
- crsCounts.put(1, entries.get(0));
+ Map params = getDefaultXSLParams(sm, context, context.getLanguage());
+ if (StringUtils.isNotBlank(crs)) {
+ crs = URLDecoder.decode(crs,Constants.ENCODING);
+ params.put("requestedCrs", crs);
}
- }
- int downloadCount = crsCounts.size()>0 ? crsCounts.keySet().iterator().next() : 0;
- Element selectedEntry = crsCounts.get(downloadCount);
-
- // No download for the CRS specified
- if (downloadCount == 0) {
- throw new Exception("No downloads available for dataset (spatial_dataset_identifier_code: " + spIdentifier + ", spatial_dataset_identifier_namespace: " + spNamespace + ", crs: " + crs + ", searchTerms: " + searchTerms + ")");
-
- // Only one download for the CRS specified
- } else if (downloadCount == 1) {
- String type = null;
- Element link = selectedEntry.getChild("link", ns);
- if (link!=null) {
- type = link.getAttributeValue("type");
+ if (StringUtils.isNotBlank(searchTerms)) {
+ params.put("searchTerms", searchTerms.toLowerCase());
+ }
+ Element feed = InspireAtomUtil.getDatasetFeed(context, spIdentifier, spNamespace, params, language);
+ Map crsCounts = new HashMap();;
+ Namespace ns = Namespace.getNamespace("http://www.w3.org/2005/Atom");
+ if (crs!=null) {
+ crsCounts = countDatasetsForCrs(feed, crs, ns);
+ } else {
+ List entries = (feed.getChildren("entry", ns));
+ if (entries.size()==1) {
+ crsCounts.put(1, entries.get(0));
+ }
}
- HttpServletResponse nativeRes = webRequest.getNativeResponse(HttpServletResponse.class);
- nativeRes.setStatus(HttpServletResponse.SC_MOVED_PERMANENTLY);
+ int downloadCount = crsCounts.size()>0 ? crsCounts.keySet().iterator().next() : 0;
+ Element selectedEntry = crsCounts.get(downloadCount);
+
+ // No download for the CRS specified
+ if (downloadCount == 0) {
+ throw new Exception("No downloads available for dataset (spatial_dataset_identifier_code: " + spIdentifier + ", spatial_dataset_identifier_namespace: " + spNamespace + ", crs: " + crs + ", searchTerms: " + searchTerms + ")");
+
+ // Only one download for the CRS specified
+ } else if (downloadCount == 1) {
+ String type = null;
+ Element link = selectedEntry.getChild("link", ns);
+ if (link!=null) {
+ type = link.getAttributeValue("type");
+ }
+ HttpServletResponse nativeRes = webRequest.getNativeResponse(HttpServletResponse.class);
+ nativeRes.setStatus(HttpServletResponse.SC_MOVED_PERMANENTLY);
// nativeRes.setHeader("Location", selectedEntry.getChildText("id",ns));
- return redirectResponse(selectedEntry.getChildText("id",ns));
- // Otherwise, return a feed with the downloads for the specified CRS
- } else {
- // Filter the dataset feed by CRS code.
- InspireAtomUtil.filterDatasetFeedByCrs(feed, crs);
- return writeOutResponse(Xml.getString(feed),"application", "atom+xml");
+ return redirectResponse(selectedEntry.getChildText("id",ns));
+ // Otherwise, return a feed with the downloads for the specified CRS
+ } else {
+ // Filter the dataset feed by CRS code.
+ InspireAtomUtil.filterDatasetFeedByCrs(feed, crs);
+ return writeOutResponse(Xml.getString(feed),"application", "atom+xml");
+ }
}
}
@@ -324,17 +334,18 @@ public HttpEntity localOpenSearchDescription(
@RequestParam(value = "language", required = false) String language,
NativeWebRequest webRequest) throws Exception {
- ServiceContext context = createServiceContext(Geonet.DEFAULT_LANGUAGE, webRequest.getNativeRequest(HttpServletRequest.class));
+ try (ServiceContext context = createServiceContext(Geonet.DEFAULT_LANGUAGE, webRequest.getNativeRequest(HttpServletRequest.class))) {
+ SettingManager sm = context.getBean(SettingManager.class);
+ boolean inspireEnable = sm.getValueAsBool(Settings.SYSTEM_INSPIRE_ENABLE);
+ if (!inspireEnable) {
+ Log.info(Geonet.ATOM, "INSPIRE is disabled");
+ throw new OperationNotAllowedEx("INSPIRE option is not enabled on this catalog.");
+ }
- SettingManager sm = context.getBean(SettingManager.class);
- boolean inspireEnable = sm.getValueAsBool(Settings.SYSTEM_INSPIRE_ENABLE);
- if (!inspireEnable) {
- Log.info(Geonet.ATOM, "INSPIRE is disabled");
- throw new OperationNotAllowedEx("INSPIRE option is not enabled on this catalog.");
- }
+ Element description = getOpenSearchDescription(context, uuid);
+ return writeOutResponse(Xml.getString(description), "application", "opensearchdescription+xml");
- Element description = getOpenSearchDescription(context, uuid);
- return writeOutResponse(Xml.getString(description), "application", "opensearchdescription+xml");
+ }
}
private Element getOpenSearchDescription(ServiceContext context, final String uuid) throws Exception {
diff --git a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApproveRecord.java b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApproveRecord.java
index 1a315ca05afe..44e68862cd10 100644
--- a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApproveRecord.java
+++ b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApproveRecord.java
@@ -23,6 +23,8 @@
package org.fao.geonet.listener.metadata.draft;
+import jeeves.server.context.ServiceContext;
+import jeeves.server.dispatchers.ServiceManager;
import org.fao.geonet.ApplicationContextHolder;
import org.fao.geonet.constants.Geonet;
import org.fao.geonet.domain.AbstractMetadata;
@@ -65,6 +67,9 @@ public class ApproveRecord implements ApplicationListener
@Autowired
private IMetadataStatus metadataStatus;
+ @Autowired
+ ServiceManager serviceManager;
+
@Autowired
private DraftUtilities draftUtilities;
@@ -76,7 +81,7 @@ public void onApplicationEvent(MetadataStatusChanged event) {
}
@TransactionalEventListener(phase = TransactionPhase.BEFORE_COMMIT)
- public void doAfterCommit(MetadataStatusChanged event) {
+ public void doBeforeCommit(MetadataStatusChanged event) {
try {
Log.trace(Geonet.DATA_MANAGER, "Status changed for metadata with id " + event.getMd().getId());
@@ -87,14 +92,16 @@ public void doAfterCommit(MetadataStatusChanged event) {
case StatusValue.Status.DRAFT:
case StatusValue.Status.SUBMITTED:
if (event.getMd() instanceof Metadata) {
- Log.trace(Geonet.DATA_MANAGER,
- "Replacing contents of record (ID=" + event.getMd().getId() + ") with draft, if exists.");
- draftUtilities.replaceMetadataWithDraft(event.getMd());
+ try (ServiceContext context = serviceManager.createServiceContext("approve_record", event.getUser())) {
+ Log.trace(Geonet.DATA_MANAGER,
+ "Replacing contents of record (ID=" + event.getMd().getId() + ") with draft, if exists.");
+ draftUtilities.replaceMetadataWithDraft(event.getMd());
+ }
}
break;
case StatusValue.Status.RETIRED:
// case StatusValue.Status.REJECTED:
- try {
+ try (ServiceContext context = serviceManager.createServiceContext("approve_record", event.getUser())){
Log.trace(Geonet.DATA_MANAGER,
"Removing draft from record (ID=" + event.getMd().getId() + "), if exists.");
removeDraft(event.getMd());
@@ -104,7 +111,7 @@ public void doAfterCommit(MetadataStatusChanged event) {
}
break;
case StatusValue.Status.APPROVED:
- try {
+ try (ServiceContext context = serviceManager.createServiceContext("approve_record", event.getUser())){
Log.trace(Geonet.DATA_MANAGER, "Replacing contents of approved record (ID=" + event.getMd().getId()
+ ") with draft, if exists.");
approveWithDraft(event);
diff --git a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftCleanup.java b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftCleanup.java
index 925e416303a5..dbce4cad159f 100644
--- a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftCleanup.java
+++ b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftCleanup.java
@@ -25,6 +25,8 @@
import java.util.List;
+import jeeves.server.context.ServiceContext;
+import jeeves.server.dispatchers.ServiceManager;
import org.fao.geonet.constants.Geonet;
import org.fao.geonet.domain.MetadataDraft;
import org.fao.geonet.events.md.MetadataRemove;
@@ -55,12 +57,15 @@ public class DraftCleanup {
@Autowired
private DraftUtilities draftUtilities;
+ @Autowired
+ ServiceManager serviceManager;
+
@TransactionalEventListener
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void doAfterCommit(MetadataRemove event) {
Log.trace(Geonet.DATA_MANAGER,
"A metadata has been removed. Cleanup associated drafts of " + event.getSource());
- try {
+ try (ServiceContext context = serviceManager.createServiceContext("draft_cleanup", -1)) {
List toRemove = metadataDraftRepository
.findAll((Specification) MetadataSpecs.hasMetadataUuid(event.getMd().getUuid()));
diff --git a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftCreated.java b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftCreated.java
index 83d5458e7a62..e6fd1d6c4ca1 100644
--- a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftCreated.java
+++ b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftCreated.java
@@ -25,6 +25,8 @@
import java.util.Arrays;
+import jeeves.server.context.ServiceContext;
+import jeeves.server.dispatchers.ServiceManager;
import org.fao.geonet.constants.Geonet;
import org.fao.geonet.domain.AbstractMetadata;
import org.fao.geonet.domain.MetadataDraft;
@@ -55,6 +57,9 @@ public class DraftCreated implements ApplicationListener {
@Autowired
private IMetadataIndexer metadataIndexer;
+ @Autowired
+ ServiceManager serviceManager;
+
@Override
public void onApplicationEvent(MetadataDraftAdd event) {
}
@@ -62,7 +67,7 @@ public void onApplicationEvent(MetadataDraftAdd event) {
@TransactionalEventListener(phase = TransactionPhase.AFTER_COMPLETION)
public void doAfterCommit(MetadataDraftAdd event) {
Log.trace(Geonet.DATA_MANAGER, "Reindexing non drafted versions of uuid " + event.getMd().getUuid());
- try {
+ try (ServiceContext context = serviceManager.createServiceContext("draft_created", -1)) {
for (AbstractMetadata md : metadataUtils.findAllByUuid(event.getMd().getUuid())) {
if (!(md instanceof MetadataDraft)) {
Log.trace(Geonet.DATA_MANAGER, "Reindexing " + md.getId());
diff --git a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftRemoved.java b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftRemoved.java
index bc957e845548..5f65f27bac5b 100644
--- a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftRemoved.java
+++ b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftRemoved.java
@@ -29,6 +29,8 @@
import javax.transaction.Transactional;
import javax.transaction.Transactional.TxType;
+import jeeves.server.context.ServiceContext;
+import jeeves.server.dispatchers.ServiceManager;
import org.fao.geonet.constants.Geonet;
import org.fao.geonet.domain.AbstractMetadata;
import org.fao.geonet.domain.MetadataDraft;
@@ -58,11 +60,14 @@ public class DraftRemoved {
@Autowired
private IMetadataIndexer metadataIndexer;
+ @Autowired
+ ServiceManager serviceManager;
+
@TransactionalEventListener(phase = TransactionPhase.AFTER_COMPLETION, fallbackExecution = true)
public void doAfterCommit(MetadataDraftRemove event) {
Log.trace(Geonet.DATA_MANAGER, "Reindexing non drafted versions of uuid " + event.getMd().getUuid());
- try {
+ try (ServiceContext context = serviceManager.createServiceContext("draft_removed", -1)) {
for (AbstractMetadata md : getRecords(event)) {
if (!(md instanceof MetadataDraft)) {
Log.trace(Geonet.DATA_MANAGER, "Reindexing " + md.getId());
diff --git a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftUtilities.java b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftUtilities.java
index e9df323af46b..38fa522ec4b1 100644
--- a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftUtilities.java
+++ b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/DraftUtilities.java
@@ -20,6 +20,9 @@
import java.util.List;
+/**
+ * Facade with utility methods responsible for workflow transitions.
+ */
@Service
public class DraftUtilities {
@@ -142,6 +145,9 @@ public AbstractMetadata replaceMetadataWithDraft(AbstractMetadata md, AbstractMe
try {
ServiceContext context = ServiceContext.get();
+ if( context == null ){
+ Log.trace(Geonet.DATA_MANAGER,"context unavailable");
+ }
Element xmlData = draft.getXmlData(false);
String changeDate = draft.getDataInfo().getChangeDate().getDateAndTime();
diff --git a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/UpdateOperations.java b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/UpdateOperations.java
index fb6049666c19..9ddac09edf46 100644
--- a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/UpdateOperations.java
+++ b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/UpdateOperations.java
@@ -25,6 +25,7 @@
import java.util.Arrays;
+import jeeves.server.dispatchers.ServiceManager;
import org.fao.geonet.constants.Geonet;
import org.fao.geonet.domain.AbstractMetadata;
import org.fao.geonet.domain.Group;
@@ -70,12 +71,15 @@ public class UpdateOperations implements ApplicationListener {
@Autowired
private MetadataDraftRepository metadataDraftRepository;
+ @Autowired
+ ServiceManager serviceManager;
+
@Override
public void onApplicationEvent(MetadataShare event) {
}
@TransactionalEventListener(phase = TransactionPhase.BEFORE_COMMIT)
- public void doAfterCommit(MetadataShare event) {
+ public void doBeforeCommit(MetadataShare event) {
try {
@@ -95,39 +99,39 @@ public void doAfterCommit(MetadataShare event) {
MetadataDraft draft = metadataDraftRepository.findOneByUuid(md.getUuid());
if (draft != null) {
- // Copy privileges from original metadata
- OperationAllowed op = event.getOp();
- ServiceContext context = ServiceContext.get();
-
- // Only interested in editing and reviewing privileges
- // No one else should be able to see it
- if (op.getId().getOperationId() == ReservedOperation.editing.getId()) {
- Log.trace(Geonet.DATA_MANAGER, "Updating privileges on draft " + draft.getId());
-
- // except for reserved groups
- Group g = groupRepository.findById(op.getId().getGroupId()).get();
- if (!g.isReserved()) {
- try {
- if (event.getType() == Type.REMOVE) {
- Log.trace(Geonet.DATA_MANAGER, "Removing editing on group "
- + op.getId().getGroupId() + " for draft " + draft.getId());
-
- metadataOperations.forceUnsetOperation(context, draft.getId(),
- op.getId().getGroupId(), op.getId().getOperationId());
- } else {
- Log.trace(Geonet.DATA_MANAGER, "Adding editing on group " + op.getId().getGroupId()
- + " for draft " + draft.getId());
-
- metadataOperations.forceSetOperation(context, draft.getId(),
- op.getId().getGroupId(), op.getId().getOperationId());
+ try (ServiceContext context = serviceManager.createServiceContext("update_operations", -1)) {
+ // Copy privileges from original metadata
+ OperationAllowed op = event.getOp();
+
+ // Only interested in editing and reviewing privileges
+ // No one else should be able to see it
+ if (op.getId().getOperationId() == ReservedOperation.editing.getId()) {
+ Log.trace(Geonet.DATA_MANAGER, "Updating privileges on draft " + draft.getId());
+
+ // except for reserved groups
+ Group g = groupRepository.findById(op.getId().getGroupId()).get();
+ if (!g.isReserved()) {
+ try {
+ if (event.getType() == Type.REMOVE) {
+ Log.trace(Geonet.DATA_MANAGER, "Removing editing on group "
+ + op.getId().getGroupId() + " for draft " + draft.getId());
+
+ metadataOperations.forceUnsetOperation(context, draft.getId(),
+ op.getId().getGroupId(), op.getId().getOperationId());
+ } else {
+ Log.trace(Geonet.DATA_MANAGER, "Adding editing on group " + op.getId().getGroupId()
+ + " for draft " + draft.getId());
+
+ metadataOperations.forceSetOperation(context, draft.getId(),
+ op.getId().getGroupId(), op.getId().getOperationId());
+ }
+ metadataIndexer.indexMetadata(Arrays.asList(String.valueOf(draft.getId())));
+ } catch (Exception e) {
+ Log.error(Geonet.DATA_MANAGER, "Error cascading operation to draft", e);
}
- metadataIndexer.indexMetadata(Arrays.asList(String.valueOf(draft.getId())));
- } catch (Exception e) {
- Log.error(Geonet.DATA_MANAGER, "Error cascading operation to draft", e);
}
}
}
-
}
}
} catch (Throwable e) {
diff --git a/services/src/main/java/org/fao/geonet/api/ApiUtils.java b/services/src/main/java/org/fao/geonet/api/ApiUtils.java
index c154684dd104..752e73be3cef 100644
--- a/services/src/main/java/org/fao/geonet/api/ApiUtils.java
+++ b/services/src/main/java/org/fao/geonet/api/ApiUtils.java
@@ -77,6 +77,9 @@
/**
* API utilities mainly to deal with parameters.
+ *
+ * Many of these methods assume a service context is available as a thread locale,
+ * see {@link #createServiceContext(HttpServletRequest)} methods for details.
*/
public class ApiUtils {
@@ -137,14 +140,17 @@ public static String getInternalId(String uuidOrInternalId, Boolean approved)
return id;
}
- //fixes the uri fragment portion (that the part after the "#")
- // so it is properly encoded
- //http://www.thesaurus.gc.ca/concept/#Offshore area --> http://www.thesaurus.gc.ca/concept/#Offshore%20area
- //http://www.thesaurus.gc.ca/concept/#AIDS (disease) --> http://www.thesaurus.gc.ca/concept/#AIDS%20%28disease%29
- //http://www.thesaurus.gc.ca/concept/#Alzheimer's disease --> http://www.thesaurus.gc.ca/concept/#Alzheimer%27s%20disease
- //
- //Includes some special case handling for spaces and ":"
- //
+ /**
+ * Fixes the uri fragment portion (that the part after the "#") so it is properly encoded.
+ *
+ *
+ * http://www.thesaurus.gc.ca/concept/#Offshore area --> http://www.thesaurus.gc.ca/concept/#Offshore%20area
+ * http://www.thesaurus.gc.ca/concept/#AIDS (disease) --> http://www.thesaurus.gc.ca/concept/#AIDS%20%28disease%29
+ * http://www.thesaurus.gc.ca/concept/#Alzheimer's disease --> http://www.thesaurus.gc.ca/concept/#Alzheimer%27s%20disease
+ *
+ *
+ * Includes some special case handling for spaces and ":"
+ */
//TODO: there could be other special handling for special cases in the future
public static String fixURIFragment(String uri) throws UnsupportedEncodingException {
String[] parts = uri.split("#");
@@ -157,7 +163,13 @@ public static String fixURIFragment(String uri) throws UnsupportedEncodingExcept
return String.join("#",parts);
}
-
+ /**
+ * Look up metadata record.
+ *
+ * @param uuidOrInternalId
+ * @return record
+ * @throws ResourceNotFoundException
+ */
public static AbstractMetadata getRecord(String uuidOrInternalId) throws ResourceNotFoundException {
IMetadataUtils metadataRepository = ApplicationContextHolder.get().getBean(IMetadataUtils.class);
AbstractMetadata metadata = null;
@@ -196,6 +208,9 @@ public static AbstractMetadata getRecord(String uuidOrInternalId) throws Resourc
*
* If session is null, it's probably a bot due to {@link AllRequestsInterceptor#createSessionForAllButNotCrawlers(HttpServletRequest)}.
* In such case return an exception.
+ *
+ * @param httpSession HTTP session
+ * @return Jeeves user session
*/
static public UserSession getUserSession(HttpSession httpSession) {
if (httpSession == null) {
@@ -211,6 +226,30 @@ static public UserSession getUserSession(HttpSession httpSession) {
/**
* If you really need a ServiceContext use this. Try to avoid in order to reduce dependency on
* Jeeves.
+ *
+ * This method has a side effect of setting the created service context for the current thread.
+ * If you create a service context you are responsible for managing on the current thread and any cleanup:
+ *
+ * Using auto closable:
+ *
+ * try(ServiceContext context = ApiUtils.createServiceContext(request, iso3langCode)){
+ * ...
+ * }
+ *
+ *
+ * Or manually:
+ *
+ * ServiceContext context = ApiUtils.createServiceContext(request, iso3langCode);
+ * try {
+ * ...
+ * }
+ * finally {
+ * context.clearAsThreadLocal();
+ * context.clear();
+ * }
+ *
+ * @param request
+ * @return new sevice context, assigned to the current thread
*/
static public ServiceContext createServiceContext(HttpServletRequest request) {
String iso3langCode = ApplicationContextHolder.get().getBean(LanguageUtils.class)
@@ -218,13 +257,51 @@ static public ServiceContext createServiceContext(HttpServletRequest request) {
return createServiceContext(request, iso3langCode);
}
+ /**
+ * If you really need a ServiceContext use this. Try to avoid in order to reduce dependency on
+ * Jeeves.
+ *
+ * This method has a side effect of setting the created service context for the current thread.
+ * If you create a service context you are responsible for managing on the current thread and any cleanup:
+ *
+ * Using auto closable:
+ *
+ * try(ServiceContext context = ApiUtils.createServiceContext(request, iso3langCode)){
+ * ...
+ * }
+ *
+ *
+ * Or manually:
+ *
+ * ServiceContext context = ApiUtils.createServiceContext(request, iso3langCode);
+ * try {
+ * ...
+ * }
+ * finally {
+ * context.clearAsThreadLocal();
+ * context.clear();
+ * }
+ *
+ *
+ * @param request
+ * @param iso3langCode
+ * @return new sevice context, assigned to the current thread
+ */
static public ServiceContext createServiceContext(HttpServletRequest request, String iso3langCode) {
ServiceManager serviceManager = ApplicationContextHolder.get().getBean(ServiceManager.class);
- ServiceContext serviceContext = serviceManager.createServiceContext("Api", iso3langCode, request);
+ String contextName = "Api"+request.getPathInfo();
+ ServiceContext serviceContext = serviceManager.createServiceContext(contextName, iso3langCode, request);
serviceContext.setAsThreadLocal();
return serviceContext;
}
+ /**
+ * Generate the filesize of files in a directory in KiB.
+ *
+ * @param lDir directory location
+ * @return size of files in directory in KiB
+ * @throws IOException
+ */
public static long sizeOfDirectory(Path lDir) throws IOException {
final long[] size = new long[]{0};
Files.walkFileTree(lDir, new SimpleFileVisitor() {
@@ -235,9 +312,17 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IO
}
});
- return size[0] / 1024;
+ return size[0] / 1024; // convert to KiB
}
+ /**
+ * Download xml file into temporary location.
+ *
+ * @param url
+ * @return temporary file
+ * @throws IOException
+ * @throws URISyntaxException
+ */
public static Path downloadUrlInTemp(String url) throws IOException, URISyntaxException {
URI uri = new URI(url);
@@ -258,61 +343,169 @@ public static Path downloadUrlInTemp(String url) throws IOException, URISyntaxEx
/**
* Check if the current user can edit this record.
+ *
+ * This method creates a temporary service context using the provided request to check record access,
+ * if you have a service context already please use {@link #canEditRecord(String, ServiceContext)}.
+ *
+ * @param metadataUuid Look up metadata record
+ * @param request Request to identify current user
+ * @return metadata record
+ * @throws SecurityException if user is not allowed to edit
*/
static public AbstractMetadata canEditRecord(String metadataUuid, HttpServletRequest request) throws Exception {
- ApplicationContext appContext = ApplicationContextHolder.get();
+ ServiceContext previous = ServiceContext.get();
+ if (previous != null) previous.clearAsThreadLocal();
+
+ try (ServiceContext context = createServiceContext(request)) {
+ return canEditRecord(metadataUuid, context);
+ }
+ finally {
+ if (previous != null) previous.setAsThreadLocal();
+ }
+ }
+
+ /**
+ * Check if the current user can edit this record.
+ *
+ * @param metadataUuid Look up metadata record
+ * @return metadata record
+ * @throws SecurityException if user is not allowed to edit
+ */
+ static public AbstractMetadata canEditRecord(String metadataUuid, ServiceContext context) throws Exception {
AbstractMetadata metadata = getRecord(metadataUuid);
- AccessManager accessManager = appContext.getBean(AccessManager.class);
- if (!accessManager.canEdit(createServiceContext(request), String.valueOf(metadata.getId()))) {
+ AccessManager accessManager = context.getBean(AccessManager.class);
+ if (!accessManager.canEdit(context, String.valueOf(metadata.getId()))) {
throw new SecurityException(String.format(
- "You can't edit record with UUID %s", metadataUuid));
+ "User %s can't edit record with UUID %s", context.userName(), metadataUuid));
}
return metadata;
}
/**
* Check if the current user can review this record.
+ *
+ * This method creates a temporary service context using the provided request to check record access,
+ * if you have a service context already please use {@link #canReviewRecord(String, ServiceContext)}.
+ *
+ * @param metadataUuid Look up metadata record
+ * @param request Request to identify current user
+ * @return metadata record
+ * @throws SecurityException if user is not allowed to review
+ * @deprecated Not presently used
*/
static public AbstractMetadata canReviewRecord(String metadataUuid, HttpServletRequest request) throws Exception {
- ApplicationContext appContext = ApplicationContextHolder.get();
+ ServiceContext previous = ServiceContext.get();
+ if (previous != null) previous.clearAsThreadLocal();
+
+ try (ServiceContext context = createServiceContext(request)) {
+ return canReviewRecord(metadataUuid,context);
+ }
+ finally {
+ if (previous != null) previous.setAsThreadLocal();
+ }
+ }
+
+ /**
+ * Check if the current user can review this record.
+ *
+ * @param metadataUuid Look up metadata record
+ * @return metadata record
+ * @throws SecurityException if user is not allowed to review
+ */
+ static public AbstractMetadata canReviewRecord(String metadataUuid,ServiceContext context) throws Exception {
AbstractMetadata metadata = getRecord(metadataUuid);
- AccessManager accessManager = appContext.getBean(AccessManager.class);
- if (!accessManager.canReview(createServiceContext(request), String.valueOf(metadata.getId()))) {
+ AccessManager accessManager = context.getBean(AccessManager.class);
+ if (!accessManager.canReview(context, String.valueOf(metadata.getId()))) {
throw new SecurityException(String.format(
- "You can't review or edit record with UUID %s", metadataUuid));
+ "User %s can't review or edit record with UUID %s", context.userName(), metadataUuid));
}
return metadata;
}
/**
* Check if the current user can change status of this record.
+ *
+ * This method creates a temporary service context using the provided request to check record access,
+ * if you have a service context already please use {@link #canChangeStatusRecord(String, ServiceContext)}.
+ *
+ * @param metadataUuid Look up metadata record
+ * @param request Request to identify current user
+ * @return metadata record
+ * @throws SecurityException if user is not allowed to review
+ * @deprecated Not presently used
*/
static public AbstractMetadata canChangeStatusRecord(String metadataUuid, HttpServletRequest request) throws Exception {
- ApplicationContext appContext = ApplicationContextHolder.get();
+ ServiceContext previous = ServiceContext.get();
+ if (previous != null) previous.clearAsThreadLocal();
+
+ try (ServiceContext context = createServiceContext(request)) {
+ return canChangeStatusRecord(metadataUuid,context);
+ }
+ finally {
+ if (previous != null) previous.setAsThreadLocal();
+ }
+ }
+
+ /**
+ * Check if the current user can change status of this record.
+ *
+ * @param metadataUuid Look up metadata record
+ * @return metadata record
+ * @throws SecurityException if user is not allowed to change status
+ */
+ static public AbstractMetadata canChangeStatusRecord(String metadataUuid, ServiceContext context) throws Exception {
AbstractMetadata metadata = getRecord(metadataUuid);
- AccessManager accessManager = appContext.getBean(AccessManager.class);
- if (!accessManager.canChangeStatus(createServiceContext(request), String.valueOf(metadata.getId()))) {
+ AccessManager accessManager = context.getBean(AccessManager.class);
+ if (!accessManager.canChangeStatus(context, String.valueOf(metadata.getId()))) {
throw new SecurityException(String.format(
- "You can't change status of record with UUID %s", metadataUuid));
+ "User %s can't change status of record with UUID %s", context.userName(), metadataUuid));
}
return metadata;
}
/**
* Check if the current user can view this record.
+ *
+ * This method creates a temporary service context using the provided request to check record access,
+ * if you have a service context already please use {@link #canViewRecord(String, ServiceContext)}.
+ *
+ * @param metadataUuid Look up metadata record
+ * @param request Request to identify current user
+ * @return metadata record
+ * @throws SecurityException if user is not allowed to view
*/
public static AbstractMetadata canViewRecord(String metadataUuid, HttpServletRequest request) throws Exception {
+ ServiceContext previous = ServiceContext.get();
+ if (previous != null) previous.clearAsThreadLocal();
+
+ try (ServiceContext context = createServiceContext(request)) {
+ return canViewRecord(metadataUuid,context);
+ }
+ finally {
+ if (previous != null) previous.setAsThreadLocal();
+ }
+ }
+ /**
+ * Check if the current user can view this record.
+ *
+ * @param metadataUuid Look up metadata record
+ * @return metadata record
+ * @throws SecurityException if user is not allowed to view
+ */
+ public static AbstractMetadata canViewRecord(String metadataUuid, ServiceContext context) throws Exception {
AbstractMetadata metadata = getRecord(metadataUuid);
try {
- Lib.resource.checkPrivilege(createServiceContext(request), String.valueOf(metadata.getId()), ReservedOperation.view);
+ Lib.resource.checkPrivilege(context, String.valueOf(metadata.getId()), ReservedOperation.view);
} catch (Exception e) {
throw new SecurityException(String.format(
- "You can't view record with UUID %s", metadataUuid));
+ "User %s can't view record with UUID %s", context.userName(), metadataUuid));
}
return metadata;
}
/**
+ * Create a favicon from the provided image.
+ *
* @param img
* @param outFile
* @throws IOException
diff --git a/services/src/main/java/org/fao/geonet/api/es/EsHTTPProxy.java b/services/src/main/java/org/fao/geonet/api/es/EsHTTPProxy.java
index e54eeaaf9afc..7418caa06464 100644
--- a/services/src/main/java/org/fao/geonet/api/es/EsHTTPProxy.java
+++ b/services/src/main/java/org/fao/geonet/api/es/EsHTTPProxy.java
@@ -254,8 +254,9 @@ public void search(
String body,
@Parameter(hidden = true)
HttpEntity httpEntity) throws Exception {
- ServiceContext context = ApiUtils.createServiceContext(request);
- call(context, httpSession, request, response, "_search", httpEntity.getBody(), bucket);
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ call(context, httpSession, request, response, "_search", httpEntity.getBody(), bucket);
+ }
}
@@ -287,9 +288,9 @@ public void call(
String body,
@Parameter(hidden = true)
HttpEntity httpEntity) throws Exception {
-
- ServiceContext context = ApiUtils.createServiceContext(request);
- call(context, httpSession, request, response, endPoint, httpEntity.getBody(), bucket);
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ call(context, httpSession, request, response, endPoint, httpEntity.getBody(), bucket);
+ }
}
public void call(ServiceContext context, HttpSession httpSession, HttpServletRequest request,
diff --git a/services/src/main/java/org/fao/geonet/api/groups/GroupsApi.java b/services/src/main/java/org/fao/geonet/api/groups/GroupsApi.java
index d6770dabbbd2..be5cbf0f0414 100644
--- a/services/src/main/java/org/fao/geonet/api/groups/GroupsApi.java
+++ b/services/src/main/java/org/fao/geonet/api/groups/GroupsApi.java
@@ -184,51 +184,52 @@ public void getGroupLogo(
Locale locale = languageUtils.parseAcceptLanguage(request.getLocales());
ApplicationContext context = ApplicationContextHolder.get();
- ServiceContext serviceContext = ApiUtils.createServiceContext(request, locale.getISO3Country());
- if (context == null) {
- throw new RuntimeException("ServiceContext not available");
- }
+ try (ServiceContext serviceContext = ApiUtils.createServiceContext(request, locale.getISO3Country())) {
+ if (context == null) {
+ throw new RuntimeException("ServiceContext not available");
+ }
- Optional group = groupRepository.findById(groupId);
- if (!group.isPresent()) {
- throw new ResourceNotFoundException(messages.getMessage("api.groups.group_not_found", new
- Object[]{groupId}, locale));
- }
- try {
- final Resources resources = context.getBean(Resources.class);
- final String logoUUID = group.get().getLogo();
- if (StringUtils.isNotBlank(logoUUID) && !logoUUID.startsWith("http://") && !logoUUID.startsWith("https//")) {
- try (Resources.ResourceHolder image = getImage(resources, serviceContext, group.get())) {
- if (image != null) {
- FileTime lastModifiedTime = image.getLastModifiedTime();
- response.setDateHeader("Expires", System.currentTimeMillis() + SIX_HOURS * 1000L);
- if (webRequest.checkNotModified(lastModifiedTime.toMillis())) {
- // webRequest.checkNotModified sets the right HTTP headers
+ Optional group = groupRepository.findById(groupId);
+ if (!group.isPresent()) {
+ throw new ResourceNotFoundException(messages.getMessage("api.groups.group_not_found", new
+ Object[]{groupId}, locale));
+ }
+ try {
+ final Resources resources = context.getBean(Resources.class);
+ final String logoUUID = group.get().getLogo();
+ if (StringUtils.isNotBlank(logoUUID) && !logoUUID.startsWith("http://") && !logoUUID.startsWith("https//")) {
+ try (Resources.ResourceHolder image = getImage(resources, serviceContext, group.get())) {
+ if (image != null) {
+ FileTime lastModifiedTime = image.getLastModifiedTime();
+ response.setDateHeader("Expires", System.currentTimeMillis() + SIX_HOURS * 1000L);
+ if (webRequest.checkNotModified(lastModifiedTime.toMillis())) {
+ // webRequest.checkNotModified sets the right HTTP headers
+ return;
+ }
+ response.setContentType(AttachmentsApi.getFileContentType(image.getPath()));
+ response.setContentLength((int) Files.size(image.getPath()));
+ response.addHeader("Cache-Control", "max-age=" + SIX_HOURS + ", public");
+ FileUtils.copyFile(image.getPath().toFile(), response.getOutputStream());
return;
}
- response.setContentType(AttachmentsApi.getFileContentType(image.getPath()));
- response.setContentLength((int) Files.size(image.getPath()));
- response.addHeader("Cache-Control", "max-age=" + SIX_HOURS + ", public");
- FileUtils.copyFile(image.getPath().toFile(), response.getOutputStream());
- return;
}
}
- }
- // no logo image found. Return a transparent 1x1 png
- FileTime lastModifiedTime = FileTime.fromMillis(0);
- if (webRequest.checkNotModified(lastModifiedTime.toMillis())) {
- return;
+ // no logo image found. Return a transparent 1x1 png
+ FileTime lastModifiedTime = FileTime.fromMillis(0);
+ if (webRequest.checkNotModified(lastModifiedTime.toMillis())) {
+ return;
+ }
+ response.setContentType("image/png");
+ response.setContentLength(TRANSPARENT_1_X_1_PNG.length);
+ response.addHeader("Cache-Control", "max-age=" + SIX_HOURS + ", public");
+ response.getOutputStream().write(TRANSPARENT_1_X_1_PNG);
+
+ } catch (IOException e) {
+ Log.error(LOGGER, String.format("There was an error accessing the logo of the group with id '%d'",
+ groupId));
+ throw new RuntimeException(e);
}
- response.setContentType("image/png");
- response.setContentLength(TRANSPARENT_1_X_1_PNG.length);
- response.addHeader("Cache-Control", "max-age=" + SIX_HOURS + ", public");
- response.getOutputStream().write(TRANSPARENT_1_X_1_PNG);
-
- } catch (IOException e) {
- Log.error(LOGGER, String.format("There was an error accessing the logo of the group with id '%d'",
- groupId));
- throw new RuntimeException(e);
}
}
diff --git a/services/src/main/java/org/fao/geonet/api/harvesting/HarvestersApi.java b/services/src/main/java/org/fao/geonet/api/harvesting/HarvestersApi.java
index ca2b45d30f73..eb7541b7140c 100644
--- a/services/src/main/java/org/fao/geonet/api/harvesting/HarvestersApi.java
+++ b/services/src/main/java/org/fao/geonet/api/harvesting/HarvestersApi.java
@@ -201,15 +201,16 @@ public ResponseEntity checkHarvesterPropertyExist(
@RequestParam
String exist,
HttpServletRequest request) throws Exception {
- ServiceContext context = ApiUtils.createServiceContext(request);
- final Element list = harvestManager.get(null, context, "site[1]/name[1]");
- if (list.getChildren().stream()
- .filter(h -> h instanceof Element)
- .map(h -> ((Element) h).getChild("site").getChild(property).getTextTrim())
- .anyMatch(name -> ((String) name).equalsIgnoreCase(exist))) {
- return new ResponseEntity<>(HttpStatus.OK);
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ final Element list = harvestManager.get(null, context, "site[1]/name[1]");
+ if (list.getChildren().stream()
+ .filter(h -> h instanceof Element)
+ .map(h -> ((Element) h).getChild("site").getChild(property).getTextTrim())
+ .anyMatch(name -> ((String) name).equalsIgnoreCase(exist))) {
+ return new ResponseEntity<>(HttpStatus.OK);
+ }
+
+ return new ResponseEntity<>(HttpStatus.NOT_FOUND);
}
-
- return new ResponseEntity<>(HttpStatus.NOT_FOUND);
}
}
diff --git a/services/src/main/java/org/fao/geonet/api/languages/LanguagesApi.java b/services/src/main/java/org/fao/geonet/api/languages/LanguagesApi.java
index ad6b53f85e2f..a642d8a08d5b 100644
--- a/services/src/main/java/org/fao/geonet/api/languages/LanguagesApi.java
+++ b/services/src/main/java/org/fao/geonet/api/languages/LanguagesApi.java
@@ -124,9 +124,10 @@ public void addLanguages(
}
}
if (data.size() > 0) {
- ServiceContext context = ApiUtils.createServiceContext(request);
- DbLib.runSQL(context, data);
- return;
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ DbLib.runSQL(context, data);
+ return;
+ }
}
}
throw new ResourceNotFoundException(String.format(
@@ -186,9 +187,10 @@ public void deleteLanguage(
}
}
if (data.size() > 0) {
- ServiceContext context = ApiUtils.createServiceContext(request);
- DbLib.runSQL(context, data);
- return;
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ DbLib.runSQL(context, data);
+ return;
+ }
}
}
throw new ResourceNotFoundException(String.format(
diff --git a/services/src/main/java/org/fao/geonet/api/links/LinksApi.java b/services/src/main/java/org/fao/geonet/api/links/LinksApi.java
index 74a6426bdf98..1aaaff03418b 100644
--- a/services/src/main/java/org/fao/geonet/api/links/LinksApi.java
+++ b/services/src/main/java/org/fao/geonet/api/links/LinksApi.java
@@ -289,63 +289,64 @@ public SimpleMetadataProcessingReport analyzeRecordLinks(
@Parameter(hidden = true)
HttpServletRequest request
) throws IOException, JDOMException {
- MAnalyseProcess registredMAnalyseProcess = getRegistredMAnalyseProcess();
+ try (ServiceContext serviceContext = ApiUtils.createServiceContext(request)) {
+ MAnalyseProcess registredMAnalyseProcess = getRegistredMAnalyseProcess();
- ServiceContext serviceContext = ApiUtils.createServiceContext(request);
- UserSession session = ApiUtils.getUserSession(httpSession);
+ UserSession session = ApiUtils.getUserSession(httpSession);
- boolean isAdministrator = session.getProfile() == Profile.Administrator;
- if (isAdministrator && removeFirst) {
- registredMAnalyseProcess.deleteAll();
- }
+ boolean isAdministrator = session.getProfile() == Profile.Administrator;
+ if (isAdministrator && removeFirst) {
+ registredMAnalyseProcess.deleteAll();
+ }
- SimpleMetadataProcessingReport report =
- new SimpleMetadataProcessingReport();
+ SimpleMetadataProcessingReport report =
+ new SimpleMetadataProcessingReport();
- Set ids = Sets.newHashSet();
+ Set ids = Sets.newHashSet();
- if (uuids != null || StringUtils.isNotEmpty(bucket)) {
- try {
- Set records = ApiUtils.getUuidsParameterOrSelection(uuids, bucket, session);
- for (String uuid : records) {
- if (!metadataUtils.existsMetadataUuid(uuid)) {
- report.incrementNullRecords();
- }
- for (AbstractMetadata record : metadataRepository.findAllByUuid(uuid)) {
- if (!accessManager.canEdit(serviceContext, String.valueOf(record.getId()))) {
- report.addNotEditableMetadataId(record.getId());
- } else {
- ids.add(record.getId());
- report.addMetadataId(record.getId());
- report.incrementProcessedRecords();
+ if (uuids != null || StringUtils.isNotEmpty(bucket)) {
+ try {
+ Set records = ApiUtils.getUuidsParameterOrSelection(uuids, bucket, session);
+ for (String uuid : records) {
+ if (!metadataUtils.existsMetadataUuid(uuid)) {
+ report.incrementNullRecords();
+ }
+ for (AbstractMetadata record : metadataRepository.findAllByUuid(uuid)) {
+ if (!accessManager.canEdit(serviceContext, String.valueOf(record.getId()))) {
+ report.addNotEditableMetadataId(record.getId());
+ } else {
+ ids.add(record.getId());
+ report.addMetadataId(record.getId());
+ report.incrementProcessedRecords();
+ }
}
}
- }
- } catch (Exception e) {
- report.addError(e);
- } finally {
- report.close();
- }
- } else {
- if (isAdministrator) {
- // Process all
- final List metadataList = metadataRepository.findAll();
- for (Metadata m : metadataList) {
- ids.add(m.getId());
- report.addMetadataId(m.getId());
- report.incrementProcessedRecords();
+ } catch (Exception e) {
+ report.addError(e);
+ } finally {
+ report.close();
}
} else {
- throw new OperationNotAllowedEx(String.format(
- "Only administrator can trigger link analysis on the entire catalogue. This is not allowed for %s.",
- session.getProfile()
- ));
+ if (isAdministrator) {
+ // Process all
+ final List metadataList = metadataRepository.findAll();
+ for (Metadata m : metadataList) {
+ ids.add(m.getId());
+ report.addMetadataId(m.getId());
+ report.incrementProcessedRecords();
+ }
+ } else {
+ throw new OperationNotAllowedEx(String.format(
+ "Only administrator can trigger link analysis on the entire catalogue. This is not allowed for %s.",
+ session.getProfile()
+ ));
+ }
+ report.close();
}
- report.close();
- }
- registredMAnalyseProcess.processMetadataAndTestLink(analyze, ids);
- return report;
+ registredMAnalyseProcess.processMetadataAndTestLink(analyze, ids);
+ return report;
+ }
}
diff --git a/services/src/main/java/org/fao/geonet/api/mapservers/MapServersApi.java b/services/src/main/java/org/fao/geonet/api/mapservers/MapServersApi.java
index f09a537da729..60e0077c6b97 100644
--- a/services/src/main/java/org/fao/geonet/api/mapservers/MapServersApi.java
+++ b/services/src/main/java/org/fao/geonet/api/mapservers/MapServersApi.java
@@ -514,57 +514,56 @@ private String publishResource(String mapserverId, String metadataUuid,
String metadataAbstract,
HttpServletRequest request,
MapServersUtils.ACTION action) throws Exception {
- // purge \\n from metadataTitle - geoserver prefers layer titles on a single line
- metadataTitle = metadataTitle.replace("\\n", "");
- metadataAbstract = metadataAbstract.replace("\\n", "");
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ // purge \\n from metadataTitle - geoserver prefers layer titles on a single line
+ metadataTitle = metadataTitle.replace("\\n", "");
+ metadataAbstract = metadataAbstract.replace("\\n", "");
- ApplicationContext applicationContext = ApplicationContextHolder.get();
- MapServer m = mapServerRepository.findOneById(Integer.valueOf(mapserverId));
- GeoServerNode g = new GeoServerNode(m);
+ ApplicationContext applicationContext = ApplicationContextHolder.get();
+ MapServer m = mapServerRepository.findOneById(Integer.valueOf(mapserverId));
+ GeoServerNode g = new GeoServerNode(m);
-
- ServiceContext context = ApiUtils.createServiceContext(request);
-
- String baseUrl = settingManager.getSiteURL(context);
- GeoServerRest gs = new GeoServerRest(requestFactory, g.getUrl(),
- g.getUsername(), g.getUserpassword(),
- g.getNamespacePrefix(), baseUrl, settingManager.getNodeURL(),
- m.pushStyleInWorkspace());
+ String baseUrl = settingManager.getSiteURL(context);
+ GeoServerRest gs = new GeoServerRest(requestFactory, g.getUrl(),
+ g.getUsername(), g.getUserpassword(),
+ g.getNamespacePrefix(), baseUrl, settingManager.getNodeURL(),
+ m.pushStyleInWorkspace());
// String access = Util.getParam(params, "access");
- //jdbc:postgresql://host:port/user:password@database#table
- if (resource.startsWith("jdbc:postgresql")) {
- String[] values = resource.split("/");
+ //jdbc:postgresql://host:port/user:password@database#table
+ if (resource.startsWith("jdbc:postgresql")) {
+ String[] values = resource.split("/");
- String[] serverInfo = values[2].split(":");
- String host = serverInfo[0];
- String port = serverInfo[1];
+ String[] serverInfo = values[2].split(":");
+ String host = serverInfo[0];
+ String port = serverInfo[1];
- String[] dbUserInfo = values[3].split("@");
+ String[] dbUserInfo = values[3].split("@");
- String[] userInfo = dbUserInfo[0].split(":");
- String user = userInfo[0];
- String password = userInfo[1];
+ String[] userInfo = dbUserInfo[0].split(":");
+ String user = userInfo[0];
+ String password = userInfo[1];
- String[] dbInfo = dbUserInfo[1].split("#");
- String db = dbInfo[0];
- String table = dbInfo[1];
+ String[] dbInfo = dbUserInfo[1].split("#");
+ String db = dbInfo[0];
+ String table = dbInfo[1];
- return publishDbTable(action, gs,
- "postgis", host, port, user, password, db, table, "postgis",
- g.getNamespaceUrl(), metadataUuid, metadataTitle, metadataAbstract);
- } else {
- if (resource.startsWith("file://") || resource.startsWith("http://")) {
- return addExternalFile(action, gs,
- resource,
- metadataUuid, metadataTitle, metadataAbstract);
+ return publishDbTable(action, gs,
+ "postgis", host, port, user, password, db, table, "postgis",
+ g.getNamespaceUrl(), metadataUuid, metadataTitle, metadataAbstract);
} else {
- // Get ZIP file from data directory
- try (Store.ResourceHolder f = store.getResource(context, metadataUuid, resource)) {
- return addZipFile(action, gs,
- f.getPath(), resource,
+ if (resource.startsWith("file://") || resource.startsWith("http://")) {
+ return addExternalFile(action, gs,
+ resource,
metadataUuid, metadataTitle, metadataAbstract);
+ } else {
+ // Get ZIP file from data directory
+ try (Store.ResourceHolder f = store.getResource(context, metadataUuid, resource)) {
+ return addZipFile(action, gs,
+ f.getPath(), resource,
+ metadataUuid, metadataTitle, metadataAbstract);
+ }
}
}
}
diff --git a/services/src/main/java/org/fao/geonet/api/processing/MInspireEtfValidateProcess.java b/services/src/main/java/org/fao/geonet/api/processing/MInspireEtfValidateProcess.java
index b6b7708bc68a..e9a3f2e90dfa 100644
--- a/services/src/main/java/org/fao/geonet/api/processing/MInspireEtfValidateProcess.java
+++ b/services/src/main/java/org/fao/geonet/api/processing/MInspireEtfValidateProcess.java
@@ -47,7 +47,8 @@
public class MInspireEtfValidateProcess implements SelfNaming {
private final ApplicationContext appContext;
- private final ServiceContext serviceContext;
+ /** Shared validation service context used as a fallback if thread local unavailable */
+ private ServiceContext validationServiceContext;
private final String URL;
private ObjectName probeName;
@@ -63,7 +64,7 @@ public class MInspireEtfValidateProcess implements SelfNaming {
public MInspireEtfValidateProcess(String URL,
ServiceContext serviceContext, ApplicationContext appContext) {
this.URL = URL;
- this.serviceContext = serviceContext;
+ this.validationServiceContext = serviceContext;
this.appContext = appContext;
try {
@@ -124,6 +125,18 @@ public Object doInTransaction(TransactionStatus transaction) throws Throwable {
});
}
+ /**
+ * Gets the ServiceContext for the current request.
+ *
+ * If there isn't a current request, then this will return the validationServiceContext.
+ *
+ * @return ServiceContext for the current request
+ */
+ protected ServiceContext getServiceContext(){
+ ServiceContext context = ServiceContext.get();
+ return context != null ? context : validationServiceContext;
+ }
+
public void processMetadata(Set uuids, String mode) throws Exception {
IMetadataUtils metadataRepository = appContext.getBean(IMetadataUtils.class);
MetadataValidationRepository metadataValidationRepository = appContext.getBean(MetadataValidationRepository.class);
@@ -136,7 +149,7 @@ public void processMetadata(Set uuids, String mode) throws Exception {
metadataToAnalyseCount = uuids.size();
analyseMdDate = System.currentTimeMillis();
- ServiceContext context = serviceContext;
+ ServiceContext context = getServiceContext();
for (String uuid : uuids) {
if (!metadataRepository.existsMetadataUuid(uuid)) {
@@ -147,7 +160,7 @@ public void processMetadata(Set uuids, String mode) throws Exception {
for (AbstractMetadata record : metadataRepository.findAllByUuid(uuid)) {
try {
- if (!accessManager.canEdit(serviceContext, String.valueOf(record.getId()))) {
+ if (!accessManager.canEdit(context, String.valueOf(record.getId()))) {
metadataAnalysed++;
metadataNotAllowed++;
} else {
@@ -183,7 +196,7 @@ public Object doInTransaction(TransactionStatus transaction) throws Throwable {
String testId = null;
String getRecordByIdUrl = null;
if (StringUtils.isEmpty(mode)) {
- testId = inspireValidatorUtils.submitFile(serviceContext, URL,
+ testId = inspireValidatorUtils.submitFile(context, URL,
new ByteArrayInputStream(mdToValidate.getBytes()), entry.getKey(), record.getUuid());
} else {
String portal = null;
@@ -208,18 +221,18 @@ public Object doInTransaction(TransactionStatus transaction) throws Throwable {
portal,
ISO19139Namespaces.GMD.getURI(),
record.getUuid());
- testId = inspireValidatorUtils.submitUrl(serviceContext, URL, getRecordByIdUrl, entry.getKey(), record.getUuid());
+ testId = inspireValidatorUtils.submitUrl(context, URL, getRecordByIdUrl, entry.getKey(), record.getUuid());
}
}
if (testId != null) {
- inspireValidatorUtils.waitUntilReady(serviceContext, URL, testId);
+ inspireValidatorUtils.waitUntilReady(context, URL, testId);
String reportUrl = inspireValidatorUtils.getReportUrl(URL, testId);
String reportXmlUrl = InspireValidatorUtils.getReportUrlXML(URL, testId);
- String reportXml = inspireValidatorUtils.retrieveReport(serviceContext, reportXmlUrl);
+ String reportXml = inspireValidatorUtils.retrieveReport(context, reportXmlUrl);
- String validationStatus = inspireValidatorUtils.isPassed(serviceContext, URL, testId);
+ String validationStatus = inspireValidatorUtils.isPassed(context, URL, testId);
MetadataValidationStatus metadataValidationStatus =
inspireValidatorUtils.calculateValidationStatus(validationStatus);
diff --git a/services/src/main/java/org/fao/geonet/api/processing/ProcessApi.java b/services/src/main/java/org/fao/geonet/api/processing/ProcessApi.java
index b1a4ab0930fb..226574ffc0d3 100644
--- a/services/src/main/java/org/fao/geonet/api/processing/ProcessApi.java
+++ b/services/src/main/java/org/fao/geonet/api/processing/ProcessApi.java
@@ -28,6 +28,7 @@
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import jeeves.server.UserSession;
+import jeeves.server.context.ServiceContext;
import org.fao.geonet.api.API;
import org.fao.geonet.api.ApiParams;
import org.fao.geonet.api.ApiUtils;
@@ -174,7 +175,7 @@ public MetadataReplacementProcessingReport searchAndReplace(
MetadataReplacementProcessingReport report =
new MetadataReplacementProcessingReport("massive-content-update");
- try {
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
Set records = ApiUtils.getUuidsParameterOrSelection(uuids, bucket, userSession);
report.setTotalRecords(records.size());
@@ -183,7 +184,7 @@ public MetadataReplacementProcessingReport searchAndReplace(
process,
isTesting, isCaseInsensitive, vacuumMode,
allParams,
- ApiUtils.createServiceContext(request), records, report);
+ context, records, report);
m.process();
} catch (Exception e) {
throw e;
diff --git a/services/src/main/java/org/fao/geonet/api/processing/ValidateApi.java b/services/src/main/java/org/fao/geonet/api/processing/ValidateApi.java
index 2f0d5baa7ac4..2d6d7ee5401b 100644
--- a/services/src/main/java/org/fao/geonet/api/processing/ValidateApi.java
+++ b/services/src/main/java/org/fao/geonet/api/processing/ValidateApi.java
@@ -161,9 +161,8 @@ public SimpleMetadataProcessingReport validateRecords(
SimpleMetadataProcessingReport report =
new SimpleMetadataProcessingReport();
- try {
+ try (ServiceContext serviceContext = ApiUtils.createServiceContext(request)) {
ApplicationContext applicationContext = ApplicationContextHolder.get();
- ServiceContext serviceContext = ApiUtils.createServiceContext(request);
Set records = ApiUtils.getUuidsParameterOrSelection(uuids, bucket, userSession);
report.setTotalRecords(records.size());
@@ -255,9 +254,7 @@ public SimpleMetadataProcessingReport cleanValidationStatus(
SimpleMetadataProcessingReport report =
new SimpleMetadataProcessingReport();
- try {
- ServiceContext serviceContext = ApiUtils.createServiceContext(request);
-
+ try (ServiceContext serviceContext = ApiUtils.createServiceContext(request)) {
Set records = ApiUtils.getUuidsParameterOrSelection(uuids, bucket, userSession);
for (String uuid : records) {
diff --git a/services/src/main/java/org/fao/geonet/api/processing/XslProcessApi.java b/services/src/main/java/org/fao/geonet/api/processing/XslProcessApi.java
index 9b7ec0a0fba9..5972c2b3418e 100644
--- a/services/src/main/java/org/fao/geonet/api/processing/XslProcessApi.java
+++ b/services/src/main/java/org/fao/geonet/api/processing/XslProcessApi.java
@@ -185,18 +185,20 @@ public Object previewProcessRecords(
}
mergedDocuments.addContent(dataMan.getMetadata(id));
} else {
- // Save processed metadata
- if (isText) {
- output.append(XslProcessUtils.processAsText(ApiUtils.createServiceContext(request),
- id, process, false,
- xslProcessingReport, siteURL, request.getParameterMap())
- );
- } else {
- Element record = XslProcessUtils.process(ApiUtils.createServiceContext(request),
- id, process, false, false,
- false, xslProcessingReport, siteURL, request.getParameterMap());
- if (record != null) {
- preview.addContent(record.detach());
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ // Save processed metadata
+ if (isText) {
+ output.append(XslProcessUtils.processAsText(context,
+ id, process, false,
+ xslProcessingReport, siteURL, request.getParameterMap())
+ );
+ } else {
+ Element record = XslProcessUtils.process(context,
+ id, process, false, false,
+ false, xslProcessingReport, siteURL, request.getParameterMap());
+ if (record != null) {
+ preview.addContent(record.detach());
+ }
}
}
}
@@ -290,7 +292,7 @@ public XsltMetadataProcessingReport processRecords(
XsltMetadataProcessingReport xslProcessingReport =
new XsltMetadataProcessingReport(process);
- try {
+ try (ServiceContext context = ApiUtils.createServiceContext(request)){
Set records = ApiUtils.getUuidsParameterOrSelection(uuids, bucket, session);
UserSession userSession = ApiUtils.getUserSession(httpSession);
@@ -299,7 +301,7 @@ public XsltMetadataProcessingReport processRecords(
xslProcessingReport.setTotalRecords(records.size());
BatchXslMetadataReindexer m = new BatchXslMetadataReindexer(
- ApiUtils.createServiceContext(request),
+ context,
dataMan, records, process, httpSession, siteURL,
xslProcessingReport, request, index, updateDateStamp, userSession.getUserIdAsInt());
m.process();
diff --git a/services/src/main/java/org/fao/geonet/api/records/CatalogApi.java b/services/src/main/java/org/fao/geonet/api/records/CatalogApi.java
index 5aa68f8821e7..7e8743d5c832 100644
--- a/services/src/main/java/org/fao/geonet/api/records/CatalogApi.java
+++ b/services/src/main/java/org/fao/geonet/api/records/CatalogApi.java
@@ -250,78 +250,80 @@ public void exportAsMef(
SelectionManager selectionManger = SelectionManager.getManager(session);
Log.info(Geonet.MEF, "Current record(s) in selection: " + uuidList.size());
- ServiceContext context = ApiUtils.createServiceContext(request);
- MEFLib.Version version = MEFLib.Version.find(acceptHeader);
- if (version == MEFLib.Version.V1) {
- throw new IllegalArgumentException("MEF version 1 only support one record. Use the /records/{uuid}/formatters/zip to retrieve that format");
- } else {
- Set allowedUuid = new HashSet();
- for (Iterator iter = uuidList.iterator(); iter.hasNext(); ) {
- String uuid = iter.next();
- try {
- ApiUtils.canViewRecord(uuid, request);
- allowedUuid.add(uuid);
- } catch (Exception e) {
- Log.debug(API.LOG_MODULE_NAME, String.format(
- "Not allowed to export record '%s'.", uuid));
- }
- }
-
- // If provided uuid, export the metadata record only
- selectionManger.close(SelectionManager.SELECTION_METADATA);
- selectionManger.addAllSelection(SelectionManager.SELECTION_METADATA,
- allowedUuid);
-
- // MEF version 2 support multiple metadata record by file.
- if (withRelated) {
- int maxhits = Integer.parseInt(settingInfo.getSelectionMaxRecords());
-
- Set tmpUuid = new HashSet();
- for (Iterator iter = allowedUuid.iterator(); iter.hasNext(); ) {
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ MEFLib.Version version = MEFLib.Version.find(acceptHeader);
+ if (version == MEFLib.Version.V1) {
+ throw new IllegalArgumentException("MEF version 1 only support one record. Use the /records/{uuid}/formatters/zip to retrieve that format");
+ } else {
+ Set allowedUuid = new HashSet();
+ for (Iterator iter = uuidList.iterator(); iter.hasNext(); ) {
String uuid = iter.next();
-
- // Search for children records
- // and service record. At some point this might be extended to all type of relations.
- final SearchResponse searchResponse = searchManager.query(
- String.format("parentUuid:\"%s\" recordOperateOn:\"%s\"", uuid, uuid),
- esHTTPProxy.buildPermissionsFilter(ApiUtils.createServiceContext(request)),
- FIELDLIST_UUID, 0, maxhits);
-
- Arrays.asList(searchResponse.getHits().getHits()).forEach(h ->
- tmpUuid.add((String) h.getSourceAsMap().get(Geonet.IndexFieldNames.UUID)));
+ try {
+ ApiUtils.canViewRecord(uuid, context);
+ allowedUuid.add(uuid);
+ } catch (Exception e) {
+ Log.debug(API.LOG_MODULE_NAME, String.format(
+ "Not allowed to export record '%s'.", uuid));
+ }
}
- if (selectionManger.addAllSelection(SelectionManager.SELECTION_METADATA, tmpUuid)) {
- Log.info(Geonet.MEF, "Child and services added into the selection");
+ // If provided uuid, export the metadata record only
+ selectionManger.close(SelectionManager.SELECTION_METADATA);
+ selectionManger.addAllSelection(SelectionManager.SELECTION_METADATA,
+ allowedUuid);
+
+ // MEF version 2 support multiple metadata record by file.
+ if (withRelated) {
+ int maxhits = Integer.parseInt(settingInfo.getSelectionMaxRecords());
+
+ Set tmpUuid = new HashSet();
+ for (Iterator iter = allowedUuid.iterator(); iter.hasNext(); ) {
+ String uuid = iter.next();
+
+ // Search for children records
+ // and service record. At some point this might be extended to all type of relations.
+ final SearchResponse searchResponse = searchManager.query(
+ String.format("parentUuid:\"%s\" recordOperateOn:\"%s\"", uuid, uuid),
+ esHTTPProxy.buildPermissionsFilter(ApiUtils.createServiceContext(request)),
+ FIELDLIST_UUID, 0, maxhits);
+
+ Arrays.asList(searchResponse.getHits().getHits()).forEach(h ->
+ tmpUuid.add((String) h.getSourceAsMap().get(Geonet.IndexFieldNames.UUID)));
+ }
+
+ if (selectionManger.addAllSelection(SelectionManager.SELECTION_METADATA, tmpUuid)) {
+ Log.info(Geonet.MEF, "Child and services added into the selection");
+ }
+ allowedUuid = selectionManger.getSelection(SelectionManager.SELECTION_METADATA);
}
- allowedUuid = selectionManger.getSelection(SelectionManager.SELECTION_METADATA);
- }
- Log.info(Geonet.MEF, "Building MEF2 file with " + uuidList.size()
- + " records.");
- try {
- file = MEFLib.doMEF2Export(context, allowedUuid, format.toString(),
- false, stylePath,
- withXLinksResolved, withXLinkAttribute,
- false, addSchemaLocation, approved);
-
- DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HHmmss");
- String fileName = String.format("%s-%s.zip",
- settingManager.getSiteName().replace(" ", ""),
- df.format(new Date()));
-
- response.setHeader(HttpHeaders.CONTENT_DISPOSITION, String.format(
- "inline; filename=\"%s\"",
- fileName
- ));
- response.setHeader(HttpHeaders.CONTENT_LENGTH, String.valueOf(Files.size(file)));
- response.setContentType(MEFLib.Version.Constants.MEF_V2_ACCEPT_TYPE);
- FileUtils.copyFile(file.toFile(), response.getOutputStream());
- } finally {
- // -- Reset selection manager
- selectionManger.close(SelectionManager.SELECTION_METADATA);
+ Log.info(Geonet.MEF, "Building MEF2 file with " + uuidList.size()
+ + " records.");
+ try {
+ file = MEFLib.doMEF2Export(context, allowedUuid, format.toString(),
+ false, stylePath,
+ withXLinksResolved, withXLinkAttribute,
+ false, addSchemaLocation, approved);
+
+ DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HHmmss");
+ String fileName = String.format("%s-%s.zip",
+ settingManager.getSiteName().replace(" ", ""),
+ df.format(new Date()));
+
+ response.setHeader(HttpHeaders.CONTENT_DISPOSITION, String.format(
+ "inline; filename=\"%s\"",
+ fileName
+ ));
+ response.setHeader(HttpHeaders.CONTENT_LENGTH, String.valueOf(Files.size(file)));
+ response.setContentType(MEFLib.Version.Constants.MEF_V2_ACCEPT_TYPE);
+ FileUtils.copyFile(file.toFile(), response.getOutputStream());
+ } finally {
+ // -- Reset selection manager
+ selectionManger.close(SelectionManager.SELECTION_METADATA);
+ }
}
}
+
}
@io.swagger.v3.oas.annotations.Operation(
@@ -506,30 +508,30 @@ public void exportAsCsv(
uuids, bucket, session);
int maxhits = Integer.parseInt(settingInfo.getSelectionMaxRecords());
- ServiceContext context = ApiUtils.createServiceContext(httpRequest);
-
- final SearchResponse searchResponse = searchManager.query(
- String.format("uuid:(\"%s\")", String.join("\" or \"", uuidList)),
- esHTTPProxy.buildPermissionsFilter(ApiUtils.createServiceContext(httpRequest)),
- FIELDLIST_CORE, 0, maxhits);
-
- Element response = new Element("response");
- Arrays.asList(searchResponse.getHits().getHits()).forEach(h -> {
- try {
- response.addContent(
- dataManager.getMetadata(
- context,
- (String) h.getSourceAsMap().get("id"),
- false, false, false));
- } catch (Exception e) {
- }
- });
+ try (ServiceContext context = ApiUtils.createServiceContext(httpRequest)) {
+ final SearchResponse searchResponse = searchManager.query(
+ String.format("uuid:(\"%s\")", String.join("\" or \"", uuidList)),
+ esHTTPProxy.buildPermissionsFilter(ApiUtils.createServiceContext(httpRequest)),
+ FIELDLIST_CORE, 0, maxhits);
+
+ Element response = new Element("response");
+ Arrays.asList(searchResponse.getHits().getHits()).forEach(h -> {
+ try {
+ response.addContent(
+ dataManager.getMetadata(
+ context,
+ (String) h.getSourceAsMap().get("id"),
+ false, false, false));
+ } catch (Exception e) {
+ }
+ });
- Element r = new XsltResponseWriter(null,"search")
- .withXml(response)
- .withXsl("xslt/services/csv/csv-search.xsl")
- .asElement();
- httpResponse.getWriter().write(r.getText());
+ Element r = new XsltResponseWriter(null,"search")
+ .withXml(response)
+ .withXsl("xslt/services/csv/csv-search.xsl")
+ .asElement();
+ httpResponse.getWriter().write(r.getText());
+ }
}
@io.swagger.v3.oas.annotations.Operation(
@@ -628,84 +630,85 @@ void getAsRdf(
allRequestParams.put("hitsPerPage", Integer.toString(hitsPerPage));
allRequestParams.put("from", Integer.toString(from));
- ServiceContext context = ApiUtils.createServiceContext(request);
- RdfOutputManager manager = new RdfOutputManager(
- thesaurusManager.buildResultfromThTable(context), hitsPerPage);
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ RdfOutputManager manager = new RdfOutputManager(
+ thesaurusManager.buildResultfromThTable(context), hitsPerPage);
- // Copy all request parameters
- /// Mimic old Jeeves param style
- Element params = new Element("params");
- allRequestParams.forEach((k, v) -> {
- params.addContent(new Element(k).setText(v));
- });
+ // Copy all request parameters
+ /// Mimic old Jeeves param style
+ Element params = new Element("params");
+ allRequestParams.forEach((k, v) -> {
+ params.addContent(new Element(k).setText(v));
+ });
- // Perform the search on the Lucene Index
- RdfSearcher rdfSearcher = new RdfSearcher(params, context);
- List results = rdfSearcher.search(context);
- rdfSearcher.close();
-
- // Calculates the pagination information, needed for the LDP Paging and Hydra Paging
- int numberMatched = rdfSearcher.getSize();
- int firstPageFrom = numberMatched > 0 ? 1 : 0;
- int firstPageTo = numberMatched > hitsPerPage ? hitsPerPage : numberMatched;
- int nextFrom = to < numberMatched ? to + 1 : to;
- int nextTo = to + hitsPerPage < numberMatched ? to + hitsPerPage : numberMatched;
- int prevFrom = from - hitsPerPage > 0 ? from - hitsPerPage : 1;
- int prevTo = to - hitsPerPage > 0 ? to - hitsPerPage : numberMatched;
- int lastPageFrom = 0 < (numberMatched % hitsPerPage) ? numberMatched - (numberMatched % hitsPerPage) + 1 : (numberMatched - hitsPerPage + 1 > 0 ? numberMatched - hitsPerPage + 1 : numberMatched);
- long versionTokenETag = rdfSearcher.getVersionToken();
- String canonicalURL = hostURL + request.getRequestURI();
- String currentPage = canonicalURL + "?" + paramsAsString(allRequestParams) + "&from=" + from + "&to=" + to;
- String lastPage = canonicalURL + "?" + paramsAsString(allRequestParams) + "&from=" + lastPageFrom + "&to=" + numberMatched;
- String firstPage = canonicalURL + "?" + paramsAsString(allRequestParams) + "&from=" + firstPageFrom + "&to=" + firstPageTo;
- String previousPage = canonicalURL + "?" + paramsAsString(allRequestParams) + "&from=" + prevFrom + "&to=" + prevTo;
- String nextPage = canonicalURL + "?" + paramsAsString(allRequestParams) + "&from=" + nextFrom + "&to=" + nextTo;
-
- // Hydra Paging information (see also: http://www.hydra-cg.com/spec/latest/core/)
- String hydraPagedCollection = "\n" +
- "" +
- "" + lastPage.replaceAll("&", "&") + " \n" +
- "" + numberMatched + " \n" +
- ((prevFrom <= prevTo && prevFrom < from && prevTo < to) ? "" + previousPage.replaceAll("&", "&") + " \n" : "") +
- ((nextFrom <= nextTo && from < nextFrom && to < nextTo) ? "" + nextPage.replaceAll("&", "&") + " \n" : "") +
- "" + firstPage.replaceAll("&", "&") + " \n" +
- "" + hitsPerPage + " \n" +
- " ";
- // Construct the RDF output
- File rdfFile = manager.createRdfFile(context, results, 1, hydraPagedCollection);
-
- try (
- ServletOutputStream out = response.getOutputStream();
- InputStream in = new FileInputStream(rdfFile)
- ) {
- byte[] bytes = new byte[1024];
- int bytesRead;
-
- response.setContentType("application/rdf+xml");
-
- //Set the Lucene versionToken as ETag response header parameter
- response.addHeader("ETag", Long.toString(versionTokenETag));
- //Include the response header "link" parameters as suggested by the W3C Linked Data Platform paging specification (see also: https://www.w3.org/2012/ldp/hg/ldp-paging.html).
- response.addHeader("Link", "; rel=\"type\"");
- response.addHeader("Link", canonicalURL + "; rel=\"canonical\"; etag=" + versionTokenETag);
-
- response.addHeader("Link", "<" + firstPage + "> ; rel=\"first\"");
- if (nextFrom <= nextTo && from < nextFrom && to < nextTo) {
- response.addHeader("Link", "<" + nextPage + "> ; rel=\"next\"");
- }
- if (prevFrom <= prevTo && prevFrom < from && prevTo < to) {
- response.addHeader("Link", "<" + previousPage + "> ; rel=\"prev\"");
- }
- response.addHeader("Link", "<" + lastPage + "> ; rel=\"last\"");
+ // Perform the search on the Lucene Index
+ RdfSearcher rdfSearcher = new RdfSearcher(params, context);
+ List results = rdfSearcher.search(context);
+ rdfSearcher.close();
+
+ // Calculates the pagination information, needed for the LDP Paging and Hydra Paging
+ int numberMatched = rdfSearcher.getSize();
+ int firstPageFrom = numberMatched > 0 ? 1 : 0;
+ int firstPageTo = numberMatched > hitsPerPage ? hitsPerPage : numberMatched;
+ int nextFrom = to < numberMatched ? to + 1 : to;
+ int nextTo = to + hitsPerPage < numberMatched ? to + hitsPerPage : numberMatched;
+ int prevFrom = from - hitsPerPage > 0 ? from - hitsPerPage : 1;
+ int prevTo = to - hitsPerPage > 0 ? to - hitsPerPage : numberMatched;
+ int lastPageFrom = 0 < (numberMatched % hitsPerPage) ? numberMatched - (numberMatched % hitsPerPage) + 1 : (numberMatched - hitsPerPage + 1 > 0 ? numberMatched - hitsPerPage + 1 : numberMatched);
+ long versionTokenETag = rdfSearcher.getVersionToken();
+ String canonicalURL = hostURL + request.getRequestURI();
+ String currentPage = canonicalURL + "?" + paramsAsString(allRequestParams) + "&from=" + from + "&to=" + to;
+ String lastPage = canonicalURL + "?" + paramsAsString(allRequestParams) + "&from=" + lastPageFrom + "&to=" + numberMatched;
+ String firstPage = canonicalURL + "?" + paramsAsString(allRequestParams) + "&from=" + firstPageFrom + "&to=" + firstPageTo;
+ String previousPage = canonicalURL + "?" + paramsAsString(allRequestParams) + "&from=" + prevFrom + "&to=" + prevTo;
+ String nextPage = canonicalURL + "?" + paramsAsString(allRequestParams) + "&from=" + nextFrom + "&to=" + nextTo;
+
+ // Hydra Paging information (see also: http://www.hydra-cg.com/spec/latest/core/)
+ String hydraPagedCollection = "\n" +
+ "" +
+ "" + lastPage.replaceAll("&", "&") + " \n" +
+ "" + numberMatched + " \n" +
+ ((prevFrom <= prevTo && prevFrom < from && prevTo < to) ? "" + previousPage.replaceAll("&", "&") + " \n" : "") +
+ ((nextFrom <= nextTo && from < nextFrom && to < nextTo) ? "" + nextPage.replaceAll("&", "&") + " \n" : "") +
+ "" + firstPage.replaceAll("&", "&") + " \n" +
+ "" + hitsPerPage + " \n" +
+ " ";
+ // Construct the RDF output
+ File rdfFile = manager.createRdfFile(context, results, 1, hydraPagedCollection);
+
+ try (
+ ServletOutputStream out = response.getOutputStream();
+ InputStream in = new FileInputStream(rdfFile)
+ ) {
+ byte[] bytes = new byte[1024];
+ int bytesRead;
+
+ response.setContentType("application/rdf+xml");
+
+ //Set the Lucene versionToken as ETag response header parameter
+ response.addHeader("ETag", Long.toString(versionTokenETag));
+ //Include the response header "link" parameters as suggested by the W3C Linked Data Platform paging specification (see also: https://www.w3.org/2012/ldp/hg/ldp-paging.html).
+ response.addHeader("Link", "; rel=\"type\"");
+ response.addHeader("Link", canonicalURL + "; rel=\"canonical\"; etag=" + versionTokenETag);
+
+ response.addHeader("Link", "<" + firstPage + "> ; rel=\"first\"");
+ if (nextFrom <= nextTo && from < nextFrom && to < nextTo) {
+ response.addHeader("Link", "<" + nextPage + "> ; rel=\"next\"");
+ }
+ if (prevFrom <= prevTo && prevFrom < from && prevTo < to) {
+ response.addHeader("Link", "<" + previousPage + "> ; rel=\"prev\"");
+ }
+ response.addHeader("Link", "<" + lastPage + "> ; rel=\"last\"");
- //Write the paged RDF result to the message body
- while ((bytesRead = in.read(bytes)) != -1) {
- out.write(bytes, 0, bytesRead);
+ //Write the paged RDF result to the message body
+ while ((bytesRead = in.read(bytes)) != -1) {
+ out.write(bytes, 0, bytesRead);
+ }
+ } catch (FileNotFoundException e) {
+ Log.error(API.LOG_MODULE_NAME, "Get catalog content as RDF. Error: " + e.getMessage(), e);
+ } catch (IOException e) {
+ Log.error(API.LOG_MODULE_NAME, "Get catalog content as RDF. Error: " + e.getMessage(), e);
}
- } catch (FileNotFoundException e) {
- Log.error(API.LOG_MODULE_NAME, "Get catalog content as RDF. Error: " + e.getMessage(), e);
- } catch (IOException e) {
- Log.error(API.LOG_MODULE_NAME, "Get catalog content as RDF. Error: " + e.getMessage(), e);
}
}
diff --git a/services/src/main/java/org/fao/geonet/api/records/DoiApi.java b/services/src/main/java/org/fao/geonet/api/records/DoiApi.java
index ed767e390220..6026d0d154d4 100644
--- a/services/src/main/java/org/fao/geonet/api/records/DoiApi.java
+++ b/services/src/main/java/org/fao/geonet/api/records/DoiApi.java
@@ -94,11 +94,12 @@ ResponseEntity> checkDoiStatus(
@Parameter(hidden = true)
HttpServletRequest request
) throws Exception {
- AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, request);
- ServiceContext serviceContext = ApiUtils.createServiceContext(request);
+ try (ServiceContext serviceContext = ApiUtils.createServiceContext(request)) {
+ AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, serviceContext);
- final Map reportStatus = doiManager.check(serviceContext, metadata, null);
- return new ResponseEntity<>(reportStatus, HttpStatus.OK);
+ final Map reportStatus = doiManager.check(serviceContext, metadata, null);
+ return new ResponseEntity<>(reportStatus, HttpStatus.OK);
+ }
}
@@ -130,11 +131,13 @@ ResponseEntity> createDoi(
@Parameter(hidden = true)
HttpSession session
) throws Exception {
- AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, request);
- ServiceContext serviceContext = ApiUtils.createServiceContext(request);
- Map doiInfo = doiManager.register(serviceContext, metadata);
- return new ResponseEntity<>(doiInfo, HttpStatus.CREATED);
+ try (ServiceContext serviceContext = ApiUtils.createServiceContext(request);) {
+ AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, serviceContext);
+
+ Map doiInfo = doiManager.register(serviceContext, metadata);
+ return new ResponseEntity<>(doiInfo, HttpStatus.CREATED);
+ }
}
@@ -167,11 +170,13 @@ ResponseEntity unregisterDoi(
@Parameter(hidden = true)
HttpSession session
) throws Exception {
- AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, request);
- ServiceContext serviceContext = ApiUtils.createServiceContext(request);
- doiManager.unregisterDoi(metadata, serviceContext);
- return new ResponseEntity<>(HttpStatus.NO_CONTENT);
+ try (ServiceContext serviceContext = ApiUtils.createServiceContext(request);) {
+ AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, serviceContext);
+
+ doiManager.unregisterDoi(metadata, serviceContext);
+ return new ResponseEntity<>(HttpStatus.NO_CONTENT);
+ }
}
// TODO: At some point we may add support for DOI States management
diff --git a/services/src/main/java/org/fao/geonet/api/records/InspireValidationApi.java b/services/src/main/java/org/fao/geonet/api/records/InspireValidationApi.java
index c6c160e20269..216fdc8db531 100644
--- a/services/src/main/java/org/fao/geonet/api/records/InspireValidationApi.java
+++ b/services/src/main/java/org/fao/geonet/api/records/InspireValidationApi.java
@@ -212,7 +212,6 @@ String validateRecordForInspire(
String id = String.valueOf(metadata.getId());
String URL = settingManager.getValue(Settings.SYSTEM_INSPIRE_REMOTE_VALIDATION_URL);
- ServiceContext context = ApiUtils.createServiceContext(request);
String getRecordByIdUrl = null;
String testId = null;
@@ -226,7 +225,7 @@ String validateRecordForInspire(
if (StringUtils.isEmpty(mode)) {
// Use formatter to convert the record
if (!schema.equals("iso19139")) {
- try {
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
Key key = new Key(metadata.getId(), "eng", FormatType.xml, "iso19139", true, FormatterWidth._100);
final FormatterApi.FormatMetadata formatMetadata =
@@ -246,24 +245,36 @@ String validateRecordForInspire(
md.detach();
- Attribute schemaLocAtt = schemaManager.getSchemaLocation(
+
+ // The following is unusual, we are creating a service context so it is our responsibility
+ // to ensure it is cleaned up.
+ //
+ // This is going to be accomplished by the scheduled InspireValidationRunnable
+ ServiceContext context = ApiUtils.createServiceContext(request);
+ try {
+ Attribute schemaLocAtt = schemaManager.getSchemaLocation(
"iso19139", context);
- if (schemaLocAtt != null) {
- if (md.getAttribute(
- schemaLocAtt.getName(),
- schemaLocAtt.getNamespace()) == null) {
- md.setAttribute(schemaLocAtt);
- // make sure namespace declaration for schemalocation is present -
- // remove it first (does nothing if not there) then add it
- md.removeNamespaceDeclaration(schemaLocAtt.getNamespace());
- md.addNamespaceDeclaration(schemaLocAtt.getNamespace());
+ if (schemaLocAtt != null) {
+ if (md.getAttribute(
+ schemaLocAtt.getName(),
+ schemaLocAtt.getNamespace()) == null) {
+ md.setAttribute(schemaLocAtt);
+ // make sure namespace declaration for schemalocation is present -
+ // remove it first (does nothing if not there) then add it
+ md.removeNamespaceDeclaration(schemaLocAtt.getNamespace());
+ md.addNamespaceDeclaration(schemaLocAtt.getNamespace());
+ }
}
- }
+ InputStream metadataToTest = convertElement2InputStream(md);
+ testId = inspireValidatorUtils.submitFile(context, URL, metadataToTest, testsuite, metadata.getUuid());
- InputStream metadataToTest = convertElement2InputStream(md);
- testId = inspireValidatorUtils.submitFile(context, URL, metadataToTest, testsuite, metadata.getUuid());
+ threadPool.runTask(new InspireValidationRunnable(context, URL, testId, metadata.getId()));
+ } finally {
+ context.clearAsThreadLocal();
+ // context clear is handled scheduled InspireValidationRunnable above
+ }
} else {
String portal = NodeInfo.DEFAULT_NODE;
if (!NodeInfo.DEFAULT_NODE.equals(mode)) {
@@ -283,10 +294,20 @@ String validateRecordForInspire(
portal,
ISO19139Namespaces.GMD.getURI(),
metadataUuid);
- testId = inspireValidatorUtils.submitUrl(context, URL, getRecordByIdUrl, testsuite, metadata.getUuid());
- }
- threadPool.runTask(new InspireValidationRunnable(context, URL, testId, metadata.getId()));
+ // The following is unusual, we are creating a service context so it is our responsibility
+ // to ensure it is cleaned up.
+ //
+ // This is going to be accomplished by the scheduled InspireValidationRunnable
+ ServiceContext context = ApiUtils.createServiceContext(request);
+ try {
+ testId = inspireValidatorUtils.submitUrl(context, URL, getRecordByIdUrl, testsuite, metadata.getUuid());
+ threadPool.runTask(new InspireValidationRunnable(context, URL, testId, metadata.getId()));
+ } finally {
+ context.clearAsThreadLocal();
+ // context clear is handled scheduled InspireValidationRunnable above
+ }
+ }
return testId;
}
@@ -337,9 +358,8 @@ Map checkValidation(
) throws Exception {
String URL = settingManager.getValue(Settings.SYSTEM_INSPIRE_REMOTE_VALIDATION_URL);
- ServiceContext context = ApiUtils.createServiceContext(request);
- try {
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
if (inspireValidatorUtils.isReady(context, URL, testId)) {
Map values = new HashMap<>();
diff --git a/services/src/main/java/org/fao/geonet/api/records/InspireValidationRunnable.java b/services/src/main/java/org/fao/geonet/api/records/InspireValidationRunnable.java
index 7413f2f115c3..eed467732d4c 100644
--- a/services/src/main/java/org/fao/geonet/api/records/InspireValidationRunnable.java
+++ b/services/src/main/java/org/fao/geonet/api/records/InspireValidationRunnable.java
@@ -26,53 +26,68 @@ public class InspireValidationRunnable implements Runnable {
private final String testId;
private final String endPoint;
private final int mdId;
- private final ServiceContext context;
-
+ /** Provided service context, cleaned up when task is complete */
+ private ServiceContext validationContext;
+
+ /**
+ * Schedule INSPIRE validation for later.
+ *
+ * @param context Validation context, it is the responsibility of this runnable to clean up
+ * @param endPoint
+ * @param testId
+ * @param mdId
+ */
public InspireValidationRunnable(ServiceContext context,
String endPoint, String testId, int mdId) {
- this.context = context;
+ this.validationContext = context;
this.testId = testId;
this.mdId = mdId;
this.endPoint = endPoint;
}
public void run() {
- TransactionManager.runInTransaction("inspire-validation", ApplicationContextHolder.get(),
- CREATE_NEW, ALWAYS_COMMIT, false, new TransactionTask() {
- @Override
- public Object doInTransaction(TransactionStatus transaction) throws Throwable {
- InspireValidatorUtils inspireValidatorUtils =
- ApplicationContextHolder.get().getBean(InspireValidatorUtils.class);
+ validationContext.setAsThreadLocal();
+ try {
+ TransactionManager.runInTransaction("inspire-validation", ApplicationContextHolder.get(),
+ CREATE_NEW, ALWAYS_COMMIT, false, new TransactionTask() {
+ @Override
+ public Object doInTransaction(TransactionStatus transaction) throws Throwable {
+ InspireValidatorUtils inspireValidatorUtils =
+ ApplicationContextHolder.get().getBean(InspireValidatorUtils.class);
- MetadataValidationRepository metadataValidationRepository =
- ApplicationContextHolder.get().getBean(MetadataValidationRepository.class);
+ MetadataValidationRepository metadataValidationRepository =
+ ApplicationContextHolder.get().getBean(MetadataValidationRepository.class);
- // Waits until the validation result is available
- inspireValidatorUtils.waitUntilReady(context, endPoint, testId);
+ // Waits until the validation result is available
+ inspireValidatorUtils.waitUntilReady(validationContext, endPoint, testId);
- String reportUrl = inspireValidatorUtils.getReportUrl(endPoint, testId);
- String reportXmlUrl = InspireValidatorUtils.getReportUrlXML(endPoint, testId);
- String reportXml = inspireValidatorUtils.retrieveReport(context, reportXmlUrl);
+ String reportUrl = inspireValidatorUtils.getReportUrl(endPoint, testId);
+ String reportXmlUrl = InspireValidatorUtils.getReportUrlXML(endPoint, testId);
+ String reportXml = inspireValidatorUtils.retrieveReport(validationContext, reportXmlUrl);
- String validationStatus = inspireValidatorUtils.isPassed(context, endPoint, testId);
+ String validationStatus = inspireValidatorUtils.isPassed(validationContext, endPoint, testId);
- MetadataValidationStatus metadataValidationStatus =
- inspireValidatorUtils.calculateValidationStatus(validationStatus);
+ MetadataValidationStatus metadataValidationStatus =
+ inspireValidatorUtils.calculateValidationStatus(validationStatus);
- MetadataValidation metadataValidation = new MetadataValidation()
- .setId(new MetadataValidationId(mdId, "inspire"))
- .setStatus(metadataValidationStatus).setRequired(false)
- .setReportUrl(reportUrl).setReportContent(reportXml);
+ MetadataValidation metadataValidation = new MetadataValidation()
+ .setId(new MetadataValidationId(mdId, "inspire"))
+ .setStatus(metadataValidationStatus).setRequired(false)
+ .setReportUrl(reportUrl).setReportContent(reportXml);
- metadataValidationRepository.save(metadataValidation);
+ metadataValidationRepository.save(metadataValidation);
- DataManager dataManager =
- ApplicationContextHolder.get().getBean(DataManager.class);
+ DataManager dataManager =
+ ApplicationContextHolder.get().getBean(DataManager.class);
- dataManager.indexMetadata(new ArrayList<>(Arrays.asList(mdId + "")));
+ dataManager.indexMetadata(new ArrayList<>(Arrays.asList(mdId + "")));
- return null;
- }
- });
+ return null;
+ }
+ });
+ } finally {
+ validationContext.clearAsThreadLocal();
+ validationContext.clear();
+ }
}
}
diff --git a/services/src/main/java/org/fao/geonet/api/records/MetadataApi.java b/services/src/main/java/org/fao/geonet/api/records/MetadataApi.java
index 2f6650dd926d..adc2495a6659 100644
--- a/services/src/main/java/org/fao/geonet/api/records/MetadataApi.java
+++ b/services/src/main/java/org/fao/geonet/api/records/MetadataApi.java
@@ -151,31 +151,33 @@ public String getRecord(
HttpServletRequest request
)
throws Exception {
- try {
- ApiUtils.canViewRecord(metadataUuid, request);
- } catch (SecurityException e) {
- Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e);
- throw new NotAllowedException(ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_VIEW);
- }
- List accept = Arrays.asList(acceptHeader.split(","));
-
- String defaultFormatter = "xsl-view";
- if (accept.contains(MediaType.TEXT_HTML_VALUE)
- || accept.contains(MediaType.APPLICATION_XHTML_XML_VALUE)
- || accept.contains("application/pdf")) {
- return "forward:" + (metadataUuid + "/formatters/" + defaultFormatter);
- } else if (accept.contains(MediaType.APPLICATION_XML_VALUE)
- || accept.contains(MediaType.APPLICATION_JSON_VALUE)) {
- return "forward:" + (metadataUuid + "/formatters/xml");
- } else if (accept.contains("application/zip")
- || accept.contains(MEF_V1_ACCEPT_TYPE)
- || accept.contains(MEF_V2_ACCEPT_TYPE)) {
- return "forward:" + (metadataUuid + "/formatters/zip");
- } else {
- // FIXME this else is never reached because any of the accepted medias match one of the previous if conditions.
- response.setHeader(HttpHeaders.ACCEPT, MediaType.APPLICATION_XHTML_XML_VALUE);
- //response.sendRedirect(metadataUuid + "/formatters/" + defaultFormatter);
- return "forward:" + (metadataUuid + "/formatters/" + defaultFormatter);
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ try {
+ ApiUtils.canViewRecord(metadataUuid, context);
+ } catch (SecurityException e) {
+ Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e);
+ throw new NotAllowedException(ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_VIEW);
+ }
+ List accept = Arrays.asList(acceptHeader.split(","));
+
+ String defaultFormatter = "xsl-view";
+ if (accept.contains(MediaType.TEXT_HTML_VALUE)
+ || accept.contains(MediaType.APPLICATION_XHTML_XML_VALUE)
+ || accept.contains("application/pdf")) {
+ return "forward:" + (metadataUuid + "/formatters/" + defaultFormatter);
+ } else if (accept.contains(MediaType.APPLICATION_XML_VALUE)
+ || accept.contains(MediaType.APPLICATION_JSON_VALUE)) {
+ return "forward:" + (metadataUuid + "/formatters/xml");
+ } else if (accept.contains("application/zip")
+ || accept.contains(MEF_V1_ACCEPT_TYPE)
+ || accept.contains(MEF_V2_ACCEPT_TYPE)) {
+ return "forward:" + (metadataUuid + "/formatters/zip");
+ } else {
+ // FIXME this else is never reached because any of the accepted medias match one of the previous if conditions.
+ response.setHeader(HttpHeaders.ACCEPT, MediaType.APPLICATION_XHTML_XML_VALUE);
+ //response.sendRedirect(metadataUuid + "/formatters/" + defaultFormatter);
+ return "forward:" + (metadataUuid + "/formatters/" + defaultFormatter);
+ }
}
}
@@ -234,75 +236,76 @@ Object getRecordAs(
HttpServletRequest request
)
throws Exception {
- AbstractMetadata metadata;
- try {
- metadata = ApiUtils.canViewRecord(metadataUuid, request);
- } catch (ResourceNotFoundException e) {
- Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e);
- throw e;
- } catch (Exception e) {
- Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e);
- throw new NotAllowedException(ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_VIEW);
- }
- ServiceContext context = ApiUtils.createServiceContext(request);
- try {
- Lib.resource.checkPrivilege(context,
- String.valueOf(metadata.getId()),
- ReservedOperation.view);
- } catch (Exception e) {
- // TODO: i18n
- // TODO: Report exception in JSON format
- Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e);
- throw new NotAllowedException(ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_VIEW);
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ AbstractMetadata metadata;
+ try {
+ metadata = ApiUtils.canViewRecord(metadataUuid, context);
+ } catch (ResourceNotFoundException e) {
+ Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e);
+ throw e;
+ } catch (Exception e) {
+ Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e);
+ throw new NotAllowedException(ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_VIEW);
+ }
+ try {
+ Lib.resource.checkPrivilege(context,
+ String.valueOf(metadata.getId()),
+ ReservedOperation.view);
+ } catch (Exception e) {
+ // TODO: i18n
+ // TODO: Report exception in JSON format
+ Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e);
+ throw new NotAllowedException(ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_VIEW);
- }
+ }
- if (increasePopularity) {
- dataManager.increasePopularity(context, metadata.getId() + "");
- }
+ if (increasePopularity) {
+ dataManager.increasePopularity(context, metadata.getId() + "");
+ }
- boolean withValidationErrors = false, keepXlinkAttributes = false, forEditing = false;
+ boolean withValidationErrors = false, keepXlinkAttributes = false, forEditing = false;
- String mdId = String.valueOf(metadata.getId());
+ String mdId = String.valueOf(metadata.getId());
- //Here we just care if we need the approved version explicitly.
- //ApiUtils.canViewRecord already filtered draft for non editors.
- if (approved) {
- mdId = String.valueOf(metadataRepository.findOneByUuid(metadata.getUuid()).getId());
- }
+ //Here we just care if we need the approved version explicitly.
+ //ApiUtils.canViewRecord already filtered draft for non editors.
+ if (approved) {
+ mdId = String.valueOf(metadataRepository.findOneByUuid(metadata.getUuid()).getId());
+ }
- Element xml = withInfo ?
- dataManager.getMetadata(context, mdId, forEditing,
- withValidationErrors, keepXlinkAttributes) :
- dataManager.getMetadataNoInfo(context, mdId + "");
-
- if (addSchemaLocation) {
- Attribute schemaLocAtt = _schemaManager.getSchemaLocation(
- metadata.getDataInfo().getSchemaId(), context);
-
- if (schemaLocAtt != null) {
- if (xml.getAttribute(
- schemaLocAtt.getName(),
- schemaLocAtt.getNamespace()) == null) {
- xml.setAttribute(schemaLocAtt);
- // make sure namespace declaration for schemalocation is present -
- // remove it first (does nothing if not there) then add it
- xml.removeNamespaceDeclaration(schemaLocAtt.getNamespace());
- xml.addNamespaceDeclaration(schemaLocAtt.getNamespace());
+ Element xml = withInfo ?
+ dataManager.getMetadata(context, mdId, forEditing,
+ withValidationErrors, keepXlinkAttributes) :
+ dataManager.getMetadataNoInfo(context, mdId + "");
+
+ if (addSchemaLocation) {
+ Attribute schemaLocAtt = _schemaManager.getSchemaLocation(
+ metadata.getDataInfo().getSchemaId(), context);
+
+ if (schemaLocAtt != null) {
+ if (xml.getAttribute(
+ schemaLocAtt.getName(),
+ schemaLocAtt.getNamespace()) == null) {
+ xml.setAttribute(schemaLocAtt);
+ // make sure namespace declaration for schemalocation is present -
+ // remove it first (does nothing if not there) then add it
+ xml.removeNamespaceDeclaration(schemaLocAtt.getNamespace());
+ xml.addNamespaceDeclaration(schemaLocAtt.getNamespace());
+ }
}
}
- }
- boolean isJson = acceptHeader.contains(MediaType.APPLICATION_JSON_VALUE);
+ boolean isJson = acceptHeader.contains(MediaType.APPLICATION_JSON_VALUE);
- String mode = (attachment) ? "attachment" : "inline";
- response.setHeader("Content-Disposition", String.format(
- mode + "; filename=\"%s.%s\"",
- metadata.getUuid(),
- isJson ? "json" : "xml"
- ));
- return isJson ? Xml.getJSON(xml) : xml;
+ String mode = (attachment) ? "attachment" : "inline";
+ response.setHeader("Content-Disposition", String.format(
+ mode + "; filename=\"%s.%s\"",
+ metadata.getUuid(),
+ isJson ? "json" : "xml"
+ ));
+ return isJson ? Xml.getJSON(xml) : xml;
+ }
}
@io.swagger.v3.oas.annotations.Operation(
@@ -377,68 +380,69 @@ void getRecordAsZip(
HttpServletRequest request
)
throws Exception {
- AbstractMetadata metadata;
- try {
- metadata = ApiUtils.canViewRecord(metadataUuid, request);
- } catch (SecurityException e) {
- Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e);
- throw new NotAllowedException(ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_VIEW);
- }
- Path stylePath = dataDirectory.getWebappDir().resolve(Geonet.Path.SCHEMAS);
- Path file = null;
- ServiceContext context = ApiUtils.createServiceContext(request);
- MEFLib.Version version = MEFLib.Version.find(acceptHeader);
- if (version == MEFLib.Version.V1) {
- // This parameter is deprecated in v2.
- boolean skipUUID = false;
-
- Integer id = -1;
-
- if (approved) {
- id = metadataRepository.findOneByUuid(metadataUuid).getId();
- } else {
- id = metadataUtils.findOneByUuid(metadataUuid).getId();
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ AbstractMetadata metadata;
+ try {
+ metadata = ApiUtils.canViewRecord(metadataUuid, context);
+ } catch (SecurityException e) {
+ Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e);
+ throw new NotAllowedException(ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_VIEW);
}
-
- file = MEFLib.doExport(
- context, id, format.toString(),
- skipUUID, withXLinksResolved, withXLinkAttribute, addSchemaLocation
- );
- response.setContentType(MEFLib.Version.Constants.MEF_V1_ACCEPT_TYPE);
- } else {
- Set tmpUuid = new HashSet();
- tmpUuid.add(metadataUuid);
- // MEF version 2 support multiple metadata record by file.
- if (withRelated) {
- // Adding children in MEF file
-
- // Get children to export - It could be better to use GetRelated service TODO
- Set childs = MetadataUtils.getUuidsToExport(
- String.format("+%s:%s", Geonet.IndexFieldNames.PARENTUUID, metadataUuid));
- if (childs.size() != 0) {
- tmpUuid.addAll(childs);
+ Path stylePath = dataDirectory.getWebappDir().resolve(Geonet.Path.SCHEMAS);
+ Path file = null;
+ MEFLib.Version version = MEFLib.Version.find(acceptHeader);
+ if (version == MEFLib.Version.V1) {
+ // This parameter is deprecated in v2.
+ boolean skipUUID = false;
+
+ Integer id = -1;
+
+ if (approved) {
+ id = metadataRepository.findOneByUuid(metadataUuid).getId();
+ } else {
+ id = metadataUtils.findOneByUuid(metadataUuid).getId();
}
- // Get linked services for export
- Set services = MetadataUtils.getUuidsToExport(
- String.format("+%s:%s", Geonet.IndexFieldNames.RECORDOPERATESON, metadataUuid));
- if (services.size() != 0) {
- tmpUuid.addAll(services);
+ file = MEFLib.doExport(
+ context, id, format.toString(),
+ skipUUID, withXLinksResolved, withXLinkAttribute, addSchemaLocation
+ );
+ response.setContentType(MEFLib.Version.Constants.MEF_V1_ACCEPT_TYPE);
+ } else {
+ Set tmpUuid = new HashSet();
+ tmpUuid.add(metadataUuid);
+ // MEF version 2 support multiple metadata record by file.
+ if (withRelated) {
+ // Adding children in MEF file
+
+ // Get children to export - It could be better to use GetRelated service TODO
+ Set childs = MetadataUtils.getUuidsToExport(
+ String.format("+%s:%s", Geonet.IndexFieldNames.PARENTUUID, metadataUuid));
+ if (childs.size() != 0) {
+ tmpUuid.addAll(childs);
+ }
+
+ // Get linked services for export
+ Set services = MetadataUtils.getUuidsToExport(
+ String.format("+%s:%s", Geonet.IndexFieldNames.RECORDOPERATESON, metadataUuid));
+ if (services.size() != 0) {
+ tmpUuid.addAll(services);
+ }
}
- }
- Log.info(Geonet.MEF, "Building MEF2 file with " + tmpUuid.size()
- + " records.");
+ Log.info(Geonet.MEF, "Building MEF2 file with " + tmpUuid.size()
+ + " records.");
- file = MEFLib.doMEF2Export(context, tmpUuid, format.toString(), false, stylePath, withXLinksResolved, withXLinkAttribute, false, addSchemaLocation, approved);
+ file = MEFLib.doMEF2Export(context, tmpUuid, format.toString(), false, stylePath, withXLinksResolved, withXLinkAttribute, false, addSchemaLocation, approved);
- response.setContentType(MEFLib.Version.Constants.MEF_V2_ACCEPT_TYPE);
+ response.setContentType(MEFLib.Version.Constants.MEF_V2_ACCEPT_TYPE);
+ }
+ response.setHeader(HttpHeaders.CONTENT_DISPOSITION, String.format(
+ "inline; filename=\"%s.zip\"",
+ metadata.getUuid()
+ ));
+ response.setHeader(HttpHeaders.CONTENT_LENGTH, String.valueOf(Files.size(file)));
+ FileUtils.copyFile(file.toFile(), response.getOutputStream());
}
- response.setHeader(HttpHeaders.CONTENT_DISPOSITION, String.format(
- "inline; filename=\"%s.zip\"",
- metadata.getUuid()
- ));
- response.setHeader(HttpHeaders.CONTENT_LENGTH, String.valueOf(Files.size(file)));
- FileUtils.copyFile(file.toFile(), response.getOutputStream());
}
@@ -463,19 +467,20 @@ public void getRecord(
HttpServletRequest request
)
throws Exception {
- AbstractMetadata metadata;
- try {
- metadata = ApiUtils.canViewRecord(metadataUuid, request);
- } catch (ResourceNotFoundException e) {
- Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e);
- throw e;
- } catch (Exception e) {
- Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e);
- throw new NotAllowedException(ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_VIEW);
- }
- ServiceContext context = ApiUtils.createServiceContext(request);
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ AbstractMetadata metadata;
+ try {
+ metadata = ApiUtils.canViewRecord(metadataUuid, context);
+ } catch (ResourceNotFoundException e) {
+ Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e);
+ throw e;
+ } catch (Exception e) {
+ Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e);
+ throw new NotAllowedException(ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_VIEW);
+ }
- dataManager.increasePopularity(context, metadata.getId() + "");
+ dataManager.increasePopularity(context, metadata.getId() + "");
+ }
}
@@ -517,31 +522,33 @@ public RelatedResponse getAssociatedResources(
int rows,
HttpServletRequest request) throws Exception {
- AbstractMetadata md;
- try {
- md = ApiUtils.canViewRecord(metadataUuid, request);
- } catch (SecurityException e) {
- Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e);
- throw new NotAllowedException(ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_VIEW);
- }
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ AbstractMetadata md;
+ try {
+ md = ApiUtils.canViewRecord(metadataUuid, context);
+ } catch (SecurityException e) {
+ Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e);
+ throw new NotAllowedException(ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_VIEW);
+ }
- String language = languageUtils.getIso3langCode(request.getLocales());
-
- // TODO PERF: ByPass XSL processing and create response directly
- // At least for related metadata and keep XSL only for links
- final ServiceContext context = ApiUtils.createServiceContext(request);
- Element raw = new Element("root").addContent(Arrays.asList(
- new Element("gui").addContent(Arrays.asList(
- new Element("language").setText(language),
- new Element("url").setText(context.getBaseUrl())
- )),
- MetadataUtils.getRelated(context, md.getId(), md.getUuid(), type, start, start + rows, true)
- ));
- Path relatedXsl = dataDirectory.getWebappDir().resolve("xslt/services/metadata/relation.xsl");
-
- final Element transform = Xml.transform(raw, relatedXsl);
- RelatedResponse response = (RelatedResponse) Xml.unmarshall(transform, RelatedResponse.class);
- return response;
+ String language = languageUtils.getIso3langCode(request.getLocales());
+
+ // TODO PERF: ByPass XSL processing and create response directly
+ // At least for related metadata and keep XSL only for links
+ Element raw = new Element("root").addContent(Arrays.asList(
+ new Element("gui").addContent(Arrays.asList(
+ new Element("language").setText(language),
+ new Element("url").setText(context.getBaseUrl())
+ )),
+ MetadataUtils.getRelated(context, md.getId(), md.getUuid(), type, start, start + rows, true)
+ ));
+ Path relatedXsl = dataDirectory.getWebappDir().resolve("xslt/services/metadata/relation.xsl");
+
+ final Element transform = Xml.transform(raw, relatedXsl);
+ RelatedResponse response = (RelatedResponse) Xml.unmarshall(transform, RelatedResponse.class);
+ return response;
+
+ }
}
@io.swagger.v3.oas.annotations.Operation(
diff --git a/services/src/main/java/org/fao/geonet/api/records/MetadataIndexApi.java b/services/src/main/java/org/fao/geonet/api/records/MetadataIndexApi.java
index c1736e17698f..a4118cb6fde1 100644
--- a/services/src/main/java/org/fao/geonet/api/records/MetadataIndexApi.java
+++ b/services/src/main/java/org/fao/geonet/api/records/MetadataIndexApi.java
@@ -98,38 +98,39 @@ JSONObject index(
)
throws Exception {
- ServiceContext serviceContext = ApiUtils.createServiceContext(request);
- UserSession session = ApiUtils.getUserSession(httpSession);
+ try (ServiceContext serviceContext = ApiUtils.createServiceContext(request)) {
+ UserSession session = ApiUtils.getUserSession(httpSession);
- SelectionManager selectionManager =
- SelectionManager.getManager(serviceContext.getUserSession());
+ SelectionManager selectionManager =
+ SelectionManager.getManager(serviceContext.getUserSession());
- Set records = ApiUtils.getUuidsParameterOrSelection(uuids, bucket, session);
- Set ids = Sets.newHashSet();
- int index = 0;
+ Set records = ApiUtils.getUuidsParameterOrSelection(uuids, bucket, session);
+ Set ids = Sets.newHashSet();
+ int index = 0;
- for (String uuid : records) {
- try {
- final String metadataId = dataManager.getMetadataId(uuid);
- if (metadataId != null) {
- ids.add(Integer.valueOf(metadataId));
- }
- } catch (Exception e) {
+ for (String uuid : records) {
try {
- ids.add(Integer.valueOf(uuid));
- } catch (NumberFormatException nfe) {
- // skip
+ final String metadataId = dataManager.getMetadataId(uuid);
+ if (metadataId != null) {
+ ids.add(Integer.valueOf(metadataId));
+ }
+ } catch (Exception e) {
+ try {
+ ids.add(Integer.valueOf(uuid));
+ } catch (NumberFormatException nfe) {
+ // skip
+ }
}
}
- }
- index = ids.size();
- new BatchOpsMetadataReindexer(dataManager, ids).process(false);
+ index = ids.size();
+ new BatchOpsMetadataReindexer(dataManager, ids).process(false);
- JSONObject res = new JSONObject();
- res.put("success", true);
- res.put("count", index);
+ JSONObject res = new JSONObject();
+ res.put("success", true);
+ res.put("count", index);
- return res;
+ return res;
+ }
}
}
diff --git a/services/src/main/java/org/fao/geonet/api/records/MetadataInsertDeleteApi.java b/services/src/main/java/org/fao/geonet/api/records/MetadataInsertDeleteApi.java
index 1a5baf80e56d..4e0c0f18b9d6 100644
--- a/services/src/main/java/org/fao/geonet/api/records/MetadataInsertDeleteApi.java
+++ b/services/src/main/java/org/fao/geonet/api/records/MetadataInsertDeleteApi.java
@@ -177,29 +177,31 @@ public void deleteRecord(
@Parameter(description = API_PARAM_RECORD_UUID, required = true) @PathVariable String metadataUuid,
@Parameter(description = API_PARAM_BACKUP_FIRST, required = false) @RequestParam(required = false, defaultValue = "true") boolean withBackup,
HttpServletRequest request) throws Exception {
- AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, request);
- ServiceContext context = ApiUtils.createServiceContext(request);
- Store store = context.getBean("resourceStore", Store.class);
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, context);
- MetadataPreRemove preRemoveEvent = new MetadataPreRemove(metadata);
- ApplicationContextHolder.get().publishEvent(preRemoveEvent);
+ Store store = context.getBean("resourceStore", Store.class);
- if (metadata.getDataInfo().getType() != MetadataType.SUB_TEMPLATE
- && metadata.getDataInfo().getType() != MetadataType.TEMPLATE_OF_SUB_TEMPLATE && withBackup) {
- MetadataUtils.backupRecord(metadata, context);
- }
+ MetadataPreRemove preRemoveEvent = new MetadataPreRemove(metadata);
+ ApplicationContextHolder.get().publishEvent(preRemoveEvent);
- boolean approved=true;
- if (metadata instanceof MetadataDraft) {
- approved=false;
- }
+ if (metadata.getDataInfo().getType() != MetadataType.SUB_TEMPLATE
+ && metadata.getDataInfo().getType() != MetadataType.TEMPLATE_OF_SUB_TEMPLATE && withBackup) {
+ MetadataUtils.backupRecord(metadata, context);
+ }
+
+ boolean approved=true;
+ if (metadata instanceof MetadataDraft) {
+ approved=false;
+ }
- store.delResources(context, metadata.getUuid(), approved);
- RecordDeletedEvent recordDeletedEvent = triggerDeletionEvent(request, metadata.getId() + "");
- metadataManager.deleteMetadata(context, metadata.getId() + "");
- recordDeletedEvent.publish(ApplicationContextHolder.get());
+ store.delResources(context, metadata.getUuid(), approved);
+ RecordDeletedEvent recordDeletedEvent = triggerDeletionEvent(request, metadata.getId() + "");
+ metadataManager.deleteMetadata(context, metadata.getId() + "");
+ recordDeletedEvent.publish(ApplicationContextHolder.get());
- dataManager.forceIndexChanges();
+ dataManager.forceIndexChanges();
+ }
}
@io.swagger.v3.oas.annotations.Operation(summary = "Delete one or more records", description ="User MUST be able to edit the record to delete it. "
@@ -220,40 +222,41 @@ public SimpleMetadataProcessingReport deleteRecords(
@Parameter(description = ApiParams.API_PARAM_BUCKET_NAME, required = false) @RequestParam(required = false) String bucket,
@Parameter(description = API_PARAM_BACKUP_FIRST, required = false) @RequestParam(required = false, defaultValue = "true") boolean withBackup,
@Parameter(hidden = true) HttpSession session, HttpServletRequest request) throws Exception {
- ServiceContext context = ApiUtils.createServiceContext(request);
- Store store = context.getBean("resourceStore", Store.class);
-
- Set records = ApiUtils.getUuidsParameterOrSelection(uuids, bucket, ApiUtils.getUserSession(session));
-
- SimpleMetadataProcessingReport report = new SimpleMetadataProcessingReport();
- for (String uuid : records) {
- AbstractMetadata metadata = metadataRepository.findOneByUuid(uuid);
- if (metadata == null) {
- report.incrementNullRecords();
- } else if (!accessManager.canEdit(context, String.valueOf(metadata.getId()))
- || metadataDraftRepository.findOneByUuid(uuid) != null) {
- report.addNotEditableMetadataId(metadata.getId());
- } else {
- MetadataPreRemove preRemoveEvent = new MetadataPreRemove(metadata);
- ApplicationContextHolder.get().publishEvent(preRemoveEvent);
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ Store store = context.getBean("resourceStore", Store.class);
+
+ Set records = ApiUtils.getUuidsParameterOrSelection(uuids, bucket, ApiUtils.getUserSession(session));
+
+ SimpleMetadataProcessingReport report = new SimpleMetadataProcessingReport();
+ for (String uuid : records) {
+ AbstractMetadata metadata = metadataRepository.findOneByUuid(uuid);
+ if (metadata == null) {
+ report.incrementNullRecords();
+ } else if (!accessManager.canEdit(context, String.valueOf(metadata.getId()))
+ || metadataDraftRepository.findOneByUuid(uuid) != null) {
+ report.addNotEditableMetadataId(metadata.getId());
+ } else {
+ MetadataPreRemove preRemoveEvent = new MetadataPreRemove(metadata);
+ ApplicationContextHolder.get().publishEvent(preRemoveEvent);
- if (metadata.getDataInfo().getType() != MetadataType.SUB_TEMPLATE
- && metadata.getDataInfo().getType() != MetadataType.TEMPLATE_OF_SUB_TEMPLATE && withBackup) {
- MetadataUtils.backupRecord(metadata, context);
- }
+ if (metadata.getDataInfo().getType() != MetadataType.SUB_TEMPLATE
+ && metadata.getDataInfo().getType() != MetadataType.TEMPLATE_OF_SUB_TEMPLATE && withBackup) {
+ MetadataUtils.backupRecord(metadata, context);
+ }
- store.delResources(context, metadata.getUuid());
+ store.delResources(context, metadata.getUuid());
- RecordDeletedEvent recordDeletedEvent = triggerDeletionEvent(request, String.valueOf(metadata.getId()));
- metadataManager.deleteMetadata(context, String.valueOf(metadata.getId()));
- recordDeletedEvent.publish(ApplicationContextHolder.get());
+ RecordDeletedEvent recordDeletedEvent = triggerDeletionEvent(request, String.valueOf(metadata.getId()));
+ metadataManager.deleteMetadata(context, String.valueOf(metadata.getId()));
+ recordDeletedEvent.publish(ApplicationContextHolder.get());
- report.incrementProcessedRecords();
- report.addMetadataId(metadata.getId());
+ report.incrementProcessedRecords();
+ report.addMetadataId(metadata.getId());
+ }
}
+ report.close();
+ return report;
}
- report.close();
- return report;
}
@io.swagger.v3.oas.annotations.Operation(summary = "Add a record", description = "Add one or more record from an XML fragment, "
@@ -357,41 +360,41 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IO
throw new Exception(
String.format("No XML or MEF or ZIP file found in server folder '%s'.", serverFolder));
}
- ServiceContext context = ApiUtils.createServiceContext(request);
- for (Path f : files) {
- if (MEFLib.isValidArchiveExtensionForMEF(f.getFileName().toString())) {
- try {
- MEFLib.Version version = MEFLib.getMEFVersion(f);
- List ids = MEFLib.doImport(version == MEFLib.Version.V1 ? "mef" : "mef2",
- uuidProcessing, transformWith, settingManager.getSiteId(), metadataType, category,
- group, rejectIfInvalid, assignToCatalog, context, f);
- for (String id : ids) {
- report.addMetadataInfos(Integer.parseInt(id), id, !publishToAll, false,
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ for (Path f : files) {
+ if (MEFLib.isValidArchiveExtensionForMEF(f.getFileName().toString())) {
+ try {
+ MEFLib.Version version = MEFLib.getMEFVersion(f);
+ List ids = MEFLib.doImport(version == MEFLib.Version.V1 ? "mef" : "mef2",
+ uuidProcessing, transformWith, settingManager.getSiteId(), metadataType, category,
+ group, rejectIfInvalid, assignToCatalog, context, f);
+ for (String id : ids) {
+ report.addMetadataInfos(Integer.parseInt(id), id, !publishToAll, false,
String.format("Metadata imported from MEF with id '%s'", id));
- triggerCreationEvent(request, id);
+ triggerCreationEvent(request, id);
- report.incrementProcessedRecords();
+ report.incrementProcessedRecords();
+ }
+ } catch (Exception e) {
+ report.addError(e);
+ report.addInfos(String.format("Failed to import MEF file '%s'. Check error for details.",
+ f.getFileName().toString()));
}
- } catch (Exception e) {
- report.addError(e);
- report.addInfos(String.format("Failed to import MEF file '%s'. Check error for details.",
- f.getFileName().toString()));
- }
- } else {
- try {
- Pair pair = loadRecord(metadataType, Xml.loadFile(f), uuidProcessing, group,
+ } else {
+ try {
+ Pair pair = loadRecord(metadataType, Xml.loadFile(f), uuidProcessing, group,
category, rejectIfInvalid, publishToAll, transformWith, schema, extra, request);
- report.addMetadataInfos(pair.one(), pair.two(), !publishToAll, false,
+ report.addMetadataInfos(pair.one(), pair.two(), !publishToAll, false,
String.format("Metadata imported from server folder with UUID '%s'", pair.two()));
- triggerCreationEvent(request, pair.two());
+ triggerCreationEvent(request, pair.two());
- } catch (Exception e) {
- report.addError(e);
+ } catch (Exception e) {
+ report.addError(e);
+ }
+ report.incrementProcessedRecords();
}
- report.incrementProcessedRecords();
}
-
}
}
report.close();
@@ -463,57 +466,58 @@ String create(
}
}
- ServiceContext context = ApiUtils.createServiceContext(request);
- String newId = dataManager.createMetadata(context, String.valueOf(sourceMetadata.getId()), group,
- settingManager.getSiteId(), context.getUserSession().getUserIdAsInt(),
- isChildOfSource ? sourceMetadata.getUuid() : null, metadataType.toString(), isVisibleByAllGroupMembers,
- metadataUuid);
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ String newId = dataManager.createMetadata(context, String.valueOf(sourceMetadata.getId()), group,
+ settingManager.getSiteId(), context.getUserSession().getUserIdAsInt(),
+ isChildOfSource ? sourceMetadata.getUuid() : null, metadataType.toString(), isVisibleByAllGroupMembers,
+ metadataUuid);
- triggerCreationEvent(request, newId);
+ triggerCreationEvent(request, newId);
- dataManager.activateWorkflowIfConfigured(context, newId, group);
+ dataManager.activateWorkflowIfConfigured(context, newId, group);
- if (hasAttachmentsOfSource) {
- try {
- StoreUtils.copyDataDir(context, sourceMetadata.getId(), Integer.parseInt(newId), true);
- } catch (Exception e) {
- Log.warning(Geonet.DATA_MANAGER,
- String.format(
- "Error while copying metadata resources. Error is %s. "
- + "Metadata is created but without resources from the source record with id '%s':",
- e.getMessage(), newId));
+ if (hasAttachmentsOfSource) {
+ try {
+ StoreUtils.copyDataDir(context, sourceMetadata.getId(), Integer.parseInt(newId), true);
+ } catch (Exception e) {
+ Log.warning(Geonet.DATA_MANAGER,
+ String.format(
+ "Error while copying metadata resources. Error is %s. "
+ + "Metadata is created but without resources from the source record with id '%s':",
+ e.getMessage(), newId));
+ }
}
- }
- if (hasCategoryOfSource) {
- final Collection categories = dataManager.getCategories(sourceMetadata.getId() + "");
- try {
- for (MetadataCategory c : categories) {
- dataManager.setCategory(context, newId, c.getId() + "");
+ if (hasCategoryOfSource) {
+ final Collection categories = dataManager.getCategories(sourceMetadata.getId() + "");
+ try {
+ for (MetadataCategory c : categories) {
+ dataManager.setCategory(context, newId, c.getId() + "");
+ }
+ } catch (Exception e) {
+ Log.warning(Geonet.DATA_MANAGER,
+ String.format("Error while copying source record category to new record. Error is %s. "
+ + "Metadata is created but without the categories from the source record with id '%d':",
+ e.getMessage(), newId));
}
- } catch (Exception e) {
- Log.warning(Geonet.DATA_MANAGER,
- String.format("Error while copying source record category to new record. Error is %s. "
- + "Metadata is created but without the categories from the source record with id '%d':",
- e.getMessage(), newId));
}
- }
- if (category != null && category.length > 0) {
- try {
- for (String c : category) {
- dataManager.setCategory(context, newId, c);
+ if (category != null && category.length > 0) {
+ try {
+ for (String c : category) {
+ dataManager.setCategory(context, newId, c);
+ }
+ } catch (Exception e) {
+ Log.warning(Geonet.DATA_MANAGER,
+ String.format(
+ "Error while setting record category to new record. Error is %s. "
+ + "Metadata is created but without the requested categories.",
+ e.getMessage(), newId));
}
- } catch (Exception e) {
- Log.warning(Geonet.DATA_MANAGER,
- String.format(
- "Error while setting record category to new record. Error is %s. "
- + "Metadata is created but without the requested categories.",
- e.getMessage(), newId));
}
- }
- return newId;
+ return newId;
+ }
}
@io.swagger.v3.oas.annotations.Operation(summary = "Add a record from XML or MEF/ZIP file", description ="Add record in the catalog by uploading files.")
@@ -541,54 +545,55 @@ public SimpleMetadataProcessingReport insertFile(
}
SimpleMetadataProcessingReport report = new SimpleMetadataProcessingReport();
if (file != null) {
- ServiceContext context = ApiUtils.createServiceContext(request);
- for (MultipartFile f : file) {
- if (MEFLib.isValidArchiveExtensionForMEF(f.getOriginalFilename())) {
- Path tempFile = Files.createTempFile("mef-import", ".zip");
- try {
- FileUtils.copyInputStreamToFile(f.getInputStream(), tempFile.toFile());
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ for (MultipartFile f : file) {
+ if (MEFLib.isValidArchiveExtensionForMEF(f.getOriginalFilename())) {
+ Path tempFile = Files.createTempFile("mef-import", ".zip");
+ try {
+ FileUtils.copyInputStreamToFile(f.getInputStream(), tempFile.toFile());
- MEFLib.Version version = MEFLib.getMEFVersion(tempFile);
+ MEFLib.Version version = MEFLib.getMEFVersion(tempFile);
- List ids = MEFLib.doImport(version == MEFLib.Version.V1 ? "mef" : "mef2",
+ List ids = MEFLib.doImport(version == MEFLib.Version.V1 ? "mef" : "mef2",
uuidProcessing, transformWith, settingManager.getSiteId(), metadataType, category,
group, rejectIfInvalid, assignToCatalog, context, tempFile);
- if (ids.isEmpty()) {
- //we could have used a finer-grained error handling inside the MEFLib import call (MEF MD file processing)
- //This is a catch-for-call for the case when there is no record is imported, to notify the user the import is not successful.
- throw new BadFormatEx("Import 0 record, check whether the importing file is a valid MEF archive.");
- }
- ids.forEach(e -> {
- report.addMetadataInfos(Integer.parseInt(e), e, !publishToAll, false,
- String.format("Metadata imported with ID '%s'", e));
-
- try {
- triggerCreationEvent(request, e);
- } catch (Exception e1) {
- report.addError(e1);
- report.addInfos(
- String.format("Impossible to store event for '%s'. Check error for details.",
- f.getOriginalFilename()));
+ if (ids.isEmpty()) {
+ //we could have used a finer-grained error handling inside the MEFLib import call (MEF MD file processing)
+ //This is a catch-for-call for the case when there is no record is imported, to notify the user the import is not successful.
+ throw new BadFormatEx("Import 0 record, check whether the importing file is a valid MEF archive.");
}
+ ids.forEach(e -> {
+ report.addMetadataInfos(Integer.parseInt(e), e, !publishToAll, false,
+ String.format("Metadata imported with ID '%s'", e));
- report.incrementProcessedRecords();
- });
- } catch (Exception e) {
- report.addError(e);
- report.addInfos(String.format("Failed to import MEF file '%s'. Check error for details.",
- f.getOriginalFilename()));
- } finally {
- IO.deleteFile(tempFile, false, Geonet.MEF);
- }
- } else {
- Pair pair = loadRecord(metadataType, Xml.loadStream(f.getInputStream()),
+ try {
+ triggerCreationEvent(request, e);
+ } catch (Exception e1) {
+ report.addError(e1);
+ report.addInfos(
+ String.format("Impossible to store event for '%s'. Check error for details.",
+ f.getOriginalFilename()));
+ }
+
+ report.incrementProcessedRecords();
+ });
+ } catch (Exception e) {
+ report.addError(e);
+ report.addInfos(String.format("Failed to import MEF file '%s'. Check error for details.",
+ f.getOriginalFilename()));
+ } finally {
+ IO.deleteFile(tempFile, false, Geonet.MEF);
+ }
+ } else {
+ Pair pair = loadRecord(metadataType, Xml.loadStream(f.getInputStream()),
uuidProcessing, group, category, rejectIfInvalid, publishToAll, transformWith, schema,
extra, request);
- report.addMetadataInfos(pair.one(), pair.two(), !publishToAll, false, String.format("Metadata imported with UUID '%s'", pair.two()));
+ report.addMetadataInfos(pair.one(), pair.two(), !publishToAll, false, String.format("Metadata imported with UUID '%s'", pair.two()));
- triggerImportEvent(request, pair.two());
+ triggerImportEvent(request, pair.two());
- report.incrementProcessedRecords();
+ report.incrementProcessedRecords();
+ }
}
}
}
@@ -626,112 +631,113 @@ public SimpleMetadataProcessingReport insertOgcMapContextFile(
+ "You MUST provide a filename in this case."));
}
- ServiceContext context = ApiUtils.createServiceContext(request);
- String styleSheetWmc = dataDirectory.getWebappDir() + File.separator + Geonet.Path.IMPORT_STYLESHEETS
- + File.separator + "OGCWMC-OR-OWSC-to-ISO19139.xsl";
-
- FilePathChecker.verify(filename);
-
- // Convert the context in an ISO19139 records
- Map xslParams = new HashMap();
- xslParams.put("viewer_url", viewerUrl);
- xslParams.put("map_url", url);
- xslParams.put("topic", topic);
- xslParams.put("title", title);
- xslParams.put("abstract", recordAbstract);
- xslParams.put("lang", context.getLanguage());
-
- // Assign current user to the record
- UserSession us = context.getUserSession();
-
- if (us != null) {
- xslParams.put("currentuser_name", us.getName() + " " + us.getSurname());
- // phone number is georchestra-specific
- // xslParams.put("currentuser_phone", us.getPrincipal().getPhone());
- xslParams.put("currentuser_mail", us.getEmailAddr());
- xslParams.put("currentuser_org", us.getOrganisation());
- }
+ try (ServiceContext context = ApiUtils.createServiceContext(request)){
+ String styleSheetWmc = dataDirectory.getWebappDir() + File.separator + Geonet.Path.IMPORT_STYLESHEETS
+ + File.separator + "OGCWMC-OR-OWSC-to-ISO19139.xsl";
- // 1. JDOMize the string
- Element wmcDoc = Xml.loadString(xml, false);
- // 2. Apply XSL (styleSheetWmc)
- Element transformedMd = Xml.transform(wmcDoc, new File(styleSheetWmc).toPath(), xslParams);
+ FilePathChecker.verify(filename);
- // 4. Inserts the metadata (does basically the same as the metadata.insert.paste
- // service (see Insert.java)
- String uuid = UUID.randomUUID().toString();
+ // Convert the context in an ISO19139 records
+ Map xslParams = new HashMap();
+ xslParams.put("viewer_url", viewerUrl);
+ xslParams.put("map_url", url);
+ xslParams.put("topic", topic);
+ xslParams.put("title", title);
+ xslParams.put("abstract", recordAbstract);
+ xslParams.put("lang", context.getLanguage());
- String date = new ISODate().toString();
- SimpleMetadataProcessingReport report = new SimpleMetadataProcessingReport();
+ // Assign current user to the record
+ UserSession us = context.getUserSession();
- final List id = new ArrayList();
- final List md = new ArrayList();
-
- md.add(transformedMd);
-
- // Import record
- Importer.importRecord(uuid, uuidProcessing, md, "iso19139", 0, settingManager.getSiteId(),
- settingManager.getSiteName(), null, context, id, date, date, group, MetadataType.METADATA);
-
- final Store store = context.getBean("resourceStore", Store.class);
- final IMetadataUtils metadataUtils = context.getBean(IMetadataUtils.class);
- final String metadataUuid = metadataUtils.getMetadataUuid(id.get(0));
-
- // Save the context if no context-url provided
- if (StringUtils.isEmpty(url)) {
- store.putResource(context, metadataUuid, filename, IOUtils.toInputStream(Xml.getString(wmcDoc)), null,
- MetadataResourceVisibility.PUBLIC, true);
-
- // Update the MD
- Map onlineSrcParams = new HashMap();
- onlineSrcParams.put("protocol", "OGC:OWS-C");
- onlineSrcParams.put("url",
- settingManager.getNodeURL() + String.format("api/records/%s/attachments/%s", uuid, filename));
- onlineSrcParams.put("name", filename);
- onlineSrcParams.put("desc", title);
- transformedMd = Xml.transform(transformedMd,
- schemaManager.getSchemaDir("iso19139").resolve("process").resolve("onlinesrc-add.xsl"),
- onlineSrcParams);
- dataManager.updateMetadata(context, id.get(0), transformedMd, false, true, false, context.getLanguage(),
- null, true);
- }
+ if (us != null) {
+ xslParams.put("currentuser_name", us.getName() + " " + us.getSurname());
+ // phone number is georchestra-specific
+ // xslParams.put("currentuser_phone", us.getPrincipal().getPhone());
+ xslParams.put("currentuser_mail", us.getEmailAddr());
+ xslParams.put("currentuser_org", us.getOrganisation());
+ }
- if (StringUtils.isNotEmpty(overview) && StringUtils.isNotEmpty(overviewFilename)) {
- store.putResource(context, metadataUuid, overviewFilename, new ByteArrayInputStream(Base64.decodeBase64(overview)), null,
- MetadataResourceVisibility.PUBLIC, true);
-
- // Update the MD
- Map onlineSrcParams = new HashMap();
- onlineSrcParams.put("thumbnail_url", settingManager.getNodeURL()
- + String.format("api/records/%s/attachments/%s", uuid, overviewFilename));
- transformedMd = Xml.transform(transformedMd,
- schemaManager.getSchemaDir("iso19139").resolve("process").resolve("thumbnail-add.xsl"),
- onlineSrcParams);
- dataManager.updateMetadata(context, id.get(0), transformedMd, false, true, false, context.getLanguage(),
- null, true);
- }
+ // 1. JDOMize the string
+ Element wmcDoc = Xml.loadString(xml, false);
+ // 2. Apply XSL (styleSheetWmc)
+ Element transformedMd = Xml.transform(wmcDoc, new File(styleSheetWmc).toPath(), xslParams);
+
+ // 4. Inserts the metadata (does basically the same as the metadata.insert.paste
+ // service (see Insert.java)
+ String uuid = UUID.randomUUID().toString();
+
+ String date = new ISODate().toString();
+ SimpleMetadataProcessingReport report = new SimpleMetadataProcessingReport();
+
+ final List id = new ArrayList();
+ final List md = new ArrayList();
+
+ md.add(transformedMd);
+
+ // Import record
+ Importer.importRecord(uuid, uuidProcessing, md, "iso19139", 0, settingManager.getSiteId(),
+ settingManager.getSiteName(), null, context, id, date, date, group, MetadataType.METADATA);
+
+ final Store store = context.getBean("resourceStore", Store.class);
+ final IMetadataUtils metadataUtils = context.getBean(IMetadataUtils.class);
+ final String metadataUuid = metadataUtils.getMetadataUuid(id.get(0));
+
+ // Save the context if no context-url provided
+ if (StringUtils.isEmpty(url)) {
+ store.putResource(context, metadataUuid, filename, IOUtils.toInputStream(Xml.getString(wmcDoc)), null,
+ MetadataResourceVisibility.PUBLIC, true);
+
+ // Update the MD
+ Map onlineSrcParams = new HashMap();
+ onlineSrcParams.put("protocol", "OGC:OWS-C");
+ onlineSrcParams.put("url",
+ settingManager.getNodeURL() + String.format("api/records/%s/attachments/%s", uuid, filename));
+ onlineSrcParams.put("name", filename);
+ onlineSrcParams.put("desc", title);
+ transformedMd = Xml.transform(transformedMd,
+ schemaManager.getSchemaDir("iso19139").resolve("process").resolve("onlinesrc-add.xsl"),
+ onlineSrcParams);
+ dataManager.updateMetadata(context, id.get(0), transformedMd, false, true, false, context.getLanguage(),
+ null, true);
+ }
- int iId = Integer.parseInt(id.get(0));
- if (publishToAll) {
- dataManager.setOperation(context, iId, ReservedGroup.all.getId(), ReservedOperation.view.getId());
- dataManager.setOperation(context, iId, ReservedGroup.all.getId(), ReservedOperation.download.getId());
- dataManager.setOperation(context, iId, ReservedGroup.all.getId(), ReservedOperation.dynamic.getId());
- }
- if (StringUtils.isNotEmpty(group)) {
- int gId = Integer.parseInt(group);
- dataManager.setOperation(context, iId, gId, ReservedOperation.view.getId());
- dataManager.setOperation(context, iId, gId, ReservedOperation.download.getId());
- dataManager.setOperation(context, iId, gId, ReservedOperation.dynamic.getId());
- }
+ if (StringUtils.isNotEmpty(overview) && StringUtils.isNotEmpty(overviewFilename)) {
+ store.putResource(context, metadataUuid, overviewFilename, new ByteArrayInputStream(Base64.decodeBase64(overview)), null,
+ MetadataResourceVisibility.PUBLIC, true);
+
+ // Update the MD
+ Map onlineSrcParams = new HashMap();
+ onlineSrcParams.put("thumbnail_url", settingManager.getNodeURL()
+ + String.format("api/records/%s/attachments/%s", uuid, overviewFilename));
+ transformedMd = Xml.transform(transformedMd,
+ schemaManager.getSchemaDir("iso19139").resolve("process").resolve("thumbnail-add.xsl"),
+ onlineSrcParams);
+ dataManager.updateMetadata(context, id.get(0), transformedMd, false, true, false, context.getLanguage(),
+ null, true);
+ }
+
+ int iId = Integer.parseInt(id.get(0));
+ if (publishToAll) {
+ dataManager.setOperation(context, iId, ReservedGroup.all.getId(), ReservedOperation.view.getId());
+ dataManager.setOperation(context, iId, ReservedGroup.all.getId(), ReservedOperation.download.getId());
+ dataManager.setOperation(context, iId, ReservedGroup.all.getId(), ReservedOperation.dynamic.getId());
+ }
+ if (StringUtils.isNotEmpty(group)) {
+ int gId = Integer.parseInt(group);
+ dataManager.setOperation(context, iId, gId, ReservedOperation.view.getId());
+ dataManager.setOperation(context, iId, gId, ReservedOperation.download.getId());
+ dataManager.setOperation(context, iId, gId, ReservedOperation.dynamic.getId());
+ }
- dataManager.indexMetadata(id);
- report.addMetadataInfos(Integer.parseInt(id.get(0)), uuid, !publishToAll, false, uuid);
+ dataManager.indexMetadata(id);
+ report.addMetadataInfos(Integer.parseInt(id.get(0)), uuid, !publishToAll, false, uuid);
- triggerCreationEvent(request, uuid);
+ triggerCreationEvent(request, uuid);
- report.incrementProcessedRecords();
- report.close();
- return report;
+ report.incrementProcessedRecords();
+ report.close();
+ return report;
+ }
}
/**
@@ -761,28 +767,29 @@ private void triggerCreationEvent(HttpServletRequest request, String uuid)
*/
private RecordDeletedEvent triggerDeletionEvent(HttpServletRequest request, String uuid)
throws Exception {
- AbstractMetadata metadata = ApiUtils.getRecord(uuid);
- ApplicationContext applicationContext = ApplicationContextHolder.get();
- UserSession userSession = ApiUtils.getUserSession(request.getSession());
-
- ServiceContext serviceContext = ApiUtils.createServiceContext(request);
- DataManager dataMan = applicationContext.getBean(DataManager.class);
- Element beforeMetadata = dataMan.getMetadata(serviceContext, String.valueOf(metadata.getId()), false, false, false);
- XMLOutputter outp = new XMLOutputter();
- String xmlBefore = outp.outputString(beforeMetadata);
- LinkedHashMap titles = new LinkedHashMap<>();
- try {
- titles = metadataUtils.extractTitles(Integer.toString(metadata.getId()));
- } catch (Exception e) {
- Log.warning(Geonet.DATA_MANAGER,
- String.format(
- "Error while extracting title for the metadata %d " +
- "while creating delete event. Error is %s. " +
- "It may happen on subtemplates.",
- metadata.getId(), e.getMessage()));
+ try (ServiceContext serviceContext = ApiUtils.createServiceContext(request)) {
+ AbstractMetadata metadata = ApiUtils.getRecord(uuid);
+ ApplicationContext applicationContext = ApplicationContextHolder.get();
+ UserSession userSession = ApiUtils.getUserSession(request.getSession());
+
+ DataManager dataMan = applicationContext.getBean(DataManager.class);
+ Element beforeMetadata = dataMan.getMetadata(serviceContext, String.valueOf(metadata.getId()), false, false, false);
+ XMLOutputter outp = new XMLOutputter();
+ String xmlBefore = outp.outputString(beforeMetadata);
+ LinkedHashMap titles = new LinkedHashMap<>();
+ try {
+ titles = metadataUtils.extractTitles(Integer.toString(metadata.getId()));
+ } catch (Exception e) {
+ Log.warning(Geonet.DATA_MANAGER,
+ String.format(
+ "Error while extracting title for the metadata %d " +
+ "while creating delete event. Error is %s. " +
+ "It may happen on subtemplates.",
+ metadata.getId(), e.getMessage()));
+ }
+ return new RecordDeletedEvent(metadata.getId(), metadata.getUuid(), titles, userSession.getUserIdAsInt(), xmlBefore);
}
- return new RecordDeletedEvent(metadata.getId(), metadata.getUuid(), titles, userSession.getUserIdAsInt(), xmlBefore);
- }
+ }
/**
@@ -807,119 +814,119 @@ private Pair loadRecord(MetadataType metadataType, Element xmlE
final boolean rejectIfInvalid, final boolean publishToAll, final String transformWith, String schema,
final String extra, HttpServletRequest request) throws Exception {
- ServiceContext context = ApiUtils.createServiceContext(request);
-
- if (!transformWith.equals("_none_")) {
- Path folder = dataDirectory.getWebappDir().resolve(Geonet.Path.IMPORT_STYLESHEETS);
- FilePathChecker.verify(transformWith);
- Path xslFile = folder.resolve(transformWith + ".xsl");
- if (Files.exists(xslFile)) {
- xmlElement = Xml.transform(xmlElement, xslFile);
- } else {
- throw new ResourceNotFoundException(String.format("XSL transformation '%s' not found.", transformWith));
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ if (!transformWith.equals("_none_")) {
+ Path folder = dataDirectory.getWebappDir().resolve(Geonet.Path.IMPORT_STYLESHEETS);
+ FilePathChecker.verify(transformWith);
+ Path xslFile = folder.resolve(transformWith + ".xsl");
+ if (Files.exists(xslFile)) {
+ xmlElement = Xml.transform(xmlElement, xslFile);
+ } else {
+ throw new ResourceNotFoundException(String.format("XSL transformation '%s' not found.", transformWith));
+ }
}
- }
- if (schema == null) {
- schema = dataManager.autodetectSchema(xmlElement);
if (schema == null) {
- throw new IllegalArgumentException("Can't detect schema for metadata automatically. "
- + "You could try to force the schema with the schema parameter.");
- // TODO: Report what are the supported schema
+ schema = dataManager.autodetectSchema(xmlElement);
+ if (schema == null) {
+ throw new IllegalArgumentException("Can't detect schema for metadata automatically. "
+ + "You could try to force the schema with the schema parameter.");
+ // TODO: Report what are the supported schema
+ }
+ } else {
+ // TODO: Check that the schema is supported
}
- } else {
- // TODO: Check that the schema is supported
- }
- if (rejectIfInvalid) {
- try {
- Integer groupId = null;
- if (StringUtils.isNotEmpty(group)) {
- groupId = Integer.parseInt(group);
+ if (rejectIfInvalid) {
+ try {
+ Integer groupId = null;
+ if (StringUtils.isNotEmpty(group)) {
+ groupId = Integer.parseInt(group);
+ }
+ DataManager.validateExternalMetadata(schema, xmlElement, context, groupId);
+ } catch (XSDValidationErrorEx e) {
+ throw new IllegalArgumentException(e);
}
- DataManager.validateExternalMetadata(schema, xmlElement, context, groupId);
- } catch (XSDValidationErrorEx e) {
- throw new IllegalArgumentException(e);
}
- }
- // --- if the uuid does not exist we generate it for metadata and templates
- String uuid;
- if (metadataType == MetadataType.SUB_TEMPLATE || metadataType == MetadataType.TEMPLATE_OF_SUB_TEMPLATE) {
- // subtemplates may need to be loaded with a specific uuid
- // that will be attached to the root element so check for that
- // and if not found, generate a new uuid
- uuid = xmlElement.getAttributeValue("uuid");
- if (StringUtils.isEmpty(uuid)) {
- uuid = UUID.randomUUID().toString();
- }
- } else {
- uuid = dataManager.extractUUID(schema, xmlElement);
- if (uuid.length() == 0) {
- uuid = UUID.randomUUID().toString();
- xmlElement = dataManager.setUUID(schema, uuid, xmlElement);
+ // --- if the uuid does not exist we generate it for metadata and templates
+ String uuid;
+ if (metadataType == MetadataType.SUB_TEMPLATE || metadataType == MetadataType.TEMPLATE_OF_SUB_TEMPLATE) {
+ // subtemplates may need to be loaded with a specific uuid
+ // that will be attached to the root element so check for that
+ // and if not found, generate a new uuid
+ uuid = xmlElement.getAttributeValue("uuid");
+ if (StringUtils.isEmpty(uuid)) {
+ uuid = UUID.randomUUID().toString();
+ }
+ } else {
+ uuid = dataManager.extractUUID(schema, xmlElement);
+ if (uuid.length() == 0) {
+ uuid = UUID.randomUUID().toString();
+ xmlElement = dataManager.setUUID(schema, uuid, xmlElement);
+ }
}
- }
- if (uuidProcessing == MEFLib.UuidAction.NOTHING) {
- AbstractMetadata md = metadataRepository.findOneByUuid(uuid);
- if (md != null) {
- throw new IllegalArgumentException(
- String.format("A record with UUID '%s' already exist and you choose no "
- + "action on UUID processing. Choose to overwrite existing record "
- + "or to generate a new UUID.", uuid));
+ if (uuidProcessing == MEFLib.UuidAction.NOTHING) {
+ AbstractMetadata md = metadataRepository.findOneByUuid(uuid);
+ if (md != null) {
+ throw new IllegalArgumentException(
+ String.format("A record with UUID '%s' already exist and you choose no "
+ + "action on UUID processing. Choose to overwrite existing record "
+ + "or to generate a new UUID.", uuid));
+ }
}
- }
- String date = new ISODate().toString();
+ String date = new ISODate().toString();
- final List id = new ArrayList();
- final List md = new ArrayList();
- md.add(xmlElement);
+ final List id = new ArrayList();
+ final List md = new ArrayList();
+ md.add(xmlElement);
- // Import record
- Map sourceTranslations = Maps.newHashMap();
- try {
- Importer.importRecord(uuid, uuidProcessing, md, schema, 0, settingManager.getSiteId(),
- settingManager.getSiteName(), sourceTranslations, context, id, date, date, group, metadataType);
+ // Import record
+ Map sourceTranslations = Maps.newHashMap();
+ try {
+ Importer.importRecord(uuid, uuidProcessing, md, schema, 0, settingManager.getSiteId(),
+ settingManager.getSiteName(), sourceTranslations, context, id, date, date, group, metadataType);
- } catch (DataIntegrityViolationException ex) {
- throw ex;
- } catch (Exception ex) {
- throw ex;
- }
- int iId = Integer.parseInt(id.get(0));
- uuid = dataManager.getMetadataUuid(iId + "");
+ } catch (DataIntegrityViolationException ex) {
+ throw ex;
+ } catch (Exception ex) {
+ throw ex;
+ }
+ int iId = Integer.parseInt(id.get(0));
+ uuid = dataManager.getMetadataUuid(iId + "");
- // Set template
- dataManager.setTemplate(iId, metadataType, null);
+ // Set template
+ dataManager.setTemplate(iId, metadataType, null);
- if (publishToAll) {
- dataManager.setOperation(context, iId, ReservedGroup.all.getId(), ReservedOperation.view.getId());
- dataManager.setOperation(context, iId, ReservedGroup.all.getId(), ReservedOperation.download.getId());
- dataManager.setOperation(context, iId, ReservedGroup.all.getId(), ReservedOperation.dynamic.getId());
- }
+ if (publishToAll) {
+ dataManager.setOperation(context, iId, ReservedGroup.all.getId(), ReservedOperation.view.getId());
+ dataManager.setOperation(context, iId, ReservedGroup.all.getId(), ReservedOperation.download.getId());
+ dataManager.setOperation(context, iId, ReservedGroup.all.getId(), ReservedOperation.dynamic.getId());
+ }
- dataManager.activateWorkflowIfConfigured(context, id.get(0), group);
+ dataManager.activateWorkflowIfConfigured(context, id.get(0), group);
- if (category != null) {
- for (String c : category) {
- dataManager.setCategory(context, id.get(0), c);
+ if (category != null) {
+ for (String c : category) {
+ dataManager.setCategory(context, id.get(0), c);
+ }
}
- }
- if (extra != null) {
- metadataRepository.update(iId, new Updater() {
- @Override
- public void apply(@Nonnull Metadata metadata) {
- if (extra != null) {
- metadata.getDataInfo().setExtra(extra);
+ if (extra != null) {
+ metadataRepository.update(iId, new Updater() {
+ @Override
+ public void apply(@Nonnull Metadata metadata) {
+ if (extra != null) {
+ metadata.getDataInfo().setExtra(extra);
+ }
}
- }
- });
- }
+ });
+ }
- dataManager.indexMetadata(id.get(0), true);
- return Pair.read(Integer.valueOf(id.get(0)), uuid);
+ dataManager.indexMetadata(id.get(0), true);
+ return Pair.read(Integer.valueOf(id.get(0)), uuid);
+ }
}
}
diff --git a/services/src/main/java/org/fao/geonet/api/records/MetadataProcessApi.java b/services/src/main/java/org/fao/geonet/api/records/MetadataProcessApi.java
index f40d2e293b08..b235950767ff 100644
--- a/services/src/main/java/org/fao/geonet/api/records/MetadataProcessApi.java
+++ b/services/src/main/java/org/fao/geonet/api/records/MetadataProcessApi.java
@@ -94,41 +94,41 @@ public class MetadataProcessApi {
List getSuggestions(
@Parameter(description = API_PARAM_RECORD_UUID, required = true) @PathVariable String metadataUuid,
HttpServletRequest request) throws Exception {
- AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, request);
-
- ServiceContext context = ApiUtils.createServiceContext(request);
-
- Map xslParameter = new HashMap();
- xslParameter.put("guiLang", request.getLocale().getISO3Language());
- xslParameter.put("siteUrl", sm.getSiteURL(context));
- xslParameter.put("nodeUrl", sm.getNodeURL());
- xslParameter.put("baseUrl", context.getBaseUrl());
- xslParameter.put("action", "analyze");
-
- // List or analyze all suggestions process registered for this schema
- MetadataSchema metadataSchema = dm.getSchema(metadata.getDataInfo().getSchemaId());
- Path xslProcessing = metadataSchema.getSchemaDir().resolve(XSL_SUGGEST_FILE);
- if (Files.exists(xslProcessing)) {
- // -- here we send parameters set by user from
- // URL if needed.
- boolean forEditing = false, withValidationErrors = false, keepXlinkAttributes = false;
- Element md = dm.getMetadata(context, String.valueOf(metadata.getId()), forEditing, withValidationErrors,
- keepXlinkAttributes);
-
- Element xmlSuggestions;
- try {
- xmlSuggestions = Xml.transform(md, xslProcessing, xslParameter);
- } catch (TransformerConfigurationException e) {
- throw new WebApplicationException(String.format("Error while retrieving suggestion for record '%s'. "
- + "Check your suggest.xsl process (and all its imports).", metadataUuid, xslProcessing), e);
- }
- SuggestionsType suggestions = (SuggestionsType) Xml.unmarshall(xmlSuggestions, SuggestionsType.class);
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, context);
+
+ Map xslParameter = new HashMap();
+ xslParameter.put("guiLang", request.getLocale().getISO3Language());
+ xslParameter.put("siteUrl", sm.getSiteURL(context));
+ xslParameter.put("nodeUrl", sm.getNodeURL());
+ xslParameter.put("baseUrl", context.getBaseUrl());
+ xslParameter.put("action", "analyze");
+
+ // List or analyze all suggestions process registered for this schema
+ MetadataSchema metadataSchema = dm.getSchema(metadata.getDataInfo().getSchemaId());
+ Path xslProcessing = metadataSchema.getSchemaDir().resolve(XSL_SUGGEST_FILE);
+ if (Files.exists(xslProcessing)) {
+ // -- here we send parameters set by user from
+ // URL if needed.
+ boolean forEditing = false, withValidationErrors = false, keepXlinkAttributes = false;
+ Element md = dm.getMetadata(context, String.valueOf(metadata.getId()), forEditing, withValidationErrors,
+ keepXlinkAttributes);
+
+ Element xmlSuggestions;
+ try {
+ xmlSuggestions = Xml.transform(md, xslProcessing, xslParameter);
+ } catch (TransformerConfigurationException e) {
+ throw new WebApplicationException(String.format("Error while retrieving suggestion for record '%s'. "
+ + "Check your suggest.xsl process (and all its imports).", metadataUuid, xslProcessing), e);
+ }
+ SuggestionsType suggestions = (SuggestionsType) Xml.unmarshall(xmlSuggestions, SuggestionsType.class);
- return suggestions.getSuggestion();
- } else {
- throw new ResourceNotFoundException(
- String.format("No %s files available in schema '%s'. No suggestion to provides.", XSL_SUGGEST_FILE,
- metadata.getDataInfo().getSchemaId()));
+ return suggestions.getSuggestion();
+ } else {
+ throw new ResourceNotFoundException(
+ String.format("No %s files available in schema '%s'. No suggestion to provides.", XSL_SUGGEST_FILE,
+ metadata.getDataInfo().getSchemaId()));
+ }
}
}
@@ -144,17 +144,19 @@ ResponseEntity processRecordPreview(
@Parameter(description = API_PARAM_RECORD_UUID, required = true) @PathVariable String metadataUuid,
@Parameter(description = ApiParams.API_PARAM_PROCESS_ID) @PathVariable String process, HttpServletRequest request)
throws Exception {
- AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, request);
- boolean save = request.getMethod().equals("POST");
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, context);
+ boolean save = request.getMethod().equals("POST");
- ApplicationContext applicationContext = ApplicationContextHolder.get();
- ServiceContext context = ApiUtils.createServiceContext(request);
+ ApplicationContext applicationContext = ApplicationContextHolder.get();
- XsltMetadataProcessingReport report = new XsltMetadataProcessingReport(process);
+ XsltMetadataProcessingReport report = new XsltMetadataProcessingReport(process);
- Element processedMetadata = process(applicationContext, process, request, metadata, save, context, sm, report);
+ Element processedMetadata = process(applicationContext, process, request, metadata, save, context, sm, report);
- return new ResponseEntity<>(processedMetadata, HttpStatus.OK);
+ return new ResponseEntity<>(processedMetadata, HttpStatus.OK);
+
+ }
}
@io.swagger.v3.oas.annotations.Operation(summary = "Apply a process", description =API_OP_NOTE_PROCESS)
@@ -169,16 +171,18 @@ ResponseEntity processRecord(
@Parameter(description = API_PARAM_RECORD_UUID, required = true) @PathVariable String metadataUuid,
@Parameter(description = ApiParams.API_PARAM_PROCESS_ID) @PathVariable String process, HttpServletRequest request)
throws Exception {
- AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, request);
- boolean save = true;
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, context);
+ boolean save = true;
- ApplicationContext applicationContext = ApplicationContextHolder.get();
- ServiceContext context = ApiUtils.createServiceContext(request);
+ ApplicationContext applicationContext = ApplicationContextHolder.get();
- XsltMetadataProcessingReport report = new XsltMetadataProcessingReport(process);
+ XsltMetadataProcessingReport report = new XsltMetadataProcessingReport(process);
- process(applicationContext, process, request, metadata, save, context, sm, report);
- return new ResponseEntity<>(HttpStatus.NO_CONTENT);
+ process(applicationContext, process, request, metadata, save, context, sm, report);
+ return new ResponseEntity<>(HttpStatus.NO_CONTENT);
+
+ }
}
private Element process(ApplicationContext applicationContext, String process, HttpServletRequest request,
diff --git a/services/src/main/java/org/fao/geonet/api/records/MetadataSampleApi.java b/services/src/main/java/org/fao/geonet/api/records/MetadataSampleApi.java
index edbfbc69e1b2..ed97d527a63f 100644
--- a/services/src/main/java/org/fao/geonet/api/records/MetadataSampleApi.java
+++ b/services/src/main/java/org/fao/geonet/api/records/MetadataSampleApi.java
@@ -116,70 +116,71 @@ SimpleMetadataProcessingReport addSamples(
HttpServletRequest request
)
throws Exception {
- ApplicationContext applicationContext = ApplicationContextHolder.get();
- ServiceContext context = ApiUtils.createServiceContext(request);
- SimpleMetadataProcessingReport report = new SimpleMetadataProcessingReport();
- UserSession userSession = ApiUtils.getUserSession(request.getSession());
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ ApplicationContext applicationContext = ApplicationContextHolder.get();
+ SimpleMetadataProcessingReport report = new SimpleMetadataProcessingReport();
+ UserSession userSession = ApiUtils.getUserSession(request.getSession());
- Element params = new Element("params");
- params.addContent(new Element("file_type").setText("mef"));
- params.addContent(new Element("uuidAction").setText("overwrite"));
- for (String schemaName : schema) {
- Log.info(Geonet.DATA_MANAGER, "Loading sample data for schema "
- + schemaName);
- Path schemaDir = schemaManager.getSchemaSampleDataDir(schemaName);
- if (schemaDir == null) {
- report.addInfos(String.format(
- "No samples available for schema '%s'.", schemaName
- ));
- continue;
- }
+ Element params = new Element("params");
+ params.addContent(new Element("file_type").setText("mef"));
+ params.addContent(new Element("uuidAction").setText("overwrite"));
+ for (String schemaName : schema) {
+ Log.info(Geonet.DATA_MANAGER, "Loading sample data for schema "
+ + schemaName);
+ Path schemaDir = schemaManager.getSchemaSampleDataDir(schemaName);
+ if (schemaDir == null) {
+ report.addInfos(String.format(
+ "No samples available for schema '%s'.", schemaName
+ ));
+ continue;
+ }
- if (Log.isDebugEnabled(Geonet.DATA_MANAGER)) {
- Log.debug(Geonet.DATA_MANAGER, "Searching for mefs in: " + schemaDir);
- }
+ if (Log.isDebugEnabled(Geonet.DATA_MANAGER)) {
+ Log.debug(Geonet.DATA_MANAGER, "Searching for mefs in: " + schemaDir);
+ }
- List sampleDataFilesList;
- try (DirectoryStream newDirectoryStream =
- Files.newDirectoryStream(schemaDir, "*.mef")) {
- sampleDataFilesList = Lists.newArrayList(newDirectoryStream);
- }
+ List sampleDataFilesList;
+ try (DirectoryStream newDirectoryStream =
+ Files.newDirectoryStream(schemaDir, "*.mef")) {
+ sampleDataFilesList = Lists.newArrayList(newDirectoryStream);
+ }
- int schemaCount = 0;
- for (final Path file : sampleDataFilesList) {
- try {
- if (Log.isDebugEnabled(Geonet.DATA_MANAGER)) {
- Log.debug(Geonet.DATA_MANAGER,
- String.format("Loading %s sample file %s ...", schemaName, file));
- }
- List importedMdIds = MEFLib.doImport(params, context, file, null);
+ int schemaCount = 0;
+ for (final Path file : sampleDataFilesList) {
+ try {
+ if (Log.isDebugEnabled(Geonet.DATA_MANAGER)) {
+ Log.debug(Geonet.DATA_MANAGER,
+ String.format("Loading %s sample file %s ...", schemaName, file));
+ }
+ List importedMdIds = MEFLib.doImport(params, context, file, null);
- if (importedMdIds != null && importedMdIds.size() > 0) {
- schemaCount += importedMdIds.size();
- for (String mdId : importedMdIds) {
- AbstractMetadata metadata = ApiUtils.getRecord(mdId);
- new RecordImportedEvent(Integer.parseInt(mdId), userSession.getUserIdAsInt(),
- ObjectJSONUtils.convertObjectInJsonObject(userSession.getPrincipal(), RecordImportedEvent.FIELD),
- metadata.getData()).publish(applicationContext);
+ if (importedMdIds != null && importedMdIds.size() > 0) {
+ schemaCount += importedMdIds.size();
+ for (String mdId : importedMdIds) {
+ AbstractMetadata metadata = ApiUtils.getRecord(mdId);
+ new RecordImportedEvent(Integer.parseInt(mdId), userSession.getUserIdAsInt(),
+ ObjectJSONUtils.convertObjectInJsonObject(userSession.getPrincipal(), RecordImportedEvent.FIELD),
+ metadata.getData()).publish(applicationContext);
+ }
}
+ } catch (Exception e) {
+ Log.error(Geonet.DATA_MANAGER,
+ String.format("Error loading %s sample file %s. Error is %s.",
+ schemaName, file, e.getMessage()),
+ e);
+ report.addError(new Exception(String.format(
+ "Error loading '%s' sample file '%s'. Error is %s.",
+ schemaName, file, e.getMessage())));
}
- } catch (Exception e) {
- Log.error(Geonet.DATA_MANAGER,
- String.format("Error loading %s sample file %s. Error is %s.",
- schemaName, file, e.getMessage()),
- e);
- report.addError(new Exception(String.format(
- "Error loading '%s' sample file '%s'. Error is %s.",
- schemaName, file, e.getMessage())));
+ dataManager.flush();
}
- dataManager.flush();
+ report.addInfos(String.format(
+ "%d record(s) added for schema '%s'.",
+ schemaCount, schemaName));
}
- report.addInfos(String.format(
- "%d record(s) added for schema '%s'.",
- schemaCount, schemaName));
+ report.close();
+ return report;
}
- report.close();
- return report;
}
@io.swagger.v3.oas.annotations.Operation(
@@ -209,110 +210,110 @@ SimpleMetadataProcessingReport addTemplates(
HttpServletRequest request
)
throws Exception {
- ServiceContext context = ApiUtils.createServiceContext(request);
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ SimpleMetadataProcessingReport report = new SimpleMetadataProcessingReport();
- SimpleMetadataProcessingReport report = new SimpleMetadataProcessingReport();
+ String siteId = settingManager.getSiteId();
+ int owner = ApiUtils.getUserSession(session).getUserIdAsInt();
- String siteId = settingManager.getSiteId();
- int owner = ApiUtils.getUserSession(session).getUserIdAsInt();
+ Log.info(Geonet.DATA_MANAGER, String.format(
+ "Loading templates for schemas '%s'.", schema));
- Log.info(Geonet.DATA_MANAGER, String.format(
- "Loading templates for schemas '%s'.", schema));
-
- for (String schemaName : schema) {
- Path templatesDir = schemaManager.getSchemaTemplatesDir(schemaName);
- if (templatesDir == null) {
- report.addInfos(String.format(
- "No templates available for schema '%s'.", schemaName
- ));
- continue;
- }
- final String subTemplatePrefix = "sub-";
- final String templateOfSubTemplatePrefix = "sub-tpl-";
- final int prefixLength = subTemplatePrefix.length();
- int schemaCount = 0;
- try (DirectoryStream newDirectoryStream =
- Files.newDirectoryStream(templatesDir, "*.xml")) {
- for (Path temp : newDirectoryStream) {
- String status = "failed";
- String templateName = temp.getFileName().toString();
+ for (String schemaName : schema) {
+ Path templatesDir = schemaManager.getSchemaTemplatesDir(schemaName);
+ if (templatesDir == null) {
+ report.addInfos(String.format(
+ "No templates available for schema '%s'.", schemaName
+ ));
+ continue;
+ }
+ final String subTemplatePrefix = "sub-";
+ final String templateOfSubTemplatePrefix = "sub-tpl-";
+ final int prefixLength = subTemplatePrefix.length();
+ int schemaCount = 0;
+ try (DirectoryStream newDirectoryStream =
+ Files.newDirectoryStream(templatesDir, "*.xml")) {
+ for (Path temp : newDirectoryStream) {
+ String status = "failed";
+ String templateName = temp.getFileName().toString();
- Element template = new Element("template");
- template.setAttribute("name", templateName);
+ Element template = new Element("template");
+ template.setAttribute("name", templateName);
- if (Log.isDebugEnabled(Geonet.DATA_MANAGER)) {
- Log.debug(Geonet.DATA_MANAGER,
- String.format(" - Adding %s template file %s ...",
- schemaName, templateName));
- }
+ if (Log.isDebugEnabled(Geonet.DATA_MANAGER)) {
+ Log.debug(Geonet.DATA_MANAGER,
+ String.format(" - Adding %s template file %s ...",
+ schemaName, templateName));
+ }
- try {
- Element xml = Xml.loadFile(temp);
- String uuid = UUID.randomUUID().toString();
- String isTemplate = "y";
- String title = null;
+ try {
+ Element xml = Xml.loadFile(temp);
+ String uuid = UUID.randomUUID().toString();
+ String isTemplate = "y";
+ String title = null;
- if (templateName.startsWith(subTemplatePrefix)) {
- isTemplate = templateName.startsWith(templateOfSubTemplatePrefix) ?
- "t" : "s";
- }
+ if (templateName.startsWith(subTemplatePrefix)) {
+ isTemplate = templateName.startsWith(templateOfSubTemplatePrefix) ?
+ "t" : "s";
+ }
- if (isTemplate.equals("s")) {
- // subtemplates loaded here can have a specific uuid
- // attribute
- String tryUuid = xml.getAttributeValue("uuid");
- if (!StringUtils.isEmpty(tryUuid)) uuid = tryUuid;
- }
- if (dataManager.existsMetadataUuid(uuid)) {
- String upid = dataManager.getMetadataId(uuid);
- AbstractMetadata metadata = dataManager.updateMetadata(context, upid, xml, false, true, false, context.getLanguage(), null, true);
- report.addMetadataInfos(metadata,
- String.format(
- "Template for schema '%s' with UUID '%s' updated.",
- schemaName, uuid));
- } else {
- //
- // insert metadata
- //
- Metadata metadata = new Metadata();
- metadata.setUuid(uuid);
- metadata.getDataInfo().
- setSchemaId(schemaName).
- setRoot(xml.getQualifiedName()).
- setType(MetadataType.lookup(isTemplate));
- metadata.getSourceInfo().
- setSourceId(siteId).
- setOwner(owner).
- setGroupOwner(1);
- // Set the UUID explicitly, insertMetadata doesn't update the xml with the generated UUID for templates
- if (MetadataType.lookup(isTemplate) == MetadataType.TEMPLATE) {
- xml = dataManager.setUUID(schemaName, uuid, xml);
+ if (isTemplate.equals("s")) {
+ // subtemplates loaded here can have a specific uuid
+ // attribute
+ String tryUuid = xml.getAttributeValue("uuid");
+ if (!StringUtils.isEmpty(tryUuid)) uuid = tryUuid;
+ }
+ if (dataManager.existsMetadataUuid(uuid)) {
+ String upid = dataManager.getMetadataId(uuid);
+ AbstractMetadata metadata = dataManager.updateMetadata(context, upid, xml, false, true, false, context.getLanguage(), null, true);
+ report.addMetadataInfos(metadata,
+ String.format(
+ "Template for schema '%s' with UUID '%s' updated.",
+ schemaName, uuid));
+ } else {
+ //
+ // insert metadata
+ //
+ Metadata metadata = new Metadata();
+ metadata.setUuid(uuid);
+ metadata.getDataInfo().
+ setSchemaId(schemaName).
+ setRoot(xml.getQualifiedName()).
+ setType(MetadataType.lookup(isTemplate));
+ metadata.getSourceInfo().
+ setSourceId(siteId).
+ setOwner(owner).
+ setGroupOwner(1);
+ // Set the UUID explicitly, insertMetadata doesn't update the xml with the generated UUID for templates
+ if (MetadataType.lookup(isTemplate) == MetadataType.TEMPLATE) {
+ xml = dataManager.setUUID(schemaName, uuid, xml);
+ }
+ dataManager.insertMetadata(context, metadata, xml, true, true, UpdateDatestamp.NO, false, true);
+ report.addMetadataInfos(metadata,
+ String.format(
+ "Template for schema '%s' with UUID '%s' added.",
+ schemaName, metadata.getUuid()));
}
- dataManager.insertMetadata(context, metadata, xml, true, true, UpdateDatestamp.NO, false, true);
- report.addMetadataInfos(metadata,
- String.format(
- "Template for schema '%s' with UUID '%s' added.",
- schemaName, metadata.getUuid()));
- }
- schemaCount++;
- } catch (Exception e) {
- Log.error(Geonet.DATA_MANAGER,
- String.format("Error loading %s template file %s. Error is %s.",
- schemaName, temp, e.getMessage()),
- e);
- report.addError(new Exception(String.format(
- "Error loading '%s' template file '%s'. Error is %s.",
- schemaName, temp, e.getMessage())));
+ schemaCount++;
+ } catch (Exception e) {
+ Log.error(Geonet.DATA_MANAGER,
+ String.format("Error loading %s template file %s. Error is %s.",
+ schemaName, temp, e.getMessage()),
+ e);
+ report.addError(new Exception(String.format(
+ "Error loading '%s' template file '%s'. Error is %s.",
+ schemaName, temp, e.getMessage())));
+ }
}
}
- }
- report.addInfos(String.format(
- "%d record(s) added for schema '%s'.",
- schemaCount, schemaName));
+ report.addInfos(String.format(
+ "%d record(s) added for schema '%s'.",
+ schemaCount, schemaName));
+ }
+ report.close();
+ return report;
}
- report.close();
- return report;
}
}
diff --git a/services/src/main/java/org/fao/geonet/api/records/MetadataSavedQueryApi.java b/services/src/main/java/org/fao/geonet/api/records/MetadataSavedQueryApi.java
index 071e4e3f6ccc..787a57a256eb 100644
--- a/services/src/main/java/org/fao/geonet/api/records/MetadataSavedQueryApi.java
+++ b/services/src/main/java/org/fao/geonet/api/records/MetadataSavedQueryApi.java
@@ -27,6 +27,7 @@
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
+import jeeves.server.context.ServiceContext;
import org.apache.commons.lang.StringUtils;
import org.fao.geonet.api.API;
import org.fao.geonet.api.ApiParams;
@@ -87,17 +88,19 @@ public List getSavedQueries(
@PathVariable final String metadataUuid,
HttpServletRequest request
) throws Exception {
- AbstractMetadata metadata = ApiUtils.canViewRecord(metadataUuid, request);
- String schemaIdentifier = metadata.getDataInfo().getSchemaId();
- SchemaPlugin schemaPlugin = schemaManager.getSchema(schemaIdentifier).getSchemaPlugin();
- if (schemaPlugin == null) {
- return new ArrayList<>();
- }
- try {
- MetadataSchema schema = schemaManager.getSchema(schemaIdentifier);
- return schema.getSchemaPlugin().getSavedQueries();
- } catch (IllegalArgumentException e) {
- return new ArrayList<>();
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ AbstractMetadata metadata = ApiUtils.canViewRecord(metadataUuid, context);
+ String schemaIdentifier = metadata.getDataInfo().getSchemaId();
+ SchemaPlugin schemaPlugin = schemaManager.getSchema(schemaIdentifier).getSchemaPlugin();
+ if (schemaPlugin == null) {
+ return new ArrayList<>();
+ }
+ try {
+ MetadataSchema schema = schemaManager.getSchema(schemaIdentifier);
+ return schema.getSchemaPlugin().getSavedQueries();
+ } catch (IllegalArgumentException e) {
+ return new ArrayList<>();
+ }
}
}
@@ -134,9 +137,11 @@ public Map applyQuery(
@Parameter(description = "The query parameters")
@RequestBody(required = false) final HashMap parameters) throws Exception {
- AbstractMetadata metadata = ApiUtils.canViewRecord(metadataUuid, request);
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ AbstractMetadata metadata = ApiUtils.canViewRecord(metadataUuid, context);
- return query(metadata, savedQuery, parameters);
+ return query(metadata, savedQuery, parameters);
+ }
}
public Map query(AbstractMetadata metadata, String savedQuery, HashMap parameters) throws ResourceNotFoundException, IOException, NoResultsFoundException {
diff --git a/services/src/main/java/org/fao/geonet/api/records/MetadataSharingApi.java b/services/src/main/java/org/fao/geonet/api/records/MetadataSharingApi.java
index e0905af3a384..380cc3b198c6 100644
--- a/services/src/main/java/org/fao/geonet/api/records/MetadataSharingApi.java
+++ b/services/src/main/java/org/fao/geonet/api/records/MetadataSharingApi.java
@@ -256,30 +256,31 @@ public void share(
HttpServletRequest request
)
throws Exception {
- AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, request);
- ApplicationContext appContext = ApplicationContextHolder.get();
- ServiceContext context = ApiUtils.createServiceContext(request);
-
- boolean skipAllReservedGroup = false;
-
- //--- in case of owner, privileges for groups 0,1 and GUEST are disabled
- //--- and are not sent to the server. So we cannot remove them
- UserSession us = ApiUtils.getUserSession(session);
- boolean isAdmin = Profile.Administrator == us.getProfile();
- if (!isAdmin && !accessManager.hasReviewPermission(context, Integer.toString(metadata.getId()))) {
- skipAllReservedGroup = true;
- }
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, context);
+ ApplicationContext appContext = ApplicationContextHolder.get();
- List operationList = operationRepository.findAll();
- Map operationMap = new HashMap<>(operationList.size());
- for (Operation o : operationList) {
- operationMap.put(o.getName(), o.getId());
- }
+ boolean skipAllReservedGroup = false;
+
+ //--- in case of owner, privileges for groups 0,1 and GUEST are disabled
+ //--- and are not sent to the server. So we cannot remove them
+ UserSession us = ApiUtils.getUserSession(session);
+ boolean isAdmin = Profile.Administrator == us.getProfile();
+ if (!isAdmin && !accessManager.hasReviewPermission(context, Integer.toString(metadata.getId()))) {
+ skipAllReservedGroup = true;
+ }
+
+ List operationList = operationRepository.findAll();
+ Map operationMap = new HashMap<>(operationList.size());
+ for (Operation o : operationList) {
+ operationMap.put(o.getName(), o.getId());
+ }
- List privileges = sharing.getPrivileges();
- setOperations(sharing, dataManager, context, appContext, metadata, operationMap, privileges,
- ApiUtils.getUserSession(session).getUserIdAsInt(), skipAllReservedGroup, null, request);
- metadataIndexer.indexMetadataPrivileges(metadata.getUuid(), metadata.getId());
+ List privileges = sharing.getPrivileges();
+ setOperations(sharing, dataManager, context, appContext, metadata, operationMap, privileges,
+ ApiUtils.getUserSession(session).getUserIdAsInt(), skipAllReservedGroup, null, request);
+ metadataIndexer.indexMetadataPrivileges(metadata.getUuid(), metadata.getId());
+ }
}
@io.swagger.v3.oas.annotations.Operation(
@@ -499,73 +500,75 @@ public SharingResponse getRecordSharingSettings(
)
throws Exception {
// TODO: Restrict to user group only in response depending on settings?
- AbstractMetadata metadata = ApiUtils.canViewRecord(metadataUuid, request);
- ApplicationContext appContext = ApplicationContextHolder.get();
- ServiceContext context = ApiUtils.createServiceContext(request);
- UserSession userSession = ApiUtils.getUserSession(session);
-
- SharingResponse sharingResponse = new SharingResponse();
- sharingResponse.setOwner(userSession.getUserId());
- Integer groupOwner = metadata.getSourceInfo().getGroupOwner();
- if (groupOwner != null) {
- sharingResponse.setGroupOwner(String.valueOf(groupOwner));
- }
-
- //--- retrieve groups operations
- Set userGroups = accessManager.getUserGroups(
- userSession,
- context.getIpAddress(), // TODO: Use the request
- false);
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ AbstractMetadata metadata = ApiUtils.canViewRecord(metadataUuid, context);
+ ApplicationContext appContext = ApplicationContextHolder.get();
- List elGroup = groupRepository.findAll();
- List allOperations = operationRepository.findAll();
+ UserSession userSession = ApiUtils.getUserSession(session);
- List groupPrivileges = new ArrayList<>(elGroup.size());
- if (elGroup != null) {
- for (Group g : elGroup) {
- GroupPrivilege groupPrivilege = new GroupPrivilege();
- groupPrivilege.setGroup(g.getId());
- groupPrivilege.setReserved(g.isReserved());
- // TODO: Restrict to user group only in response depending on settings?
- groupPrivilege.setUserGroup(userGroups.contains(g.getId()));
+ SharingResponse sharingResponse = new SharingResponse();
+ sharingResponse.setOwner(userSession.getUserId());
+ Integer groupOwner = metadata.getSourceInfo().getGroupOwner();
+ if (groupOwner != null) {
+ sharingResponse.setGroupOwner(String.valueOf(groupOwner));
+ }
- // TODO: Collecting all those info is probably a bit slow when having lots of groups
- final Specification hasGroupId = UserGroupSpecs.hasGroupId(g.getId());
- final Specification hasUserId = UserGroupSpecs.hasUserId(userSession.getUserIdAsInt());
- final Specification hasUserIdAndGroupId = where(hasGroupId).and(hasUserId);
- List userGroupEntities = userGroupRepository.findAll(hasUserIdAndGroupId);
- List userGroupProfile = new ArrayList<>();
- for (UserGroup ug : userGroupEntities) {
- userGroupProfile.add(ug.getProfile());
- }
- groupPrivilege.setUserProfile(userGroupProfile);
+ //--- retrieve groups operations
+ Set userGroups = accessManager.getUserGroups(
+ userSession,
+ context.getIpAddress(), // TODO: Use the request
+ false);
+
+ List elGroup = groupRepository.findAll();
+ List allOperations = operationRepository.findAll();
+
+ List groupPrivileges = new ArrayList<>(elGroup.size());
+ if (elGroup != null) {
+ for (Group g : elGroup) {
+ GroupPrivilege groupPrivilege = new GroupPrivilege();
+ groupPrivilege.setGroup(g.getId());
+ groupPrivilege.setReserved(g.isReserved());
+ // TODO: Restrict to user group only in response depending on settings?
+ groupPrivilege.setUserGroup(userGroups.contains(g.getId()));
+
+ // TODO: Collecting all those info is probably a bit slow when having lots of groups
+ final Specification hasGroupId = UserGroupSpecs.hasGroupId(g.getId());
+ final Specification hasUserId = UserGroupSpecs.hasUserId(userSession.getUserIdAsInt());
+ final Specification hasUserIdAndGroupId = where(hasGroupId).and(hasUserId);
+ List userGroupEntities = userGroupRepository.findAll(hasUserIdAndGroupId);
+ List userGroupProfile = new ArrayList<>();
+ for (UserGroup ug : userGroupEntities) {
+ userGroupProfile.add(ug.getProfile());
+ }
+ groupPrivilege.setUserProfile(userGroupProfile);
- //--- get all operations that this group can do on given metadata
- Specification hasGroupIdAndMetadataId =
- where(hasGroupId(g.getId()))
- .and(hasMetadataId(metadata.getId()));
- List operationAllowedForGroup =
- operationAllowedRepository.findAll(hasGroupIdAndMetadataId);
+ //--- get all operations that this group can do on given metadata
+ Specification hasGroupIdAndMetadataId =
+ where(hasGroupId(g.getId()))
+ .and(hasMetadataId(metadata.getId()));
+ List operationAllowedForGroup =
+ operationAllowedRepository.findAll(hasGroupIdAndMetadataId);
- Map operations = new HashMap<>(allOperations.size());
- for (Operation o : allOperations) {
+ Map operations = new HashMap<>(allOperations.size());
+ for (Operation o : allOperations) {
- boolean operationSetForGroup = false;
- for (OperationAllowed operationAllowed : operationAllowedForGroup) {
- if (o.getId() == operationAllowed.getId().getOperationId()) {
- operationSetForGroup = true;
- break;
+ boolean operationSetForGroup = false;
+ for (OperationAllowed operationAllowed : operationAllowedForGroup) {
+ if (o.getId() == operationAllowed.getId().getOperationId()) {
+ operationSetForGroup = true;
+ break;
+ }
}
+ operations.put(o.getName(), operationSetForGroup);
}
- operations.put(o.getName(), operationSetForGroup);
+ groupPrivilege.setOperations(operations);
+ groupPrivileges.add(groupPrivilege);
}
- groupPrivilege.setOperations(operations);
- groupPrivileges.add(groupPrivilege);
}
+ sharingResponse.setPrivileges(groupPrivileges);
+ return sharingResponse;
}
- sharingResponse.setPrivileges(groupPrivileges);
- return sharingResponse;
}
@io.swagger.v3.oas.annotations.Operation(
@@ -599,29 +602,34 @@ public void setRecordGroup(
HttpServletRequest request
)
throws Exception {
- AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, request);
- ApplicationContext appContext = ApplicationContextHolder.get();
- ServiceContext context = ApiUtils.createServiceContext(request);
-
- Group group = groupRepository.findById(groupIdentifier).get();
- if (group == null) {
- throw new ResourceNotFoundException(String.format(
- "Group with identifier '%s' not found.", groupIdentifier
- ));
- }
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, context);
+ ApplicationContext appContext = ApplicationContextHolder.get();
+
+ Group group = groupRepository.findById(groupIdentifier).get();
+ if (group == null) {
+ throw new ResourceNotFoundException(String.format(
+ "Group with identifier '%s' not found.", groupIdentifier
+ ));
+ }
- Integer previousGroup = metadata.getSourceInfo().getGroupOwner();
- Group oldGroup = null;
- if (previousGroup != null) {
- oldGroup = groupRepository.findById(previousGroup).get();
- }
+ Integer previousGroup = metadata.getSourceInfo().getGroupOwner();
+ Group oldGroup = null;
+ if (previousGroup != null) {
+ oldGroup = groupRepository.findById(previousGroup).get();
+ }
- metadata.getSourceInfo().setGroupOwner(groupIdentifier);
- metadataManager.save(metadata);
- dataManager.indexMetadata(String.valueOf(metadata.getId()), true);
+ metadata.getSourceInfo().setGroupOwner(groupIdentifier);
+ metadataManager.save(metadata);
+ dataManager.indexMetadata(String.valueOf(metadata.getId()), true);
- new RecordGroupOwnerChangeEvent(metadata.getId(), ApiUtils.getUserSession(request.getSession()).getUserIdAsInt(), ObjectJSONUtils.convertObjectInJsonObject(oldGroup, RecordGroupOwnerChangeEvent.FIELD), ObjectJSONUtils.convertObjectInJsonObject(group, RecordGroupOwnerChangeEvent.FIELD)).publish(appContext);
- }
+ new RecordGroupOwnerChangeEvent(metadata.getId(),
+ ApiUtils.getUserSession(request.getSession()).getUserIdAsInt(),
+ ObjectJSONUtils.convertObjectInJsonObject(oldGroup, RecordGroupOwnerChangeEvent.FIELD),
+ ObjectJSONUtils.convertObjectInJsonObject(group, RecordGroupOwnerChangeEvent.FIELD)
+ ).publish(appContext);
+ }
+ }
@io.swagger.v3.oas.annotations.Operation(
summary = "Get record sharing settings",
@@ -646,38 +654,38 @@ public SharingResponse getSharingSettings(
HttpServletRequest request
)
throws Exception {
- ApplicationContext appContext = ApplicationContextHolder.get();
- ServiceContext context = ApiUtils.createServiceContext(request);
- UserSession userSession = ApiUtils.getUserSession(session);
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ UserSession userSession = ApiUtils.getUserSession(session);
- SharingResponse sharingResponse = new SharingResponse();
- sharingResponse.setOwner(userSession.getUserId());
+ SharingResponse sharingResponse = new SharingResponse();
+ sharingResponse.setOwner(userSession.getUserId());
- List allOperations = operationRepository.findAll();
+ List allOperations = operationRepository.findAll();
- //--- retrieve groups operations
- Set userGroups = accessManager.getUserGroups(
- context.getUserSession(),
- context.getIpAddress(), false);
+ //--- retrieve groups operations
+ Set userGroups = accessManager.getUserGroups(
+ context.getUserSession(),
+ context.getIpAddress(), false);
- List elGroup = groupRepository.findAll();
- List groupPrivileges = new ArrayList<>(elGroup.size());
+ List elGroup = groupRepository.findAll();
+ List groupPrivileges = new ArrayList<>(elGroup.size());
- for (Group g : elGroup) {
- GroupPrivilege groupPrivilege = new GroupPrivilege();
- groupPrivilege.setGroup(g.getId());
- groupPrivilege.setReserved(g.isReserved());
- groupPrivilege.setUserGroup(userGroups.contains(g.getId()));
+ for (Group g : elGroup) {
+ GroupPrivilege groupPrivilege = new GroupPrivilege();
+ groupPrivilege.setGroup(g.getId());
+ groupPrivilege.setReserved(g.isReserved());
+ groupPrivilege.setUserGroup(userGroups.contains(g.getId()));
- Map operations = new HashMap<>(allOperations.size());
- for (Operation o : allOperations) {
- operations.put(o.getName(), false);
+ Map operations = new HashMap<>(allOperations.size());
+ for (Operation o : allOperations) {
+ operations.put(o.getName(), false);
+ }
+ groupPrivilege.setOperations(operations);
+ groupPrivileges.add(groupPrivilege);
}
- groupPrivilege.setOperations(operations);
- groupPrivileges.add(groupPrivilege);
+ sharingResponse.setPrivileges(groupPrivileges);
+ return sharingResponse;
}
- sharingResponse.setPrivileges(groupPrivileges);
- return sharingResponse;
}
@io.swagger.v3.oas.annotations.Operation(
@@ -732,14 +740,10 @@ MetadataProcessingReport setGroupAndOwner(
throws Exception {
MetadataProcessingReport report = new SimpleMetadataProcessingReport();
- try {
+ try (ServiceContext serviceContext = ApiUtils.createServiceContext(request)) {
Set records = ApiUtils.getUuidsParameterOrSelection(uuids, bucket, ApiUtils.getUserSession(session));
report.setTotalRecords(records.size());
- final ApplicationContext context = ApplicationContextHolder.get();
-
- ServiceContext serviceContext = ApiUtils.createServiceContext(request);
-
List listOfUpdatedRecords = new ArrayList<>();
for (String uuid : records) {
updateOwnership(groupIdentifier, userIdentifier,
@@ -804,13 +808,11 @@ MetadataProcessingReport setRecordOwnership(
throws Exception {
MetadataProcessingReport report = new SimpleMetadataProcessingReport();
- AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, request);
- try {
- report.setTotalRecords(1);
+ try (ServiceContext serviceContext = ApiUtils.createServiceContext(request)) {
+ AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, serviceContext);
- final ApplicationContext context = ApplicationContextHolder.get();
+ report.setTotalRecords(1);
- ServiceContext serviceContext = ApiUtils.createServiceContext(request);
List listOfUpdatedRecords = new ArrayList<>();
updateOwnership(groupIdentifier, userIdentifier,
report, dataManager, accessManager, metadataRepository,
@@ -960,40 +962,40 @@ private void checkCanPublishToAllGroup(ServiceContext context, DataManager dm, A
*/
private void shareMetadataWithAllGroup(String metadataUuid, boolean publish,
HttpSession session, HttpServletRequest request) throws Exception {
- AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, request);
- ApplicationContext appContext = ApplicationContextHolder.get();
- ServiceContext context = ApiUtils.createServiceContext(request);
+ try (ServiceContext context = ApiUtils.createServiceContext(request)) {
+ AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, context);
+ ApplicationContext appContext = ApplicationContextHolder.get();
+ //--- in case of owner, privileges for groups 0,1 and GUEST are disabled
+ //--- and are not sent to the server. So we cannot remove them
+ UserSession us = ApiUtils.getUserSession(session);
+ boolean isAdmin = Profile.Administrator == us.getProfile();
+ boolean isMdGroupReviewer = accessManager.getReviewerGroups(us).contains(metadata.getSourceInfo().getGroupOwner());
+ boolean isReviewOperationAllowedOnMdForUser = accessManager.hasReviewPermission(context, Integer.toString(metadata.getId()));
+ boolean isPublishForbiden = !isMdGroupReviewer && !isAdmin && !isReviewOperationAllowedOnMdForUser;
+ if (isPublishForbiden) {
- //--- in case of owner, privileges for groups 0,1 and GUEST are disabled
- //--- and are not sent to the server. So we cannot remove them
- UserSession us = ApiUtils.getUserSession(session);
- boolean isAdmin = Profile.Administrator == us.getProfile();
- boolean isMdGroupReviewer = accessManager.getReviewerGroups(us).contains(metadata.getSourceInfo().getGroupOwner());
- boolean isReviewOperationAllowedOnMdForUser = accessManager.hasReviewPermission(context, Integer.toString(metadata.getId()));
- boolean isPublishForbiden = !isMdGroupReviewer && !isAdmin && !isReviewOperationAllowedOnMdForUser;
- if (isPublishForbiden) {
-
- throw new Exception(String.format("User not allowed to publish the metadata %s. You need to be administrator, or reviewer of the metadata group or reviewer with edit privilege on the metadata.",
+ throw new Exception(String.format("User not allowed to publish the metadata %s. You need to be administrator, or reviewer of the metadata group or reviewer with edit privilege on the metadata.",
metadataUuid));
- }
+ }
- DataManager dataManager = appContext.getBean(DataManager.class);
+ DataManager dataManager = appContext.getBean(DataManager.class);
- OperationRepository operationRepository = appContext.getBean(OperationRepository.class);
- List operationList = operationRepository.findAll();
- Map operationMap = new HashMap<>(operationList.size());
- for (Operation o : operationList) {
- operationMap.put(o.getName(), o.getId());
- }
+ OperationRepository operationRepository = appContext.getBean(OperationRepository.class);
+ List