entry : definitions) {
String language = toiso639_1_Lang(entry.getKey());
Value definitionObj = myFactory.createLiteral(entry.getValue(), language);
myGraph.add(subject, predicateScopeNote, definitionObj);
-
}
// update bbox
@@ -677,7 +687,7 @@ public synchronized Thesaurus updateCode(String namespace, String oldcode, Strin
/**
* Update concept code using its URI. This is recommended when concept identifier may not be
* based on thesaurus namespace and does not contains #.
- *
+ *
* eg. http://vocab.nerc.ac.uk/collection/P07/current/CFV13N44/
*/
public synchronized Thesaurus updateCodeByURI(String olduri, String newuri) throws AccessDeniedException {
@@ -729,13 +739,13 @@ public void createConceptScheme(String thesaurusTitle,
Graph myGraph = new org.openrdf.model.impl.GraphImpl();
writeConceptScheme(myGraph,
- thesaurusTitle,
- multilingualTitles,
- thesaurusDescription,
- multilingualDescriptions,
- identifier,
- type,
- namespace);
+ thesaurusTitle,
+ multilingualTitles,
+ thesaurusDescription,
+ multilingualDescriptions,
+ identifier,
+ type,
+ namespace);
repository.addGraph(myGraph);
}
@@ -755,13 +765,13 @@ public void updateConceptScheme(String thesaurusTitle,
removeElement(getConceptSchemes().get(0));
writeConceptScheme(myGraph,
- thesaurusTitle,
- multilingualTitles,
- thesaurusDescription,
- multilingualDescriptions,
- identifier,
- type,
- namespace);
+ thesaurusTitle,
+ multilingualTitles,
+ thesaurusDescription,
+ multilingualDescriptions,
+ identifier,
+ type,
+ namespace);
}
public void writeConceptScheme(Graph myGraph, String thesaurusTitle,
@@ -823,9 +833,6 @@ public void writeConceptScheme(Graph myGraph, String thesaurusTitle,
}
-
-
-
private void addElement(String name, String value, Graph myGraph, ValueFactory myFactory, URI mySubject) {
if (StringUtils.isNotEmpty(value)) {
URI uri = myFactory.createURI(DC_NAMESPACE, name);
@@ -861,22 +868,22 @@ private void addElement(String name, String value, Graph myGraph, ValueFactory m
private void retrieveDublinCore(Element thesaurusEl) {
List theNSs = getThesaurusNamespaces();
- Namespace xmlNS = Namespace.getNamespace("xml","http://www.w3.org/XML/1998/namespace");
+ Namespace xmlNS = Namespace.getNamespace("xml", "http://www.w3.org/XML/1998/namespace");
try {
List multiLingualTitles = (List) Xml.selectNodes(thesaurusEl,
- "skos:ConceptScheme/dc:*[@xml:lang]|skos:ConceptScheme/dcterms:*[@xml:lang]", theNSs);
+ "skos:ConceptScheme/dc:*[@xml:lang]|skos:ConceptScheme/dcterms:*[@xml:lang]", theNSs);
dublinCoreMultilingual.clear();
- for (Element el: multiLingualTitles) {
+ for (Element el : multiLingualTitles) {
String lang = isoLanguageMapper.iso639_2_to_iso639_1(el.getAttribute("lang", xmlNS).getValue());
String value = el.getTextTrim();
String name = el.getName();
if (!dublinCoreMultilingual.containsKey(lang)) {
- dublinCoreMultilingual.put(lang,new HashMap<>());
+ dublinCoreMultilingual.put(lang, new HashMap<>());
}
- dublinCoreMultilingual.get(lang).put(name,value);
+ dublinCoreMultilingual.get(lang).put(name, value);
}
} catch (Exception e) {
- Log.warning(Geonet.THESAURUS,"error extracting multilingual dublin core items from thesaurus",e);
+ Log.warning(Geonet.THESAURUS, "error extracting multilingual dublin core items from thesaurus", e);
}
}
@@ -896,14 +903,14 @@ private void retrieveDublinCore(Element thesaurusEl) {
private void retrieveMultiLingualTitles(Element thesaurusEl) {
try {
String xpathTitles = "skos:ConceptScheme/dc:title[@xml:lang]" +
- "|skos:ConceptScheme/dcterms:title[@xml:lang]" +
- "|skos:ConceptScheme/rdfs:label[@xml:lang]" +
- "|skos:ConceptScheme/skos:prefLabel[@xml:lang]" +
- "|rdf:Description[rdf:type/@rdf:resource = 'http://www.w3.org/2004/02/skos/core#ConceptScheme']/dc:title[@xml:lang]";
+ "|skos:ConceptScheme/dcterms:title[@xml:lang]" +
+ "|skos:ConceptScheme/rdfs:label[@xml:lang]" +
+ "|skos:ConceptScheme/skos:prefLabel[@xml:lang]" +
+ "|rdf:Description[rdf:type/@rdf:resource = 'http://www.w3.org/2004/02/skos/core#ConceptScheme']/dc:title[@xml:lang]";
multilingualTitles.clear();
multilingualTitles.putAll(retrieveMultilingualField(thesaurusEl, xpathTitles));
} catch (Exception e) {
- Log.warning(Geonet.THESAURUS,"error extracting multilingual titles from thesaurus",e);
+ Log.warning(Geonet.THESAURUS, "error extracting multilingual titles from thesaurus", e);
}
}
@@ -913,19 +920,19 @@ private void retrieveMultiLingualDescriptions(Element thesaurusEl) {
multilingualDescriptions.clear();
multilingualDescriptions.putAll(retrieveMultilingualField(thesaurusEl, xpathDescriptions));
} catch (Exception e) {
- Log.warning(Geonet.THESAURUS,"error extracting multilingual descriptions from thesaurus",e);
+ Log.warning(Geonet.THESAURUS, "error extracting multilingual descriptions from thesaurus", e);
}
}
private Map retrieveMultilingualField(Element thesaurusEl, String xpath) throws JDOMException {
List theNSs = getThesaurusNamespaces();
- Namespace xmlNS = Namespace.getNamespace("xml","http://www.w3.org/XML/1998/namespace");
+ Namespace xmlNS = Namespace.getNamespace("xml", "http://www.w3.org/XML/1998/namespace");
Map multilingualValues = new HashMap<>();
List multilingualValuesEl = (List) Xml.selectNodes(thesaurusEl,
- xpath, theNSs);
- for (Element el: multilingualValuesEl) {
+ xpath, theNSs);
+ for (Element el : multilingualValuesEl) {
String lang = isoLanguageMapper.iso639_2_to_iso639_1(el.getAttribute("lang", xmlNS).getValue());
String titleValue = el.getTextTrim();
multilingualValues.put(lang, titleValue);
@@ -936,7 +943,7 @@ private Map retrieveMultilingualField(Element thesaurusEl, Strin
/**
* Retrieves the thesaurus information from rdf file.
- *
+ *
* Used to set the thesaurusName and thesaurusDate for keywords.
*/
private void retrieveThesaurusInformation(Path thesaurusFile, String defaultTitle, boolean ignoreMissingError) {
@@ -956,25 +963,25 @@ private void retrieveThesaurusInformation(Path thesaurusFile, String defaultTitl
retrieveDublinCore(thesaurusEl);
Element titleEl = Xml.selectElement(thesaurusEl,
- "skos:ConceptScheme/dc:title|skos:ConceptScheme/dcterms:title" +
- "|skos:ConceptScheme/rdfs:label|skos:ConceptScheme/skos:prefLabel" +
- "|skos:Collection/dc:title|skos:Collection/dcterms:title" +
- "|rdf:Description/dc:title|rdf:Description/dcterms:title", theNSs);
+ "skos:ConceptScheme/dc:title|skos:ConceptScheme/dcterms:title" +
+ "|skos:ConceptScheme/rdfs:label|skos:ConceptScheme/skos:prefLabel" +
+ "|skos:Collection/dc:title|skos:Collection/dcterms:title" +
+ "|rdf:Description/dc:title|rdf:Description/dcterms:title", theNSs);
if (titleEl != null) {
this.title = titleEl.getValue();
this.defaultNamespace = titleEl
- .getParentElement()
- .getAttributeValue("about", Namespace.getNamespace("rdf", RDF_NAMESPACE));
+ .getParentElement()
+ .getAttributeValue("about", Namespace.getNamespace("rdf", RDF_NAMESPACE));
} else {
this.title = defaultTitle;
this.defaultNamespace = DEFAULT_THESAURUS_NAMESPACE;
}
Element descriptionEl = Xml.selectElement(thesaurusEl,
- "skos:ConceptScheme/dc:description|skos:ConceptScheme/dcterms:description|" +
- "skos:Collection/dc:description|skos:Collection/dcterms:description|" +
- "rdf:Description/dc:description|rdf:Description/dcterms:description", theNSs);
+ "skos:ConceptScheme/dc:description|skos:ConceptScheme/dcterms:description|" +
+ "skos:Collection/dc:description|skos:Collection/dcterms:description|" +
+ "rdf:Description/dc:description|rdf:Description/dcterms:description", theNSs);
this.description = descriptionEl != null ? descriptionEl.getValue() : "";
@@ -987,13 +994,13 @@ private void retrieveThesaurusInformation(Path thesaurusFile, String defaultTitl
}
Element issuedDateEl = Xml.selectElement(thesaurusEl, "skos:ConceptScheme/dcterms:issued", theNSs);
- this.issuedDate = issuedDateEl==null? "": issuedDateEl.getText();
+ this.issuedDate = issuedDateEl == null ? "" : issuedDateEl.getText();
Element modifiedDateEl = Xml.selectElement(thesaurusEl, "skos:ConceptScheme/dcterms:modified", theNSs);
- this.modifiedDate = modifiedDateEl==null? "": modifiedDateEl.getText();
+ this.modifiedDate = modifiedDateEl == null ? "" : modifiedDateEl.getText();
Element createdDateEl = Xml.selectElement(thesaurusEl, "skos:ConceptScheme/dcterms:created", theNSs);
- this.createdDate = createdDateEl==null? "": createdDateEl.getText();
+ this.createdDate = createdDateEl == null ? "" : createdDateEl.getText();
// Default date
Element dateEl = Xml.selectElement(thesaurusEl, "skos:ConceptScheme/dcterms:issued|skos:Collection/dc:date", theNSs);
@@ -1031,12 +1038,12 @@ private void retrieveThesaurusInformation(Path thesaurusFile, String defaultTitl
if (Log.isDebugEnabled(Geonet.THESAURUS_MAN)) {
Log.debug(Geonet.THESAURUS_MAN, String.format(
- "Thesaurus information: %s (%s)", this.title, this.date));
+ "Thesaurus information: %s (%s)", this.title, this.date));
}
} catch (Exception ex) {
if (!ignoreMissingError)
Log.error(Geonet.THESAURUS_MAN, String.format(
- "Error getting thesaurus info for %s. Error is: %s", thesaurusFile, ex.getMessage()));
+ "Error getting thesaurus info for %s. Error is: %s", thesaurusFile, ex.getMessage()));
}
}
@@ -1137,9 +1144,9 @@ public KeywordBean getKeyword(String uri, String... languages) {
try {
Query query = QueryBuilder
- .keywordQueryBuilder(getIsoLanguageMapper(), languages)
- .where(Wheres.ID(uri))
- .build();
+ .keywordQueryBuilder(getIsoLanguageMapper(), languages)
+ .where(Wheres.ID(uri))
+ .build();
keywords = query.execute(this);
} catch (Exception e) {
@@ -1165,9 +1172,9 @@ public List getTopConcepts(String... languages) {
try {
Query query = QueryBuilder
- .keywordQueryBuilder(getIsoLanguageMapper(), languages)
- .select(Selectors.TOPCONCEPTS, true)
- .build();
+ .keywordQueryBuilder(getIsoLanguageMapper(), languages)
+ .select(Selectors.TOPCONCEPTS, true)
+ .build();
keywords = query.execute(this);
} catch (Exception e) {
@@ -1235,9 +1242,9 @@ public boolean hasBroader(String uri) {
*/
public List getRelated(String uri, KeywordRelation request, String... languages) {
Query query = QueryBuilder
- .keywordQueryBuilder(getIsoLanguageMapper(), languages)
- .select(Selectors.related(uri, request), true)
- .build();
+ .keywordQueryBuilder(getIsoLanguageMapper(), languages)
+ .select(Selectors.related(uri, request), true)
+ .build();
try {
return query.execute(this);
@@ -1272,9 +1279,9 @@ public boolean hasKeywordWithLabel(String label, String langCode) {
*/
public KeywordBean getKeywordWithLabel(String label, String langCode) {
Query query = QueryBuilder
- .keywordQueryBuilder(getIsoLanguageMapper(), langCode)
- .where(Wheres.prefLabel(langCode, label))
- .build();
+ .keywordQueryBuilder(getIsoLanguageMapper(), langCode)
+ .where(Wheres.prefLabel(langCode, label))
+ .build();
List matchingKeywords;
@@ -1304,7 +1311,7 @@ public Map getTitles(ApplicationContext context) throws JDOMExce
return LangUtils.translate(context, getKey());
}
- public List getKeywordHierarchy(String keywordLabel, String langCode) {
+ public List getKeywordHierarchy(String keywordLabel, String langCode) {
String cacheKey = "getKeywordHierarchy" + keywordLabel + langCode;
Object cacheValue = THESAURUS_SEARCH_CACHE.getIfPresent(cacheKey);
if (cacheValue != null) {
@@ -1312,26 +1319,26 @@ public List getKeywordHierarchy(String keywordLabel, String langCode) {
}
boolean isUri = keywordLabel.startsWith("http");
KeywordBean term =
- isUri
- ? this.getKeyword(keywordLabel, langCode)
- : this.getKeywordWithLabel(keywordLabel, langCode);
+ isUri
+ ? this.getKeyword(keywordLabel, langCode)
+ : this.getKeywordWithLabel(keywordLabel, langCode);
- List> result = this.classify(term, langCode);
+ List> result = this.classify(term, langCode);
- List hierarchies = new ArrayList<>();
- for ( List hierachy : result) {
+ List hierarchies = new ArrayList<>();
+ for (List hierachy : result) {
String path = hierachy.stream()
- .map(k -> isUri ? k.getUriCode() : k.getPreferredLabel(langCode))
- .collect(Collectors.joining("^"));
+ .map(k -> isUri ? k.getUriCode() : k.getPreferredLabel(langCode))
+ .collect(Collectors.joining("^"));
hierarchies.add(path);
}
THESAURUS_SEARCH_CACHE.put(cacheKey, hierarchies);
return hierarchies;
}
- public List> classify(KeywordBean term, String langCode) {
+ public List> classify(KeywordBean term, String langCode) {
- List> result = new ArrayList<>();
+ List> result = new ArrayList<>();
if (this.hasBroader(term.getUriCode())) {
result.addAll(classifyTermWithBroaderTerms(term, langCode));
} else {
@@ -1340,16 +1347,16 @@ public List> classify(KeywordBean term, String langCode)
return result;
}
- private List> classifyTermWithBroaderTerms(KeywordBean term, String langCode) {
- List> result = new ArrayList<>();
- for (ArrayList stringToBroaderTerm : classifyBroaderTerms(term, langCode)) {
+ private List> classifyTermWithBroaderTerms(KeywordBean term, String langCode) {
+ List> result = new ArrayList<>();
+ for (ArrayList stringToBroaderTerm : classifyBroaderTerms(term, langCode)) {
stringToBroaderTerm.add(term);
result.add(stringToBroaderTerm);
}
return result;
}
- private List> classifyBroaderTerms(KeywordBean term, String langCode) {
+ private List> classifyBroaderTerms(KeywordBean term, String langCode) {
List> result = new ArrayList<>();
List narrowerList = this.getNarrower(term.getUriCode(), langCode);
for (KeywordBean broaderTerm : this.getBroader(term.getUriCode(), langCode)) {
@@ -1361,8 +1368,8 @@ private List> classifyBroaderTerms(KeywordBean term, Str
return result;
}
- private ArrayList classifyTermWithNoBroaderTerms(KeywordBean term) {
- ArrayList list = new ArrayList <>();
+ private ArrayList classifyTermWithNoBroaderTerms(KeywordBean term) {
+ ArrayList list = new ArrayList<>();
list.add(term);
return list;
}
diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataOperations.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataOperations.java
index b2411bb2ca8..46dc7677973 100644
--- a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataOperations.java
+++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataOperations.java
@@ -24,6 +24,7 @@
package org.fao.geonet.kernel.datamanager;
import java.util.Collection;
+import java.util.List;
import org.fao.geonet.domain.OperationAllowed;
import org.fao.geonet.domain.ReservedOperation;
@@ -52,6 +53,15 @@ public interface IMetadataOperations {
*/
void deleteMetadataOper(String metadataId, boolean skipAllReservedGroup) throws Exception;
+ /**
+ * Removes all operations stored for a metadata except for the operations of the groups in the exclude list.
+ * Used for preventing deletion of operations for reserved and restricted groups.
+ *
+ * @param metadataId Metadata identifier
+ * @param groupIdsToExclude List of group ids to exclude from deletion
+ */
+ void deleteMetadataOper(String metadataId, List groupIdsToExclude);
+
/**
* Adds a permission to a group. Metadata is not reindexed.
*/
diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataUtils.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataUtils.java
index 63c3dadb559..e875d9fc5d6 100644
--- a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataUtils.java
+++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataUtils.java
@@ -549,4 +549,31 @@ void setCreativeCommons(ServiceContext context, String id, String licenseurl, St
* @param dest
*/
void replaceFiles(AbstractMetadata original, AbstractMetadata dest);
+
+ /**
+ * Get the metadata after preforming a search and replace on it.
+ * @param uuid The UUID of the metadata to search for.
+ * @param search The string to search for.
+ * @param replace The string to replace the search string with.
+ * @return The metadata with the search and replace applied.
+ */
+ String selectOneWithSearchAndReplace(String uuid, String search, String replace);
+
+ /**
+ * Get the metadata after preforming a regex search and replace on it.
+ * @param uuid The UUID of the metadata to search for.
+ * @param search The string to search for.
+ * @param replace The string to replace the search string with.
+ * @return The metadata with the search and replace applied.
+ */
+ String selectOneWithRegexSearchAndReplaceWithFlags(String uuid, String search, String replace, String flags);
+
+ /**
+ * Get the metadata after preforming a regex search and replace on it.
+ * @param uuid The UUID of the metadata to search for.
+ * @param search The string to search for.
+ * @param replace The string to replace the search string with.
+ * @return The metadata with the search and replace applied.
+ */
+ String selectOneWithRegexSearchAndReplace(String uuid, String search, String replace);
}
diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataManager.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataManager.java
index f2f159c029c..7464a267735 100644
--- a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataManager.java
+++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataManager.java
@@ -582,7 +582,11 @@ public AbstractMetadata insertMetadata(ServiceContext context, AbstractMetadata
// Check if the schema is allowed by settings
String mdImportSetting = settingManager.getValue(Settings.METADATA_IMPORT_RESTRICT);
- if (mdImportSetting != null && !mdImportSetting.equals("")) {
+ if (mdImportSetting != null) {
+ // Remove spaces from the list so that "iso19115-3.2018, dublin-core" will also work
+ mdImportSetting = mdImportSetting.replace(" ", "");
+ }
+ if (!StringUtils.isBlank(mdImportSetting)) {
if (!newMetadata.getHarvestInfo().isHarvested() && !Arrays.asList(mdImportSetting.split(",")).contains(schema)) {
throw new IllegalArgumentException("The system setting '" + Settings.METADATA_IMPORT_RESTRICT
+ "' doesn't allow to import " + schema
diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataOperations.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataOperations.java
index 47cfe3b5342..526c1d65c38 100644
--- a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataOperations.java
+++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataOperations.java
@@ -116,6 +116,18 @@ public void deleteMetadataOper(String metadataId, boolean skipAllReservedGroup)
}
}
+ /**
+ * Removes all operations stored for a metadata except for the operations of the groups in the exclude list.
+ * Used for preventing deletion of operations for reserved and restricted groups.
+ *
+ * @param metadataId Metadata identifier
+ * @param groupIdsToExclude List of group ids to exclude from deletion
+ */
+ @Override
+ public void deleteMetadataOper(String metadataId, List groupIdsToExclude) {
+ opAllowedRepo.deleteAllByMetadataIdExceptGroupId(Integer.parseInt(metadataId), groupIdsToExclude);
+ }
+
/**
* Adds a permission to a group. Metadata is not reindexed.
*/
diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataUtils.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataUtils.java
index ad4dde213b4..bbcbe009e6d 100644
--- a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataUtils.java
+++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataUtils.java
@@ -1028,4 +1028,19 @@ public void cloneFiles(AbstractMetadata original, AbstractMetadata dest) {
public void replaceFiles(AbstractMetadata original, AbstractMetadata dest) {
// Empty implementation for non-draft mode as not used
}
+
+ @Override
+ public String selectOneWithSearchAndReplace(String uuid, String search, String replace) {
+ return metadataRepository.selectOneWithSearchAndReplace(uuid, search, replace);
+ }
+
+ @Override
+ public String selectOneWithRegexSearchAndReplaceWithFlags(String uuid, String search, String replace, String flags) {
+ return metadataRepository.selectOneWithRegexSearchAndReplaceWithFlags(uuid, search, replace, flags);
+ }
+
+ @Override
+ public String selectOneWithRegexSearchAndReplace(String uuid, String search, String replace) {
+ return metadataRepository.selectOneWithRegexSearchAndReplace(uuid, search, replace);
+ }
}
diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java
index c5cc81ad1da..39b163ded5b 100644
--- a/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java
+++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java
@@ -677,6 +677,7 @@ private void cloneStoreFileUploadRequests(AbstractMetadata original, AbstractMet
metadataFileUpload.setFileSize(mfu.getFileSize());
metadataFileUpload.setUploadDate(mfu.getUploadDate());
metadataFileUpload.setUserName(mfu.getUserName());
+ metadataFileUpload.setDeletedDate(mfu.getDeletedDate());
repo.save(metadataFileUpload);
}
@@ -689,4 +690,31 @@ public void setListOfStatusCreatingDraft(Set listOfStatusCreatingDraft)
public Set getListOfStatusCreatingDraft() {
return listOfStatusToTriggerDraftCreation;
}
+
+ @Override
+ public String selectOneWithSearchAndReplace(String uuid, String search, String replace) {
+ String updatedXml = metadataDraftRepository.selectOneWithSearchAndReplace(uuid, search, replace);
+ if (updatedXml == null) {
+ updatedXml = super.selectOneWithSearchAndReplace(uuid, search, replace);
+ }
+ return updatedXml;
+ }
+
+ @Override
+ public String selectOneWithRegexSearchAndReplaceWithFlags(String uuid, String search, String replace, String flags) {
+ String updatedXml = metadataDraftRepository.selectOneWithRegexSearchAndReplaceWithFlags(uuid, search, replace, flags);
+ if (updatedXml == null) {
+ updatedXml = super.selectOneWithRegexSearchAndReplaceWithFlags(uuid, search, replace, flags);
+ }
+ return updatedXml;
+ }
+
+ @Override
+ public String selectOneWithRegexSearchAndReplace(String uuid, String search, String replace) {
+ String updatedXml = metadataDraftRepository.selectOneWithRegexSearchAndReplace(uuid, search, replace);
+ if (updatedXml == null) {
+ updatedXml = super.selectOneWithRegexSearchAndReplace(uuid, search, replace);
+ }
+ return updatedXml;
+ }
}
diff --git a/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java b/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java
index 58cc82a4459..8c0c0ca2b33 100644
--- a/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java
+++ b/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java
@@ -48,6 +48,8 @@
import org.springframework.context.ApplicationContext;
import java.util.*;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
import static org.fao.geonet.kernel.setting.Settings.SYSTEM_FEEDBACK_EMAIL;
import static org.fao.geonet.util.LocalizedEmailComponent.ComponentType.*;
@@ -330,22 +332,24 @@ protected void notify(List userToNotify, MetadataStatus status) throws Exc
);
}
- LocalizedEmail localizedEmail = new LocalizedEmail(false);
- localizedEmail.addComponents(emailSubjectComponent, emailMessageComponent, emailSalutationComponent);
-
- String subject = localizedEmail.getParsedSubject(feedbackLocales);
-
for (User user : userToNotify) {
+ LocalizedEmail localizedEmail = new LocalizedEmail(false);
+
String userName = Joiner.on(" ").skipNulls().join(user.getName(), user.getSurname());
//If we have a userName add the salutation
String message;
if (StringUtils.isEmpty(userName)) {
+ localizedEmail.addComponents(emailSubjectComponent, emailMessageComponent);
+
message = localizedEmail.getParsedMessage(feedbackLocales);
} else {
+ localizedEmail.addComponents(emailSubjectComponent, emailMessageComponent, emailSalutationComponent);
+
Map replacements = new HashMap<>();
replacements.put("{{userName}}", userName);
message = localizedEmail.getParsedMessage(feedbackLocales, replacements);
}
+ String subject = localizedEmail.getParsedSubject(feedbackLocales);
sendEmail(user.getEmail(), subject, message);
}
}
@@ -449,7 +453,9 @@ public static List getUserToNotify(StatusValueNotificationLevel notificati
}
}
}
- return users;
+
+ // Filter out users without email
+ return users.stream().filter(u -> StringUtils.isNotEmpty(u.getEmail())).collect(Collectors.toList());
}
public static List getGroupToNotify(StatusValueNotificationLevel notificationLevel, List groupNames) {
diff --git a/core/src/main/java/org/fao/geonet/kernel/search/KeywordsSearcher.java b/core/src/main/java/org/fao/geonet/kernel/search/KeywordsSearcher.java
index 054f1b2d687..0d25acf7011 100644
--- a/core/src/main/java/org/fao/geonet/kernel/search/KeywordsSearcher.java
+++ b/core/src/main/java/org/fao/geonet/kernel/search/KeywordsSearcher.java
@@ -101,9 +101,11 @@ public static Element toRawElement(Element rootEl, KeywordBean kb) {
elKeyword.addContent(elSelected);
elKeyword.addContent(new Element("id").addContent(Integer.toString(kb.getId())));
- elKeyword.addContent(new Element("value").addContent(kb.getDefaultValue()).setAttribute("language", defaultLang));
- elKeyword.addContent(new Element("definition").addContent(kb.getDefaultDefinition()).setAttribute("language", defaultLang));
- elKeyword.addContent(new Element("defaultLang").addContent(defaultLang));
+ if (defaultLang != null) {
+ elKeyword.addContent(new Element("value").addContent(kb.getDefaultValue()).setAttribute("language", defaultLang));
+ elKeyword.addContent(new Element("definition").addContent(kb.getDefaultDefinition()).setAttribute("language", defaultLang));
+ elKeyword.addContent(new Element("defaultLang").addContent(defaultLang));
+ }
Element thesaurusElement = new Element("thesaurus");
thesaurusElement.addContent(new Element("key").setText(kb.getThesaurusKey()));
thesaurusElement.addContent(new Element("title").setText(kb.getThesaurusTitle()));
diff --git a/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersAuthFilter.java b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersAuthFilter.java
new file mode 100644
index 00000000000..9a35028cf2c
--- /dev/null
+++ b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersAuthFilter.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright (C) 2024 Food and Agriculture Organization of the
+ * United Nations (FAO-UN), United Nations World Food Programme (WFP)
+ * and United Nations Environment Programme (UNEP)
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or (at
+ * your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
+ *
+ * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2,
+ * Rome - Italy. email: geonetwork@osgeo.org
+ */
+
+package org.fao.geonet.kernel.security.jwtheaders;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
+import org.springframework.security.core.context.SecurityContextHolder;
+import org.springframework.web.filter.GenericFilterBean;
+
+import javax.servlet.FilterChain;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import java.io.IOException;
+
+
+/**
+ * This handles the JWT-Headers authentication filter. It's based on the Shibboleth filter.
+ */
+public class JwtHeadersAuthFilter extends GenericFilterBean {
+
+ @Autowired
+ public JwtHeadersUserUtil jwtHeadersUserUtil;
+
+ JwtHeadersConfiguration jwtHeadersConfiguration;
+
+ //uniquely identify this authfilter
+ //this is need if there are >1 Jwt-Header filters active at the same time
+ String filterId = java.util.UUID.randomUUID().toString();
+
+
+ public JwtHeadersAuthFilter(JwtHeadersConfiguration jwtHeadersConfiguration) {
+ this.jwtHeadersConfiguration = jwtHeadersConfiguration;
+ }
+
+ @Override
+ public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain)
+ throws IOException, ServletException {
+ var existingAuth = SecurityContextHolder.getContext().getAuthentication();
+ HttpServletRequest request = (HttpServletRequest) servletRequest;
+
+
+ var config = jwtHeadersConfiguration.getJwtConfiguration();
+
+ var user = JwtHeadersTrivialUser.create(config, request);
+
+ //if request is already logged in by us (same filterId), but there aren't any Jwt-Headers attached
+ //then log them out.
+ if (user == null && existingAuth != null) {
+ if (existingAuth instanceof JwtHeadersUsernamePasswordAuthenticationToken
+ && ((JwtHeadersUsernamePasswordAuthenticationToken) existingAuth).authFilterId.equals(filterId)) {
+ //at this point, there isn't a JWT header, but there's an existing auth that was made by us (JWT header)
+ // in this case, we need to log-off. They have a JSESSION auth that is no longer valid.
+ logout(request);
+ filterChain.doFilter(servletRequest, servletResponse);
+ return;
+ }
+ }
+
+
+ if (user == null) {
+ filterChain.doFilter(servletRequest, servletResponse);
+ return; // no valid user in header
+ }
+
+ //we have a valid user in the headers
+
+ //existing user is the same user as the request
+ if (existingAuth != null && existingAuth.getName().equals(user.getUsername())) {
+ filterChain.doFilter(servletRequest, servletResponse);
+ return; // abort early - no need to do an expensive login. Use the existing one.
+ }
+
+ //existing user isnt the same user as the request
+ if (existingAuth != null && !existingAuth.getName().equals(user.getUsername())) {
+ //in this case there are two auth's - the existing one (likely from JSESSION)
+ //and one coming in from the JWT headers. In this case, we kill the other login
+ //and make a new one.
+ logout(request);
+ }
+
+ var userDetails = jwtHeadersUserUtil.getUser(user, jwtHeadersConfiguration);
+ if (userDetails != null) {
+ UsernamePasswordAuthenticationToken auth = new JwtHeadersUsernamePasswordAuthenticationToken(
+ filterId, userDetails, null, userDetails.getAuthorities());
+ auth.setDetails(userDetails);
+ SecurityContextHolder.getContext().setAuthentication(auth);
+ }
+
+ filterChain.doFilter(servletRequest, servletResponse);
+ }
+
+ /**
+ * handle a logout - clear out the security context, and invalidate the session
+ *
+ * @param request
+ * @throws ServletException
+ */
+ public void logout(HttpServletRequest request) throws ServletException {
+ request.logout();//dont think this does anything in GN
+ SecurityContextHolder.getContext().setAuthentication(null);
+ request.getSession().invalidate();
+ }
+
+}
+
+
diff --git a/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersConfiguration.java b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersConfiguration.java
new file mode 100644
index 00000000000..73d4fee4316
--- /dev/null
+++ b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersConfiguration.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2024 Food and Agriculture Organization of the
+ * United Nations (FAO-UN), United Nations World Food Programme (WFP)
+ * and United Nations Environment Programme (UNEP)
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or (at
+ * your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
+ *
+ * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2,
+ * Rome - Italy. email: geonetwork@osgeo.org
+ */
+
+package org.fao.geonet.kernel.security.jwtheaders;
+
+import org.fao.geonet.kernel.security.SecurityProviderConfiguration;
+import org.geoserver.security.jwtheaders.JwtConfiguration;
+
+/**
+ * configuration for the JWT Headers security filter.
+ * See GN documentation.
+ * This is based on GeoServer's JWT-Headers Module, so you can see there as well.
+ *
+ * This class handles the GN filter configuration details, and hands the actual configuration
+ * for the filter to the JwtConfiguration class. This class is also used in Geoserver.
+ */
+public class JwtHeadersConfiguration {
+
+
+ public SecurityProviderConfiguration.LoginType loginType = SecurityProviderConfiguration.LoginType.AUTOLOGIN;
+ /**
+ * true -> update the DB with the information from OIDC (don't allow user to edit profile in the UI)
+ * false -> don't update the DB (user must edit profile in UI).
+ */
+ public boolean updateProfile = true;
+ /**
+ * true -> update the DB (user's group) with the information from OIDC (don't allow admin to edit user's groups in the UI)
+ * false -> don't update the DB (admin must edit groups in UI).
+ */
+ public boolean updateGroup = true;
+ protected JwtConfiguration jwtConfiguration;
+
+ //shared JwtHeadersSecurityConfig object
+ JwtHeadersSecurityConfig securityConfig;
+
+ // getters/setters
+
+ public JwtHeadersConfiguration(JwtHeadersSecurityConfig securityConfig) {
+ this.securityConfig = securityConfig;
+ jwtConfiguration = new JwtConfiguration();
+ }
+
+ public boolean isUpdateProfile() {
+ return securityConfig.isUpdateProfile();
+ }
+
+ public void setUpdateProfile(boolean updateProfile) {
+ securityConfig.setUpdateProfile(updateProfile);
+ }
+
+ public boolean isUpdateGroup() {
+ return securityConfig.isUpdateGroup();
+ }
+
+
+ //---- abstract class methods
+
+ public void setUpdateGroup(boolean updateGroup) {
+ securityConfig.setUpdateGroup(updateGroup);
+ }
+
+ public String getLoginType() {
+ return securityConfig.getLoginType();
+ }
+
+
+ public String getSecurityProvider() {
+ return securityConfig.getSecurityProvider();
+ }
+
+
+ public boolean isUserProfileUpdateEnabled() {
+ return securityConfig.isUserProfileUpdateEnabled();
+ }
+
+ //========================================================================
+
+ // @Override
+ public boolean isUserGroupUpdateEnabled() {
+ return securityConfig.isUserGroupUpdateEnabled();
+ }
+
+ public org.geoserver.security.jwtheaders.JwtConfiguration getJwtConfiguration() {
+ return jwtConfiguration;
+ }
+
+ public void setJwtConfiguration(
+ org.geoserver.security.jwtheaders.JwtConfiguration jwtConfiguration) {
+ this.jwtConfiguration = jwtConfiguration;
+ }
+
+}
diff --git a/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersSecurityConfig.java b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersSecurityConfig.java
new file mode 100644
index 00000000000..3e311faaa3e
--- /dev/null
+++ b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersSecurityConfig.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright (C) 2024 Food and Agriculture Organization of the
+ * United Nations (FAO-UN), United Nations World Food Programme (WFP)
+ * and United Nations Environment Programme (UNEP)
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or (at
+ * your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
+ *
+ * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2,
+ * Rome - Italy. email: geonetwork@osgeo.org
+ */
+package org.fao.geonet.kernel.security.jwtheaders;
+
+import org.fao.geonet.kernel.security.SecurityProviderConfiguration;
+
+/**
+ * GeoNetwork only allows one SecurityProviderConfiguration bean.
+ * In the jwt-headers-multi (2 auth filters) situation, we need to have a single SecurityProviderConfiguration.
+ * We, therefore, share a single one.
+ * This class is shared between all the JwtHeadersConfiguration objects.
+ */
+public class JwtHeadersSecurityConfig implements SecurityProviderConfiguration {
+
+
+ public SecurityProviderConfiguration.LoginType loginType = SecurityProviderConfiguration.LoginType.AUTOLOGIN;
+ /**
+ * true -> update the DB with the information from OIDC (don't allow user to edit profile in the UI)
+ * false -> don't update the DB (user must edit profile in UI).
+ */
+ public boolean updateProfile = true;
+ /**
+ * true -> update the DB (user's group) with the information from OIDC (don't allow admin to edit user's groups in the UI)
+ * false -> don't update the DB (admin must edit groups in UI).
+ */
+ public boolean updateGroup = true;
+
+
+ // getters/setters
+
+
+ public JwtHeadersSecurityConfig() {
+
+ }
+
+ public boolean isUpdateProfile() {
+ return updateProfile;
+ }
+
+ public void setUpdateProfile(boolean updateProfile) {
+ this.updateProfile = updateProfile;
+ }
+
+ public boolean isUpdateGroup() {
+ return updateGroup;
+ }
+
+
+ //---- abstract class methods
+
+ public void setUpdateGroup(boolean updateGroup) {
+ this.updateGroup = updateGroup;
+ }
+
+ //@Override
+ public String getLoginType() {
+ return loginType.toString();
+ }
+
+ // @Override
+ public String getSecurityProvider() {
+ return "JWT-HEADERS";
+ }
+
+ // @Override
+ public boolean isUserProfileUpdateEnabled() {
+ // If updating profile from the security provider then disable the profile updates in the interface
+ return !updateProfile;
+ }
+
+ //========================================================================
+
+ // @Override
+ public boolean isUserGroupUpdateEnabled() {
+ // If updating group from the security provider then disable the group updates in the interface
+ return !updateGroup;
+ }
+
+}
diff --git a/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersTrivialUser.java b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersTrivialUser.java
new file mode 100644
index 00000000000..de22d9f5ca3
--- /dev/null
+++ b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersTrivialUser.java
@@ -0,0 +1,249 @@
+/*
+ * Copyright (C) 2024 Food and Agriculture Organization of the
+ * United Nations (FAO-UN), United Nations World Food Programme (WFP)
+ * and United Nations Environment Programme (UNEP)
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or (at
+ * your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
+ *
+ * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2,
+ * Rome - Italy. email: geonetwork@osgeo.org
+ */
+
+package org.fao.geonet.kernel.security.jwtheaders;
+
+import org.fao.geonet.constants.Geonet;
+import org.fao.geonet.domain.Profile;
+import org.fao.geonet.utils.Log;
+import org.geoserver.security.jwtheaders.JwtConfiguration;
+import org.geoserver.security.jwtheaders.roles.JwtHeadersRolesExtractor;
+import org.geoserver.security.jwtheaders.token.TokenValidator;
+import org.geoserver.security.jwtheaders.username.JwtHeaderUserNameExtractor;
+import org.springframework.util.StringUtils;
+
+import javax.servlet.http.HttpServletRequest;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * uses the GS library to process the headers.
+ * This returns a GN-compliant "user" (JwtHeadersTrivialUser) that
+ * has the header-derived username and roles (profile and profileGroups).
+ *
+ * Most of the code, here is for processing profileGroups (Map>).
+ */
+public class JwtHeadersTrivialUser {
+
+ static String ROLE_GROUP_SEPARATOR = ":";
+ static Profile MIN_PROFILE = Profile.RegisteredUser;
+ Map> profileGroups;
+ private String username;
+
+ //----------------------
+ private Profile profile;
+
+ public JwtHeadersTrivialUser(String userName) {
+ setUsername(userName);
+ profileGroups = new HashMap<>();
+ }
+
+ public static JwtHeadersTrivialUser create(JwtConfiguration config, HttpServletRequest request) throws IOException {
+ if (request == null || config == null || config.getUserNameHeaderAttributeName() == null) {
+ Log.debug(Geonet.SECURITY, "JwtHeadersUser.create called with null args!");
+ return null; // nothing to do
+ }
+
+ var userNameHeader = request.getHeader(config.getUserNameHeaderAttributeName());
+ if (userNameHeader == null) {
+ return null; // no username in request!
+ }
+
+ //get the username from the headers (pay attention to config)
+ JwtHeaderUserNameExtractor userNameExtractor = new JwtHeaderUserNameExtractor(config);
+ var userName = userNameExtractor.extractUserName(userNameHeader);
+
+ if (userName == null) {
+ return null; // no username
+ }
+
+ var tokenValidator = new TokenValidator(config);
+ try {
+// var accessToken = userNameHeader.replaceFirst("^Bearer", "");
+// accessToken = accessToken.replaceFirst("^bearer", "");
+// accessToken = accessToken.trim();
+ tokenValidator.validate(userNameHeader);
+ } catch (Exception e) {
+ throw new IOException("JWT Token is invalid", e);
+ }
+
+ //get roles from the headers (pay attention to config)
+ var result = new JwtHeadersTrivialUser(userName);
+ handleRoles(result, config, request);
+
+ return result;
+ }
+
+ /**
+ * @param user user to be modified
+ * @param config configuration (i.e. where to get the roles from and how to convert them)
+ * @param request header to get the roles from
+ */
+ public static void handleRoles(JwtHeadersTrivialUser user, JwtConfiguration config, HttpServletRequest request) {
+ if (!config.getJwtHeaderRoleSource().equals("JSON") && !config.getJwtHeaderRoleSource().equals("JWT"))
+ return; // nothing to do - we aren't configured to handle roles extraction (get from GN DB).
+
+ if (config.getRolesHeaderName() == null)
+ return; //misconfigured
+
+ //get the header value and extract the set of roles in it (processed by the RoleConverter)
+ var rolesHeader = request.getHeader(config.getRolesHeaderName());
+ JwtHeadersRolesExtractor rolesExtractor = new JwtHeadersRolesExtractor(config);
+ var roles = rolesExtractor.getRoles(rolesHeader);
+
+
+ updateUserWithRoles(user, roles);
+ }
+
+ public static void updateUserWithRoles(JwtHeadersTrivialUser user, Collection roles) {
+ //need to convert the simple roles into profileGroups
+ // i.e. group1:Reviewer means user has "Reviewer" Profile for group "group1"
+ Map> profileGroups = extractProfileRoles(roles);
+
+ //get the "max" profile (for User#Profile)
+ if (profileGroups != null && profileGroups.size() > 0) {
+ String profile = getMaxProfile(profileGroups).name();
+ if (profile != null) {
+ user.profile = Profile.valueOf(profile);
+ }
+ }
+ else {
+ user.profile = Profile.RegisteredUser;
+ }
+
+ //set the profileGroups
+ user.profileGroups = profileGroups;
+ }
+
+ /**
+ * Get the profiles, and the list of groups for that profile, from the access token.
+ *
+ * i.e. ["Administrator","g2:Editor"] -> {"Administrator":[], "Editor":["g2"]}
+ *
+ * @param rolesInToken list of roles for the user (from headers + gone through the JWT Headers RoleConverter)
+ * @return map object with the profile and related groups.
+ */
+ //from GN keycloak plugin
+ public static Map> extractProfileRoles(Collection rolesInToken) {
+ Map> profileGroups = new HashMap<>();
+
+ Set roleGroupList = new HashSet<>();
+
+ // Get role that are in the format of group:role format access
+ // Todo Reevaluate to see if this is how we want to get role groups. It may not be a good idea to place separator in group name and parse it this way.
+ for (String role : rolesInToken) {
+ if (role.contains(ROLE_GROUP_SEPARATOR)) {
+ Log.debug(Geonet.SECURITY, "Identified group:profile (" + role + ") from user token.");
+ roleGroupList.add(role);
+ } else {
+ // Only use the profiles we know of and don't add duplicates.
+ Profile p = Profile.findProfileIgnoreCase(role);
+ if (p != null && !profileGroups.containsKey(p)) {
+ profileGroups.put(p, new ArrayList<>());
+ }
+ }
+ }
+
+
+ for (String rg : roleGroupList) {
+ String[] rg_role_groups = rg.split(ROLE_GROUP_SEPARATOR);
+
+ if (rg_role_groups.length == 0 || StringUtils.isEmpty(rg_role_groups[0])) {
+ continue;
+ }
+
+ Profile p = null;
+ if (rg_role_groups.length >= 1) {
+ p = Profile.findProfileIgnoreCase(rg_role_groups[1]);
+ }
+ // If we cannot find the profile then lets ignore this entry.
+ if (p == null) {
+ continue;
+ }
+
+ List groups;
+ if (profileGroups.containsKey(p)) {
+ groups = profileGroups.get(p);
+ } else {
+ groups = new ArrayList<>();
+ }
+ if (rg_role_groups.length > 1) {
+ groups.add(rg_role_groups[0]);
+ }
+ profileGroups.put(p, groups);
+ }
+
+ return profileGroups;
+ }
+
+ //----------------------
+
+ public static Profile getMaxProfile(Map> profileGroups) {
+ Profile maxProfile = null;
+
+ for (Profile p : profileGroups.keySet()) {
+ if (maxProfile == null) {
+ maxProfile = p;
+ } else if (maxProfile.compareTo(p) >= 0) {
+ maxProfile = p;
+ }
+ }
+
+ // Fallback if no profile
+ if (maxProfile == null) {
+ maxProfile = MIN_PROFILE;
+ }
+ return maxProfile;
+ }
+
+ public String getUsername() {
+ return username;
+ }
+
+ public void setUsername(String username) {
+ this.username = username;
+ }
+
+ public Map> getProfileGroups() {
+ return profileGroups;
+ }
+
+ public void setProfileGroups(Map> profileGroups) {
+ this.profileGroups = profileGroups;
+ }
+
+ public Profile getProfile() {
+ return profile;
+ }
+
+ public void setProfile(Profile profile) {
+ this.profile = profile;
+ }
+
+}
diff --git a/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersUserUtil.java b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersUserUtil.java
new file mode 100644
index 00000000000..b5629c52183
--- /dev/null
+++ b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersUserUtil.java
@@ -0,0 +1,245 @@
+/*
+ * Copyright (C) 2024 Food and Agriculture Organization of the
+ * United Nations (FAO-UN), United Nations World Food Programme (WFP)
+ * and United Nations Environment Programme (UNEP)
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or (at
+ * your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
+ *
+ * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2,
+ * Rome - Italy. email: geonetwork@osgeo.org
+ */
+
+package org.fao.geonet.kernel.security.jwtheaders;
+
+import org.fao.geonet.constants.Geonet;
+import org.fao.geonet.domain.Group;
+import org.fao.geonet.domain.Language;
+import org.fao.geonet.domain.Profile;
+import org.fao.geonet.domain.User;
+import org.fao.geonet.domain.UserGroup;
+import org.fao.geonet.kernel.security.GeonetworkAuthenticationProvider;
+import org.fao.geonet.repository.GroupRepository;
+import org.fao.geonet.repository.LanguageRepository;
+import org.fao.geonet.repository.UserGroupRepository;
+import org.fao.geonet.repository.UserRepository;
+import org.fao.geonet.utils.Log;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.security.core.userdetails.UsernameNotFoundException;
+
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * This class handles GeoNetwork related User (and Group/UserGroup) activities.
+ */
+public class JwtHeadersUserUtil {
+
+ @Autowired
+ UserRepository userRepository;
+
+ @Autowired
+ GroupRepository groupRepository;
+
+ @Autowired
+ UserGroupRepository userGroupRepository;
+
+ @Autowired
+ GeonetworkAuthenticationProvider authProvider;
+
+ @Autowired
+ LanguageRepository languageRepository;
+
+ /**
+ * Gets a user.
+ * 1. if the user currently existing in the GN DB:
+ * - user is retrieved from the GN DB
+ * - if the profile/profileGroup update is true then the DB is updated with info from `userFromHeaders`
+ * - otherwise, the header roles are ignored and profile/profileGroups are taken from the GN DB
+ *
+ * 2. if the user doesn't existing in the DB:
+ * - user is created and saved to the DB
+ * - if the profile/profileGroup update is true then the DB is updated with info from `userFromHeaders`
+ * - otherwise, the header roles are ignored and profile/profileGroups are taken from the GN DB
+ * - NOTE: in this case, the user will not have any profile/profileGraoup -
+ * an admin will have to manually set them in GN GUI
+ *
+ * @param userFromHeaders This is user info supplied in the request headers
+ * @param configuration Configuration of the JWT Headers filter
+ * @return
+ */
+ public User getUser(JwtHeadersTrivialUser userFromHeaders, JwtHeadersConfiguration configuration) {
+ try {
+ User userFromDb = (User) authProvider.loadUserByUsername(userFromHeaders.getUsername());
+ injectRoles(userFromDb, userFromHeaders, configuration);
+ return userFromDb;
+ } catch (UsernameNotFoundException e) {
+ return createUser(userFromHeaders, configuration);
+ }
+ }
+
+ /**
+ * given an existing user (both from GN DB and from the Request Headers),
+ * update roles (profile/profileGroups).
+ *
+ * isUpdateProfile/isUpdateGroup control if the DB is updated from the request Headers
+ *
+ * @param userFromDb
+ * @param userFromHeaders
+ * @param configuration
+ */
+ public void injectRoles(User userFromDb, JwtHeadersTrivialUser userFromHeaders, JwtHeadersConfiguration configuration) {
+ if (configuration.isUpdateProfile()) {
+ userFromDb.setProfile(userFromHeaders.getProfile());
+ userRepository.save(userFromDb);
+ Log.trace(Geonet.SECURITY, String.format("JwtHeaders: existing user (%s) with profile: '%s'", userFromDb.getUsername(), userFromHeaders.getProfile()));
+ }
+ if (configuration.isUpdateGroup()) {
+ var profileGroups = userFromHeaders.getProfileGroups();
+ if (profileGroups != null) {
+ updateGroups(profileGroups, userFromDb);
+ if (!profileGroups.isEmpty()) {
+ Log.trace(Geonet.SECURITY, "JwtHeaders: existing user profile groups: ");
+ for (var group : profileGroups.entrySet()) {
+ Log.debug(Geonet.SECURITY,
+ String.format(" + Profile '%s' has groups: '%s'",
+ group.getKey(),
+ String.join(",", group.getValue())
+ ));
+ }
+ }
+ }
+ }
+
+ }
+
+ /**
+ * creates a new user based on what was in the request headers.
+ *
+ * profile updating (in GN DB) is controlled by isUpdateGroup
+ * profileGroup updating (in GN DB) is controlled by isUpdateGroup
+ *
+ * cf. updateGroups for how the profile/profileGroups are updated
+ *
+ * @param userFromHeaders
+ * @param configuration
+ * @return
+ */
+ public User createUser(JwtHeadersTrivialUser userFromHeaders, JwtHeadersConfiguration configuration) {
+ //create user
+ User user = new User();
+ user.setUsername(userFromHeaders.getUsername());
+
+ // Add email
+ if (userFromHeaders.getUsername().contains("@")) {
+ user.getEmailAddresses().add(userFromHeaders.getUsername());
+ // dave@example.com --> dave
+ user.setName(user.getUsername().substring(0, user.getUsername().indexOf("@")));
+ }
+
+ Log.debug(Geonet.SECURITY, "JwtHeaders: Creating new User in GN DB: " + user);
+
+ if (configuration.isUpdateProfile()) {
+ user.setProfile(userFromHeaders.getProfile());
+ Log.debug(Geonet.SECURITY, String.format("JwtHeaders: new user profile: '%s'", userFromHeaders.getProfile()));
+ } else {
+ user.setProfile(Profile.RegisteredUser);//default to registered user
+ }
+
+ userRepository.save(user);
+
+
+ if (configuration.isUpdateGroup()) {
+ var profileGroups = userFromHeaders.getProfileGroups();
+ if (profileGroups != null) {
+ updateGroups(profileGroups, user);
+ if (!profileGroups.isEmpty()) {
+ Log.debug(Geonet.SECURITY, "JwtHeaders: new user profile groups: ");
+ for (var group : profileGroups.entrySet()) {
+ Log.debug(Geonet.SECURITY,
+ String.format(" + Profile '%s' has groups: '%s'",
+ group.getKey(),
+ String.join(",", group.getValue())
+ ));
+ }
+ }
+ }
+ }
+
+ return user;
+ }
+
+
+ /**
+ * Update users group information in the database.
+ *
+ * @param profileGroups object containing the profile and related groups.
+ * @param user to apply the changes to.
+ */
+ //from keycloak
+ public void updateGroups(Map> profileGroups, User user) {
+ Set userGroups = new HashSet<>();
+
+ // Now we add the groups
+ for (Profile p : profileGroups.keySet()) {
+ List groups = profileGroups.get(p);
+ for (String rgGroup : groups) {
+
+ Group group = groupRepository.findByName(rgGroup);
+
+ if (group == null) {
+ group = new Group();
+ group.setName(rgGroup);
+
+ // Populate languages for the group
+ for (Language l : languageRepository.findAll()) {
+ group.getLabelTranslations().put(l.getId(), group.getName());
+ }
+
+ groupRepository.save(group);
+ }
+
+ UserGroup usergroup = new UserGroup();
+ usergroup.setGroup(group);
+ usergroup.setUser(user);
+
+ Profile profile = p;
+ if (profile.equals(Profile.Administrator)) {
+ // As we are assigning to a group, it is UserAdmin instead
+ profile = Profile.UserAdmin;
+ }
+ usergroup.setProfile(profile);
+
+ //Todo - It does not seem necessary to add the user to the editor profile
+ // since the reviewer is the parent of the editor
+ // Seems like the permission checks should be smart enough to know that if a user
+ // is a reviewer then they are also an editor. Need to test and fix if necessary
+ if (profile.equals(Profile.Reviewer)) {
+ UserGroup ug = new UserGroup();
+ ug.setGroup(group);
+ ug.setUser(user);
+ ug.setProfile(Profile.Editor);
+ userGroups.add(ug);
+ }
+
+ userGroups.add(usergroup);
+ }
+ }
+
+ userGroupRepository.updateUserGroups(user.getId(), userGroups);
+ }
+
+}
diff --git a/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersUsernamePasswordAuthenticationToken.java b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersUsernamePasswordAuthenticationToken.java
new file mode 100644
index 00000000000..1e83d15e2bb
--- /dev/null
+++ b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersUsernamePasswordAuthenticationToken.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2024 Food and Agriculture Organization of the
+ * United Nations (FAO-UN), United Nations World Food Programme (WFP)
+ * and United Nations Environment Programme (UNEP)
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or (at
+ * your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
+ *
+ * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2,
+ * Rome - Italy. email: geonetwork@osgeo.org
+ */
+
+package org.fao.geonet.kernel.security.jwtheaders;
+
+import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
+import org.springframework.security.core.GrantedAuthority;
+
+import java.util.Collection;
+
+/**
+ * this class just allows us to tag an authentication as coming from JWT Headers (for detecting logout)
+ */
+public class JwtHeadersUsernamePasswordAuthenticationToken extends UsernamePasswordAuthenticationToken {
+
+ //ID of the JwtHeaderAuthFilter that authenticated the user
+ String authFilterId;
+
+ public JwtHeadersUsernamePasswordAuthenticationToken(String authFilterId, Object principal, Object credentials, Collection extends GrantedAuthority> authorities) {
+ super(principal, credentials, authorities);
+ this.authFilterId = authFilterId;
+ }
+}
diff --git a/core/src/main/java/org/fao/geonet/util/FileUtil.java b/core/src/main/java/org/fao/geonet/util/FileUtil.java
index 483ebab84b0..4c54e2f4f9a 100644
--- a/core/src/main/java/org/fao/geonet/util/FileUtil.java
+++ b/core/src/main/java/org/fao/geonet/util/FileUtil.java
@@ -80,4 +80,21 @@ public static String readLastLines(File file, int lines) {
}
}
}
+
+ /**
+ * Similar to https://commons.apache.org/proper/commons-io/apidocs/org/apache/commons/io/FileUtils.html#byteCountToDisplaySize(long)
+ * however the format is returned in 2 decimal precision.
+ *
+ * @param bytes to be converted into human-readable format.
+ * @return human-readable formated bytes.
+ */
+ public static String humanizeFileSize(long bytes) {
+ if (bytes == 0) return "0 Bytes";
+
+ String[] sizes = {"Bytes", "KB", "MB", "GB", "TB"};
+ int i = (int) Math.floor(Math.log(bytes) / Math.log(1024)); // Determine the index for sizes
+ double humanizedSize = bytes / Math.pow(1024, i);
+
+ return String.format("%.2f %s", humanizedSize, sizes[i]);
+ }
}
diff --git a/core/src/main/java/org/fao/geonet/util/XslUtil.java b/core/src/main/java/org/fao/geonet/util/XslUtil.java
index 34b9ae272ee..067d3cc4b8e 100644
--- a/core/src/main/java/org/fao/geonet/util/XslUtil.java
+++ b/core/src/main/java/org/fao/geonet/util/XslUtil.java
@@ -46,9 +46,9 @@
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.fao.geonet.ApplicationContextHolder;
+import org.fao.geonet.Constants;
import org.fao.geonet.SystemInfo;
import org.fao.geonet.analytics.WebAnalyticsConfiguration;
-import org.fao.geonet.api.records.attachments.FilesystemStore;
import org.fao.geonet.api.records.attachments.FilesystemStoreResourceContainer;
import org.fao.geonet.api.records.attachments.Store;
import org.fao.geonet.constants.Geonet;
@@ -113,6 +113,7 @@
import java.io.StringReader;
import java.net.URL;
import java.net.URLConnection;
+import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.*;
@@ -1246,11 +1247,11 @@ public static String buildDataUrl(String url, Integer size) {
Matcher m = Pattern.compile(settingManager.getNodeURL() + "api/records/(.*)/attachments/(.*)$").matcher(url);
BufferedImage image;
if (m.find()) {
- Store store = ApplicationContextHolder.get().getBean(FilesystemStore.class);
+ Store store = ApplicationContextHolder.get().getBean("filesystemStore", Store.class);
try (Store.ResourceHolder file = store.getResourceInternal(
- m.group(1),
+ URLDecoder.decode(m.group(1), Constants.ENCODING),
MetadataResourceVisibility.PUBLIC,
- m.group(2), true)) {
+ URLDecoder.decode(m.group(2), Constants.ENCODING), true)) {
image = ImageIO.read(file.getPath().toFile());
}
} else {
@@ -1440,7 +1441,7 @@ public static String getThesaurusIdByTitle(String title) {
return thesaurus == null ? "" : "geonetwork.thesaurus." + thesaurus.getKey();
}
-
+
/**
* Retrieve the thesaurus title using the thesaurus key.
*
@@ -1455,6 +1456,15 @@ public static String getThesaurusTitleByKey(String id) {
}
+ public static String getThesaurusUriByKey(String id) {
+ ApplicationContext applicationContext = ApplicationContextHolder.get();
+ ThesaurusManager thesaurusManager = applicationContext.getBean(ThesaurusManager.class);
+ Thesaurus thesaurus = thesaurusManager.getThesaurusByName(id);
+ return thesaurus == null ? "" : thesaurus.getDefaultNamespace();
+ }
+
+
+
/**
* Utility method to retrieve the name (label) for an iso language using it's code for a specific language.
*
@@ -1594,7 +1604,11 @@ private static List buildRecordLink(List hits, String type) {
public static String escapeForJson(String value) {
return StringEscapeUtils.escapeJson(value);
}
-
+
+ public static String escapeForEcmaScript(String value) {
+ return StringEscapeUtils.escapeEcmaScript(value);
+ }
+
public static String getWebAnalyticsService() {
ApplicationContext applicationContext = ApplicationContextHolder.get();
WebAnalyticsConfiguration webAnalyticsConfiguration = applicationContext.getBean(WebAnalyticsConfiguration.class);
diff --git a/core/src/test/java/jeeves/interfaces/ProfileTest.java b/core/src/test/java/jeeves/interfaces/ProfileTest.java
index 0e360641f77..b52042cc58e 100644
--- a/core/src/test/java/jeeves/interfaces/ProfileTest.java
+++ b/core/src/test/java/jeeves/interfaces/ProfileTest.java
@@ -34,17 +34,27 @@
public class ProfileTest {
+ @Test
+ public void testGetChildren() {
+ assertContainsAllExactly(Administrator.getChildren(), UserAdmin, Monitor);
+ assertContainsOnly(Reviewer, UserAdmin.getChildren());
+ assertContainsOnly(Editor, Reviewer.getChildren());
+ assertContainsOnly(RegisteredUser, Editor.getChildren());
+ assertContainsOnly(Guest, RegisteredUser.getChildren());
+ assertEquals(0, Monitor.getChildren().size());
+ assertEquals(0, Guest.getChildren().size());
+ }
+
@Test
public void testGetParents() {
- assertEquals(2, Administrator.getParents().size());
- assertTrue(Administrator.getParents().contains(UserAdmin));
- assertTrue(Administrator.getParents().contains(Monitor));
- assertContainsOnly(Reviewer, UserAdmin.getParents());
- assertContainsOnly(Editor, Reviewer.getParents());
- assertContainsOnly(RegisteredUser, Editor.getParents());
- assertContainsOnly(Guest, RegisteredUser.getParents());
- assertEquals(0, Monitor.getParents().size());
- assertEquals(0, Guest.getParents().size());
+ assertEquals(0, Administrator.getParents().size());
+ assertContainsOnly(Administrator, UserAdmin.getParents());
+ assertContainsOnly(UserAdmin, Reviewer.getParents());
+ assertContainsOnly(Reviewer, Editor.getParents());
+ assertContainsOnly(Editor, RegisteredUser.getParents());
+ assertContainsOnly(RegisteredUser, Guest.getParents());
+ assertContainsOnly(Administrator, Monitor.getParents());
+
}
private void assertContainsOnly(Profile profile, Set parents) {
@@ -53,12 +63,25 @@ private void assertContainsOnly(Profile profile, Set parents) {
}
@Test
- public void testGetAll() {
- assertContainsAllExactly(Administrator.getAll(), Administrator, UserAdmin, Reviewer, Editor, RegisteredUser, Guest, Monitor);
- assertContainsAllExactly(UserAdmin.getAll(), UserAdmin, Reviewer, Editor, RegisteredUser, Guest);
- assertContainsAllExactly(Reviewer.getAll(), Reviewer, Editor, RegisteredUser, Guest);
- assertContainsAllExactly(Editor.getAll(), Editor, RegisteredUser, Guest);
- assertContainsAllExactly(Editor.getAll(), Editor, RegisteredUser, Guest);
+ public void testGetProfileAndAllChildren() {
+ assertContainsAllExactly(Administrator.getProfileAndAllChildren(), Administrator, UserAdmin, Reviewer, Editor, RegisteredUser, Guest, Monitor);
+ assertContainsAllExactly(UserAdmin.getProfileAndAllChildren(), UserAdmin, Reviewer, Editor, RegisteredUser, Guest);
+ assertContainsAllExactly(Reviewer.getProfileAndAllChildren(), Reviewer, Editor, RegisteredUser, Guest);
+ assertContainsAllExactly(Editor.getProfileAndAllChildren(), Editor, RegisteredUser, Guest);
+ assertContainsAllExactly(RegisteredUser.getProfileAndAllChildren(), RegisteredUser, Guest);
+ assertContainsAllExactly(Guest.getProfileAndAllChildren(), Guest);
+ assertContainsAllExactly(Monitor.getProfileAndAllChildren(), Monitor);
+ }
+
+ @Test
+ public void testGetProfileAndAllParents() {
+ assertContainsAllExactly(Administrator.getProfileAndAllParents(), Administrator);
+ assertContainsAllExactly(UserAdmin.getProfileAndAllParents(), UserAdmin, Administrator);
+ assertContainsAllExactly(Reviewer.getProfileAndAllParents(), Reviewer, UserAdmin, Administrator);
+ assertContainsAllExactly(Editor.getProfileAndAllParents(), Editor, Reviewer, UserAdmin, Administrator);
+ assertContainsAllExactly(RegisteredUser.getProfileAndAllParents(), RegisteredUser, Editor, Reviewer, UserAdmin, Administrator);
+ assertContainsAllExactly(Guest.getProfileAndAllParents(), Guest, RegisteredUser, Editor, Reviewer, UserAdmin, Administrator);
+ assertContainsAllExactly(Monitor.getProfileAndAllParents(), Monitor, Administrator);
}
private void assertContainsAllExactly(Set all, Profile... profiles) {
diff --git a/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersConfigurationTest.java b/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersConfigurationTest.java
new file mode 100644
index 00000000000..56068bad3a3
--- /dev/null
+++ b/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersConfigurationTest.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2024 Food and Agriculture Organization of the
+ * United Nations (FAO-UN), United Nations World Food Programme (WFP)
+ * and United Nations Environment Programme (UNEP)
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or (at
+ * your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
+ *
+ * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2,
+ * Rome - Italy. email: geonetwork@osgeo.org
+ */
+
+package org.fao.geonet.kernel.security.jwtheaders;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * very simple tests for JwtHeadersConfiguration for the GN-only portions.
+ */
+public class JwtHeadersConfigurationTest {
+
+ //Very very simple test to ensure that setters/getters are working correctly
+ @Test
+ public void testGetSet() {
+ var config = JwtHeadersIntegrationTest.getBasicConfig();
+
+ //CONST
+ Assert.assertEquals("autologin", config.getLoginType());
+ Assert.assertEquals("JWT-HEADERS", config.getSecurityProvider());
+
+ config.setUpdateGroup(false);
+ Assert.assertEquals(false, config.isUpdateGroup());
+ Assert.assertEquals(false, !config.isUserGroupUpdateEnabled());
+ config.setUpdateGroup(true);
+ Assert.assertEquals(true, config.isUpdateGroup());
+ Assert.assertEquals(true, !config.isUserGroupUpdateEnabled());
+
+
+ config.setUpdateProfile(false);
+ Assert.assertEquals(false, config.isUpdateProfile());
+ Assert.assertEquals(false, !config.isUserProfileUpdateEnabled());
+ config.setUpdateProfile(true);
+ Assert.assertEquals(true, config.isUpdateProfile());
+ Assert.assertEquals(true, !config.isUserProfileUpdateEnabled());
+
+
+ Assert.assertEquals(config.jwtConfiguration, config.getJwtConfiguration());
+ config.setJwtConfiguration(null);
+ Assert.assertNull(config.getJwtConfiguration());
+ }
+}
diff --git a/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersIntegrationTest.java b/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersIntegrationTest.java
new file mode 100644
index 00000000000..f133167989c
--- /dev/null
+++ b/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersIntegrationTest.java
@@ -0,0 +1,281 @@
+/*
+ * Copyright (C) 2024 Food and Agriculture Organization of the
+ * United Nations (FAO-UN), United Nations World Food Programme (WFP)
+ * and United Nations Environment Programme (UNEP)
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or (at
+ * your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
+ *
+ * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2,
+ * Rome - Italy. email: geonetwork@osgeo.org
+ */
+
+package org.fao.geonet.kernel.security.jwtheaders;
+
+import org.fao.geonet.domain.User;
+import org.geoserver.security.jwtheaders.JwtConfiguration;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+import org.springframework.mock.web.MockHttpServletRequest;
+import org.springframework.security.core.context.SecurityContextHolder;
+
+import javax.servlet.FilterChain;
+import javax.servlet.ServletException;
+import javax.servlet.ServletResponse;
+import java.io.IOException;
+
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.verify;
+
+/**
+ * Basic integration tests for the filter.
+ *
+ * We are mocking all the other interactions and directly calling JwtHeadersAuthFilter#doFilter
+ * and validating the results.
+ */
+public class JwtHeadersIntegrationTest {
+
+
+ //JWT example
+ public static String JWT = "eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICItWEdld190TnFwaWRrYTl2QXNJel82WEQtdnJmZDVyMlNWTWkwcWMyR1lNIn0.eyJleHAiOjE3MDcxNTMxNDYsImlhdCI6MTcwNzE1Mjg0NiwiYXV0aF90aW1lIjoxNzA3MTUyNjQ1LCJqdGkiOiJlMzhjY2ZmYy0zMWNjLTQ0NmEtYmU1Yy04MjliNDE0NTkyZmQiLCJpc3MiOiJodHRwczovL2xvZ2luLWxpdmUtZGV2Lmdlb2NhdC5saXZlL3JlYWxtcy9kYXZlLXRlc3QyIiwiYXVkIjoiYWNjb3VudCIsInN1YiI6ImVhMzNlM2NjLWYwZTEtNDIxOC04OWNiLThkNDhjMjdlZWUzZCIsInR5cCI6IkJlYXJlciIsImF6cCI6ImxpdmUta2V5MiIsIm5vbmNlIjoiQldzc2M3cTBKZ0tHZC1OdFc1QlFhVlROMkhSa25LQmVIY0ZMTHZ5OXpYSSIsInNlc3Npb25fc3RhdGUiOiIxY2FiZmU1NC1lOWU0LTRjMmMtODQwNy03NTZiMjczZmFmZmIiLCJhY3IiOiIwIiwicmVhbG1fYWNjZXNzIjp7InJvbGVzIjpbImRlZmF1bHQtcm9sZXMtZGF2ZS10ZXN0MiIsIm9mZmxpbmVfYWNjZXNzIiwidW1hX2F1dGhvcml6YXRpb24iXX0sInJlc291cmNlX2FjY2VzcyI6eyJsaXZlLWtleTIiOnsicm9sZXMiOlsiR2Vvc2VydmVyQWRtaW5pc3RyYXRvciJdfSwiYWNjb3VudCI6eyJyb2xlcyI6WyJtYW5hZ2UtYWNjb3VudCIsIm1hbmFnZS1hY2NvdW50LWxpbmtzIiwidmlldy1wcm9maWxlIl19fSwic2NvcGUiOiJvcGVuaWQgcGhvbmUgb2ZmbGluZV9hY2Nlc3MgbWljcm9wcm9maWxlLWp3dCBwcm9maWxlIGFkZHJlc3MgZW1haWwiLCJzaWQiOiIxY2FiZmU1NC1lOWU0LTRjMmMtODQwNy03NTZiMjczZmFmZmIiLCJ1cG4iOiJkYXZpZC5ibGFzYnlAZ2VvY2F0Lm5ldCIsImVtYWlsX3ZlcmlmaWVkIjpmYWxzZSwiYWRkcmVzcyI6e30sIm5hbWUiOiJkYXZpZCBibGFzYnkiLCJncm91cHMiOlsiZGVmYXVsdC1yb2xlcy1kYXZlLXRlc3QyIiwib2ZmbGluZV9hY2Nlc3MiLCJ1bWFfYXV0aG9yaXphdGlvbiJdLCJwcmVmZXJyZWRfdXNlcm5hbWUiOiJkYXZpZC5ibGFzYnlAZ2VvY2F0Lm5ldCIsImdpdmVuX25hbWUiOiJkYXZpZCIsImZhbWlseV9uYW1lIjoiYmxhc2J5IiwiZW1haWwiOiJkYXZpZC5ibGFzYnlAZ2VvY2F0Lm5ldCJ9.fHzXd7oISnqWb09ah9wikfP2UOBeiOA3vd_aDg3Bw-xcfv9aD3CWhAK5FUDPYSPyj4whAcknZbUgUzcm0qkaI8V_aS65F3Fug4jt4nC9YPL4zMSJ5an4Dp6jlQ3OQhrKFn4FwaoW61ndMmScsZZWEQyj6gzHnn5cknqySB26tVydT6q57iTO7KQFcXRdbXd6GWIoFGS-ud9XzxQMUdNfYmsDD7e6hoWhe9PJD9Zq4KT6JN13hUU4Dos-Z5SBHjRa6ieHoOe9gqkjKyA1jT1NU42Nqr-mTV-ql22nAoXuplpvOYc5-09-KDDzSDuVKFwLCNMN3ZyRF1wWuydJeU-gOQ";
+ JwtHeadersConfiguration config;
+ FilterChain filterChain;
+ ServletResponse response;
+ JwtHeadersUserUtil jwtHeadersUserUtil;
+ User user;
+ User user2;
+
+ /**
+ * standard configuration for testing JSON
+ */
+ public static JwtHeadersConfiguration getBasicConfig() {
+ JwtHeadersConfiguration config = new JwtHeadersConfiguration(new JwtHeadersSecurityConfig());
+ var jwtheadersConfiguration = config.getJwtConfiguration();
+ jwtheadersConfiguration.setUserNameHeaderAttributeName("OIDC_id_token_payload");
+
+ jwtheadersConfiguration.setUserNameFormatChoice(JwtConfiguration.UserNameHeaderFormat.JSON);
+ jwtheadersConfiguration.setUserNameJsonPath("preferred_username");
+
+
+ jwtheadersConfiguration.setRolesJsonPath("resource_access.live-key2.roles");
+ jwtheadersConfiguration.setRolesHeaderName("OIDC_id_token_payload");
+ jwtheadersConfiguration.setJwtHeaderRoleSource("JSON");
+
+ jwtheadersConfiguration.setRoleConverterString("GeonetworkAdministrator=ADMINISTRATOR");
+ jwtheadersConfiguration.setOnlyExternalListedRoles(false);
+
+ jwtheadersConfiguration.setValidateToken(false);
+
+ jwtheadersConfiguration.setValidateTokenAgainstURL(true);
+ jwtheadersConfiguration.setValidateTokenAgainstURLEndpoint("");
+ jwtheadersConfiguration.setValidateSubjectWithEndpoint(true);
+
+ jwtheadersConfiguration.setValidateTokenAudience(true);
+ jwtheadersConfiguration.setValidateTokenAudienceClaimName("");
+ jwtheadersConfiguration.setValidateTokenAudienceClaimValue("");
+
+ jwtheadersConfiguration.setValidateTokenSignature(true);
+ jwtheadersConfiguration.setValidateTokenSignatureURL("");
+
+ return config;
+ }
+
+ /**
+ * standard configuration for testing JWT
+ */
+ public static JwtHeadersConfiguration getBasicConfigJWT() {
+ JwtHeadersConfiguration config = new JwtHeadersConfiguration(new JwtHeadersSecurityConfig());
+ var jwtheadersConfiguration = config.getJwtConfiguration();
+ jwtheadersConfiguration.setUserNameHeaderAttributeName("TOKEN");
+
+ jwtheadersConfiguration.setUserNameFormatChoice(JwtConfiguration.UserNameHeaderFormat.JWT);
+ jwtheadersConfiguration.setUserNameJsonPath("preferred_username");
+
+
+ jwtheadersConfiguration.setRolesJsonPath("resource_access.live-key2.roles");
+ jwtheadersConfiguration.setRolesHeaderName("TOKEN");
+ jwtheadersConfiguration.setJwtHeaderRoleSource("JWT");
+
+ jwtheadersConfiguration.setRoleConverterString("GeoserverAdministrator=ADMINISTRATOR");
+ jwtheadersConfiguration.setOnlyExternalListedRoles(false);
+
+ jwtheadersConfiguration.setValidateToken(false);
+
+ jwtheadersConfiguration.setValidateTokenAgainstURL(true);
+ jwtheadersConfiguration.setValidateTokenAgainstURLEndpoint("");
+ jwtheadersConfiguration.setValidateSubjectWithEndpoint(true);
+
+ jwtheadersConfiguration.setValidateTokenAudience(true);
+ jwtheadersConfiguration.setValidateTokenAudienceClaimName("");
+ jwtheadersConfiguration.setValidateTokenAudienceClaimValue("");
+
+ jwtheadersConfiguration.setValidateTokenSignature(true);
+ jwtheadersConfiguration.setValidateTokenSignatureURL("");
+
+ return config;
+ }
+
+ @Before
+ public void setUp() throws Exception {
+
+ SecurityContextHolder.getContext().setAuthentication(null);
+
+
+ config = getBasicConfig();
+
+ filterChain = Mockito.mock(FilterChain.class);
+ response = Mockito.mock(ServletResponse.class);
+
+ jwtHeadersUserUtil = Mockito.mock(JwtHeadersUserUtil.class);
+
+ user = new User();
+ user.setUsername("testcase-user@geocat.net");
+
+ user2 = new User();
+ user2.setUsername("testcase-user2222@geocat.net");
+ }
+
+ /**
+ * trivial integration test - user arrives at site with header (gets access).
+ */
+ @Test
+ public void testTrivialLogin() throws ServletException, IOException {
+ doReturn(user)
+ .when(jwtHeadersUserUtil).getUser(any(), any());
+
+ var request = new MockHttpServletRequest();
+
+ request.addHeader("oidc_id_token_payload", "{\"preferred_username\":\"david.blasby2@geocat.net\",\"resource_access\":{\"live-key2\":{\"roles\":[\"GeonetworkAdministrator\",\"group1:Reviewer\"]}}}");
+
+ JwtHeadersAuthFilter filter = new JwtHeadersAuthFilter(config);
+ filter.jwtHeadersUserUtil = jwtHeadersUserUtil;
+ filter = spy(filter);
+
+ //this should login the user
+ filter.doFilter(request, response, filterChain);
+
+ //this validate login
+ var auth = SecurityContextHolder.getContext().getAuthentication();
+ Assert.assertNotNull(auth);
+ Assert.assertTrue(auth instanceof JwtHeadersUsernamePasswordAuthenticationToken);
+ var principle = (User) auth.getPrincipal();
+ Assert.assertEquals(user.getUsername(), principle.getUsername());
+
+ //logout() should not have been called
+ verify(filter, never()).logout(any());
+ }
+
+ /**
+ * integration test -
+ * 1. user arrives at site with header (gets access).
+ * 2. user then makes request (without headers) - should get logged out (i.e. not auth + logout() called)
+ */
+ @Test
+ public void testLoginLogout() throws ServletException, IOException {
+ doReturn(user)
+ .when(jwtHeadersUserUtil).getUser(any(), any());
+
+ var request = new MockHttpServletRequest();
+
+
+ JwtHeadersAuthFilter filter = new JwtHeadersAuthFilter(config);
+ filter = spy(filter);
+ filter.jwtHeadersUserUtil = jwtHeadersUserUtil;
+
+ //logged in
+ request.addHeader("oidc_id_token_payload", "{\"preferred_username\":\"david.blasby2@geocat.net\",\"resource_access\":{\"live-key2\":{\"roles\":[\"GeonetworkAdministrator\",\"group1:Reviewer\"]}}}");
+
+ //user should be logged in
+ filter.doFilter(request, response, filterChain);
+
+ //validate login
+ var auth = SecurityContextHolder.getContext().getAuthentication();
+ Assert.assertNotNull(auth);
+ Assert.assertTrue(auth instanceof JwtHeadersUsernamePasswordAuthenticationToken);
+ var principle = (User) auth.getPrincipal();
+ Assert.assertEquals(user.getUsername(), principle.getUsername());
+ verify(filter, never()).logout(any()); //logout() should not have been called
+
+ //logout
+ request = new MockHttpServletRequest();
+ filter.doFilter(request, response, filterChain);
+
+ //no longer an auth
+ auth = SecurityContextHolder.getContext().getAuthentication();
+ Assert.assertNull(auth);
+ verify(filter).logout(any()); //logout was called
+ }
+
+ /**
+ * integration test -
+ * 1. user1 arrives at site with header (gets access).
+ * 2. switch to user2 then makes request (with headers)
+ * - user1 should get logged out (i.e. not auth + logout() called)
+ * - user2 gets logged in
+ *
+ * In general, this shouldn't happen, but could happen:
+ * 1. logon as low-rights user
+ * 2. -- do stuff ---
+ * 3. need high privileges, so change to higher-rights user
+ * 4. -- do stuff ---
+ */
+ @Test
+ public void testLoginDifferentLogin() throws ServletException, IOException {
+ doReturn(user)
+ .when(jwtHeadersUserUtil).getUser(any(), any());
+
+ var request = new MockHttpServletRequest();
+
+
+ JwtHeadersAuthFilter filter = new JwtHeadersAuthFilter(config);
+ filter = spy(filter);
+ filter.jwtHeadersUserUtil = jwtHeadersUserUtil;
+
+
+ //logged in
+ request.addHeader("oidc_id_token_payload", "{\"preferred_username\":\"david.blasby2@geocat.net\",\"resource_access\":{\"live-key2\":{\"roles\":[\"GeonetworkAdministrator\",\"group1:Reviewer\"]}}}");
+
+ filter.doFilter(request, response, filterChain);
+
+ //validate user logged in
+ var auth = SecurityContextHolder.getContext().getAuthentication();
+ Assert.assertNotNull(auth);
+ Assert.assertTrue(auth instanceof JwtHeadersUsernamePasswordAuthenticationToken);
+ var principle = (User) auth.getPrincipal();
+ Assert.assertEquals(user.getUsername(), principle.getUsername());
+ verify(filter, never()).logout(any()); //logout() should not have been called
+
+ //login new user (user2)
+ request = new MockHttpServletRequest();
+ request.addHeader("oidc_id_token_payload", "{\"preferred_username\":\"david.blasby2@geocat.net\",\"resource_access\":{\"live-key2\":{\"roles\":[\"GeonetworkAdministrator\",\"group1:Reviewer\"]}}}");
+ doReturn(user2)
+ .when(jwtHeadersUserUtil).getUser(any(), any());
+
+ filter.doFilter(request, response, filterChain);
+
+ //validate that the correct user is logged in
+ auth = SecurityContextHolder.getContext().getAuthentication();
+ Assert.assertNotNull(auth);
+ Assert.assertTrue(auth instanceof JwtHeadersUsernamePasswordAuthenticationToken);
+ principle = (User) auth.getPrincipal();
+ Assert.assertEquals(user2.getUsername(), principle.getUsername());
+ verify(filter).logout(any()); //logout must be called
+ }
+
+}
diff --git a/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersTrivialUserTest.java b/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersTrivialUserTest.java
new file mode 100644
index 00000000000..82f2171ca0d
--- /dev/null
+++ b/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersTrivialUserTest.java
@@ -0,0 +1,220 @@
+/*
+ * Copyright (C) 2024 Food and Agriculture Organization of the
+ * United Nations (FAO-UN), United Nations World Food Programme (WFP)
+ * and United Nations Environment Programme (UNEP)
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or (at
+ * your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
+ *
+ * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2,
+ * Rome - Italy. email: geonetwork@osgeo.org
+ */
+
+package org.fao.geonet.kernel.security.jwtheaders;
+
+import org.fao.geonet.domain.Profile;
+import org.junit.Assert;
+import org.junit.Test;
+import org.springframework.mock.web.MockHttpServletRequest;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Tests that the JwtHeadersTrivialUser is working.
+ */
+public class JwtHeadersTrivialUserTest {
+
+
+ /**
+ * test #maxProfile
+ * Should give the highest profile in the profileGroups
+ */
+ @Test
+ public void testMaxProfile() {
+ Map> profileGroups = new HashMap<>();
+
+ //no profileGroups -> JwtHeadersTrivialUser.MIN_PROFILE
+ var maxProfile = JwtHeadersTrivialUser.getMaxProfile(profileGroups);
+ Assert.assertEquals(JwtHeadersTrivialUser.MIN_PROFILE, maxProfile);
+
+
+ //admin -> admin
+ profileGroups = new HashMap<>();
+ profileGroups.put(Profile.Administrator, new ArrayList<>());
+ maxProfile = JwtHeadersTrivialUser.getMaxProfile(profileGroups);
+ Assert.assertEquals(Profile.Administrator, maxProfile);
+
+ //Reviewer -> Reviewer
+ profileGroups = new HashMap<>();
+ profileGroups.put(Profile.Reviewer, new ArrayList<>());
+ maxProfile = JwtHeadersTrivialUser.getMaxProfile(profileGroups);
+ Assert.assertEquals(Profile.Reviewer, maxProfile);
+
+ //Editor -> Editor
+ profileGroups = new HashMap<>();
+ profileGroups.put(Profile.Editor, new ArrayList<>());
+ maxProfile = JwtHeadersTrivialUser.getMaxProfile(profileGroups);
+ Assert.assertEquals(Profile.Editor, maxProfile);
+
+
+ //Editor,Reviewer -> Reviewer
+ profileGroups = new HashMap<>();
+ profileGroups.put(Profile.Editor, new ArrayList<>());
+ profileGroups.put(Profile.Reviewer, new ArrayList<>());
+ maxProfile = JwtHeadersTrivialUser.getMaxProfile(profileGroups);
+ Assert.assertEquals(Profile.Reviewer, maxProfile);
+ }
+
+
+ /**
+ * tests that the extraction of ProfileRoles is correct
+ */
+ @Test
+ public void testExtractProfileRoles() {
+
+ //no roles -> no profileGroups
+ List processedRolesFromHeaders = Arrays.asList();
+ var profileGroups = JwtHeadersTrivialUser.extractProfileRoles(processedRolesFromHeaders);
+ Assert.assertEquals(0, profileGroups.size());
+
+ // "Administrator" -> "Administrator":[]
+ processedRolesFromHeaders = Arrays.asList("Administrator");
+ profileGroups = JwtHeadersTrivialUser.extractProfileRoles(processedRolesFromHeaders);
+ Assert.assertEquals(1, profileGroups.size());
+ Assert.assertTrue(profileGroups.containsKey(Profile.Administrator));
+ Assert.assertEquals(0, profileGroups.get(Profile.Administrator).size());
+
+ // "g1:Reviewer" -> "Reviewer":["g1"]
+ processedRolesFromHeaders = Arrays.asList("g1:Reviewer");
+ profileGroups = JwtHeadersTrivialUser.extractProfileRoles(processedRolesFromHeaders);
+ Assert.assertEquals(1, profileGroups.size());
+ Assert.assertTrue(profileGroups.containsKey(Profile.Reviewer));
+ Assert.assertEquals(1, profileGroups.get(Profile.Reviewer).size());
+ Assert.assertEquals("g1", profileGroups.get(Profile.Reviewer).get(0));
+
+ // "g1:Reviewer","g2:Reviewer" -> "Reviewer":["g1",g2]
+ processedRolesFromHeaders = Arrays.asList("g1:Reviewer", "g2:Reviewer");
+ profileGroups = JwtHeadersTrivialUser.extractProfileRoles(processedRolesFromHeaders);
+ Assert.assertEquals(1, profileGroups.size());
+ Assert.assertTrue(profileGroups.containsKey(Profile.Reviewer));
+ Assert.assertEquals(2, profileGroups.get(Profile.Reviewer).size());
+ Assert.assertTrue(profileGroups.get(Profile.Reviewer).contains("g1"));
+ Assert.assertTrue(profileGroups.get(Profile.Reviewer).contains("g2"));
+
+ // "g1:Reviewer","g2:Editor" -> "Reviewer":["g1"], "Editor":["g2"]
+ processedRolesFromHeaders = Arrays.asList("g1:Reviewer", "g2:Editor");
+ profileGroups = JwtHeadersTrivialUser.extractProfileRoles(processedRolesFromHeaders);
+ Assert.assertEquals(2, profileGroups.size());
+ Assert.assertTrue(profileGroups.containsKey(Profile.Reviewer));
+ Assert.assertTrue(profileGroups.containsKey(Profile.Editor));
+ Assert.assertEquals(1, profileGroups.get(Profile.Reviewer).size());
+ Assert.assertEquals(1, profileGroups.get(Profile.Editor).size());
+ Assert.assertTrue(profileGroups.get(Profile.Reviewer).contains("g1"));
+ Assert.assertTrue(profileGroups.get(Profile.Editor).contains("g2"));
+
+ // "Administrator","g2:Editor" -> "Administrator":[], "Editor":["g2"]
+ processedRolesFromHeaders = Arrays.asList("Administrator", "g2:Editor");
+ profileGroups = JwtHeadersTrivialUser.extractProfileRoles(processedRolesFromHeaders);
+ Assert.assertEquals(2, profileGroups.size());
+ Assert.assertTrue(profileGroups.containsKey(Profile.Administrator));
+ Assert.assertTrue(profileGroups.containsKey(Profile.Editor));
+ Assert.assertEquals(0, profileGroups.get(Profile.Administrator).size());
+ Assert.assertEquals(1, profileGroups.get(Profile.Editor).size());
+ Assert.assertTrue(profileGroups.get(Profile.Editor).contains("g2"));
+ }
+
+ /**
+ * Method #UpdateUserWithRoles relies on the above methods, so we don't test this too much
+ * The method just updates the user (Profile & ProfileGroups), so we test that here.
+ */
+ @Test
+ public void testUpdateUserWithRoles() {
+ // "Administrator","g2:Editor" -> "Administrator":[], "Editor":["g2"] AND Profile=Administrator
+ var processedRolesFromHeaders = Arrays.asList("Administrator", "g2:Editor");
+ var user = new JwtHeadersTrivialUser("testcaseUser");
+ JwtHeadersTrivialUser.updateUserWithRoles(user, processedRolesFromHeaders);
+
+ Assert.assertEquals(Profile.Administrator, user.getProfile());
+ var profileGroups = user.getProfileGroups();
+ Assert.assertEquals(2, profileGroups.size());
+ Assert.assertTrue(profileGroups.containsKey(Profile.Administrator));
+ Assert.assertTrue(profileGroups.containsKey(Profile.Editor));
+ Assert.assertEquals(0, profileGroups.get(Profile.Administrator).size());
+ Assert.assertEquals(1, profileGroups.get(Profile.Editor).size());
+ Assert.assertTrue(profileGroups.get(Profile.Editor).contains("g2"));
+ }
+
+
+ /***
+ * Method #handleRoles mostly relies on methods tested above and is mostly about extracting the correct headers from the request
+ */
+ @Test
+ public void testHandleRolesJson() {
+ var config = JwtHeadersIntegrationTest.getBasicConfig();
+ var user = new JwtHeadersTrivialUser("testCaseUser");
+ var request = new MockHttpServletRequest();
+ request.addHeader("oidc_id_token_payload", "{\"preferred_username\":\"david.blasby2@geocat.net\",\"resource_access\":{\"live-key2\":{\"roles\":[\"GeonetworkAdministrator\",\"group1:Reviewer\"]}}}");
+
+ JwtHeadersTrivialUser.handleRoles(user, config.getJwtConfiguration(), request);
+
+ Assert.assertEquals(Profile.Administrator, user.getProfile());
+ var profileGroups = user.getProfileGroups();
+ Assert.assertEquals(2, profileGroups.size());
+ Assert.assertTrue(profileGroups.containsKey(Profile.Administrator));
+ Assert.assertTrue(profileGroups.containsKey(Profile.Reviewer));
+ Assert.assertEquals(0, profileGroups.get(Profile.Administrator).size());
+ Assert.assertEquals(1, profileGroups.get(Profile.Reviewer).size());
+ Assert.assertTrue(profileGroups.get(Profile.Reviewer).contains("group1"));
+ }
+
+ @Test
+ public void testHandleRolesJWT() {
+ var config = JwtHeadersIntegrationTest.getBasicConfigJWT();
+ var user = new JwtHeadersTrivialUser("testCaseUser");
+ var request = new MockHttpServletRequest();
+ request.addHeader("TOKEN", JwtHeadersIntegrationTest.JWT);
+
+ JwtHeadersTrivialUser.handleRoles(user, config.getJwtConfiguration(), request);
+
+ Assert.assertEquals(Profile.Administrator, user.getProfile());
+ var profileGroups = user.getProfileGroups();
+ Assert.assertEquals(1, profileGroups.size());
+ Assert.assertTrue(profileGroups.containsKey(Profile.Administrator));
+ Assert.assertEquals(0, profileGroups.get(Profile.Administrator).size());
+ }
+
+ /**
+ * this is dependent on the above methods, so this is just a quick test
+ */
+ @Test
+ public void testCreate() throws Exception {
+ var config = JwtHeadersIntegrationTest.getBasicConfigJWT();
+ var request = new MockHttpServletRequest();
+ request.addHeader("TOKEN", JwtHeadersIntegrationTest.JWT);
+
+ var user = JwtHeadersTrivialUser.create(config.getJwtConfiguration(), request);
+
+ Assert.assertEquals("david.blasby@geocat.net", user.getUsername());
+
+ Assert.assertEquals(Profile.Administrator, user.getProfile());
+ var profileGroups = user.getProfileGroups();
+ Assert.assertEquals(1, profileGroups.size());
+ Assert.assertTrue(profileGroups.containsKey(Profile.Administrator));
+ Assert.assertEquals(0, profileGroups.get(Profile.Administrator).size());
+ }
+}
diff --git a/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersUserUtilTest.java b/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersUserUtilTest.java
new file mode 100644
index 00000000000..482ef5549f6
--- /dev/null
+++ b/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersUserUtilTest.java
@@ -0,0 +1,365 @@
+/*
+ * Copyright (C) 2024 Food and Agriculture Organization of the
+ * United Nations (FAO-UN), United Nations World Food Programme (WFP)
+ * and United Nations Environment Programme (UNEP)
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or (at
+ * your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
+ *
+ * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2,
+ * Rome - Italy. email: geonetwork@osgeo.org
+ */
+
+package org.fao.geonet.kernel.security.jwtheaders;
+
+import org.fao.geonet.domain.Group;
+import org.fao.geonet.domain.Profile;
+import org.fao.geonet.domain.User;
+import org.fao.geonet.domain.UserGroup;
+import org.fao.geonet.kernel.security.GeonetworkAuthenticationProvider;
+import org.fao.geonet.repository.GroupRepository;
+import org.fao.geonet.repository.LanguageRepository;
+import org.fao.geonet.repository.UserGroupRepository;
+import org.fao.geonet.repository.UserRepository;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Mockito;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+import org.springframework.security.core.userdetails.UsernameNotFoundException;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+/**
+ * tests that JwtHeadersUserUtil works.
+ *
+ * Because JwtHeadersUserUtil uses the Group/User/UserGroup repositories, this uses a lot
+ * of Mockito to setup different scenarios.
+ *
+ * The main scenarios we are testing;
+ * 1. user is correctly saved (if new) or loaded (if existing)
+ * 2. user's profile and profileGroups are correctly updated (or not updated), depending on the
+ * filter's configuration.
+ */
+public class JwtHeadersUserUtilTest {
+
+ JwtHeadersUserUtil jwtHeadersUserUtil; //spy()-ed
+
+ @Before
+ public void setUp() throws Exception {
+ jwtHeadersUserUtil = new JwtHeadersUserUtil();
+ jwtHeadersUserUtil = spy(jwtHeadersUserUtil);
+
+ jwtHeadersUserUtil.userRepository = Mockito.mock(UserRepository.class);
+ jwtHeadersUserUtil.groupRepository = Mockito.mock(GroupRepository.class);
+ jwtHeadersUserUtil.userGroupRepository = Mockito.mock(UserGroupRepository.class);
+ jwtHeadersUserUtil.authProvider = Mockito.mock(GeonetworkAuthenticationProvider.class);
+ jwtHeadersUserUtil.languageRepository = Mockito.mock(LanguageRepository.class);
+ }
+
+
+ /**
+ * we have the config setup so it doesn't get any write access from the database
+ * + no user in DB
+ * + new user created
+ */
+ @Test
+ public void testSimplestCase() {
+ doThrow(new UsernameNotFoundException(""))
+ .when(jwtHeadersUserUtil.authProvider).loadUserByUsername(any());
+
+ JwtHeadersConfiguration basicConfig = JwtHeadersIntegrationTest.getBasicConfig();
+ basicConfig.setUpdateGroup(false);
+ basicConfig.setUpdateProfile(false);
+
+ var trivialUser = new JwtHeadersTrivialUser("testcaseUser@example.com");
+ trivialUser = spy(trivialUser);
+
+ User userDetails = (User) jwtHeadersUserUtil.getUser(trivialUser, basicConfig);
+
+ Assert.assertEquals("testcaseUser@example.com", userDetails.getUsername());
+ Assert.assertEquals("testcaseUser", userDetails.getName());
+
+ //verify helper methods called
+ verify(jwtHeadersUserUtil.authProvider).loadUserByUsername("testcaseUser@example.com");
+ verify(jwtHeadersUserUtil).createUser(trivialUser, basicConfig);
+
+ // these shouldn't ever be looked at
+ verify(jwtHeadersUserUtil, never()).updateGroups(any(), any());
+ verify(trivialUser, never()).getProfile();
+ verify(trivialUser, never()).getProfileGroups();
+
+ //db should not have been saved to
+ verify(jwtHeadersUserUtil.groupRepository, never()).save(any());
+ verify(jwtHeadersUserUtil.userGroupRepository, never()).save(any());
+ verify(jwtHeadersUserUtil.languageRepository, never()).save(any());
+
+ //user was saved
+ verify(jwtHeadersUserUtil.userRepository).save(userDetails);
+ }
+
+ /**
+ * we have the config setup so it doesn't get any write access from the database
+ * + user IS in DB
+ */
+ @Test
+ public void testSimplestCaseAlreadyExists() {
+ User user = new User();
+ user.setUsername("testcaseUser@example.com");
+ user.setName("testcaseUser");
+ user.setId(666);
+
+ doReturn(user)
+ .when(jwtHeadersUserUtil.authProvider).loadUserByUsername("testcaseUser@example.com");
+
+ JwtHeadersConfiguration basicConfig = JwtHeadersIntegrationTest.getBasicConfig();
+ basicConfig.setUpdateGroup(false);
+ basicConfig.setUpdateProfile(false);
+
+ var trivialUser = new JwtHeadersTrivialUser("testcaseUser@example.com");
+ trivialUser = spy(trivialUser);
+
+ User userDetails = (User) jwtHeadersUserUtil.getUser(trivialUser, basicConfig);
+
+ Assert.assertEquals("testcaseUser@example.com", userDetails.getUsername());
+ Assert.assertEquals("testcaseUser", userDetails.getName());
+
+ //verify helper methods called
+ verify(jwtHeadersUserUtil.authProvider).loadUserByUsername("testcaseUser@example.com");
+
+
+ // these shouldn't ever be looked at
+ verify(jwtHeadersUserUtil, never()).createUser(trivialUser, basicConfig);
+ verify(jwtHeadersUserUtil, never()).updateGroups(any(), any());
+ verify(trivialUser, never()).getProfile();
+ verify(trivialUser, never()).getProfileGroups();
+
+ //db should not have been saved to
+ verify(jwtHeadersUserUtil.groupRepository, never()).save(any());
+ verify(jwtHeadersUserUtil.userGroupRepository, never()).save(any());
+ verify(jwtHeadersUserUtil.languageRepository, never()).save(any());
+
+ //user wasn't saved (no modification)
+ verify(jwtHeadersUserUtil.userRepository, never()).save(userDetails);
+ }
+
+
+ /**
+ * we have the config setup so it writes user data to DB
+ * + no user in DB
+ * + new user created
+ * + validate that profile is set
+ * + validate that user groups (in db) are updated
+ */
+ @Test
+ public void testNewUserWithGroups() {
+ doThrow(new UsernameNotFoundException(""))
+ .when(jwtHeadersUserUtil.authProvider).loadUserByUsername(any());
+
+ //make sure that the group ID is set when saved. GN uses the ID in Set<> operations, so we must SET it.
+ when(jwtHeadersUserUtil.groupRepository.save(any())).thenAnswer(new Answer() {
+ @Override
+ public Group answer(InvocationOnMock invocation) throws Throwable {
+ ((Group) invocation.getArguments()[0]).setId(new Random().nextInt());
+ return ((Group) invocation.getArguments()[0]);
+ }
+ });
+
+ JwtHeadersConfiguration basicConfig = JwtHeadersIntegrationTest.getBasicConfig();
+ basicConfig.setUpdateGroup(true);
+ basicConfig.setUpdateProfile(true);
+
+ var trivialUser = new JwtHeadersTrivialUser("testcaseUser@example.com");
+ trivialUser.setProfile(Profile.Administrator);
+
+ Map> profileGroups = new HashMap<>();
+ profileGroups.put(Profile.Reviewer, Arrays.asList("group1", "group2"));
+ trivialUser.setProfileGroups(profileGroups);
+
+ trivialUser = spy(trivialUser);
+
+ User userDetails = (User) jwtHeadersUserUtil.getUser(trivialUser, basicConfig);
+
+ Assert.assertEquals("testcaseUser@example.com", userDetails.getUsername());
+ Assert.assertEquals("testcaseUser", userDetails.getName());
+
+ //verify helper methods called
+ verify(jwtHeadersUserUtil.authProvider).loadUserByUsername("testcaseUser@example.com");
+ verify(jwtHeadersUserUtil).createUser(trivialUser, basicConfig);
+
+ //user should be saved with the Profile (admin)
+ verify(jwtHeadersUserUtil.userRepository).save(userDetails); //user was saved
+ Assert.assertEquals(Profile.Administrator, userDetails.getProfile());
+
+
+ //update groups method was called
+ verify(jwtHeadersUserUtil).updateGroups(profileGroups, userDetails);
+
+ //group1 and group2 saved to db
+ //attempted to find them in DB
+ verify(jwtHeadersUserUtil.groupRepository).findByName("group1");
+ verify(jwtHeadersUserUtil.groupRepository).findByName("group2");
+
+ //saved
+ ArgumentCaptor groupsCaptor = ArgumentCaptor.forClass(Group.class);
+ verify(jwtHeadersUserUtil.groupRepository, times(2)).save(groupsCaptor.capture());
+
+ Assert.assertEquals("group1", groupsCaptor.getAllValues().get(0).getName());
+ Assert.assertEquals("group2", groupsCaptor.getAllValues().get(1).getName());
+
+
+ //user connected to group and role
+ ArgumentCaptor setUserGroupCaptor = ArgumentCaptor.forClass(Set.class);
+
+ verify(jwtHeadersUserUtil.userGroupRepository).updateUserGroups(eq(userDetails.getId()), setUserGroupCaptor.capture());
+ Assert.assertEquals(1, setUserGroupCaptor.getAllValues().size());
+ List userGroups = (List) setUserGroupCaptor.getAllValues().get(0).stream().collect(Collectors.toList());
+ Collections.sort(userGroups,
+ (o1, o2) -> ((o1).getGroup().getName() + "-" + o1.getProfile()).compareTo((o2).getGroup().getName() + "-" + o2.getProfile()));
+ Assert.assertEquals(4, userGroups.size());
+
+ Assert.assertEquals(Profile.Editor, userGroups.get(0).getProfile());
+ Assert.assertEquals(userDetails, userGroups.get(0).getUser());
+ Assert.assertEquals("group1", userGroups.get(0).getGroup().getName());
+
+ Assert.assertEquals(Profile.Reviewer, userGroups.get(1).getProfile());
+ Assert.assertEquals(userDetails, userGroups.get(1).getUser());
+ Assert.assertEquals("group1", userGroups.get(1).getGroup().getName());
+
+ Assert.assertEquals(Profile.Editor, userGroups.get(2).getProfile());
+ Assert.assertEquals(userDetails, userGroups.get(2).getUser());
+ Assert.assertEquals("group2", userGroups.get(2).getGroup().getName());
+
+ Assert.assertEquals(Profile.Reviewer, userGroups.get(3).getProfile());
+ Assert.assertEquals(userDetails, userGroups.get(3).getUser());
+ Assert.assertEquals("group2", userGroups.get(3).getGroup().getName());
+ }
+
+
+ /**
+ * we have the config setup so it writes user data to DB
+ * + user IS in DB
+ * + validate that profile is set
+ * + validate that user groups (in db) are updated
+ */
+ @Test
+ public void testOldUserWithGroups() {
+ User user = new User();
+ user.setUsername("testcaseUser@example.com");
+ user.setName("testcaseUser");
+ user.setId(666);
+
+ doReturn(user)
+ .when(jwtHeadersUserUtil.authProvider).loadUserByUsername("testcaseUser@example.com");
+
+
+ //make sure that the group ID is set when saved. GN uses the ID in Set<> operations, so we must SET it.
+ when(jwtHeadersUserUtil.groupRepository.save(any())).thenAnswer(new Answer() {
+ @Override
+ public Group answer(InvocationOnMock invocation) throws Throwable {
+ ((Group) invocation.getArguments()[0]).setId(new Random().nextInt());
+ return ((Group) invocation.getArguments()[0]);
+ }
+ });
+
+ JwtHeadersConfiguration basicConfig = JwtHeadersIntegrationTest.getBasicConfig();
+ basicConfig.setUpdateGroup(true);
+ basicConfig.setUpdateProfile(true);
+
+ var trivialUser = new JwtHeadersTrivialUser("testcaseUser@example.com");
+ trivialUser.setProfile(Profile.Administrator);
+
+ Map> profileGroups = new HashMap<>();
+ profileGroups.put(Profile.Reviewer, Arrays.asList("group1", "group2"));
+ trivialUser.setProfileGroups(profileGroups);
+
+ trivialUser = spy(trivialUser);
+
+ User userDetails = (User) jwtHeadersUserUtil.getUser(trivialUser, basicConfig);
+
+ Assert.assertEquals("testcaseUser@example.com", userDetails.getUsername());
+ Assert.assertEquals("testcaseUser", userDetails.getName());
+
+ //verify helper methods called
+ verify(jwtHeadersUserUtil.authProvider).loadUserByUsername("testcaseUser@example.com");
+ verify(jwtHeadersUserUtil, never()).createUser(trivialUser, basicConfig);
+
+ //user should be saved with the Profile (admin)
+ verify(jwtHeadersUserUtil.userRepository).save(userDetails); //user was saved
+ Assert.assertEquals(Profile.Administrator, userDetails.getProfile());
+
+
+ //update groups method was called
+ verify(jwtHeadersUserUtil).updateGroups(profileGroups, userDetails);
+
+ //group1 and group2 saved to db
+ //attempted to find them in DB
+ verify(jwtHeadersUserUtil.groupRepository).findByName("group1");
+ verify(jwtHeadersUserUtil.groupRepository).findByName("group2");
+
+ //saved
+ ArgumentCaptor groupsCaptor = ArgumentCaptor.forClass(Group.class);
+ verify(jwtHeadersUserUtil.groupRepository, times(2)).save(groupsCaptor.capture());
+
+ Assert.assertEquals("group1", groupsCaptor.getAllValues().get(0).getName());
+ Assert.assertEquals("group2", groupsCaptor.getAllValues().get(1).getName());
+
+ //user connected to group and role
+ ArgumentCaptor setUserGroupCaptor = ArgumentCaptor.forClass(Set.class);
+
+ verify(jwtHeadersUserUtil.userGroupRepository).updateUserGroups(eq(userDetails.getId()), setUserGroupCaptor.capture());
+ Assert.assertEquals(1, setUserGroupCaptor.getAllValues().size());
+ List userGroups = (List) setUserGroupCaptor.getAllValues().get(0).stream().collect(Collectors.toList());
+ Collections.sort(userGroups,
+ (o1, o2) -> ((o1).getGroup().getName() + "-" + o1.getProfile()).compareTo((o2).getGroup().getName() + "-" + o2.getProfile()));
+ Assert.assertEquals(4, userGroups.size());
+
+ Assert.assertEquals(Profile.Editor, userGroups.get(0).getProfile());
+ Assert.assertEquals(userDetails, userGroups.get(0).getUser());
+ Assert.assertEquals("group1", userGroups.get(0).getGroup().getName());
+
+ Assert.assertEquals(Profile.Reviewer, userGroups.get(1).getProfile());
+ Assert.assertEquals(userDetails, userGroups.get(1).getUser());
+ Assert.assertEquals("group1", userGroups.get(1).getGroup().getName());
+
+ Assert.assertEquals(Profile.Editor, userGroups.get(2).getProfile());
+ Assert.assertEquals(userDetails, userGroups.get(2).getUser());
+ Assert.assertEquals("group2", userGroups.get(2).getGroup().getName());
+
+ Assert.assertEquals(Profile.Reviewer, userGroups.get(3).getProfile());
+ Assert.assertEquals(userDetails, userGroups.get(3).getUser());
+ Assert.assertEquals("group2", userGroups.get(3).getGroup().getName());
+ }
+
+
+}
diff --git a/core/src/test/resources/org/fao/geonet/api/Messages.properties b/core/src/test/resources/org/fao/geonet/api/Messages.properties
index 4db47277a6b..2657aa3088a 100644
--- a/core/src/test/resources/org/fao/geonet/api/Messages.properties
+++ b/core/src/test/resources/org/fao/geonet/api/Messages.properties
@@ -54,8 +54,7 @@ user_password_changed='%s' password was updated.
user_password_notchanged=A problem occurred trying to change '%s' password. Contact the helpdesk.
user_password_invalid_changekey='%s' is an invalid change key for '%s'. Change keys are only valid for one day.
user_registered=User '%s' registered.
-user_with_that_email_found=A user with this email or username already exists.
-user_with_that_username_found=A user with this email or username already exists.
+user_with_that_email_username_found=A user with this email or username already exists.
register_email_admin_subject=%s / New account for %s as %s
register_email_admin_message=Dear Admin,\n\
Newly registered user %s has requested %s access for %s.\n\
@@ -177,6 +176,8 @@ api.exception.resourceAlreadyExists=Resource already exists
api.exception.resourceAlreadyExists.description=Resource already exists.
api.exception.unsatisfiedRequestParameter=Unsatisfied request parameter
api.exception.unsatisfiedRequestParameter.description=Unsatisfied request parameter.
+exception.maxUploadSizeExceeded=Maximum upload size of {0} exceeded.
+exception.maxUploadSizeExceeded.description=The request was rejected because its size ({0}) exceeds the configured maximum ({1}).
exception.resourceNotFound.metadata=Metadata not found
exception.resourceNotFound.metadata.description=Metadata with UUID ''{0}'' not found.
exception.resourceNotFound.resource=Metadata resource ''{0}'' not found
diff --git a/core/src/test/resources/org/fao/geonet/api/Messages_fre.properties b/core/src/test/resources/org/fao/geonet/api/Messages_fre.properties
index fa2455ebbf2..af2b4d80fd1 100644
--- a/core/src/test/resources/org/fao/geonet/api/Messages_fre.properties
+++ b/core/src/test/resources/org/fao/geonet/api/Messages_fre.properties
@@ -53,8 +53,7 @@ user_password_sent=Si l''utilisateur existe, vous recevrez un courriel contenant
user_password_changed=Le mot de passe de %s a \u00E9t\u00E9 mis \u00E0 jour.
user_password_notchanged=\u00C9chec lors du changement de mot de passe de %s. Contactez le support.
user_password_invalid_changekey=%s est une cl\u00E9 invalide pour %s. Les cl\u00E9s ne sont valides que pendant une journ\u00E9e.
-user_with_that_email_found=Un utilisateur avec cette adresse email ou ce nom d''utilisateur existe d\u00E9j\u00E0.
-user_with_that_username_found=Un utilisateur avec cette adresse email ou ce nom d''utilisateur existe d\u00E9j\u00E0.
+user_with_that_email_username_found=Un utilisateur avec cette adresse email ou ce nom d''utilisateur existe d\u00E9j\u00E0.
register_email_admin_subject=%s / Cr\u00E9ation de compte pour %s en tant que %s
register_email_admin_message=Cher administrateur,\n\
L'utilisateur %s vient de demander une cr\u00E9ation de compte pour %s.\n\
@@ -171,6 +170,8 @@ api.exception.resourceAlreadyExists=La ressource existe d\u00E9j\u00E0
api.exception.resourceAlreadyExists.description=La ressource existe d\u00E9j\u00E0.
api.exception.unsatisfiedRequestParameter=Param\u00E8tre de demande non satisfait
api.exception.unsatisfiedRequestParameter.description=Param\u00E8tre de demande non satisfait.
+exception.maxUploadSizeExceeded=La taille maximale du t\u00E9l\u00E9chargement de {0} a \u00E9t\u00E9 exc\u00E9d\u00E9e.
+exception.maxUploadSizeExceeded.description=La demande a \u00E9t\u00E9 refus\u00E9e car sa taille ({0}) exc\u00E8de le maximum configur\u00E9 ({1}).
exception.resourceNotFound.metadata=Fiches introuvables
exception.resourceNotFound.metadata.description=La fiche ''{0}'' est introuvable.
exception.resourceNotFound.resource=Ressource ''{0}'' introuvable
diff --git a/csw-server/pom.xml b/csw-server/pom.xml
index 4f625facbe7..f6a27aadb3c 100644
--- a/csw-server/pom.xml
+++ b/csw-server/pom.xml
@@ -27,7 +27,7 @@
geonetwork
org.geonetwork-opensource
- 4.4.6-SNAPSHOT
+ 4.4.7-SNAPSHOT
4.0.0
diff --git a/csw-server/src/main/java/org/fao/geonet/component/csw/GetCapabilities.java b/csw-server/src/main/java/org/fao/geonet/component/csw/GetCapabilities.java
index 50f0a420200..72f0dc19c43 100644
--- a/csw-server/src/main/java/org/fao/geonet/component/csw/GetCapabilities.java
+++ b/csw-server/src/main/java/org/fao/geonet/component/csw/GetCapabilities.java
@@ -65,6 +65,7 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.stream.Collectors;
import static org.fao.geonet.kernel.setting.SettingManager.isPortRequired;
@@ -529,6 +530,8 @@ private void setOperationsParameters(Element capabilities) {
*/
private void populateTypeNameAndOutputSchema(Element op) {
Map typenames = _schemaManager.getHmSchemasTypenames();
+ List outputSchemas = _schemaManager.getOutputSchemas().values().stream().sorted().collect(Collectors.toList());
+
List operations = op.getChildren("Parameter", Csw.NAMESPACE_OWS);
for (Element operation : operations) {
if ("typeNames".equals(operation.getAttributeValue("name"))) {
@@ -541,12 +544,10 @@ private void populateTypeNameAndOutputSchema(Element op) {
.setText(typename));
}
} else if ("outputSchema".equals(operation.getAttributeValue("name"))) {
- for (Map.Entry entry : typenames.entrySet()) {
- Namespace ns = entry.getValue();
- operation.addNamespaceDeclaration(ns);
+ outputSchemas.forEach(uri ->
operation.addContent(new Element("Value", Csw.NAMESPACE_OWS)
- .setText(ns.getURI()));
- }
+ .setText(uri))
+ );
}
}
}
diff --git a/csw-server/src/main/java/org/fao/geonet/csw/common/OutputSchema.java b/csw-server/src/main/java/org/fao/geonet/csw/common/OutputSchema.java
index 9b156b71541..c6d65519c47 100644
--- a/csw-server/src/main/java/org/fao/geonet/csw/common/OutputSchema.java
+++ b/csw-server/src/main/java/org/fao/geonet/csw/common/OutputSchema.java
@@ -78,16 +78,16 @@ public static String parse(String schema, SchemaManager schemaManager) throws In
if (schema.equals("csw:IsoRecord")) return "gmd";
if (schema.equals("own")) return "own";
- Map typenames = schemaManager.getHmSchemasTypenames();
- for (Map.Entry entry : typenames.entrySet()) {
- Namespace ns = entry.getValue();
- if (schema.equals(ns.getURI())) {
- return ns.getPrefix();
+ Map typenames = schemaManager.getOutputSchemas();
+ for (Map.Entry entry : typenames.entrySet()) {
+ String ns = entry.getValue();
+ if (schema.equals(ns)) {
+ return entry.getKey();
}
}
throw new InvalidParameterValueEx("outputSchema",
- String.format("'%s' schema is not valid. Supported values are %s",
+ String.format("'%s' output schema is not valid. Supported values are %s",
schema,
schemaManager.getListOfOutputSchemaURI().toString()));
}
diff --git a/csw-server/src/main/java/org/fao/geonet/csw/common/util/Xml.java b/csw-server/src/main/java/org/fao/geonet/csw/common/util/Xml.java
index 51bdeffe793..c5ab2c8053a 100644
--- a/csw-server/src/main/java/org/fao/geonet/csw/common/util/Xml.java
+++ b/csw-server/src/main/java/org/fao/geonet/csw/common/util/Xml.java
@@ -125,22 +125,24 @@ public static Element applyElementSetName(ServiceContext context, SchemaManager
ResultType resultType, String id, String displayLanguage) throws InvalidParameterValueEx {
Path schemaDir = schemaManager.getSchemaCSWPresentDir(schema);
Path styleSheet = schemaDir.resolve(outputSchema + "-" + elementSetName + ".xsl");
+ Path styleSheetWithoutElementSet = schemaDir.resolve(outputSchema + ".xsl");
- if (!Files.exists(styleSheet)) {
+ if (!Files.exists(styleSheet) && !Files.exists(styleSheetWithoutElementSet)) {
throw new InvalidParameterValueEx("OutputSchema",
String.format(
- "OutputSchema '%s' not supported for metadata with '%s' (%s).\nCorresponding XSL transformation '%s' does not exist for this schema.\nThe record will not be returned in response.",
- outputSchema, id, schema, styleSheet.getFileName()));
+ "OutputSchema '%s' not supported for metadata with '%s' (%s).\nCorresponding XSL transformation '%s' (or '%s') does not exist for this schema.\nThe record will not be returned in response.",
+ outputSchema, id, schema, styleSheet.getFileName(), styleSheetWithoutElementSet.getFileName()));
} else {
Map params = new HashMap<>();
params.put("lang", displayLanguage);
+ Path xslFile = Files.exists(styleSheet) ? styleSheet : styleSheetWithoutElementSet;
try {
- result = org.fao.geonet.utils.Xml.transform(result, styleSheet, params);
+ result = org.fao.geonet.utils.Xml.transform(result, xslFile, params);
} catch (Exception e) {
String msg = String.format(
- "Error occured while transforming metadata with id '%s' using '%s'.",
- id, styleSheet.getFileName());
+ "Error occurred while transforming metadata with id '%s' using '%s'.",
+ id, xslFile.getFileName());
context.error(msg);
context.error(" (C) StackTrace:\n" + Util.getStackTrace(e));
throw new InvalidParameterValueEx("OutputSchema", msg);
diff --git a/csw-server/src/main/java/org/fao/geonet/kernel/csw/services/getrecords/es/CswFilter2Es.java b/csw-server/src/main/java/org/fao/geonet/kernel/csw/services/getrecords/es/CswFilter2Es.java
index 2122a8c4a10..d77bfd28818 100644
--- a/csw-server/src/main/java/org/fao/geonet/kernel/csw/services/getrecords/es/CswFilter2Es.java
+++ b/csw-server/src/main/java/org/fao/geonet/kernel/csw/services/getrecords/es/CswFilter2Es.java
@@ -29,6 +29,7 @@
import org.apache.commons.text.StringEscapeUtils;
import org.fao.geonet.constants.Geonet;
import org.fao.geonet.kernel.csw.services.getrecords.IFieldMapper;
+import org.fao.geonet.utils.DateUtil;
import org.fao.geonet.utils.Log;
import org.geotools.api.filter.*;
import org.geotools.api.filter.expression.Expression;
@@ -338,7 +339,11 @@ public Object visitRange(BinaryComparisonOperator filter, String operator, Objec
String dataPropertyValue = stack.pop();
String dataPropertyName = stack.pop();
- if (!NumberUtils.isNumber(dataPropertyValue)) {
+ boolean isDate = (DateUtil.parseBasicOrFullDateTime(dataPropertyValue) != null);
+
+ if (isDate) {
+ dataPropertyValue = CswFilter2Es.quoteString(dataPropertyValue);
+ } else if (!NumberUtils.isNumber(dataPropertyValue)) {
dataPropertyValue = StringEscapeUtils.escapeJson(CswFilter2Es.quoteString(dataPropertyValue));
}
diff --git a/csw-server/src/test/java/org/fao/geonet/kernel/csw/services/getrecords/es/CswFilter2EsTest.java b/csw-server/src/test/java/org/fao/geonet/kernel/csw/services/getrecords/es/CswFilter2EsTest.java
index f8a31dabbfb..98770670563 100644
--- a/csw-server/src/test/java/org/fao/geonet/kernel/csw/services/getrecords/es/CswFilter2EsTest.java
+++ b/csw-server/src/test/java/org/fao/geonet/kernel/csw/services/getrecords/es/CswFilter2EsTest.java
@@ -381,4 +381,27 @@ void assertFilterEquals(JsonNode expected, String actual, String filterSpecVersi
assertEquals(expected, MAPPER.readTree(new StringReader(result)));
}
+
+
+ @Test
+ void testPropertyIsGreaterThanDateValue() throws IOException {
+
+ // INPUT:
+ final String input =
+ " \n" +
+ " \n" +
+ " Modified\n" +
+ " 1910-02-05\n" +
+ " \n" +
+ " ";
+
+ // EXPECTED:
+ final ObjectNode expected = EsJsonHelper.boolbdr(). //
+ must(array(range("Modified", "gt", "1910-02-05"))). //
+ filter(queryStringPart()). //
+ bld();
+
+
+ assertFilterEquals(expected, input);
+ }
}
diff --git a/csw-server/src/test/java/org/fao/geonet/kernel/csw/services/getrecords/es/EsJsonHelper.java b/csw-server/src/test/java/org/fao/geonet/kernel/csw/services/getrecords/es/EsJsonHelper.java
index 629247c8825..f727509808a 100644
--- a/csw-server/src/test/java/org/fao/geonet/kernel/csw/services/getrecords/es/EsJsonHelper.java
+++ b/csw-server/src/test/java/org/fao/geonet/kernel/csw/services/getrecords/es/EsJsonHelper.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2001-2023 Food and Agriculture Organization of the
+ * Copyright (C) 2001-2024 Food and Agriculture Organization of the
* United Nations (FAO-UN), United Nations World Food Programme (WFP)
* and United Nations Environment Programme (UNEP)
*
@@ -62,6 +62,35 @@ public static ObjectNode match(String property, String matchString) {
return outer;
}
+
+ /**
+ * Returns a structure like
+ *
+ *
+ * { "range":
+ * {
+ * "gt": "value"
+ * }
+ *
+ *
+ * @param property
+ * @param operator
+ * @param matchString
+ * @return
+ */
+ public static ObjectNode range(String property, String operator, String matchString) {
+ final ObjectNode rangeOperatorObject = MAPPER.createObjectNode();
+ rangeOperatorObject.put(operator, matchString);
+
+ final ObjectNode rangeObject = MAPPER.createObjectNode();
+ rangeObject.put(property, rangeOperatorObject);
+
+ final ObjectNode outer = MAPPER.createObjectNode();
+ outer.set("range", rangeObject);
+ return outer;
+ }
+
+
private static ArrayNode bound(double x, double y) {
final ArrayNode bound = MAPPER.createArrayNode();
bound.add(x);
diff --git a/datastorages/cmis/pom.xml b/datastorages/cmis/pom.xml
index 18533eb6b72..dffd357d170 100644
--- a/datastorages/cmis/pom.xml
+++ b/datastorages/cmis/pom.xml
@@ -28,7 +28,7 @@
gn-datastorages
org.geonetwork-opensource.datastorage
- 4.4.6-SNAPSHOT
+ 4.4.7-SNAPSHOT
4.0.0
diff --git a/datastorages/cmis/src/main/java/org/fao/geonet/api/records/attachments/CMISStore.java b/datastorages/cmis/src/main/java/org/fao/geonet/api/records/attachments/CMISStore.java
index de258b3a711..5957cc4c3c8 100644
--- a/datastorages/cmis/src/main/java/org/fao/geonet/api/records/attachments/CMISStore.java
+++ b/datastorages/cmis/src/main/java/org/fao/geonet/api/records/attachments/CMISStore.java
@@ -1,6 +1,6 @@
/*
* =============================================================================
- * === Copyright (C) 2001-2016 Food and Agriculture Organization of the
+ * === Copyright (C) 2001-2024 Food and Agriculture Organization of the
* === United Nations (FAO-UN), United Nations World Food Programme (WFP)
* === and United Nations Environment Programme (UNEP)
* ===
@@ -190,7 +190,20 @@ public ResourceHolder getResource(final ServiceContext context, final String met
@Override
public ResourceHolder getResourceInternal(String metadataUuid, MetadataResourceVisibility visibility, String resourceId, Boolean approved) throws Exception {
- throw new UnsupportedOperationException("CMISStore does not support getResourceInternal.");
+ int metadataId = getAndCheckMetadataId(metadataUuid, approved);
+ checkResourceId(resourceId);
+
+ try {
+ ServiceContext context = ServiceContext.get();
+ final CmisObject object = cmisConfiguration.getClient().getObjectByPath(getKey(context, metadataUuid, metadataId, visibility, resourceId));
+ return new ResourceHolderImpl(object, createResourceDescription(context, metadataUuid, visibility, resourceId,
+ (Document) object, metadataId, approved));
+ } catch (CmisObjectNotFoundException e) {
+ throw new ResourceNotFoundException(
+ String.format("Metadata resource '%s' not found for metadata '%s'", resourceId, metadataUuid))
+ .withMessageKey("exception.resourceNotFound.resource", new String[]{resourceId})
+ .withDescriptionKey("exception.resourceNotFound.resource.description", new String[]{resourceId, metadataUuid});
+ }
}
protected String getKey(final ServiceContext context, String metadataUuid, int metadataId, MetadataResourceVisibility visibility, String resourceId) {
@@ -424,13 +437,9 @@ public String delResource(final ServiceContext context, final String metadataUui
for (MetadataResourceVisibility visibility : MetadataResourceVisibility.values()) {
if (tryDelResource(context, metadataUuid, metadataId, visibility, resourceId)) {
- Log.info(Geonet.RESOURCES,
- String.format("MetadataResource '%s' removed.", resourceId));
- return String.format("MetadataResource '%s' removed.", resourceId);
+ return String.format("Metadata resource '%s' removed.", resourceId);
}
}
- Log.info(Geonet.RESOURCES,
- String.format("Unable to remove resource '%s'.", resourceId));
return String.format("Unable to remove resource '%s'.", resourceId);
}
@@ -439,12 +448,8 @@ public String delResource(final ServiceContext context, final String metadataUui
final String resourceId, Boolean approved) throws Exception {
int metadataId = canEdit(context, metadataUuid, approved);
if (tryDelResource(context, metadataUuid, metadataId, visibility, resourceId)) {
- Log.info(Geonet.RESOURCES,
- String.format("MetadataResource '%s' removed.", resourceId));
- return String.format("MetadataResource '%s' removed.", resourceId);
+ return String.format("Metadata resource '%s' removed.", resourceId);
}
- Log.info(Geonet.RESOURCES,
- String.format("Unable to remove resource '%s'.", resourceId));
return String.format("Unable to remove resource '%s'.", resourceId);
}
@@ -459,6 +464,8 @@ protected boolean tryDelResource(final ServiceContext context, final String meta
try {
final CmisObject object = cmisConfiguration.getClient().getObjectByPath(key, oc);
object.delete();
+ Log.info(Geonet.RESOURCES,
+ String.format("Resource '%s' removed for metadata %d (%s).", resourceId, metadataId, metadataUuid));
if (object instanceof Folder) {
cmisUtils.invalidateFolderCacheItem(key);
}
@@ -467,6 +474,8 @@ protected boolean tryDelResource(final ServiceContext context, final String meta
//CmisPermissionDeniedException when user does not have permissions.
//CmisConstraintException when there is a lock on the file from a checkout.
} catch (CmisObjectNotFoundException | CmisPermissionDeniedException | CmisConstraintException e) {
+ Log.info(Geonet.RESOURCES,
+ String.format("Unable to remove resource '%s' for metadata %d (%s). %s", resourceId, metadataId, metadataUuid, e.getMessage()));
return false;
}
}
@@ -627,8 +636,10 @@ private GeonetworkDataDirectory getDataDirectory(ServiceContext context) {
/**
* get external resource management for the supplied resource.
* Replace the following
+ * {objectId} type:visibility:metadataId:version:resourceId in base64 encoding
* {id} resource id
- * {type:folder:document} // If the type is folder then type "folder" will be displayed else if document then "document" will be displayed
+ * {type:folder:document} // Custom return type based on type. If the type is folder then type "folder" will be displayed else if document then "document" will be displayed
+ * {type} // If the type is folder then type "folder" will be displayed else if document then "document" will be displayed
* {uuid} metadatauuid
* {metadataid} metadataid
* {visibility} visibility
@@ -657,16 +668,27 @@ protected MetadataResourceExternalManagementProperties getMetadataResourceExtern
) {
String metadataResourceExternalManagementPropertiesUrl = cmisConfiguration.getExternalResourceManagementUrl();
if (!StringUtils.isEmpty(metadataResourceExternalManagementPropertiesUrl)) {
+ // {objectid} objectId // It will be the type:visibility:metadataId:version:resourceId in base64
+ // i.e. folder::100::100 # Folder in resource 100
+ // i.e. document:public:100:v1:sample.jpg # public document 100 version v1 name sample.jpg
+ if (metadataResourceExternalManagementPropertiesUrl.contains("{objectid}")) {
+ metadataResourceExternalManagementPropertiesUrl = metadataResourceExternalManagementPropertiesUrl.replaceAll("(\\{objectid\\})",
+ getResourceManagementExternalPropertiesObjectId((type == null ? "document" : (type instanceof Folder ? "folder" : "document")), visibility, metadataId, version, resourceId));
+ }
// {id} id
if (metadataResourceExternalManagementPropertiesUrl.contains("{id}")) {
metadataResourceExternalManagementPropertiesUrl = metadataResourceExternalManagementPropertiesUrl.replaceAll("(\\{id\\})", (resourceId==null?"":resourceId));
}
- // {type:folder:document} // If the type is folder then type "folder" will be displayed else if document then "document" will be displayed
+ // {type:folder:document} // Custom return type based on type. If the type is folder then type "folder" will be displayed else if document then "document" will be displayed
if (metadataResourceExternalManagementPropertiesUrl.contains("{type:")) {
metadataResourceExternalManagementPropertiesUrl = metadataResourceExternalManagementPropertiesUrl.replaceAll("\\{type:([a-zA-Z0-9]*?):([a-zA-Z0-9]*?)\\}",
(type==null?"":(type instanceof Folder?"$1":"$2")));
}
-
+ // {type} // If the type is folder then type "folder" will be displayed else if document then "document" will be displayed
+ if (metadataResourceExternalManagementPropertiesUrl.contains("{type}")) {
+ metadataResourceExternalManagementPropertiesUrl = metadataResourceExternalManagementPropertiesUrl.replaceAll("(\\{type\\})",
+ (type == null ? "document" : (type instanceof Folder ? "folder" : "document")));
+ }
// {uuid} metadatauuid
if (metadataResourceExternalManagementPropertiesUrl.contains("{uuid}")) {
metadataResourceExternalManagementPropertiesUrl = metadataResourceExternalManagementPropertiesUrl.replaceAll("(\\{uuid\\})", (metadataUuid==null?"":metadataUuid));
@@ -757,14 +779,14 @@ public String toString() {
}
protected static class ResourceHolderImpl implements ResourceHolder {
- private CmisObject cmisObject;
+ private final CmisObject cmisObject;
private Path tempFolderPath;
private Path path;
private final MetadataResource metadataResource;
public ResourceHolderImpl(final CmisObject cmisObject, MetadataResource metadataResource) throws IOException {
// Preserve filename by putting the files into a temporary folder and using the same filename.
- tempFolderPath = Files.createTempDirectory("gn-meta-res-" + String.valueOf(metadataResource.getMetadataId() + "-"));
+ tempFolderPath = Files.createTempDirectory("gn-meta-res-" + metadataResource.getMetadataId() + "-");
tempFolderPath.toFile().deleteOnExit();
path = tempFolderPath.resolve(getFilename(cmisObject.getName()));
this.metadataResource = metadataResource;
@@ -795,11 +817,5 @@ public void close() throws IOException {
path=null;
tempFolderPath = null;
}
-
- @Override
- protected void finalize() throws Throwable {
- close();
- super.finalize();
- }
}
}
diff --git a/datastorages/cmis/src/main/java/org/fao/geonet/resources/CMISConfiguration.java b/datastorages/cmis/src/main/java/org/fao/geonet/resources/CMISConfiguration.java
index 257ef3246d6..87b76ec0821 100644
--- a/datastorages/cmis/src/main/java/org/fao/geonet/resources/CMISConfiguration.java
+++ b/datastorages/cmis/src/main/java/org/fao/geonet/resources/CMISConfiguration.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2001-2016 Food and Agriculture Organization of the
+ * Copyright (C) 2001-2024 Food and Agriculture Organization of the
* United Nations (FAO-UN), United Nations World Food Programme (WFP)
* and United Nations Environment Programme (UNEP)
*
@@ -59,26 +59,28 @@
public class CMISConfiguration {
private Session client = null;
- public final static Integer CMIS_MAX_ITEMS_PER_PAGE = 1000;
- public final static String CMIS_FOLDER_DELIMITER = "/"; // Specs indicate that "/" is the folder delimiter/separator - not sure if other delimiter can be used?.
- public final static String CMIS_SECONDARY_PROPERTY_SEPARATOR = "->";
- private final String CMIS_DEFAULT_WEBSERVICES_ACL_SERVICE = "/services/ACLService?wsdl";
- private final String CMIS_DEFAULT_WEBSERVICES_DISCOVERY_SERVICE = "/services/DiscoveryService?wsdl";
- private final String CMIS_DEFAULT_WEBSERVICES_MULTIFILING_SERVICE = "/services/MultiFilingService?wsdl";
- private final String CMIS_DEFAULT_WEBSERVICES_NAVIGATION_SERVICE = "/services/NavigationService?wsdl";
- private final String CMIS_DEFAULT_WEBSERVICES_OBJECT_SERVICE = "/services/ObjectService?wsdl";
- private final String CMIS_DEFAULT_WEBSERVICES_POLICY_SERVICE = "/services/PolicyService?wsdl";
- private final String CMIS_DEFAULT_WEBSERVICES_RELATIONSHIP_SERVICE = "/services/RelationshipService?wsdl";
- private final String CMIS_DEFAULT_WEBSERVICES_REPOSITORY_SERVICE = "/services/RepositoryService?wsdl";
- private final String CMIS_DEFAULT_WEBSERVICES_VERSIONING_SERVICE = "/services/VersioningService?wsdl";
- private final String CMIS_DEFAULT_WEBSERVICES_BASE_URL_SERVICE = "/cmis";
- private final String CMIS_DEFAULT_BROWSER_URL_SERVICE = "/browser";
- private final String CMIS_DEFAULT_ATOMPUB_URL_SERVICE = "/atom";
-
- private final String CMIS_DEFAULT_EXTERNAL_RESOURCE_MANAGEMENT_WINDOW_PARAMETERS = "toolbar=0,width=600,height=600";
- private final Boolean CMIS_DEFAULT_EXTERNAL_RESOURCE_MANAGEMENT_MODAL_ENABLED = true;
- private final Boolean CMIS_DEFAULT_EXTERNAL_RESOURCE_MANAGEMENT_FOLDER_ENABLED = true;
- private final Boolean CMIS_DEFAULT_VERSIONING_ENABLED = false;
+ // DFO change to 100. Due to bug with open text cmis where if max is set to 1000, it will return 100 but if it is set to 100 it will return all records.
+ // https://dev.azure.com/foc-poc/EDH-CDE/_workitems/edit/95878
+ public static final Integer CMIS_MAX_ITEMS_PER_PAGE = 100;
+ public static final String CMIS_FOLDER_DELIMITER = "/"; // Specs indicate that "/" is the folder delimiter/separator - not sure if other delimiter can be used?.
+ public static final String CMIS_SECONDARY_PROPERTY_SEPARATOR = "->";
+ private static final String CMIS_DEFAULT_WEBSERVICES_ACL_SERVICE = "/services/ACLService?wsdl";
+ private static final String CMIS_DEFAULT_WEBSERVICES_DISCOVERY_SERVICE = "/services/DiscoveryService?wsdl";
+ private static final String CMIS_DEFAULT_WEBSERVICES_MULTIFILING_SERVICE = "/services/MultiFilingService?wsdl";
+ private static final String CMIS_DEFAULT_WEBSERVICES_NAVIGATION_SERVICE = "/services/NavigationService?wsdl";
+ private static final String CMIS_DEFAULT_WEBSERVICES_OBJECT_SERVICE = "/services/ObjectService?wsdl";
+ private static final String CMIS_DEFAULT_WEBSERVICES_POLICY_SERVICE = "/services/PolicyService?wsdl";
+ private static final String CMIS_DEFAULT_WEBSERVICES_RELATIONSHIP_SERVICE = "/services/RelationshipService?wsdl";
+ private static final String CMIS_DEFAULT_WEBSERVICES_REPOSITORY_SERVICE = "/services/RepositoryService?wsdl";
+ private static final String CMIS_DEFAULT_WEBSERVICES_VERSIONING_SERVICE = "/services/VersioningService?wsdl";
+ private static final String CMIS_DEFAULT_WEBSERVICES_BASE_URL_SERVICE = "/cmis";
+ private static final String CMIS_DEFAULT_BROWSER_URL_SERVICE = "/browser";
+ private static final String CMIS_DEFAULT_ATOMPUB_URL_SERVICE = "/atom";
+
+ private static final String CMIS_DEFAULT_EXTERNAL_RESOURCE_MANAGEMENT_WINDOW_PARAMETERS = "toolbar=0,width=600,height=600";
+ private static final Boolean CMIS_DEFAULT_EXTERNAL_RESOURCE_MANAGEMENT_MODAL_ENABLED = true;
+ private static final Boolean CMIS_DEFAULT_EXTERNAL_RESOURCE_MANAGEMENT_FOLDER_ENABLED = true;
+ private static final Boolean CMIS_DEFAULT_VERSIONING_ENABLED = false;
private String servicesBaseUrl;
private String bindingType;
@@ -111,7 +113,6 @@ public class CMISConfiguration {
* Property name for validation status that is expected to be an integer with values of null, 0, 1, 2
* (See MetadataResourceExternalManagementProperties.ValidationStatus for code meaning)
* Property name follows the same format as cmisMetadataUUIDPropertyName
- *
* If null then validation status will default to UNKNOWN.
*/
private String externalResourceManagementValidationStatusPropertyName;
@@ -505,7 +506,6 @@ public void setExternalResourceManagementValidationStatusPropertyName(String ext
String.format("Invalid format for property name %s property will not be used", externalResourceManagementValidationStatusPropertyName));
this.externalResourceManagementValidationStatusPropertyName = null;
this.externalResourceManagementValidationStatusSecondaryProperty = false;
- return;
} else {
this.externalResourceManagementValidationStatusSecondaryProperty = true;
}
@@ -514,7 +514,7 @@ public void setExternalResourceManagementValidationStatusPropertyName(String ext
public MetadataResourceExternalManagementProperties.ValidationStatus getValidationStatusDefaultValue() {
// We only need to set the default if there is a status property supplied, and it is not already set
- if (this.defaultStatus == null && !org.springframework.util.StringUtils.isEmpty(getExternalResourceManagementValidationStatusPropertyName())) {
+ if (this.defaultStatus == null && org.springframework.util.StringUtils.hasLength(getExternalResourceManagementValidationStatusPropertyName())) {
if (getExternalResourceManagementValidationStatusDefaultValue() != null) {
// If a default property name does exist then use it
this.defaultStatus = MetadataResourceExternalManagementProperties.ValidationStatus.valueOf(getExternalResourceManagementValidationStatusDefaultValue());
@@ -536,9 +536,8 @@ public void init() {
}
// default factory implementation
- Map parameters = new HashMap();
+ Map parameters = new HashMap<>();
- this.baseRepositoryPath = baseRepositoryPath;
if (this.baseRepositoryPath == null) {
this.baseRepositoryPath = "";
}
@@ -609,7 +608,7 @@ public void init() {
}
}
} else {
- // Try to find the repository name for the id that we have specified..
+ // Try to find the repository name for the id that we have specified.
try {
for (Repository repository : factory.getRepositories(parameters)) {
if (repository.getId().equalsIgnoreCase(this.repositoryId)) {
@@ -633,7 +632,7 @@ public void init() {
repositoryUrl + "' using product '" + client.getRepositoryInfo().getProductName() + "' version '" +
client.getRepositoryInfo().getProductVersion() + "'.");
- // Check if we can parse the secondary parameters from human readable to secondary ids.
+ // Check if we can parse the secondary parameters from human-readable to secondary ids.
parsedCmisMetadataUUIDPropertyName = parseSecondaryProperty(client, cmisMetadataUUIDPropertyName);
parsedExternalResourceManagementValidationStatusPropertyName = parseSecondaryProperty(client, externalResourceManagementValidationStatusPropertyName);
@@ -743,7 +742,7 @@ public boolean existExternalResourceManagementValidationStatusSecondaryProperty(
}
/**
- * Generte a full url based on the supplied entered serviceurl and the default.
+ * Generate a full url based on the supplied entered serviceUrl and the default.
*
* @param baseUrl Base url
* @param serviceUrl Supplied service url (This could start with / or http. If it starts with http then ignore baseUrl)
diff --git a/datastorages/cmis/src/main/resources/config-store/config-cmis-overrides.properties b/datastorages/cmis/src/main/resources/config-store/config-cmis-overrides.properties
index f0a62c1920a..4c154639ca5 100644
--- a/datastorages/cmis/src/main/resources/config-store/config-cmis-overrides.properties
+++ b/datastorages/cmis/src/main/resources/config-store/config-cmis-overrides.properties
@@ -11,8 +11,8 @@ cmis.external.resource.management.window.parameters=${CMIS_EXTERNAL_RESOURCE_MAN
cmis.external.resource.management.modal.enabled=${CMIS_EXTERNAL_RESOURCE_MANAGEMENT_MODAL_ENABLED:#{null}}
cmis.external.resource.management.folder.enabled=${CMIS_EXTERNAL_RESOURCE_MANAGEMENT_FOLDER_ENABLED:#{null}}
cmis.external.resource.management.folder.root=${CMIS_EXTERNAL_RESOURCE_MANAGEMENT_FOLDER_ROOT:#{null}}
-cmis.external.resource.status.property.name=${CMIS_EXTERNAL_RESOURCE_STATUS_PROPERTY_NAME:#{null}}
-cmis.external.resource.management.status.default.value=${CMIS_EXTERNAL_RESOURCE_MANAGEMENT_STATUS_DEFAULT_VALUE:#{null}}
+cmis.external.resource.management.validation.status.property.name=${CMIS_EXTERNAL_RESOURCE_MANAGEMENT_VALIDATION_STATUS_PROPERTY_NAME:#{null}}
+cmis.external.resource.management.validation.status.default.value=${CMIS_EXTERNAL_RESOURCE_MANAGEMENT_VALIDATION_STATUS_DEFAULT_VALUE:#{null}}
cmis.versioning.enabled=${CMIS_VERSIONING_ENABLED:#{null}}
cmis.versioning.state=#{'${CMIS_VERSIONING_STATE:MAJOR}'.toUpperCase()}
diff --git a/datastorages/cmis/src/main/resources/config-store/config-cmis.xml b/datastorages/cmis/src/main/resources/config-store/config-cmis.xml
index 76abe73572c..1c302788b5c 100644
--- a/datastorages/cmis/src/main/resources/config-store/config-cmis.xml
+++ b/datastorages/cmis/src/main/resources/config-store/config-cmis.xml
@@ -1,6 +1,6 @@
+
+
+
+
geonetwork
org.geonetwork-opensource
- 4.4.6-SNAPSHOT
+ 4.4.7-SNAPSHOT
4.0.0
diff --git a/datastorages/s3/pom.xml b/datastorages/s3/pom.xml
index 861af49bf91..7c348f271e2 100644
--- a/datastorages/s3/pom.xml
+++ b/datastorages/s3/pom.xml
@@ -28,7 +28,7 @@
gn-datastorages
org.geonetwork-opensource.datastorage
- 4.4.6-SNAPSHOT
+ 4.4.7-SNAPSHOT
4.0.0
diff --git a/datastorages/s3/src/main/java/org/fao/geonet/api/records/attachments/S3Store.java b/datastorages/s3/src/main/java/org/fao/geonet/api/records/attachments/S3Store.java
index 27df07a4532..2114f8f5d20 100644
--- a/datastorages/s3/src/main/java/org/fao/geonet/api/records/attachments/S3Store.java
+++ b/datastorages/s3/src/main/java/org/fao/geonet/api/records/attachments/S3Store.java
@@ -193,9 +193,13 @@ public String delResources(final ServiceContext context, final int metadataId) t
for (S3ObjectSummary object: objects.getObjectSummaries()) {
s3.getClient().deleteObject(s3.getBucket(), object.getKey());
}
- return String.format("Metadata '%s' directory removed.", metadataId);
+ Log.info(Geonet.RESOURCES,
+ String.format("Metadata '%d' directory removed.", metadataId));
+ return String.format("Metadata '%d' directory removed.", metadataId);
} catch (AmazonServiceException e) {
- return String.format("Unable to remove metadata '%s' directory.", metadataId);
+ Log.warning(Geonet.RESOURCES,
+ String.format("Unable to remove metadata '%d' directory. %s", metadataId, e.getMessage()));
+ return String.format("Unable to remove metadata '%d' directory.", metadataId);
}
}
@@ -206,7 +210,7 @@ public String delResource(final ServiceContext context, final String metadataUui
for (MetadataResourceVisibility visibility: MetadataResourceVisibility.values()) {
if (tryDelResource(metadataUuid, metadataId, visibility, resourceId)) {
- return String.format("MetadataResource '%s' removed.", resourceId);
+ return String.format("Metadata resource '%s' removed.", resourceId);
}
}
return String.format("Unable to remove resource '%s'.", resourceId);
@@ -217,7 +221,7 @@ public String delResource(final ServiceContext context, final String metadataUui
final String resourceId, Boolean approved) throws Exception {
int metadataId = canEdit(context, metadataUuid, approved);
if (tryDelResource(metadataUuid, metadataId, visibility, resourceId)) {
- return String.format("MetadataResource '%s' removed.", resourceId);
+ return String.format("Metadata resource '%s' removed.", resourceId);
}
return String.format("Unable to remove resource '%s'.", resourceId);
}
@@ -227,8 +231,12 @@ private boolean tryDelResource(final String metadataUuid, final int metadataId,
final String key = getKey(metadataUuid, metadataId, visibility, resourceId);
if (s3.getClient().doesObjectExist(s3.getBucket(), key)) {
s3.getClient().deleteObject(s3.getBucket(), key);
+ Log.info(Geonet.RESOURCES,
+ String.format("Resource '%s' removed for metadata %d (%s).", resourceId, metadataId, metadataUuid));
return true;
}
+ Log.info(Geonet.RESOURCES,
+ String.format("Unable to remove resource '%s' for metadata %d (%s).", resourceId, metadataId, metadataUuid));
return false;
}
@@ -287,11 +295,5 @@ public void close() throws IOException {
path = null;
}
}
-
- @Override
- protected void finalize() throws Throwable {
- close();
- super.finalize();
- }
}
}
diff --git a/docs/changes/changes4.4.6-0.txt b/docs/changes/changes4.4.6-0.txt
new file mode 100644
index 00000000000..636b5eb919c
--- /dev/null
+++ b/docs/changes/changes4.4.6-0.txt
@@ -0,0 +1,133 @@
+================================================================================
+===
+=== GeoNetwork 4.4.6-SNAPSHOT: List of changes
+===
+================================================================================
+- Release / 4.4.6 / Changelog. (#8462)
+- update PSC details in user guide
+- Delete date not being copied causing duplicate (#8454)
+- Standard / DCAT (and profiles) export (#7600)
+- Update home page "browse by" to display facet as label if there is only one (#8426)
+- Avoid duplicate validation message when trying to register a user with existing email
+- Add bootstrap datepicker language files for supported UI languages
+- Add better logging when resources are deleted to make it clear what metadata record the resource was deleted from. (#8430)
+- Record view / More like this / Add filter option.
+- Fix saving UI settings without changes
+- Harvester / Simple URL / Fix multiple URL alignement
+- Elasticsearch / Update to 8.14.3. (#8337)
+- Remove empty filename condition (#8436)
+- Elasticsearch / API / Allow ndjson for _msearch endpoint
+- Improve administrator guide UI configuration documentation
+- Harvester / Simple URL / ODS improvement
+- Editor / Geopublication / Misc fix. (#8092)
+- WebDav harvester / Add support for XSLT filter process (#8243)
+- Editor / Associated resource / Remote document / Add content type
+- Fixed description for getIdentifiers in IdentifierApi (#8422)
+- Formatter / Datacite / Default resource type (#8407)
+- Remove spaces from the list of schema list of metadata import restrictions so that "iso19115-3.2018, dublin-core" will also work. (#8408)
+- Thesaurus / OWL format / Mobility theme hierarchy (#8393)
+- Record view / Does not display thesaurus block if no keywords.
+- Map / Save your map improvements (#8155)
+- Editor / Table mode / Fix field using directive (#8261)
+- Thesaurus / Add inScheme property in concept of local thesaurus
+- Standard / ISO19115-3 / Only search for associated record with UUID
+- CSW / Fix parsing date values for filters. Fixes #8034
+- Javascript / HTML formatting fixes related to Prettier
+- Update external management url Add {objectId} property in external management url (base64 unique identifier for the record) Change external management type url property {type} so that it is fixed values so that same value can be used in {objectId} CMIS Fixed property names used for validation fields to be consistent with other names. Jcloud Updgade from jcloud 2.3.0 to jcloud 2.5.0 Add support for external management named properties similar to cmis Fix bug with deleting all resources as it was failing to identify folders correctly for azure blob.
+- Harvester / ISO19115-3 / Better support missing metadata date info
+- Metadata indexing / ISO19139 / ISO19115-3.2018 / Escape graphic overview file name for JSON (#8412)
+- Fixed spurious whitespace for gn-comma-list (#8398)
+- Metadata editor / Add required indicator support to the keyword selector directive and fix its display for the field duration directive
+- Fix the width of the projection switcher (#8399)
+- Metadata editor / validation report improvements (#8395)
+- Don't capitalize the labels for the facet filter values (#8133)
+- Support multiple DOI servers (#8098)
+- Thesaurus / Date improvements. (#8392)
+- GeoNetwork harvester - avoid double counting of updated metadata. (#8389)
+- Fix harvester execution logs added to previous logs (#8387)
+- Visual and UX changes for WFS previews (#8284)
+- Metadata detail page - hide history types selector when tasks (DOI) and workflow are disabled
+- Fix the overlapping filter settings and the customize options (#8316)
+- ISO19139 / ISO19115.3 / Index resource date fields as defined in the metadata.
+- Fix the schema artifact name in add schema script
+- Update configuring-faceted-search.md
+- Aggregations / Temporal range / Avoid browser autocomplete on calendar field
+- OpenAPI / Operation returning no content should not advertised a schema.
+- Indexing / DCAT multilingual support (#8377)
+- Xsl utility / Add a function to retrieve thesaurus title with its key (#8378)
+- GIT / .gitignore
+- Map viewer / WMS GetFeatureInfo support for application/json info format (#8372)
+- Add build profile for MacOS ARM
+- Editor / Associated resource / DOI search. (#8363)
+- Standard / ISO19115-3 / Label improvement. (#8364)
+- Harvester / Simple URL / ODS / Improve mapping
+- Don't add file content to the exception when requesting XML documents, if the content is not XML (#8360)
+- Put the image name in the `alt` attribute in the thumbnail on the metadata page. (#8290)
+- CSW Harvester / Avoid increment 2 metrics for a single metadata in certain conditions (#8069)
+- iso19139 - Update thumbnail add/update and remove to support index update/removal (#8348)
+- publish status not refreshing fix (#8344)
+- Editor / Associated resource / Avoid empty label (#8339)
+- Editor / DOI search / Improve label (#8338)
+- API / Improve parameter check for XSL conversion. (#8201)
+- Admin / Source / Improve dirty state (#8222)
+- Standard / ISO19115-3 / Formatters / ISO19139 / Ignore mcc linkage for overview (#8225)
+- Fix Clipboard copy/paste on Firefox - use ES5 (#8332)
+- Indexing / Draft field MUST not be an array (#8242)
+- Editor / Dublin core / Fix extent coordinates (#8258)
+- Workflow / update notification level based on user profile when cancelling a submission (#8264)
+- INSPIRE Atom harvester / process only public datasets by resource identifier
+- Special characters in the cookie causing 400 bad requests from Spring Security. Fixes #8275
+- Do not try to request clipboard permissions
+- Social links in metadata page doesn't have the metadata page permalink. Fixes #8322
+- Repository Citation.cff metadata for DOI registration with Zenodo (#8317)
+- Modify record not found message to only link to signin if user is not logged in (#8312)
+- Modify GnMdViewController to set recordIdentifierRequested using the getUuid function
+- harvesting CSW: changed loglevel for invalid metadata to info (#8303)
+- Standard / ISO19139 / i18n / Missing french translation (#8298)
+- Index / Add maintenance details.
+- Record view / Improve layout of table (eg. quality measures)
+- Update batch PDF export to skip working copies (#8292)
+- Standard / ISO19139 / Fix removal of online source when multiple transfer options block are used. (#8281)
+- Fix a problem with recaptcha not shown sometimes (#8285)
+- Zoom to map popup remains active on non-map pages. (#8267)
+- Use UI language for metadata selection export to CSV / PDF. Fixes #7969 (#8262)
+- Fixed issue with working copy not being returned from /api/records/{metadataUuid}/formatters/{formatterId:.+} (#8269)
+- Fixed issue with working copy not being returned from getRecordAS api (#8265)
+- Standard / ISO19115-3 / Formatters / ISO19139 / Fix scope code (#8224)
+- Standard / ISO19115-3 / Formatter / Fix namespace declaration (#8223)
+- Editor / Configuration / Improve deletion in forEach section (#8244)
+- Fix infinite "Please wait" message on error (#8249)
+- Broadcasting error when delete record (#8212)
+- ISO19115-3.2018 / Remove duplicated fields for metadata identifier and uuid in CSV export (#8238)
+- Standard / ISO19139 / Formatter / Do not display extent if none available (#8229)
+- Fix wrong HTML self closing tags (#8232)
+- Editor / Polygon not saved (#8230)
+- Add info logs to make transaction of working copy merge more traceable (#8178)
+- API / Client code generation / Avoid reserved word (#8214)
+- Double translation can lead to infinite stack (#8209)
+- Fix canViewRecord function so that it returned the workflow record. (#8152)
+- Automatic formatting
+- Association type / Consistent labels (#8077)
+- Multilingual Emails (#8044)
+- Add support for multilingual thesaurus titles in the index (#8154)
+- Bump actions/setup-java from 4.1.0 to 4.2.1 (#7870)
+- Fix presence of duplicated geonet elements on partial metadata updates
+- Fix user application feedback (#7769)
+- Update SECURITY.md (#8172)
+- Register user / allow to configured allowed email domains (#8186)
+- docs: fix image links in change-log(version-3.8.0.md and 4.0.0-alpha.1 (#7938)
+- Elasticsearch / Update to 8.14.0.
+- Bump org.apache.maven.plugins:maven-dependency-plugin
+- Release script improvement
+- Cleaning / Remove transifex converting tools
+- Indexing / Lower severity of getIndexField
+- Metadata extents API - fix service for metadata with working copy - test (#8197)
+- Register user / allow to select the group where the user wants to register (#8176)
+- Metadata extents API - fix service for metadata with working copy
+- Bump com.jayway.jsonpath:json-path from 2.4.0 to 2.9.0 in /services
+- Bump org.owasp.esapi:esapi from 2.4.0.0 to 2.5.4.0
+- Bump org.postgresql:postgresql from 42.6.0 to 42.7.3
+- Bump org.xmlunit:xmlunit-core from 2.1.1 to 2.10.0
+- Bump com.google.guava:guava from 30.0-jre to 33.2.1-jre
+- Update en-admin.json
+- Update version to 4.4.6-SNAPSHOT
\ No newline at end of file
diff --git a/docs/manual/docs/administrator-guide/configuring-the-catalog/img/feedback-email.png b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/feedback-email.png
new file mode 100644
index 00000000000..5d377748e86
Binary files /dev/null and b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/feedback-email.png differ
diff --git a/docs/manual/docs/administrator-guide/configuring-the-catalog/img/feedback-multilingual.png b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/feedback-multilingual.png
new file mode 100644
index 00000000000..87044df119d
Binary files /dev/null and b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/feedback-multilingual.png differ
diff --git a/docs/manual/docs/administrator-guide/configuring-the-catalog/img/morelikethisconfig.png b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/morelikethisconfig.png
new file mode 100644
index 00000000000..bc215c549b5
Binary files /dev/null and b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/morelikethisconfig.png differ
diff --git a/docs/manual/docs/administrator-guide/configuring-the-catalog/img/ui-settings-searchpage.png b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/ui-settings-searchpage.png
index 06bc3bff312..c764f5d9244 100644
Binary files a/docs/manual/docs/administrator-guide/configuring-the-catalog/img/ui-settings-searchpage.png and b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/ui-settings-searchpage.png differ
diff --git a/docs/manual/docs/administrator-guide/configuring-the-catalog/system-configuration.md b/docs/manual/docs/administrator-guide/configuring-the-catalog/system-configuration.md
index d56eb454ffa..2bec134017e 100644
--- a/docs/manual/docs/administrator-guide/configuring-the-catalog/system-configuration.md
+++ b/docs/manual/docs/administrator-guide/configuring-the-catalog/system-configuration.md
@@ -58,21 +58,41 @@ JVM proxy parameters may also be required to properly set the proxy for all remo
## Feedback {#system-config-feedback}
-Email may be sent by the catalog.
+Email notifications are sent by the catalog.
-- you are using the User Self-registration system
-- you are using the metadata status workflow (See [Life cycle](../../user-guide/workflow/life-cycle.md))
-- a file uploaded with a metadata record is downloaded and notify privilege is selected
+- When using the User Self-registration system.
+- When using the metadata status workflow (See [Life cycle](../../user-guide/workflow/life-cycle.md)).
+- When a file uploaded with a metadata record is downloaded and notify privilege is selected.
This section configure the mail server to use.
- **Email** This is the administrator's email address used to send feedback.
- **SMTP host** The mail server name or IP address to use for sending emails.
- **SMTP port** The SMTP port.
-- **Use SSL** Enable SSL mode
+- **Use SSL** Enable Secure Sockets Layer (SSL) mode
- **User name** Username if connection is required on the SMTP server
- **Password** Username password if connection is required on the SMTP server
+- **Use TLS** Enable use of Transport Layer Security (TLS)
+![](img/feedback-email.png)
+
+Additional settings are available to respect user language preference:
+
+- **Language for system generated emails** The ui language will be used when sending notification emails by default. To To override this behaviour and generate a multi-lingual notification email list the langauges to be used.
+
+- **Translation follows text** Provide an introduction phrase indicating a multi-lingual notification follows.
+
+![](img/feedback-multilingual.png)
+
+!!! note
+
+ Email notifications for metadata publication are sent as `text/html` messages, this can be changed using ```WEB-INF/config.properties``` configuration:
+
+ ```properties
+ # Configure the metadata publication notification mails to be sent as HTML (true) or TEXT (false)
+ metadata.publicationmail.format.html=true
+ ```
+
## Metadata search results
Configuration settings in this group determine what the limits are on user interaction with the search results.
diff --git a/docs/manual/docs/administrator-guide/configuring-the-catalog/user-interface-configuration.md b/docs/manual/docs/administrator-guide/configuring-the-catalog/user-interface-configuration.md
index 1734724599f..9f3072a7657 100644
--- a/docs/manual/docs/administrator-guide/configuring-the-catalog/user-interface-configuration.md
+++ b/docs/manual/docs/administrator-guide/configuring-the-catalog/user-interface-configuration.md
@@ -18,7 +18,7 @@ To add a new configuration, such as for a sub-portal (see [Portal configuration]
Since the settings form is a long form, the `save` button is repeated at the base of the page. In either case, all settings are saved.
-- **Filter settings**: This search box can be used to filter settings in the form, for example searching for "social" will show only the settings related to the Social Bar.
+- **Filter settings**: This search box can be used to filter settings in the form, for example searching for "social" will show only the settings related to the Social bar.
![](img/ui-settings-filter.png)
@@ -31,7 +31,7 @@ To add a new configuration, such as for a sub-portal (see [Portal configuration]
## Footer {#user-interface-config-footer}
- **Footer**: Select this checkbox to determine whether the GeoNetwork footer is shown. If not set, no footer will be visible.
-- **Social bar**: Select this check box to show the social bar (links to twitter, facebook, linkedin etc) in the footer.
+- **Social bar**: Select this check box to show the social media bar in the footer.
![](img/ui-settings-footer.png)
@@ -60,22 +60,23 @@ To add a new configuration, such as for a sub-portal (see [Portal configuration]
- **Search application**: Select this check box to determine whether the search application is visible in the top toolbar. If not set, no link is shown.
- **Application URL**: Define the URL for the search application. In the majority of cases this can be left as the default.
- **Number of records per page**: Define the options to determine the number of records shown per page of results, and the default.
-- **Type of facet**: Define the set of search facets should be visible in the search page. The default is `details` but `manager` can be used to show the facets more normally used on the editor page.
-- **Default search**: Define a default filter for the search.
+- **Facet configuration**: See [Configuring faceted search](../../customizing-application/configuring-faceted-search.md)). The configuration are defined using JSON following Elasticsearch API (See .
![](img/ui-settings-searchpage.png)
-- **Facet field to display using tabs**: This option creates a tab for each configured facet above the search results. This can be used to further narrow down the search results. The list of facet names can be found at . For example, to include the Topic Category filter above the search results, the administrator would add `topicCat` as the facet field to display.
-- **List of facets**: This can be used to restrict the facets available for searching. For example, adding `topicCat` to this list would restrict the search options to `Topic Category` only. This can be useful for restricting the search options in a sub-portal or external web application. To add additional facets to the list, select the blue `+` button.
+- **Facet field to display using tabs**: This option creates a tab for each configured facet above the search results. This can be used to further narrow down the search results.
- **Filters**: Define additional search criteria added to all searches and again are used primarily for external applications and sub-portals.
-
-![](img/ui-settings-searchpage2.png)
-
- **Type of sort options**: Define the different ways by which a user can sort a set of search results. The **default sort by option** is shown below. Note that to search for example on `title` in alphabetical order it is necessary to set the order to `reverse`.
- **List of templates for search results**: This section allows the administrator to configure templates for the layout of the search results. The default is `grid` whereas `list` is the default for the editor board.
![](img/ui-settings-searchpage3.png)
+
+- **Similar records** or **More like this**: Define the query used to search for similar records that are displayed at the bottom of the record view.
+
+![](img/morelikethisconfig.png)
+
+
- **Default template used for search results**: Define the template page for the search. Generally this can be left as the default.
- **List of formatter for record view**: Determine the formatter used to display the search results. See [Customizing metadata views](../../customizing-application/creating-custom-view.md) for information on creating a new formatter. To add an additional view, click the blue `+` button below the list and provide a name and a URL.
@@ -135,30 +136,30 @@ You can configure each map with different layers and projections.
- **Map Projection** This is the default projection of the map. Make sure the projection is defined in **Projections to display maps into** below.
-![](img/ui-settings-mapprojection.png)
+ ![](img/ui-settings-mapprojection.png)
-- **List of map projections to display bounding box coordinates in** This is used in the map when editing a record and defining the bounding box extent. Note that the coordinates will be stored in WGS84 regardless of the projection used to draw them.
+- **List of map projections to display bounding box coordinates in** This is used in the map when editing a record and defining the bounding box extent. Make sure the listed projections are defined in **Projections to display maps into** below. Note that the coordinates will be stored in WGS84 regardless of the projection used to draw them.
-![](img/ui-settings-mapprojectionslist.png)
+ ![](img/ui-settings-mapprojectionslist.png)
- **Projections to display maps into** This is where the different projections available to the map are defined. All projections will be shown in the `Projection Switcher` tool of the map.
-![](img/ui-settings-mapprojection2.png)
+ ![](img/ui-settings-mapprojection2.png)
-In order to enable a new projection it must be defined here using the **proj4js** syntax, which can be found at . Additionally the default bounding box extent, maximum bounding box extent, and allowed resolutions (if required) can be defined.
+ In order to enable a new projection it must be defined here using the **proj4** syntax, which can be found for many EPSG-listed projections at, for example, . Additionall, the default bounding box extent, maximum bounding box extent and allowed resolutions (if required) can be defined.
-Ensure that the coordinates inserted are in the correct units for and are local to the projection. A list of resolutions is only relevant if the main map layer has a XYZ source, which does not follow the common tiling pattern.
+ Ensure that the coordinates inserted are in the correct units for the projection and are local to the projection. A list of resolutions is only relevant if the main map layer has a XYZ source that does not follow the common tiling pattern.
-Check that this configuration is valid by opening the map.
+ Check that this configuration is valid by opening the map.
-![](img/ui-settings-mapprojection3.png)
+ ![](img/ui-settings-mapprojection3.png)
-!!! info "Important"
+ !!! info "Important"
If the configuration of a projection is incomplete or invalid, the map may fail to load.
-If a projection is defined which is not supported by the source of the map layer, the map application will reproject map images at the client side. This may cause unexpected behaviour, such as rotated or distorted labels.
+ If a projection is defined which is not supported by the source of the map layer, the map application will reproject map images at the client side. This may cause unexpected behaviour, such as rotated or distorted labels.
- **Optional Map Viewer Tools** The checkboxes in this section define the tools available to the user in the right toolbar of the main map. Elements that are not checked are not visible.
- **OGC Service to use as a graticule**: This is optional and allows the use of an external service to display the graticule on the map.
@@ -215,7 +216,7 @@ This section defines the configuration for the map shown when editing a record.
## Record View
- **Record view**:
-- **Show Social bar**: If enabled the social bar (links to facebook, twitter etc) are enabled in record view.
+- **Show Social bar**: If enabled, the social media bar is enabled in record view.
## Editor Application
@@ -250,7 +251,7 @@ This section defines the configuration for the map shown when editing a record.
## JSON Configuration
-This section shows the JSON configuration for the currently applied User Interface settings. From here, the json can be saved to a file (by copying and pasting).
+This section shows the JSON configuration for the currently applied User Interface settings. From here, the JSON can be saved to a file (by copying and pasting).
- **Test client configuration**: Click this button to test the configuration in a new browser tab.
- **Reset configuration**: Click this button to reset the configuration back to the default. Note that this will revert any changes you have made in the above page.
diff --git a/docs/manual/docs/administrator-guide/managing-users-and-groups/authentication-mode.md b/docs/manual/docs/administrator-guide/managing-users-and-groups/authentication-mode.md
index efc095df787..7026e9c804b 100644
--- a/docs/manual/docs/administrator-guide/managing-users-and-groups/authentication-mode.md
+++ b/docs/manual/docs/administrator-guide/managing-users-and-groups/authentication-mode.md
@@ -6,6 +6,7 @@ By default the catalog uses the internal database for user management and authen
- [Configuring LDAP - Hierarchy](authentication-mode.md#authentication-ldap-hierarchy)
- [Configuring CAS](authentication-mode.md#authentication-cas)
- [Configuring OAUTH2 OpenID Connect](authentication-mode.md#authentication-openid)
+- [Configuring JWT/JSON Headers](authentication-mode.md#jwt-headers)
- [Configuring Keycloak](authentication-mode.md#authentication-keycloak)
- [Configuring Shibboleth](authentication-mode.md#authentication-shibboleth)
@@ -818,6 +819,253 @@ sample:RegisteredUser
A similar setup is described for geoserver in the [geoserver documentation](https://docs.geoserver.org/latest/en/user/community/keycloak/index.html).
+## Configurating JWT/JSON Headers {#jwt-headers}
+
+The JWT Headers module provides a security module for header based security. It is equivalent to GeoServer's JWT Headers Module (both GeoServer and GeoNetwork share a code library to make them equivalent).
+
+This module allows [JSON-based](https://en.wikipedia.org/wiki/JSON) headers (for username and roles) as well as [JWT-based](https://en.wikipedia.org/wiki/JSON_Web_Token>) headers (for username and roles). It also allows for validating JWT-Based AccessTokens (i.e. via [OAUTH2](https://en.wikipedia.org/wiki/OAuth>)/[OpenID Connect](ttps://en.wikipedia.org/wiki/OpenID#OpenID_Connect_(OIDC)).
+
+
+If you are using something like [Apache's mod_auth_openidc](https://github.com/OpenIDC/mod_auth_openidc), then this module will allow you to;
+
+1. Get the username from an Apache-provided `OIDC_*` header (either as simple-strings or as a component of a JSON object).
+2. Get the user's roles from an Apache-provided `OIDC_*` header (as a component of a JSON object).
+3. The user's roles can also come from the GeoNetwork Database (managed by the administrator in the GeoNetwork GUI).
+
+If you are using [OAUTH2/OIDC Access Tokens](https://www.oauth.com/oauth2-servers/access-tokens/):
+
+1. Get the username from the attached JWT Access Token (via a path into the [Access Token's JSON Claims](https://auth0.com/docs/authenticate/login/oidc-conformant-authentication/oidc-adoption-access-tokens/)).
+2. Get the user's roles from the JWT Access Token (via a path into the Token's JSON Claims).
+3. Validate the Access Token
+
+ * Validate its Signature
+ * Validate that it hasn't expired
+ * Validate the token against a token verifier URL ("userinfo_endpoint") and check that subjects match
+ * Validate components of the Access Token (like [aud (audience)](https://auth0.com/docs/secure/tokens/json-web-tokens/json-web-token-claims>))
+
+4. The user's roles can also come from the GeoNetwork Database (managed by the administrator in the GeoNetwork GUI).
+5. You can also extract roles from the JWT Access Token (via a JSON path).
+
+### JWT Headers configuration
+
+
+The JWT Headers module covers three main use cases:
+
+1. Simple Text, JSON, or JWT headers for the username
+2. Verification of JWT Access Tokens
+3. Getting roles from a JSON header or an attached JWT Access Token claim
+
+#### Configuration Options
+
+You must turn on JWT Header Support by setting the `GEONETWORK_SECURITY_TYPE` environment variable to `jwt-headers`.
+
+```
+GEONETWORK_SECURITY_TYPE=jwt-headers
+```
+
+Please see these files for more detailed configuration:
+* `config-security-jwt-header.xml`
+* `config-security-jwt-header-overrides.properties`
+
+##### User Name Options
+
+
+| Environment Variable | Meaning |
+| ------------- | ------- |
+|JWTHEADERS_UserNameHeaderFormat | The name of the HTTP header item that contains the user name. |
+|JWTHEADERS_UserNameFormat| Format that the user name is in:
`STRING` - user name is the header's value.
`JSON` - The header is a JSON string. Use "JSON path" for where the user name is in the JSON.
`JWT` - The header is a JWT (base64) string. Use "JSON path" for where the user name is in the JWT claims. |
+|JWTHEADERS_UserNameJsonPath | JSON path for the User Name. If the user name is in JSON or JWT format, this is the JSON path to the user's name.|
+
+
+
+If you are using [Apache's mod_auth_openidc](https://github.com/OpenIDC/mod_auth_openidc), then Apache will typically add:
+
+* an `OIDC_id_token_payload` header item (containing a JSON string of the ID token claims)
+* an `OIDC_access_token` header item (containing a base64 JWT Access Token)
+* optionally, a simple header item with individual claim values (i.e. `OIDC_access_token`)
+
+Here are some example values;
+
+STRING
+```
+OIDC_preferred_username: david.blasby@geocat.net
+```
+
+JSON
+```
+OIDC_id_token_payload: {"exp":1708555947,"iat":1708555647,"auth_time":1708555288,"jti":"42ee833e-89d3-4779-bd9d-06b979329c9f","iss":"http://localhost:7777/realms/dave-test2","aud":"live-key2","sub":"98cfe060-f980-4a05-8612-6c609219ffe9","typ":"ID","azp":"live-key2","nonce":"4PhqmZSJ355KBtJPbAP_PdwqiLnc7B1lA2SGpB0zXr4","session_state":"7712b364-339a-4053-ae0c-7d3adfca9005","at_hash":"2Tyw8q4ZMewuYrD38alCug","acr":"0","sid":"7712b364-339a-4053-ae0c-7d3adfca9005","upn":"david.blasby@geocat.net","resource_access":{"live-key2":{"roles":["GeonetworkAdministrator","GeoserverAdministrator"]}},"email_verified":false,"address":{},"name":"david blasby","groups":["default-roles-dave-test2","offline_access","uma_authorization"],"preferred_username":"david.blasby@geocat.net","given_name":"david","family_name":"blasby","email":"david.blasby@geocat.net"}
+```
+
+JWT
+```
+OIDC_access_token: eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICItb0QyZXphcjF3ZHBUUmZCS0NqMFY4cm5ZVkJGQmxJLW5ldzFEREJCNTJrIn0.eyJleHAiOjE3MDg1NTU5NDcsImlhdCI6MTcwODU1NTY0NywiYXV0aF90aW1lIjoxNzA4NTU1Mjg4LCJqdGkiOiI0M2UyYjUwZS1hYjJkLTQ2OWQtYWJjOC01Nzc1YTY0MTMwNTkiLCJpc3MiOiJodHRwOi8vbG9jYWxob3N0Ojc3NzcvcmVhbG1zL2RhdmUtdGVzdDIiLCJhdWQiOiJhY2NvdW50Iiwic3ViIjoiOThjZmUwNjAtZjk4MC00YTA1LTg2MTItNmM2MDkyMTlmZmU5IiwidHlwIjoiQmVhcmVyIiwiYXpwIjoibGl2ZS1rZXkyIiwibm9uY2UiOiI0UGhxbVpTSjM1NUtCdEpQYkFQX1Bkd3FpTG5jN0IxbEEyU0dwQjB6WHI0Iiwic2Vzc2lvbl9zdGF0ZSI6Ijc3MTJiMzY0LTMzOWEtNDA1My1hZTBjLTdkM2FkZmNhOTAwNSIsImFjciI6IjAiLCJyZWFsbV9hY2Nlc3MiOnsicm9sZXMiOlsiZGVmYXVsdC1yb2xlcy1kYXZlLXRlc3QyIiwib2ZmbGluZV9hY2Nlc3MiLCJ1bWFfYXV0aG9yaXphdGlvbiJdfSwicmVzb3VyY2VfYWNjZXNzIjp7ImxpdmUta2V5MiI6eyJyb2xlcyI6WyJHZW9uZXR3b3JrQWRtaW5pc3RyYXRvciIsIkdlb3NlcnZlckFkbWluaXN0cmF0b3IiXX0sImFjY291bnQiOnsicm9sZXMiOlsibWFuYWdlLWFjY291bnQiLCJtYW5hZ2UtYWNjb3VudC1saW5rcyIsInZpZXctcHJvZmlsZSJdfX0sInNjb3BlIjoib3BlbmlkIHBob25lIG9mZmxpbmVfYWNjZXNzIG1pY3JvcHJvZmlsZS1qd3QgcHJvZmlsZSBhZGRyZXNzIGVtYWlsIiwic2lkIjoiNzcxMmIzNjQtMzM5YS00MDUzLWFlMGMtN2QzYWRmY2E5MDA1IiwidXBuIjoiZGF2aWQuYmxhc2J5QGdlb2NhdC5uZXQiLCJlbWFpbF92ZXJpZmllZCI6ZmFsc2UsImFkZHJlc3MiOnt9LCJuYW1lIjoiZGF2aWQgYmxhc2J5IiwiZ3JvdXBzIjpbImRlZmF1bHQtcm9sZXMtZGF2ZS10ZXN0MiIsIm9mZmxpbmVfYWNjZXNzIiwidW1hX2F1dGhvcml6YXRpb24iXSwicHJlZmVycmVkX3VzZXJuYW1lIjoiZGF2aWQuYmxhc2J5QGdlb2NhdC5uZXQiLCJnaXZlbl9uYW1lIjoiZGF2aWQiLCJmYW1pbHlfbmFtZSI6ImJsYXNieSIsImVtYWlsIjoiZGF2aWQuYmxhc2J5QGdlb2NhdC5uZXQifQ.Iq8YJ99s_HBd-gU2zaDqGbJadCE--7PlS2kRHaegYTil7WoNKfjfcH-K-59mHGzJm-V_SefE-iWG63z2c6ChddzhvG8I_O5vDNFoGlGOQFunZC379SqhqhCEdwscEUDkNA3iTTXvK9vn0muStDiv9OzpJ1zcpqYqsgxGbolGgLJgeuK8yNDH7kzDtoRzHiHw2rx4seeVpxUYAjyg_cCkEjRt3wzud7H3xlfQWRx75YfpJ0pnVphuXYR7Z8x9p6hCPtrBfDeriudm-wkwXtcV2LNlXrZ2zpKS_6Zdxzza2lN30q_6DQXHGo8EAIr8SiiQrxPQulNiX9r8XmQ917Ep0g
+```
+
+
+
+It is recommended to either use the `OIDC_id_token_payload` (JSON) or `OIDC_access_token` (JWT) header.
+
+For `OIDC_id_token_payload`:
+
+* Request header attribute for User Name: `OIDC_id_token_payload`
+* Format the Header value is in: `JSON`
+* JSON path for the User Name: `preferred_username`
+
+For `OIDC_access_token`:
+
+* Request header attribute for User Name: `OIDC_access_token`
+* Format the Header value is in: `JWT`
+* JSON path for the User Name: `preferred_username`
+
+
+
+#### Role Source Options
+
+
+You can use the standard role source options in GeoNetwork (`Request Header`, `User Group Service`, or `Role Service`). The JWT Headers module adds two more role sources - `Header Containing JSON String` and `Header containing JWT`.
+
+
+| Environment Variable | Meaning |
+| ------------- | ------- |
+|JWTHEADERS_RolesHeaderName| Name of the header item the JSON or JWT is contained in|
+| JWTHEADERS_JwtHeaderRoleSource |Which Role Source to use:
`JSON` - The header is a JSON string. Use "JSON path" for where the roles are in the JSON.
`JWT` - The header is a JWT (base64) string. Use "JSON path" for where the roles are in the JWT claims. |
+| JWTHEADERS_RolesJsonPath| Path in the JSON object or JWT claims that contains the roles. This should either be a simple string (single role) or a list of strings.|
+
+
+
+Using the example `OIDC_id_token_payload` (JSON) or `OIDC_access_token` (JWT) shown above, the claims are:
+
+
+```
+ {
+ "exp": 1708555947,
+ "iat": 1708555647,
+ "auth_time": 1708555288,
+ "jti": "42ee833e-89d3-4779-bd9d-06b979329c9f",
+ "iss": "http://localhost:7777/realms/dave-test2",
+ "aud": "live-key2",
+ "sub": "98cfe060-f980-4a05-8612-6c609219ffe9",
+ "typ": "ID",
+ "azp": "live-key2",
+ "nonce": "4PhqmZSJ355KBtJPbAP_PdwqiLnc7B1lA2SGpB0zXr4",
+ "session_state": "7712b364-339a-4053-ae0c-7d3adfca9005",
+ "at_hash": "2Tyw8q4ZMewuYrD38alCug",
+ "acr": "0",
+ "sid": "7712b364-339a-4053-ae0c-7d3adfca9005",
+ "upn": "david.blasby@geocat.net",
+ "resource_access":
+ {
+ "live-key2":
+ {
+ "roles":
+ [
+ "GeonetworkAdministrator",
+ "GeoserverAdministrator"
+ ]
+ }
+ },
+ "email_verified": false,
+ "address": { },
+ "name": "david blasby",
+ "groups": ["default-roles-dave-test2", "offline_access", "uma_authorization"],
+ "preferred_username": "david.blasby@geocat.net",
+ "given_name": "david",
+ "family_name": "blasby",
+ "email": "david.blasby@geocat.net"
+ }
+```
+
+In this JSON set of claims (mirrored in the JWT claims of the Access Token), and the two roles from the IDP are "GeonetworkAdministrator", and "GeoserverAdministrator". The JSON path to the roles is `resource_access.live-key2.roles`.
+
+#### Role Conversion
+
+
+The JWT Headers module also allows for converting roles (from the external IDP) to the GeoNetwork internal role names.
+
+
+| Environment Variable | Meaning |
+| ------------- |----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+|JWTHEADERS_RoleConverterString| Role Converter Map from External Roles to GeoNetwork Roles.
This is a ";" delimited map in the form of:
`ExternalRole1=GeoNetworkRole1;ExternalRole2=GeoNetworkRole2` |
+|JWTHEADERS_OnlyExternalListedRoles | Only allow External Roles that are explicitly named above.
If true, external roles that are not mentioned in the conversion map will be ignored. If false, those external roles will be turned into GeoNetwork roles of the same name.
These roles should either be a Profile ("Administrator", "Reviewer", etc..) or group-based permissions ("GroupName:ProfileName") |
+
+
+For example, a conversion map like `GeonetworkAdministrator=ADMINISTRATOR` will convert our IDP "GeonetworkAdministrator" to the "ADMINISTRATOR" Profile...
+
+In our example, the user has two roles "GeoserverAdministrator" and "GeonetworkAdministrator". If the "Only allow External Roles that are explicitly named above" is true, then GeoNetwork will only see the "ADMINISTRATOR" role. If true, it will see "ADMINISTRATOR" and "GeoserverAdministrator". In neither case will it see the converted "GeonetworkAdministrator" roles.
+
+##### Groups
+
+As equivalent with the OIDC and Keycloak providers, specify group permissions in the `:` format.
+
+
+### JWT Validation
+
+
+If you are using Apache's `mod_auth_openidc` module, then you do *not* have to do JWT validation - Apache will ensure they are valid when it attaches the headers to the request.
+
+However, if you are using robot access to GeoNetwork, you can attach an Access Token to the request header for access.
+
+```
+Authentication: Bearer `base64 JWT Access Token`
+```
+
+OR
+
+```
+Authentication: `base64 JWT Access Token`
+```
+
+You would then setup the user name to come from a JWT token in the `Authentication` header with a JSON path like `preferred_username`.
+
+
+
+
+You can also extract roles from the Access Token in a similar manner - make sure your IDP imbeds roles inside the Access Token.
+
+| Environment Variable | Meaning |
+| ------------- | ------- |
+|JWTHEADERS_ValidateToken |Validate JWT (Access Token).
If false, do not do any validation. |
+| JWTHEADERS_ValidateTokenExpiry|Validate Token Expiry.
If true, validate the `exp` claim in the JWT and ensure it is in the future. This should always be true so you do not allow expired tokens. |
+| JWTHEADERS_ValidateTokenSignature| Validate JWT (Access Token) Signature.
If true, validate the Token's Signature|
+|JWTHEADERS_ValidateTokenSignatureURL | JSON Web Key Set URL (jwks_uri).
URL for a JWK Set. This is typically called `jwks_uri` in the OIDC metadata configuration. This will be downloaded and used to check the JWT's signature. This should always be true to ensure that the JWT has not been modified.|
+|JWTHEADERS_ValidateTokenAgainstURL | Validate JWT (Access Token) Against Endpoint.
If true, validate the access token against an IDP's token verification URL.|
+| JWTHEADERS_ValidateTokenAgainstURLEndpoint| URL (userinfo_endpoint).
IDP's token validation URL. This URL will be retrieved by adding the Access Token to the `Authentiation: Bearer ` header. It should return a HTTP 200 status code if the token is valid. This is recommened by the OIDC specification.|
+| JWTHEADERS_ValidateSubjectWithEndpoint|Also validate Subject.
If true, the `sub` claim of the Access Token and the "userinfo_endpoint" `sub` claim will be checked to ensure they are equal. This is recommened by the OIDC specification. |
+|JWTHEADERS_ValidateTokenAudience | Validate JWT (Access Token) Audience.
If true, the audience of the Access Token is checked. This is recommened by the OIDC specification since this verifies that the Access Token is meant for us.|
+|JWTHEADERS_ValidateTokenAudienceClaimName | Claim Name.
The name of the claim the audience is in (`aud`, `azp`, or `appid` claim) the Access Token.|
+|JWTHEADERS_ValidateTokenAudienceClaimValue|Required Claim Value.
The value this claim must be (if the claim is a list of string, then it must contain this value). |
+
+
+#### Using Headers or GeoNetwork Database for Profiles & Profile Groups
+
+Inside `JwtHeaderSecurityConfig`, use these values to determine where Profile and ProfileGroups come from.
+
+| Property | Meaning |
+| ------------- | ------- |
+|updateProfile| true -> update the DB with the information from OIDC (don't allow user to edit profile in the UI)
false -> don't update the DB (user must edit profile in UI). |
+|updateGroup| true -> update the DB (user's group) with the information from OIDC (don't allow admin to edit user's groups in the UI)
false -> don't update the DB (admin must edit groups in UI).|
+
+### Using JWT Headers for both OIDC and OAUTH2 (Simultaneously)
+
+Using the above configuration, you can configure JWT Headers for either OIDC-based browser access (i.e. with Apache `mod_auth_openidc`) ***or*** for OAUTH2 based Bearer Token access. However, you cannot do both at the same time.
+
+To configure JWT Headers to simultaneously provide OIDC and OAUTH2 access, you can use the `jwt-headers-multi` configuration.
+
+To use this, set the `GEONETWORK_SECURITY_TYPE` to `jwt-headers-multi`
+
+```
+GEONETWORK_SECURITY_TYPE=jwt-headers-multi
+```
+
+Please see these files for more detailed configuration:
+* `config-security-jwt-header-multi.xml`
+* `config-security-jwt-header-multi-overrides.properties`
+
+This creates two JWT Header authentication filters for GeoNetwork - one for OIDC based Browser access, and one for OAUTH2 based Robot access.
+
+You configure each of these independently using the same environment variables described above.
+For the first filter, use the environment variables defined above (ie. `JWTHEADERS_UserNameFormat`). For the second filter, add a `2` at the end of the environment variable (i.e. `JWTHEADERS_UserNameFormat2`).
+
## Configuring EU Login {#authentication-ecas}
EU Login is the central login mechanism of the European Commission. You can enable login against that central service in case your intended users have ar can acquire a EU Login.
diff --git a/docs/manual/docs/administrator-guide/managing-users-and-groups/creating-group.md b/docs/manual/docs/administrator-guide/managing-users-and-groups/creating-group.md
index 857df1f970d..650d1e9d62e 100644
--- a/docs/manual/docs/administrator-guide/managing-users-and-groups/creating-group.md
+++ b/docs/manual/docs/administrator-guide/managing-users-and-groups/creating-group.md
@@ -17,12 +17,33 @@ To create new groups you should be logged on with an account that has administra
4. Click *Save*
-Access privileges can be set per metadata record. You can define privileges on a per Group basis.
+## Access privileges
-Privileges that can be set relate to visibility of the Metadata (*Publish*), data *Download*, *Interactive Map* access and display of the record in the *Featured* section of the home page.
+Access privileges can be set on a per-metadata-record basis. Privileges define which actions are available to users in the group:
-*Editing* defines the groups for which editors can edit the metadata record.
+- **Publish**: Controls visibility of the metadata.
+- **Download**: Grants access to data downloads.
+- **Interactive Map**: Provides access to map tools.
+- **Featured**: Displays the record in the *Featured* section on the home page.
-*Notify* defines what Groups are notified when a file managed by GeoNetwork is downloaded.
+Additional settings:
+- **Editing**: Specifies which groups can edit the metadata record.
+- **Notify**: Determines which groups are notified when a file managed by GeoNetwork is downloaded.
-Below is an example of the privileges management table related to a dataset.
+## Minimum user profile allowed to set privileges
+
+This setting allows administrators to control the minimum user profile required to assign privileges for a group. It provides enhanced control over who can manage sensitive privileges for users within the group.
+
+### Default setting
+
+By default, the **"Minimum User Profile Allowed to Set Privileges"** is set to **No Restrictions**. This means that any user with permission to manage privileges for a metadata record can assign privileges for users in this group.
+
+### Restricted setting
+
+When a specific profile is selected, only users with that profile or higher within the group can assign privileges. Users with lower profiles will have **read-only** access to privilege settings for this group.
+
+### Example usage
+
+If a group has **"Minimum User Profile Allowed to Set Privileges"** set to **Reviewer**:
+- Only users with the **Reviewer** profile or higher (e.g., **Administrator**) can assign privileges for users in this group.
+- Users with profiles below **Reviewer** (e.g., **Editor**) will see the group as **read-only** in the privileges interface.
diff --git a/docs/manual/docs/administrator-guide/managing-users-and-groups/creating-user.md b/docs/manual/docs/administrator-guide/managing-users-and-groups/creating-user.md
index 240fac6944b..e1d35ba75eb 100644
--- a/docs/manual/docs/administrator-guide/managing-users-and-groups/creating-user.md
+++ b/docs/manual/docs/administrator-guide/managing-users-and-groups/creating-user.md
@@ -3,8 +3,11 @@
To add a new user to the GeoNetwork system, please do the following:
1. Select the *Administration* button in the menu. On the Administration page, select *User management*.
-2. Click the button *Add a new user*;
-3. Provide the *information* required for the new user;
-4. Assign the correct *profile* (see [Users, Groups and Roles](index.md#user_profiles));
-5. Assign the user to a *group* (see [Creating group](creating-group.md));
+2. Click the button *Add a new user*.
+3. Provide the *information* required for the new user.
+4. Assign the correct *profile* (see [Users, Groups and Roles](index.md#user_profiles)).
+5. Assign the user to a *group* (see [Creating group](creating-group.md)).
6. Click *Save*.
+
+!!! note
+ Usernames are not case sensitive. The application does not allow to create different users with the same username in different cases.
diff --git a/docs/manual/docs/administrator-guide/managing-users-and-groups/img/password-forgot.png b/docs/manual/docs/administrator-guide/managing-users-and-groups/img/password-forgot.png
index d1bc512667d..bdccc9830b2 100644
Binary files a/docs/manual/docs/administrator-guide/managing-users-and-groups/img/password-forgot.png and b/docs/manual/docs/administrator-guide/managing-users-and-groups/img/password-forgot.png differ
diff --git a/docs/manual/docs/administrator-guide/managing-users-and-groups/img/selfregistration-start.png b/docs/manual/docs/administrator-guide/managing-users-and-groups/img/selfregistration-start.png
index 7e9a6f8084f..1c617a5d007 100644
Binary files a/docs/manual/docs/administrator-guide/managing-users-and-groups/img/selfregistration-start.png and b/docs/manual/docs/administrator-guide/managing-users-and-groups/img/selfregistration-start.png differ
diff --git a/docs/manual/docs/administrator-guide/managing-users-and-groups/index.md b/docs/manual/docs/administrator-guide/managing-users-and-groups/index.md
index aa0408ce3f4..c35bb17f71b 100644
--- a/docs/manual/docs/administrator-guide/managing-users-and-groups/index.md
+++ b/docs/manual/docs/administrator-guide/managing-users-and-groups/index.md
@@ -3,6 +3,7 @@
- [Creating group](creating-group.md)
- [Creating user](creating-user.md)
- [User Self-Registration](user-self-registration.md)
+- [User reset password](user-reset-password.md)
- [Authentication mode](authentication-mode.md)
## Default user {#user-defaults}
diff --git a/docs/manual/docs/administrator-guide/managing-users-and-groups/user-reset-password.md b/docs/manual/docs/administrator-guide/managing-users-and-groups/user-reset-password.md
new file mode 100644
index 00000000000..2eb887c85d5
--- /dev/null
+++ b/docs/manual/docs/administrator-guide/managing-users-and-groups/user-reset-password.md
@@ -0,0 +1,36 @@
+# User 'Forgot your password?' function {#user_forgot_password}
+
+!!! note
+ This function requires an email server configured. See [System configuration](../configuring-the-catalog/system-configuration.md#system-config-feedback).
+
+This function allows users who have forgotten their password to request a new one. Go to the sign in page to access the form:
+
+![](img/password-forgot.png)
+
+If a user takes this option they will receive an email inviting them to change their password as follows:
+
+ You have requested to change your Greenhouse GeoNetwork Site password.
+
+ You can change your password using the following link:
+
+ http://localhost:8080/geonetwork/srv/en/password.change.form?username=dubya.shrub@greenhouse.gov&changeKey=635d6c84ddda782a9b6ca9dda0f568b011bb7733
+
+ This link is valid for today only.
+
+ Greenhouse GeoNetwork Site
+
+The catalog has generated a changeKey from the forgotten password and the current date and emailed that to the user as part of a link to a change password form.
+
+If you want to change the content of this email, you should modify `xslt/service/account/password-forgotten-email.xsl`.
+
+When the user clicks on the link, a change password form is displayed in their browser and a new password can be entered. When that form is submitted, the changeKey is regenerated and checked with the changeKey supplied in the link, if they match then the password is changed to the new password supplied by the user.
+
+The final step in this process is a verification email sent to the email address of the user confirming that a change of password has taken place:
+
+ Your Greenhouse GeoNetwork Site password has been changed.
+
+ If you did not change this password contact the Greenhouse GeoNetwork Site helpdesk
+
+ The Greenhouse GeoNetwork Site team
+
+If you want to change the content of this email, you should modify `xslt/service/account/password-changed-email.xsl`.
diff --git a/docs/manual/docs/administrator-guide/managing-users-and-groups/user-self-registration.md b/docs/manual/docs/administrator-guide/managing-users-and-groups/user-self-registration.md
index fe3cb2d0142..aa7fdbb254b 100644
--- a/docs/manual/docs/administrator-guide/managing-users-and-groups/user-self-registration.md
+++ b/docs/manual/docs/administrator-guide/managing-users-and-groups/user-self-registration.md
@@ -1,5 +1,9 @@
# User Self-Registration {#user_self_registration}
+!!! note
+ This function requires an email server configured. See [System configuration](../configuring-the-catalog/system-configuration.md#system-config-feedback).
+
+
To enable the self-registration functions, see [System configuration](../configuring-the-catalog/system-configuration.md). When self-registration is enabled, for users that are not logged in, an additional link is shown on the login page:
![](img/selfregistration-start.png)
@@ -15,8 +19,8 @@ The fields in this form are self-explanatory except for the following:
- the user will still be given the `Registered User` profile
- an email will be sent to the Email address nominated in the Feedback section of the 'System Administration' menu, informing them of the request for a more privileged profile
- **Requested group**: By default, self-registered users are not assigned to any group. If a group is selected:
- - the user will still not be assigned to any group
- - an email will be sent to the Email address nominated in the Feedback section of the 'System Administration' menu, informing them of the requested group.
+ - the user will still not be assigned to any group
+ - an email will be sent to the Email address nominated in the Feedback section of the 'System Administration' menu, informing them of the requested group.
## What happens when a user self-registers?
@@ -72,39 +76,3 @@ If you want to change the content of this email, you should modify `xslt/service
The Greenhouse GeoNetwork Site
If you want to change the content of this email, you should modify `xslt/service/account/registration-prof-email.xsl`.
-
-## The 'Forgot your password?' function
-
-This function allows users who have forgotten their password to request a new one. Go to the sign in page to access the form:
-
-![](img/password-forgot.png)
-
-For security reasons, only users that have the `Registered User` profile can request a new password.
-
-If a user takes this option they will receive an email inviting them to change their password as follows:
-
- You have requested to change your Greenhouse GeoNetwork Site password.
-
- You can change your password using the following link:
-
- http://localhost:8080/geonetwork/srv/en/password.change.form?username=dubya.shrub@greenhouse.gov&changeKey=635d6c84ddda782a9b6ca9dda0f568b011bb7733
-
- This link is valid for today only.
-
- Greenhouse GeoNetwork Site
-
-The catalog has generated a changeKey from the forgotten password and the current date and emailed that to the user as part of a link to a change password form.
-
-If you want to change the content of this email, you should modify `xslt/service/account/password-forgotten-email.xsl`.
-
-When the user clicks on the link, a change password form is displayed in their browser and a new password can be entered. When that form is submitted, the changeKey is regenerated and checked with the changeKey supplied in the link, if they match then the password is changed to the new password supplied by the user.
-
-The final step in this process is a verification email sent to the email address of the user confirming that a change of password has taken place:
-
- Your Greenhouse GeoNetwork Site password has been changed.
-
- If you did not change this password contact the Greenhouse GeoNetwork Site helpdesk
-
- The Greenhouse GeoNetwork Site team
-
-If you want to change the content of this email, you should modify `xslt/service/account/password-changed-email.xsl`.
diff --git a/docs/manual/docs/annexes/standards/iso19115-3.2018.md b/docs/manual/docs/annexes/standards/iso19115-3.2018.md
index baaabc077f1..396aa275988 100644
--- a/docs/manual/docs/annexes/standards/iso19115-3.2018.md
+++ b/docs/manual/docs/annexes/standards/iso19115-3.2018.md
@@ -6887,7 +6887,7 @@ Those values are defined in the standard but hidden when editing.
| code | label | description |
|------------------------------|--------------------------------|-------------|
-| map staticMap interactiveMap | Map Static map Interactive map | |
+| map map-static map-interactive | Map Static map Interactive map | |
### Scope description {#iso19115-3.2018-elem-mcc-MD_ScopeDescription-7995800501eaf72f941d8e81542f8e98}
@@ -19811,7 +19811,7 @@ Those values are defined in the standard but hidden when editing.
| code | label | description |
|------------------------------|--------------------------------|-------------|
-| map staticMap interactiveMap | Map Static map Interactive map | |
+| map map-static map-interactive | Map Static map Interactive map | |
### Standard codelists Spatial Representation Type (mcc:MD_SpatialRepresentationTypeCode) {#iso19115-3.2018-cl-mcc-MD_SpatialRepresentationTypeCode}
diff --git a/docs/manual/docs/annexes/standards/iso19139.md b/docs/manual/docs/annexes/standards/iso19139.md
index ceb1ef656c9..53a1707aaf4 100644
--- a/docs/manual/docs/annexes/standards/iso19139.md
+++ b/docs/manual/docs/annexes/standards/iso19139.md
@@ -11963,7 +11963,7 @@ Those values are defined in the standard but hidden when editing.
| code | label | description |
|---------------------------------------------|------------------------------------------------|-------------|
-| map staticMap interactiveMap featureCatalog | Map Static map Interactive map Feature catalog | |
+| map map-static map-interactive featureCatalog | Map Static map Interactive map Feature catalog | |
Displayed only if
@@ -17436,7 +17436,7 @@ Those values are defined in the standard but hidden when editing.
| code | label | description |
|---------------------------------------------|------------------------------------------------|-------------|
-| map staticMap interactiveMap featureCatalog | Map Static map Interactive map Feature catalog | |
+| map map-static map-interactive featureCatalog | Map Static map Interactive map Feature catalog | |
Displayed only if
diff --git a/docs/manual/docs/api/img/dcat-in-download-menu.png b/docs/manual/docs/api/img/dcat-in-download-menu.png
new file mode 100644
index 00000000000..5de86b80f02
Binary files /dev/null and b/docs/manual/docs/api/img/dcat-in-download-menu.png differ
diff --git a/docs/manual/docs/api/rdf-dcat.md b/docs/manual/docs/api/rdf-dcat.md
index bb2fa14fefc..0d5eb499453 100644
--- a/docs/manual/docs/api/rdf-dcat.md
+++ b/docs/manual/docs/api/rdf-dcat.md
@@ -1,24 +1,251 @@
-# RDF DCAT end point {#rdf-dcat}
+# DCAT {#rdf-dcat}
-!!! warning
+The catalogue has the capability to convert ISO to DCAT format in various API endpoint.
- Unavailable since version 4.0.0.
-
- There is no known sponsor or interested party for implementing RDF DCAT.
- Interested parties may contact the project team for guidance and to express their intent.
+## Supported DCAT profiles
-The RDF DCAT end point provides a way of getting information about the catalog, the datasets and services, and links to distributed resources in a machine-readable format. The formats of the output are based on DCAT, an RDF vocabulary that is designed to facilitate interoperability between web-based data catalogs.
+A base conversion is provided with complementary extensions for various profiles of DCAT:
-Reference:
+| Profile | Version | Description | URL | Conversion from |
+|-----------------------------|---------|---------------------------------------------------------------------------------------------------------------------|------|------------------|
+| W3C DCAT | 3 | Default W3C standard | https://www.w3.org/TR/vocab-dcat-3/ | ISO19115-3 |
+| European DCAT-AP | 3.0.0 | DCAT profile for sharing information about Catalogues containing Datasets and Data Services descriptions in Europe | https://semiceu.github.io/DCAT-AP/releases/3.0.0/ | ISO19115-3 |
+| European DCAT-AP-Mobility | 1.0.1 | mobilityDCAT-AP is a mobility-related extension of the DCAT-AP | https://mobilitydcat-ap.github.io/mobilityDCAT-AP/releases/ | ISO19115-3 |
+| European DCAT-AP-HVD | 2.2.0 | DCAT-AP for a dataset that is subject to the requirements imposed by the High-Value Dataset implementing regulation | https://semiceu.github.io/DCAT-AP/releases/2.2.0-hvd/ | ISO19115-3 |
+| European GeoDCAT-AP | 3.0.0 | | https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0/ | ISO19115-3 |
+| European GeoDCAT-AP (SEMIC) | 3.0.0 | [XSLT conversion maintained by SEMIC](https://github.com/SEMICeu/iso-19139-to-dcat-ap/blob/main/iso-19139-to-dcat-ap.xsl) | https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0/ | ISO19139 |
-* [Data Catalog Vocabulary (DCAT)](https://www.w3.org/TR/vocab-dcat-3/)
-## Upgrading from GeoNetwork 3.0 Guidance
+* The mapping is done from ISO19115-3 to DCAT* except for the SEMIC conversion which converts ISO19139 to GeoDCAT-AP.
-RDF DCAT API is no longer available.
+* When needed, an ISO19139 to or from ISO19115-3 conversion is applied (eg. a CSW request querying a catalog in ISO19115-3 using the SEMIC conversion).
+
+* DCAT output are not available for ISO19110 or Dublin core standards.
+
+## Past implementation
+
+[The first implementation of DCAT output was done in 2012](https://trac.osgeo.org/geonetwork/wiki/proposals/DCATandRDFServices) and was targeting interaction with semantic service and semantic sitemap support. DCAT output was available using a service named `rdf.search`. This service was deprecated in version 4.0.0 in favor of producing DCAT output in the [Catalog Service for the Web (CSW)](csw.md) or using the formatters API.
+
+
+## Usage in the formatters API
+
+Each DCAT formats are available using a formatter eg. http://localhost:8080/geonetwork/srv/api/records/be44fe5a-65ca-4b70-9d29-ac5bf1f0ebc5/formatters/eu-dcat-ap
+
+To add the formatter in the record view download list, the user interface configuration can be updated:
+
+![image](img/dcat-in-download-menu.png)
+
+
+User interface configuration:
+
+```json
+{
+ "mods": {
+ "search": {
+ "downloadFormatter": [
+ {
+ "label": "exportMEF",
+ "url": "/formatters/zip?withRelated=false",
+ "class": "fa-file-zip-o"
+ },
+ {
+ "label": "exportPDF",
+ "url": "/formatters/xsl-view?output=pdf&language=${lang}",
+ "class": "fa-file-pdf-o"
+ },
+ {
+ "label": "exportXML",
+ "url": "/formatters/xml",
+ "class": "fa-file-code-o"
+ },
+ {
+ "label": "W3C-DCAT",
+ "url": "/formatters/dcat"
+ },
+ {
+ "label": "EU-DCAT-AP",
+ "url": "/formatters/eu-dcat-ap"
+ },
+ {
+ "label": "EU-GEO-DCAT-AP",
+ "url": "/formatters/eu-geodcat-ap"
+ },
+ {
+ "label": "EU-DCAT-AP-MOBILITY",
+ "url": "/formatters/eu-dcat-ap-mobility"
+ },
+ {
+ "label": "EU-DCAT-AP-HVD",
+ "url": "/formatters/eu-dcat-ap-hvd"
+ }
+ ]
+```
+
+
+## Usage in the CSW service
+
+All DCAT profiles are also accessible using CSW protocol.
+
+A `GetRecordById` operation can be used: http://localhost:8080/geonetwork/srv/eng/csw?SERVICE=CSW&VERSION=2.0.2&REQUEST=GetRecordById&ID=da165110-88fd-11da-a88f-000d939bc5d8&outputSchema=https://semiceu.github.io/DCAT-AP/releases/2.2.0-hvd/ and is equivalent to the API http://localhost:8080/geonetwork/srv/api/records/da165110-88fd-11da-a88f-000d939bc5d8/formatters/eu-dcat-ap-hvd?output=xml.
+
+A `GetRecords` operation can be used to retrieve a set of records: http://localhost:8080/geonetwork/srv/fre/csw?SERVICE=CSW&VERSION=2.0.2&REQUEST=GetRecords&outputSchema=http://data.europa.eu/930/&elementSetName=full&resultType=results&maxRecords=300
+
+Use the `outputSchema` parameter to select the DCAT profile to use. The following values are supported:
+
+
+| Profile | Output schema parameter |
+|-----------------------------------------|-------------------------------------------------------|
+| CSW | http://www.opengis.net/cat/csw/2.0.2 |
+| ISO19115-3 | http://standards.iso.org/iso/19115/-3/mdb/2.0 |
+| ISO19110 | http://www.isotc211.org/2005/gfc |
+| ISO19139 | http://www.isotc211.org/2005/gmd |
+| W3C DCAT | http://www.w3.org/ns/dcat#core |
+| EU-DCAT-AP | http://data.europa.eu/r5r/ |
+| EU-GeoDCAT-AP | http://data.europa.eu/930/ |
+| EU-GeoDCAT-AP (SEMIC) | http://data.europa.eu/930/#semiceu |
+| DCAT (past implementation - deprecated) | http://www.w3.org/ns/dcat# |
+| EU-DCAT-AP-HVD | https://semiceu.github.io/DCAT-AP/releases/2.2.0-hvd/ |
+| EU-DCAT-AP-Mobility | https://w3id.org/mobilitydcat-ap |
+
+When using GET request, it is recommended to encode URL characters in parameters (eg. `#` as `%23`) to avoid issues with the URL.
+
+Those values are listed in the `GetCapabilities` operation http://localhost:8080/geonetwork/srv/eng/csw?SERVICE=CSW&VERSION=2.0.2&REQUEST=GetCapabilities.
+
+```xml
+
+ http://www.opengis.net/cat/csw/2.0.2
+ http://standards.iso.org/iso/19115/-3/mdb/2.0
+ http://www.isotc211.org/2005/gfc
+ http://www.isotc211.org/2005/gmd
+ http://data.europa.eu/930/
+ http://data.europa.eu/930/#semiceu
+ http://data.europa.eu/r5r/
+ http://www.w3.org/ns/dcat#
+ http://www.w3.org/ns/dcat#core
+ https://semiceu.github.io/DCAT-AP/releases/2.2.0-hvd/
+ https://w3id.org/mobilitydcat-ap
+```
+
+## Usage in OGC API Records
+
+For the time being, OGC API Records provides a simplified DCAT output (based on the index document).
+
+## DCAT validation
+
+The DCAT validation can be done using online validation tool:
+
+* https://www.itb.ec.europa.eu/shacl/dcat-ap/upload
+
+Depending on the target DCAT profile to use, it may be required to build proper ISO template and metadata record containing all required fields. Usually profiles are adding constraints for usage of specific vocabularies and fields (eg. [for High Value datasets, specific vocabularies are defined for categories, license, applicable legislations, ...](https://semiceu.github.io/DCAT-AP/releases/2.2.0-hvd/#controlled-vocabularies-to-be-used)).
+
+
+## Mapping considerations
+
+### Items under discussion
+
+
+The mapping is done from ISO19115-3 to DCAT. The mapping may not cover all usages and may be adapted. This can be done in the `iso19115-3.2018` schema plugin in the `formatter/dcat*` XSLT files.
+
+Some points under discussion are:
+
+#### Object vs Reference:
+
+* Should we use object or reference for some fields (eg. contact, organisation, ...)?
+* What should be the reference URI?
+* Where is defined the reference URI in ISO?
+
+eg.
+
+* for the CatalogRecord reference URI is the `metadataLinkage` or the `metadataIdentifier`.
+* for the Resource reference URI is the first resource identifier or the CatalogRecord reference URI with `#resource` suffix.
+* for an organisation, the URI will be the first value in the following sequence:
+
+```xml
+(cit:partyIdentifier/*/mcc:code/*/text(),
+cit:contactInfo/*/cit:onlineResource/*/cit:linkage/gco:CharacterString/text(),
+cit:name/gcx:Anchor/@xlink:href,
+@uuid)[1]
+```
+
+#### Distribution model in DCAT and ISO
+
+In DCAT, a number of properties from the dataset are also defined in the distribution elements.
+In ISO, an option could be to use multiple transfer options element to create multiple distribution elements with more detailed information in DCAT (eg. transfer size).
+
+In the mapping, should we repeat all the information about the dataset? Should we recommend to use multiple transfer options element in ISO?
+
+#### No equivalent field in ISO
+
+eg. Where to store `spdx:checksum` in ISO? Could be considered as an online resource id attribute as the checksum uniquely identify the resource.
+
+
+#### Associated resources
+
+Links between resources are not always bidirectional so using the associated API would allow to populate more relations.
+This is also mitigated when the complete RDF graph of the catalogue is retrieved as it will provide relations from all records.
+
+
+### EU DCAT AP High Value Datasets
+
+When encoding in ISO datasets in the context of DCAT HVD, consider encoding the following properties:
+
+* Add a keyword pointing to the legislation [http://data.europa.eu/eli/reg_impl/2023/138/oj](http://data.europa.eu/eli/reg_impl/2023/138/oj)
+* Add at least one keyword for the `dcatap:hvdCategory` from the [High-value dataset categories vocabulary](https://op.europa.eu/en/web/eu-vocabularies/dataset/-/resource?uri=http://publications.europa.eu/resource/dataset/high-value-dataset-category)
+
+See [DCAT AP HVD specification](https://semiceu.github.io/DCAT-AP/releases/2.2.0-hvd/) for other requirements.
+
+### EU DCAT mobility
+
+When encoding in ISO datasets in the context of DCAT Mobility, consider encoding the following properties:
+
+* `mobilitydcatap:mobilityTheme` (mandatory) is encoded as a keyword from the [mobility theme vocabulary](https://w3id.org/mobilitydcat-ap/mobility-theme)
+* `mobilitydcatap:georeferencingMethod` (recommended) is encoded as a keyword from the [mobility georeferencing method vocabulary](https://w3id.org/mobilitydcat-ap/georeferencing-method/)
+* `mobilitydcatap:networkCoverage` (recommended) is encoded as a keyword from the [mobility network coverage vocabulary](https://w3id.org/mobilitydcat-ap/network-coverage)
+* `mobilitydcatap:transportMode` (recommended) is encoded as a keyword from the [mobility transport mode vocabulary](https://w3id.org/mobilitydcat-ap/transport-mode)
+
+See [DCAT AP Mobility specification](https://mobilitydcat-ap.github.io/mobilityDCAT-AP/releases/) for other requirements.
+
+
+### SEMIC conversion compared to GeoNetwork conversion
+
+The main differences between the 2 conversions is that the GeoNetwork conversion **starts from ISO19115-3 instead of ISO19139** (to better support additional information provided in ISO19115-3 eg. date lifecycle, party identifiers and citation in data quality, feature catalogue, additional documentation, portrayal sections). **The conversion to GeoDCAT-AP is done as an extension of DCAT-AP which extends the core W3C DCAT** for easier customization and extension. This allows non EU countries to also use the base DCAT conversion. The conversion is less linear and easier to extend or customize.
+
+SEMIC conversion parameters `core`, `extended`, `include-deprecated` are not available in the GeoNetwork conversion which focus on version 3 of GeoDCAT-AP.
+
+Some of the differences in the GeoNetwork conversion are:
+
+* CatalogRecord / `dct:identifier` is prefixed with the code space if defined.
+* CatalogRecord / `dct:title` and `dct:description` are set at CatalogRecord level and at the Resource level
+* Resource / First resource identifier is used for `dct:identifer` (MobilityDCAT restrict it to 0..1), then additional ones are encoded in `adms:identifier`
+* Resource / `dct:spatial` is only encoded using a `dcat:bbox` in GeoJSON (instead of WKT and GML and GeoJson and the `locn:geometry` which was kept for backward compatibility with GeoDCAT-AP v1.*)
+* Resource / `dct:temporal` is only encoded using a `dcat:startDate` and `dcat:endDate` (and do not add same information in `schemas:startDate` and `schemas:endDate` which was kept for backward compatibility with GeoDCAT-AP v1.*)
+* Portrayal, specification, report online link are encoded using `foaf:page` instead of `foaf:landingPage`
+* `prov:qualifiedAttribution` element are not created because `dcat:creator|publisher|contactPoint|..` already provide the same information.
+* Keyword / When encoded with `Anchor`, `dcat:theme` encoded with only a reference in SEMIC conversion and using `skos:Concept` in the GeoNetwork conversion (see discussion point above)
+
+```xml
+
+vs
+
+
+ Données de base (autre)
+
+
+```
+
+Additional properties supported:
+
+* CatalogRecord / `dct:issued` is added if exists in the metadata (added in ISO19115-3)
+* CatalogRecord / `dct:language` is added if exists
+* CatalogRecord / `cnt:characterEncoding` is added if exists
+* Resource / `graphicOverview` is encoded as `foaf:page`
+* Resource / Associated resources
+ * Source dataset are encoded using `dct:source`
+ * Associated resource are encoded using `dct:relation` and subtypes (eg. `isPartOf`)
+* Party identifier (added in ISO19115-3) are used for `rdf:about` attribute for individual or organization
+
+Technical differences:
+
+* `normalize-space` is not applied to `abstract` or `lineage` (which lose the line breaks and basic formatting)
-1. We recommend migrating to use of [Catalog Service for the Web (CSW)](csw.md) API to query and explore data.
-2. When downloading using `GetRecord` make use of the `application/rdf+xml; charset=UTF-8` output format.
-
- This will allow retrieving records in the same document format as previously provided by RDF DCAT api.
diff --git a/docs/manual/docs/customizing-application/configuring-faceted-search.md b/docs/manual/docs/customizing-application/configuring-faceted-search.md
index 292739175bc..15a35e166ec 100644
--- a/docs/manual/docs/customizing-application/configuring-faceted-search.md
+++ b/docs/manual/docs/customizing-application/configuring-faceted-search.md
@@ -338,6 +338,23 @@ When using a generic field like `tag.default` and including only a subset of key
},
```
+To translate the label `IDP_TOPICS`, 2 options:
+
+* Use the translation API to add your custom translation in the database for the facet key `facet-IDP_TOPICS` (see the Admin console --> Settings --> Languages).
+* Or declare a meta property `labels` in the facet configuration:
+
+``` js
+"IDP_TOPICS": {
+ "terms": {
+ ...
+ "meta": {
+ "labels": {
+ "eng": "IDP topics",
+ "fre": "Thèmes IDP"
+ },
+```
+
+
## Decorate aggregations {#configuring-facet-decorator}
All aggregations can be decorated by an icon or an image in the home page or in other pages. The decorator is configured in the `meta` properties of the facet:
diff --git a/docs/manual/docs/install-guide/installing-index.md b/docs/manual/docs/install-guide/installing-index.md
index e3ea8950d68..870e764f3ed 100644
--- a/docs/manual/docs/install-guide/installing-index.md
+++ b/docs/manual/docs/install-guide/installing-index.md
@@ -1,38 +1,43 @@
# Installing search platform
-The GeoNetwork search engine is built on top of Elasticsearch. The platform is used to index records and also to analyze WFS data (See [Analyze and visualize data](../user-guide/analyzing/data.md) ).
+The GeoNetwork search engine is built on top of Elasticsearch. The platform is used to index records and also to index WFS data (See [Analyze and visualize data](../user-guide/analyzing/data.md) ).
GeoNetwork requires an [Elasticsearch](https://www.elastic.co/products/elasticsearch) instance to be installed next to the catalog.
+
## Elasticsearch compatibility
+Elasticsearch Java client version: 8.14.3
+
| Elasticsearch Version | Compatibility |
|-----------------------| ------------- |
-| Elasticsearch 7.15.x | minimum |
-| Elasticsearch 8.11.3 | tested |
+| Elasticsearch 8.14.3 | recommended |
+| Elasticsearch 8.14.x | minimum |
+
+Older version may be supported but are untested.
## Installation
=== "Manual installation"
- 1. **Download:** Elasticsearch 8.x (`8.11.3` tested, minimum `7.15.x`) from and unzip the file.
+ 1. **Download:** Elasticsearch `8.14.3` from and unzip the file.
``` shell
- wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-8.11.3.tar.gz
- tar xvfz elasticsearch-8.11.3.tar.gz
+ wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-8.14.3.tar.gz
+ tar xvfz elasticsearch-8.14.3.tar.gz
```
2. **Start**: Manually start Elasticsearch using:
``` shell
- elasticsearch-8.11.3/bin/elasticsearch
+ elasticsearch-8.14.3/bin/elasticsearch
```
3. **Stop**: Manually stop Elasticsearch using:
``` shell
- elasticsearch-8.11.3/bin/elasticsearch stop
+ elasticsearch-8.14.3/bin/elasticsearch stop
```
=== "Install using Maven"
diff --git a/docs/manual/docs/overview/authors.md b/docs/manual/docs/overview/authors.md
index 60099cdf93a..106fba51a02 100644
--- a/docs/manual/docs/overview/authors.md
+++ b/docs/manual/docs/overview/authors.md
@@ -9,17 +9,17 @@ In brief the committee votes on proposals on the geonetwork-dev mailinglist. Pro
### Members of the Project Steering Committee
- Jeroen Ticheler (jeroen ticheler * geocat net) [GeoCat](https://www.geocat.net) - Chair
-- Francois Prunayre [Titellus](https://titellus.net)
- Simon Pigot [CSIRO](https://www.csiro.au)
- Florent Gravin [CamptoCamp](https://camptocamp.com)
- Jose Garcia [GeoCat](https://www.geocat.net)
-- Jo Cook [Astun Technology](https://www.astuntechnology.com)
- Paul van Genuchten [ISRIC](https://www.isric.org)
### Former members of the PSC
+- Jo Cook [Astun Technology](https://www.astuntechnology.com)
- Patrizia Monteduro (Patrizia Monteduro * fao org) [FAO-UN](https://www.fao.org)
- Emanuele Tajariol (e tajariol * mclink it - GeoSolutions)
+- Francois Prunayre
- Jesse Eichar
- Andrea Carboni (acarboni * crisalis-tech com - Independent consultant)
- Archie Warnock (warnock * awcubed com) [A/WWW Enterprises](https://www.awcubed.com)
diff --git a/docs/manual/docs/overview/change-log/history/index.md b/docs/manual/docs/overview/change-log/history/index.md
index 298f544c401..2401afe752f 100644
--- a/docs/manual/docs/overview/change-log/history/index.md
+++ b/docs/manual/docs/overview/change-log/history/index.md
@@ -12,6 +12,8 @@ This series is under **active development** by our community, with new features,
### 4.4
+- [Version 4.4.6](../version-4.4.6.md)
+- [Version 4.4.5](../version-4.4.5.md)
- [Version 4.4.4](../version-4.4.4.md)
- [Version 4.4.3](../version-4.4.3.md)
- [Version 4.4.2](../version-4.4.2.md)
@@ -26,6 +28,8 @@ This series is under **active use** by our community, with regular improvements,
### 4.2
+- [Version 4.2.11](../version-4.2.11.md)
+- [Version 4.2.10](../version-4.2.10.md)
- [Version 4.2.9](../version-4.2.9.md)
- [Version 4.2.8](../version-4.2.8.md)
- [Version 4.2.7](../version-4.2.7.md)
diff --git a/docs/manual/docs/overview/change-log/index.md b/docs/manual/docs/overview/change-log/index.md
index cfb550e923d..5d0623fa2e6 100644
--- a/docs/manual/docs/overview/change-log/index.md
+++ b/docs/manual/docs/overview/change-log/index.md
@@ -3,5 +3,5 @@
Notable changes made to GeoNetwork opensource including new features, migration instructions, and bug fixes.
- [Version 4.4.5](version-4.4.4.md)
-- [Version 4.2.9](version-4.2.9.md)
+- [Version 4.2.11](version-4.2.11.md)
- [Release History](history/index.md)
diff --git a/docs/manual/docs/overview/change-log/version-4.2.11.md b/docs/manual/docs/overview/change-log/version-4.2.11.md
new file mode 100644
index 00000000000..88d7e7f702d
--- /dev/null
+++ b/docs/manual/docs/overview/change-log/version-4.2.11.md
@@ -0,0 +1,18 @@
+# Version 4.2.11 {#version-4211}
+
+GeoNetwork 4.2.11 release is a minor release.
+
+## List of changes
+
+Release highlights:
+
+- [Upgrade jQuery to version 3.7.1](https://github.com/geonetwork/core-geonetwork/pull/8105)
+- [CSW / Fix parsing date values for filters](https://github.com/geonetwork/core-geonetwork/pull/8417)
+- [Fix harvester execution logs added to previous logs](https://github.com/geonetwork/core-geonetwork/pull/8388)
+- [Register user / allow to configured allowed email domains](https://github.com/geonetwork/core-geonetwork/pull/8207)
+- [Register user / allow to select the group where the user wants to register](https://github.com/geonetwork/core-geonetwork/pull/8195)
+- [WebDav harvester / Add support for XSLT filter process](https://github.com/geonetwork/core-geonetwork/pull/8423)
+- [Update home page "browse by" to display facet as label if there is only one](https://github.com/geonetwork/core-geonetwork/pull/8449)
+- [Use UI language for metadata selection export to CSV / PDF](https://github.com/geonetwork/core-geonetwork/pull/8274)
+-
+and more \... see [4.2.11 issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A4.2.11+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?q=is%3Apr+milestone%3A4.2.11+is%3Aclosed) for full details.
diff --git a/docs/manual/docs/overview/change-log/version-4.4.6.md b/docs/manual/docs/overview/change-log/version-4.4.6.md
new file mode 100644
index 00000000000..1e9a6a2a214
--- /dev/null
+++ b/docs/manual/docs/overview/change-log/version-4.4.6.md
@@ -0,0 +1,35 @@
+# Version 4.4.6 {#version-446}
+
+GeoNetwork 4.4.6 is a minor release.
+
+## Update notes
+
+When updating please review the following actions:
+
+### Index changes
+
+Due to [Elasticsearch update to 8.14.3](https://github.com/geonetwork/core-geonetwork/pull/8337) it is recommended to use 8.14.x version of Elasticsearch server.
+
+After updating use **Admin Console > Tools** and use **Delete index and reindex**:
+
+
+## List of changes
+
+Major changes:
+
+* [Add support for external management named properties in JCloud](https://github.com/geonetwork/core-geonetwork/pull/8357)
+
+* [Use UI language for metadata selection export to CSV / PDF.
+ ](https://github.com/geonetwork/core-geonetwork/pull/8262)
+
+* [WebDav harvester / Add support for XSLT filter process](https://github.com/geonetwork/core-geonetwork/pull/8243)
+
+* [Register user / allow to configured allowed email domains](https://github.com/geonetwork/core-geonetwork/pull/8186)
+
+* [Register user / allow to select the group where the user wants to register](https://github.com/geonetwork/core-geonetwork/pull/8176)
+
+* [Support multiple DOI servers](https://github.com/geonetwork/core-geonetwork/pull/8098)
+
+* [Standard / DCAT (and profiles) export ](https://github.com/geonetwork/core-geonetwork/pull/7600)
+
+and more \... see [4.4.6-0 issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A4.4.6+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?page=3&q=is%3Apr+milestone%3A4.4.6+is%3Aclosed) for full details.
diff --git a/docs/manual/docs/user-guide/associating-resources/doi.md b/docs/manual/docs/user-guide/associating-resources/doi.md
index cb57e8f9589..3f61601ebca 100644
--- a/docs/manual/docs/user-guide/associating-resources/doi.md
+++ b/docs/manual/docs/user-guide/associating-resources/doi.md
@@ -21,7 +21,7 @@ Providing the following information:
- `Final DOI URL prefix`: (Optional) Keep it empty to use the default https://doi.org prefix. Use https://mds.test.datacite.org/doi when using the test API.
- `DOI pattern`: Default is `{{uuid}}` but the DOI structure can be customized with database id and/or record group eg. `example-{{groupOwner}}-{{id}}`.
- `DataCite prefix`: Usually looks like `10.xxxx`. You will be allowed to register DOI names only under the prefixes that have been assigned to you.
-- `Publication groups`: (Optional) Select the groups which metadata should be published to the DOI server. If no groups are selected, the server will be provided to publish the metadata that has no other DOI servers related to the metadata owner group.
+- `Record groups`: (Optional) When creating a DOI, only DOI server(s) associated with the record group are proposed. If record group is not associated with any DOI servers, then DOI servers with no group are proposed.
A record can be downloaded using the DataCite format from the API using:
diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-csw.md b/docs/manual/docs/user-guide/harvesting/harvesting-csw.md
index 614687eb471..dc94a777d4a 100644
--- a/docs/manual/docs/user-guide/harvesting/harvesting-csw.md
+++ b/docs/manual/docs/user-guide/harvesting/harvesting-csw.md
@@ -4,16 +4,38 @@ This harvester will connect to a remote CSW server and retrieve metadata records
## Adding a CSW harvester
-The figure above shows the options available:
-
-- **Site** - Options about the remote site.
- - *Name* - This is a short description of the remote site. It will be shown in the harvesting main page as the name for this instance of the CSW harvester.
- - *Service URL* - The URL of the capabilities document of the CSW server to be harvested. eg. . This document is used to discover the location of the services to call to query and retrieve metadata.
- - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing harvested metadata records in the search results.
- - *Use account* - Account credentials for basic HTTP authentication on the CSW server.
-- **Search criteria** - Using the Add button, you can add several search criteria. You can query only the fields recognised by the CSW protocol.
-- **Options** - Scheduling options.
-- **Options** - Specific harvesting options for this harvester.
- - *Validate* - If checked, the metadata will be validated after retrieval. If the validation does not pass, the metadata will be skipped.
+To create a CSW harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `CSW`:
+
+![](img/add-csw-harvester.png)
+
+Providing the following information:
+
+- **Identification**
+ - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester.
+ - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester.
+ - *User*: User who owns the harvested records.
+
+- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)).
+
+- **Configure connection to OGC CSW 2.0.2**
+ - *Service URL*: The URL of the capabilities document of the CSW server to be harvested. eg. . This document is used to discover the location of the services to call to query and retrieve metadata.
+ - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the CSW server.
+ - *Search filter*: (Optional) Define the search criteria below to restrict the records to harvest.
+ - *Search options*:
+ - *Sort by*: Define sort option to retrieve the results. Sorting by 'identifier:A' means by UUID with alphabetical order. Any CSW queryables can be used in combination with A or D for setting the ordering.
+ - *Output Schema*: The metadata standard to request the metadata records from the CSW server.
+ - *Distributed search*: Enables the distributed search in remote server (if the remote server supports it). When this option is enabled, the remote catalog cascades the search to the Federated CSW servers that has configured.
+
+- **Configure response processing for CSW**
+ - *Action on UUID collision*: When a harvester finds the same uuid on a record collected by another method (another harvester, importer, dashboard editor,...), should this record be skipped (default), overriden or generate a new UUID?
+ - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron).
+ - Accept all metadata without validation.
+ - Accept metadata that are XSD valid.
+ - Accept metadata that are XSD and schematron valid.
+ - *Check for duplicate resources based on the resource identifier*: If checked, ignores metadata with a resource identifier (`gmd:identificationInfo/*/gmd:citation/gmd:CI_Citation/gmd:identifier/*/gmd:code/gco:CharacterString`) that is assigned to other metadata record in the catalog. It only applies to records in ISO19139 or ISO profiles.
+ - *XPath filter*: (Optional) When record is retrived from remote server, check an XPath expression to accept or discard the record.
+ - *XSL transformation to apply*: (Optional) The referenced XSL transform will be applied to each metadata record before it is added to GeoNetwork.
+ - *Batch edits*: (Optional) Allows to update harvested records, using XPATH syntax. It can be used to add, replace or delete element.
+ - *Category*: (Optional) A GeoNetwork category to assign to each metadata record.
+
- **Privileges** - Assign privileges to harvested metadata.
-- **Categories**
diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-filesystem.md b/docs/manual/docs/user-guide/harvesting/harvesting-filesystem.md
index 5e0b6b3ab54..900deeafc4c 100644
--- a/docs/manual/docs/user-guide/harvesting/harvesting-filesystem.md
+++ b/docs/manual/docs/user-guide/harvesting/harvesting-filesystem.md
@@ -4,21 +4,35 @@ This harvester will harvest metadata as XML files from a filesystem available on
## Adding a Local File System harvester
-The figure above shows the options available:
-
-- **Site** - Options about the remote site.
- - *Name* - This is a short description of the filesystem harvester. It will be shown in the harvesting main page as the name for this instance of the Local Filesystem harvester.
- - *Directory* - The path name of the directory containing the metadata (as XML files) to be harvested.
- - *Recurse* - If checked and the *Directory* path contains other directories, then the harvester will traverse the entire file system tree in that directory and add all metadata files found.
- - *Keep local if deleted at source* - If checked then metadata records that have already been harvested will be kept even if they have been deleted from the *Directory* specified.
- - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing harvested metadata records in the search results.
-- **Options** - Scheduling options.
-- **Harvested Content** - Options that are applied to harvested content.
- - *Apply this XSLT to harvested records* - Choose an XSLT here that will convert harvested records to a different format.
- - *Validate* - If checked, the metadata will be validated after retrieval. If the validation does not pass, the metadata will be skipped.
-- **Privileges** - Assign privileges to harvested metadata.
-- **Categories**
+To create a Local File System harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `Directory`:
+
+![](img/add-filesystem-harvester.png)
+
+Providing the following information:
-!!! Notes
+- **Identification**
+ - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester.
+ - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester.
+ - *User*: User who owns the harvested records.
- - in order to be successfully harvested, metadata records retrieved from the file system must match a metadata schema in the local GeoNetwork instance
+- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)).
+
+- **Configure connection to Directory**
+ - *Directory*: The path name of the directory containing the metadata (as XML files) to be harvested. The directory must be accessible by GeoNetwork.
+ - *Also search in subfolders*: If checked and the *Directory* path contains other directories, then the harvester will traverse the entire file system tree in that directory and add all metadata files found.
+ - *Script to run before harvesting*
+ - *Type of record*
+
+- **Configure response processing for filesystem**
+ - *Action on UUID collision*: When a harvester finds the same uuid on a record collected by another method (another harvester, importer, dashboard editor,...), should this record be skipped (default), overriden or generate a new UUID?
+ - *Update catalog record only if file was updated*
+ - *Keep local even if deleted at source*: If checked then metadata records that have already been harvested will be kept even if they have been deleted from the *Directory* specified.
+ - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron).
+ - Accept all metadata without validation.
+ - Accept metadata that are XSD valid.
+ - Accept metadata that are XSD and schematron valid.
+ - *XSL transformation to apply*: (Optional) The referenced XSL transform will be applied to each metadata record before it is added to GeoNetwork.
+ - *Batch edits*: (Optional) Allows to update harvested records, using XPATH syntax. It can be used to add, replace or delete element.
+ - *Category*: (Optional) A GeoNetwork category to assign to each metadata record.
+
+- **Privileges** - Assign privileges to harvested metadata.
diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork-2.md b/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork-2.md
new file mode 100644
index 00000000000..de085a9bb9b
--- /dev/null
+++ b/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork-2.md
@@ -0,0 +1,9 @@
+# GeoNetwork 2.0 Harvester {#gn2_harvester}
+
+## Upgrading from GeoNetwork 2.0 Guidance
+
+GeoNetwork 2.1 introduced a new powerful harvesting engine which is not compatible with GeoNetwork version 2.0 based catalogues.
+
+* Harvesting metadata from a v2.0 server requires this harvesting type.
+* Old 2.0 servers can still harvest from 2.1 servers
+* Due to the fact that GeoNetwork 2.0 is no longer suitable for production use, this harvesting type is deprecated.
diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork.md b/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork.md
index de085a9bb9b..3c692b5e3ec 100644
--- a/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork.md
+++ b/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork.md
@@ -1,9 +1,43 @@
-# GeoNetwork 2.0 Harvester {#gn2_harvester}
+# GeoNetwork 2.1-3.X Harvester
-## Upgrading from GeoNetwork 2.0 Guidance
+This harvester will connect to a remote GeoNetwork server that uses versions from 2.1-3.X and retrieve metadata records that match the query parameters.
-GeoNetwork 2.1 introduced a new powerful harvesting engine which is not compatible with GeoNetwork version 2.0 based catalogues.
+## Adding a GeoNetwork 2.1-3.X harvester
-* Harvesting metadata from a v2.0 server requires this harvesting type.
-* Old 2.0 servers can still harvest from 2.1 servers
-* Due to the fact that GeoNetwork 2.0 is no longer suitable for production use, this harvesting type is deprecated.
+To create a GeoNetwork 2.1-3.X harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `GeoNetwork (from 2.1 to 3.x)`:
+
+![](img/add-geonetwork-3-harvester.png)
+
+Providing the following information:
+
+- **Identification**
+ - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester.
+ - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester.
+ - *User*: User who owns the harvested records.
+
+- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)).
+
+- **Configure connection to GeoNetwork (from 2.1 to 3.x)**
+ - *Catalog URL*:
+ - The remote URL of the GeoNetwork server from which metadata will be harvested. The URL should contain the catalog name, for example: http://www.fao.org/geonetwork.
+ - Additionally, it should be configured the node name, usually the value `srv`.
+ - *Search filter*: (Optional) Define the filter to retrieve the remote metadata.
+ - *Catalog*: (Optional) Select the portal in the remote server to harvest.
+
+- **Configure response processing for GeoNetwork**
+ - *Action on UUID collision*: When a harvester finds the same uuid on a record collected by another method (another harvester, importer, dashboard editor,...), should this record be skipped (default), overriden or generate a new UUID?
+ - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the WebDAV/WAF server.
+ - *Use full MEF format*: If checked, uses MEF format instead of XML to retrieve the remote metadata. Recommended to metadata with files.
+ - *Use change date for comparison*: If checked, uses change date to detect changes on remote server.
+ - *Set category if it exists locally*: If checked, uses the category set on the metadata in the remote server also locally (assuming it exists locally). Applies only when using MEF format for the harvesting.
+ - *Category*: (Optional) A GeoNetwork category to assign to each metadata record.
+ - *XSL filter name to apply*: (Optional) The XSL filter is applied to each metadata record. The filter is a process which depends on the schema (see the `process` folder of the schemas).
+
+ It could be composed of parameter which will be sent to XSL transformation using the following syntax: `anonymizer?protocol=MYLOCALNETWORK:FILEPATH&email=gis@organisation.org&thesaurus=MYORGONLYTHEASURUS`
+
+ - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron).
+ - Accept all metadata without validation.
+ - Accept metadata that are XSD valid.
+ - Accept metadata that are XSD and schematron valid.
+
+- **Privileges** - Assign privileges to harvested metadata.
diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-geoportal.md b/docs/manual/docs/user-guide/harvesting/harvesting-geoportal.md
index e8887286ea3..ec16a07b9ae 100644
--- a/docs/manual/docs/user-guide/harvesting/harvesting-geoportal.md
+++ b/docs/manual/docs/user-guide/harvesting/harvesting-geoportal.md
@@ -4,24 +4,38 @@ This harvester will connect to a remote GeoPortal version 9.3.x or 10.x server a
## Adding a GeoPortal REST harvester
-The figure above shows the options available:
-
-- **Site** - Options about the remote site.
- - *Name* - This is a short description of the remote site. It will be shown in the harvesting main page as the name for this instance of the GeoPortal REST harvester.
- - *Base URL* - The base URL of the GeoPortal server to be harvested. eg. . The harvester will add the additional path required to access the REST services on the GeoPortal server.
- - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing harvested metadata records in the search results.
-- **Search criteria** - Using the Add button, you can add several search criteria. You can query any field on the GeoPortal server using the Lucene query syntax described at .
-- **Options** - Scheduling options.
-- **Harvested Content** - Options that are applied to harvested content.
- - *Apply this XSLT to harvested records* - Choose an XSLT here that will convert harvested records to a different format. See notes section below for typical usage.
- - *Validate* - If checked, the metadata will be validated after retrieval. If the validation does not pass, the metadata will be skipped.
+To create a GeoPortal REST harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `GeoPortal REST`:
+
+![](img/add-geoportalrest-harvester.png)
+
+Providing the following information:
+
+- **Identification**
+ - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester.
+ - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester.
+ - *User*: User who owns the harvested records.
+
+- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)).
+
+- **Configure connection to GeoPortal REST**
+ - *URL*: The base URL of the GeoPortal server to be harvested. eg. . The harvester will add the additional path required to access the REST services on the GeoPortal server.
+ - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the server.
+ - *Search filter*: (Optional) You can query any field on the GeoPortal server using the Lucene query syntax described at .
+
+- **Configure response processing for geoPREST**
+ - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron).
+ - Accept all metadata without validation.
+ - Accept metadata that are XSD valid.
+ - Accept metadata that are XSD and schematron valid.
+ - *XSL transformation to apply*: (Optional) The referenced XSL transform will be applied to each metadata record before it is added to GeoNetwork.
+
- **Privileges** - Assign privileges to harvested metadata.
-- **Categories**
+
!!! Notes
- - this harvester uses two REST services from the GeoPortal API:
+ - This harvester uses two REST services from the GeoPortal API:
- `rest/find/document` with searchText parameter to return an RSS listing of metadata records that meet the search criteria (maximum 100000)
- `rest/document` with id parameter from each result returned in the RSS listing
- - this harvester has been tested with GeoPortal 9.3.x and 10.x. It can be used in preference to the CSW harvester if there are issues with the handling of the OGC standards etc.
- - typically ISO19115 metadata produced by the Geoportal software will not have a 'gmd' prefix for the namespace `http://www.isotc211.org/2005/gmd`. GeoNetwork XSLTs will not have any trouble understanding this metadata but will not be able to map titles and codelists in the viewer/editor. To fix this problem, please select the ``Add-gmd-prefix`` XSLT for the *Apply this XSLT to harvested records* in the **Harvested Content** set of options described earlier
+ - This harvester has been tested with GeoPortal 9.3.x and 10.x. It can be used in preference to the CSW harvester if there are issues with the handling of the OGC standards etc.
+ - Typically ISO19115 metadata produced by the Geoportal software will not have a 'gmd' prefix for the namespace `http://www.isotc211.org/2005/gmd`. GeoNetwork XSLTs will not have any trouble understanding this metadata but will not be able to map titles and codelists in the viewer/editor. To fix this problem, please select the ``Add-gmd-prefix`` XSLT for the *Apply this XSLT to harvested records* in the **Harvested Content** set of options described earlier
diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-oaipmh.md b/docs/manual/docs/user-guide/harvesting/harvesting-oaipmh.md
index cf046363634..6c528feb7e2 100644
--- a/docs/manual/docs/user-guide/harvesting/harvesting-oaipmh.md
+++ b/docs/manual/docs/user-guide/harvesting/harvesting-oaipmh.md
@@ -1,36 +1,49 @@
# OAIPMH Harvesting {#oaipmh_harvester}
-This is a harvesting protocol that is widely used among libraries. GeoNetwork implements version 2.0 of the protocol.
+This is a harvesting protocol that is widely used among libraries. GeoNetwork implements version 2.0 of the protocol. An OAI-PMH server implements a harvesting protocol that GeoNetwork, acting as a client, can use to harvest metadata.
## Adding an OAI-PMH harvester
-An OAI-PMH server implements a harvesting protocol that GeoNetwork, acting as a client, can use to harvest metadata.
+To create a OAI-PMH harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `OAI/PMH`:
-Configuration options:
+![](img/add-oaipmh-harvester.png)
-- **Site** - Options describing the remote site.
- - *Name* - This is a short description of the remote site. It will be shown in the harvesting main page as the name for this instance of the OAIPMH harvester.
- - *URL* - The URL of the OAI-PMH server from which metadata will be harvested.
- - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing search results.
- - *Use account* - Account credentials for basic HTTP authentication on the OAIPMH server.
-- **Search criteria** - This allows you to select metadata records for harvest based on certain criteria:
- - *From* - You can provide a start date here. Any metadata whose last change date is equal to or greater than this date will be harvested. To add or edit a value for this field you need to use the icon alongside the text box. This field is optional so if you don't provide a start date the constraint is dropped. Use the icon to clear the field.
- - *Until* - Functions in the same way as the *From* parameter but adds an end constraint to the last change date search. Any metadata whose last change data is less than or equal to this data will be harvested.
- - *Set* - An OAI-PMH server classifies metadata into sets (like categories in GeoNetwork). You can request all metadata records that belong to a set (and any of its subsets) by specifying the name of that set here.
- - *Prefix* - 'Prefix' means metadata format. The oai_dc prefix must be supported by all OAI-PMH compliant servers.
- - You can use the Add button to add more than one Search Criteria set. Search Criteria sets can be removed by clicking on the small cross at the top left of the set.
+Providing the following information:
-!!! note
+- **Identification**
+ - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester.
+ - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester.
+ - *User*: User who owns the harvested records.
- the 'OAI provider sets' drop down next to the *Set* text box and the 'OAI provider prefixes' drop down next to the *Prefix* textbox are initially blank. After specifying the connection URL, you can press the **Retrieve Info** button, which will connect to the remote OAI-PMH server, retrieve all supported sets and prefixes and fill the drop downs with these values. Selecting a value from either of these drop downs will fill the appropriate text box with the selected value.
+- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)).
+- **Configure connection to OGC CSW 2.0.2**
+ - *URL*: The URL of the OAI-PMH server from which metadata will be harvested.
+ - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the OAIPMH server.
+ - *Search filter*: (Optional) Define the search criteria below to restrict the records to harvest.
+ - *From*: You can provide a start date here. Any metadata whose last change date is equal to or greater than this date will be harvested. To add or edit a value for this field you need to use the icon alongside the text box. This field is optional so if you don't provide a start date the constraint is dropped. Use the icon to clear the field.
+ - *Until*: Functions in the same way as the *From* parameter but adds an end constraint to the last change date search. Any metadata whose last change data is less than or equal to this data will be harvested.
+ - *Set*: An OAI-PMH server classifies metadata into sets (like categories in GeoNetwork). You can request all metadata records that belong to a set (and any of its subsets) by specifying the name of that set here.
+ - *Prefix*: 'Prefix' means metadata format. The oai_dc prefix must be supported by all OAI-PMH compliant servers.
+
+ !!! note
+
+ The 'OAI provider sets' drop down next to the *Set* text box and the 'OAI provider prefixes' drop down next to the *Prefix* textbox are initially blank. After specifying the connection URL, you can press the **Retrieve Info** button, which will connect to the remote OAI-PMH server, retrieve all supported sets and prefixes and fill the drop downs with these values. Selecting a value from either of these drop downs will fill the appropriate text box with the selected value.
+- **Configure response processing for oaipmh**
+ - *Action on UUID collision*: When a harvester finds the same uuid on a record collected by another method (another harvester, importer, dashboard editor,...), should this record be skipped (default), overriden or generate a new UUID?
+ - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron).
+ - Accept all metadata without validation.
+ - Accept metadata that are XSD valid.
+ - Accept metadata that are XSD and schematron valid.
+ - *XSL transformation to apply*: (Optional) The referenced XSL transform will be applied to each metadata record before it is added to GeoNetwork.
+
+ - *Category*: (Optional) A GeoNetwork category to assign to each metadata record.
+
+- **Privileges** - Assign privileges to harvested metadata.
-- **Options** - Scheduling Options.
-- **Privileges**
-- **Categories**
!!! Notes
- - if you request the oai_dc output format, GeoNetwork will convert it to Dublin Core format.
- - when you edit a previously created OAIPMH harvester instance, both the *set* and *prefix* drop down lists will be empty. You have to press the retrieve info button again to connect to the remote server and retrieve set and prefix information.
- - the id of the remote server must be a UUID. If not, metadata can be harvested but during hierarchical propagation id clashes could corrupt harvested metadata.
+ - If you request the oai_dc output format, GeoNetwork will convert it to Dublin Core format.
+ - When you edit a previously created OAIPMH harvester instance, both the *set* and *prefix* drop down lists will be empty. You have to press the retrieve info button again to connect to the remote server and retrieve set and prefix information.
+ - The id of the remote server must be a UUID. If not, metadata can be harvested but during hierarchical propagation id clashes could corrupt harvested metadata.
diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-ogcwxs.md b/docs/manual/docs/user-guide/harvesting/harvesting-ogcwxs.md
index 52c88c134d4..70f45cf75d6 100644
--- a/docs/manual/docs/user-guide/harvesting/harvesting-ogcwxs.md
+++ b/docs/manual/docs/user-guide/harvesting/harvesting-ogcwxs.md
@@ -11,27 +11,46 @@ An OGC service implements a GetCapabilities operation that GeoNetwork, acting as
## Adding an OGC Service Harvester
-Configuration options:
-
-- **Site**
- - *Name* - The name of the catalogue and will be one of the search criteria.
- - *Type* - The type of OGC service indicates if the harvester has to query for a specific kind of service. Supported type are WMS (1.0.0, 1.1.1, 1.3.0), WFS (1.0.0 and 1.1.0), WCS (1.0.0), WPS (0.4.0 and 1.0.0), CSW (2.0.2) and SOS (1.0.0).
- - *Service URL* - The service URL is the URL of the service to contact (without parameters like "REQUEST=GetCapabilities", "VERSION=", \...). It has to be a valid URL like .
- - *Metadata language* - Required field that will define the language of the metadata. It should be the language used by the OGC web service administrator.
- - *ISO topic category* - Used to populate the topic category element in the metadata. It is recommended to choose one as the topic category is mandatory for the ISO19115/19139 standard if the hierarchical level is "datasets".
- - *Type of import* - By default, the harvester produces one service metadata record. Check boxes in this group determine the other metadata that will be produced.
- - *Create metadata for layer elements using GetCapabilities information*: Checking this option means that the harvester will loop over datasets served by the service as described in the GetCapabilities document.
- - *Create metadata for layer elements using MetadataURL attributes*: Checkthis option means that the harvester will generate metadata from an XML document referenced in the MetadataUrl attribute of the dataset in the GetCapabilities document. If the document referred to by this attribute is not valid (eg. unknown schema, bad XML format), the GetCapabilities document is used as per the previous option.
- - *Create thumbnails for WMS layers*: If harvesting from an OGC WMS, then checking this options means that thumbnails will be created during harvesting.
- - *Target schema* - The metadata schema of the dataset metadata records that will be created by this harvester.
- - *Icon* - The default icon displayed as attribution logo for metadata created by this harvester.
-- **Options** - Scheduling Options.
-- **Privileges**
-- **Category for service** - Metadata for the harvested service is assigned to the category selected in this option (eg. "interactive resources").
-- **Category for datasets** - Metadata for the harvested datasets is assigned to the category selected in this option (eg. "datasets").
+To create a OGC Service harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `OGC Web Services`:
+
+![](img/add-ogcwebservices-harvester.png)
+
+Providing the following information:
+
+- **Identification**
+ - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester.
+ - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester.
+ - *User*: User who owns the harvested records.
+
+- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)).
+
+- **Configure connection to OGC Web Services**
+ - *Service URL*: The service URL is the URL of the service to contact (without parameters like "REQUEST=GetCapabilities", "VERSION=", \...). It has to be a valid URL like .
+ - *Service type* - The type of OGC service indicates if the harvester has to query for a specific kind of service. Supported type are WMS (1.0.0, 1.1.1, 1.3.0), WFS (1.0.0 and 1.1.0), WCS (1.0.0), WPS (0.4.0 and 1.0.0), CSW (2.0.2) and SOS (1.0.0).
+ - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the server.
+
+- **Configure response processing for ogcwxs**
+ - *Build service metadata record from a template*:
+ - *Category for service metadata*: (Optional) Metadata for the harvested service is assigned to the category selected in this option (eg. "interactive resources").
+ - *Create record for each layer only using GetCapabilities information*: Checking this option means that the harvester will loop over datasets served by the service as described in the GetCapabilities document.
+ - *Import record for each layer using MetadataURL attributes*: Checkthis option means that the harvester will generate metadata from an XML document referenced in the MetadataUrl attribute of the dataset in the GetCapabilities document. If the document referred to by this attribute is not valid (eg. unknown schema, bad XML format), the GetCapabilities document is used as per the previous option.
+ - *Build dataset metadata records from a template*
+ - *Create thumbnail*: If checked, when harvesting from an OGC Web Map Service (WMS) that supports WGS84 projection, thumbnails for the layers metadata will be created during harvesting.
+ - *Category for datasets*: Metadata for the harvested datasets is assigned to the category selected in this option (eg. "datasets").
+
+ - *ISO category*: (Optional) Used to populate the topic category element in the metadata. It is recommended to choose one as the topic category is mandatory for the ISO19115/19139 standard if the hierarchical level is "datasets".
+ - *Metadata language*: Required field that will define the language of the metadata. It should be the language used by the OGC web service administrator.
+ - *Output schema*: The metadata schema of the dataset metadata records that will be created by this harvester. The value should be an XSLT process which is used by the harvester to convert the GetCapabilities document to metadata records from that schema. If in doubt, use the default value `iso19139`.
+ - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron).
+ - Accept all metadata without validation.
+ - Accept metadata that are XSD valid.
+ - Accept metadata that are XSD and schematron valid.
+ - *XSL transformation to apply*: (Optional) The referenced XSL transform will be applied to each metadata record before it is added to GeoNetwork.
+
+
+- **Privileges** - Assign privileges to harvested metadata.
+
!!! Notes
- - every time the harvester runs, it will remove previously harvested records and create new records. GeoNetwork will generate the uuid for all metadata (both service and datasets). The exception to this rule is dataset metadata created using the MetadataUrl tag is in the GetCapabilities document, in that case, the uuid of the remote XML document is used instead
- - thumbnails can only be generated when harvesting an OGC Web Map Service (WMS). The WMS should support the WGS84 projection
- - the chosen *Target schema* must have the support XSLTs which are used by the harvester to convert the GetCapabilities statement to metadata records from that schema. If in doubt, use iso19139.
+ - Every time the harvester runs, it will remove previously harvested records and create new records. GeoNetwork will generate the uuid for all metadata (both service and datasets). The exception to this rule is dataset metadata created using the MetadataUrl tag is in the GetCapabilities document, in that case, the uuid of the remote XML document is used instead
diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-sde.md b/docs/manual/docs/user-guide/harvesting/harvesting-sde.md
index 7f4f99cb913..32cdd4df780 100644
--- a/docs/manual/docs/user-guide/harvesting/harvesting-sde.md
+++ b/docs/manual/docs/user-guide/harvesting/harvesting-sde.md
@@ -1,55 +1,60 @@
# Harvesting an ARCSDE Node {#sde_harvester}
-This is a harvesting protocol for metadata stored in an ArcSDE installation.
+This is a harvesting protocol for metadata stored in an ArcSDE installation. The harvester identifies the ESRI metadata format: ESRI ISO, ESRI FGDC to apply the required xslts to transform metadata to ISO19139.
## Adding an ArcSDE harvester
-The harvester identifies the ESRI metadata format: ESRI ISO, ESRI FGDC to apply the required xslts to transform metadata to ISO19139. Configuration options:
+To create an ArcSDE harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `ArcSDE`:
+
+![](img/add-arcsde-harvester.png)
+
+Providing the following information:
- **Identification**
- - *Name* - This is a short description of the node. It will be shown in the harvesting main page.
- - *Group* - User admin of this group and catalog administrator can manage this node.
- - *Harvester user* - User that owns the harvested metadata.
-- **Schedule** - Schedule configuration to execute the harvester.
-- **Configuration for protocol ArcSDE**
- - *Server* - ArcSde server IP address or name.
- - *Port* - ArcSde service port (typically 5151) or ArcSde database port, depending on the connection type selected, see below the *Connection type* section.
- - *Database name* - ArcSDE instance name (typically esri_sde).
- - *ArcSde version* - ArcSde version to harvest. The data model used by ArcSde is different depending on the ArcSde version.
+ - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester.
+ - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester.
+ - *User*: User who owns the harvested records.
+
+- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)).
+
+- **Configure connection to Database**
+ - *Server*: ArcSDE server IP address or name.
+ - *Port*: ArcSDE service port (typically 5151) or ArcSDE database port, depending on the connection type selected, see below the *Connection type* section.
+ - *Database name*: ArcSDE instance name (typically esri_sde).
+ - *ArcSDE version: ArcSDE version to harvest. The data model used by ArcSDE is different depending on the ArcSDE version.
- *Connection type*
- - *ArcSde service* - Uses the ArcSde service to retrieve the metadata.
+ - *ArcSDE service*: Uses the ArcSDE service to retrieve the metadata.
!!! note
- Additional installation steps are required to use the ArcSDE harvester because it needs proprietary ESRI Java api jars to be installed.
-
- ArcSDE Java API libraries need to be installed by the user in GeoNetwork (folder INSTALL_DIR_GEONETWORK/WEB-INF/lib), as these are proprietary libraries not distributed with GeoNetwork.
-
- The following jars are required:
-
- - jpe_sdk.jar
- - jsde_sdk.jar
-
- dummy-api-XXX.jar must be removed from INSTALL_DIR/web/geonetwork/WEB-INF/lib
+ Additional installation steps are required to use the ArcSDE harvester because it needs proprietary ESRI Java api jars to be installed.
+ ArcSDE Java API libraries need to be installed by the user in GeoNetwork (folder `INSTALL_DIR_GEONETWORK/WEB-INF/lib`), as these are proprietary libraries not distributed with GeoNetwork.
- - *Database direct connection* - Uses a database connection (JDBC) to retrieve the metadata. With
+ The following jars are required:
- !!! note
+ - jpe_sdk.jar
+ - jsde_sdk.jar
- Database direct connection requires to copy JDBC drivers in INSTALL_DIR_GEONETWORK/WEB-INF/lib.
+ `dummy-api-XXX.jar` must be removed from `INSTALL_DIR/web/geonetwork/WEB-INF/lib`.
+ - *Database direct connection*: Uses a database connection (JDBC) to retrieve the metadata.
+
+ !!! note
+
+ Database direct connection requires to copy JDBC drivers in `INSTALL_DIR_GEONETWORK/WEB-INF/lib`.
!!! note
Postgres JDBC drivers are distributed with GeoNetwork, but not for Oracle or SqlServer.
- - *Database type* - ArcSde database type: Oracle, Postgres, SqlServer. Only available if connection type is configured to *Database direct connection*.
- - *Username* - Username to connect to ArcSDE server.
- - *Password* - Password of the ArcSDE user.
-- **Advanced options for protocol arcsde**
- - *Validate records before import* - Defines the criteria to reject metadata that is invalid according to XSD and schematron rules.
+ - *Database type* - ArcSDE database type: Oracle, Postgres, SqlServer. Only available if connection type is configured to *Database direct connection*.
+ - *Remote authentication*: Credentials to connect to the ArcSDE server.
+
+- **Configure response processing for arcsde**
+ - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron).
- Accept all metadata without validation.
- Accept metadata that are XSD valid.
- Accept metadata that are XSD and schematron valid.
+
- **Privileges** - Assign privileges to harvested metadata.
diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-simpleurl.md b/docs/manual/docs/user-guide/harvesting/harvesting-simpleurl.md
index 775b4a9d1a9..e7243dc8421 100644
--- a/docs/manual/docs/user-guide/harvesting/harvesting-simpleurl.md
+++ b/docs/manual/docs/user-guide/harvesting/harvesting-simpleurl.md
@@ -4,47 +4,72 @@ This harvester connects to a remote server via a simple URL to retrieve metadata
## Adding a simple URL harvester
-- **Site** - Options about the remote site.
+To create a Simple URL harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `Simple URL`:
- - *Name* - This is a short description of the remote site. It will be shown in the harvesting main page as the name for this instance of the harvester.
- - *Service URL* - The URL of the server to be harvested. This can include pagination params like `?start=0&rows=20`
- - *loopElement* - Propery/element containing a list of the record entries. (Indicated as an absolute path from the document root.) eg. `/datasets`
- - *numberOfRecordPath* : Property indicating the total count of record entries. (Indicated as an absolute path from the document root.) eg. `/nhits`
- - *recordIdPath* : Property containing the record id. eg. `datasetid`
- - *pageFromParam* : Property indicating the first record item on the current "page" eg. `start`
- - *pageSizeParam* : Property indicating the number of records containned in the current "page" eg. `rows`
- - *toISOConversion* : Name of the conversion schema to use, which must be available as XSL on the GN instance. eg. `OPENDATASOFT-to-ISO19115-3-2018`
+![](img/add-simpleurl-harvester.png)
- !!! note
+Providing the following information:
- GN looks for schemas by name in . These schemas might internally include schemas from other locations like . To indicate the `fromJsonOpenDataSoft` schema for example, from the latter location directly in the admin UI the following syntax can be used: `schema:iso19115-3.2018:convert/fromJsonOpenDataSoft`.
+- **Identification**
+ - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester.
+ - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester.
+ - *User*: User who owns the harvested records.
+- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)).
- **Sample configuration for opendatasoft**
+- **Configure connection to Simple URL**
+ - *URL* - The URL of the server to be harvested. This can include pagination params like `?start=0&rows=20`
+ - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the server.
+ - *Element to loop on*: Propery/element containing a list of the record entries. (Indicated as an absolute path from the document root.) eg. `/datasets`
+ - *Element for the UUID of each record* : Property containing the record id. eg. `datasetid`
+ - *Pagination parameters*: (optional).
+ - *Element for the number of records to collect*: Property indicating the total count of record entries. (Indicated as an absolute path from the document root.) eg. `/nhits`
+ - *From URL parameter*: Property indicating the first record item on the current "page" eg. `start`
+ - *Size URL parameter*: Property indicating the number of records containned in the current "page" eg. `rows`
+
+- **Configure response processing for Simple URL**
- - *loopElement* - `/datasets`
- - *numberOfRecordPath* : `/nhits`
- - *recordIdPath* : `datasetid`
- - *pageFromParam* : `start`
- - *pageSizeParam* : `rows`
- - *toISOConversion* : `OPENDATASOFT-to-ISO19115-3-2018`
+ - *XSL transformation to apply*: Name of the conversion schema to use, which must be available as XSL on the GeoNetwork instance. eg. `OPENDATASOFT-to-ISO19115-3-2018`
- **Sample configuration for ESRI**
+ !!! note
- - *loopElement* - `/dataset`
- - *numberOfRecordPath* : `/result/count`
- - *recordIdPath* : `landingPage`
- - *pageFromParam* : `start`
- - *pageSizeParam* : `rows`
- - *toISOConversion* : `ESRIDCAT-to-ISO19115-3-2018`
+ GN looks for schemas by name in . These schemas might internally include schemas from other locations like . To indicate the `fromJsonOpenDataSoft` schema for example, from the latter location directly in the admin UI the following syntax can be used: `schema:iso19115-3.2018:convert/fromJsonOpenDataSoft`.
- **Sample configuration for DKAN**
-
- - *loopElement* - `/result/0`
- - *numberOfRecordPath* : `/result/count`
- - *recordIdPath* : `id`
- - *pageFromParam* : `start`
- - *pageSizeParam* : `rows`
- - *toISOConversion* : `DKAN-to-ISO19115-3-2018`
+ - *Batch edits*: (Optional) Allows to update harvested records, using XPATH syntax. It can be used to add, replace or delete element.
+ - *Category*: (Optional) A GeoNetwork category to assign to each metadata record.
+ - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron).
+ - Accept all metadata without validation.
+ - Accept metadata that are XSD valid.
+ - Accept metadata that are XSD and schematron valid.
- **Privileges** - Assign privileges to harvested metadata.
+
+
+## Sample configurations
+
+### Sample configuration for opendatasoft
+
+- *Element to loop on* - `/datasets`
+- *Element for the number of records to collect* : `/nhits`
+- *Element for the UUID of each record* : `datasetid`
+- *From URL parameter* : `start`
+- *Size URL parameter* : `rows`
+- *XSL transformation to apply* : `OPENDATASOFT-to-ISO19115-3-2018`
+
+### Sample configuration for ESRI
+
+- *Element to loop on* - `/dataset`
+- *Element for the number of records to collect* : `/result/count`
+- *Element for the UUID of each record* : `landingPage`
+- *From URL parameter* : `start`
+- *Size URL parameter* : `rows`
+- *XSL transformation to apply* : `ESRIDCAT-to-ISO19115-3-2018`
+
+### Sample configuration for DKAN
+
+- *Element to loop on* - `/result/0`
+- *Element for the number of records to collect* : `/result/count`
+- *Element for the UUID of each record* : `id`
+- *From URL parameter* : `start`
+- *Size URL parameter* : `rows`
+- *XSL transformation to apply* : `DKAN-to-ISO19115-3-2018`
diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-thredds.md b/docs/manual/docs/user-guide/harvesting/harvesting-thredds.md
index 2c988d58e34..bb4716c7508 100644
--- a/docs/manual/docs/user-guide/harvesting/harvesting-thredds.md
+++ b/docs/manual/docs/user-guide/harvesting/harvesting-thredds.md
@@ -4,35 +4,33 @@ THREDDS catalogs describe inventories of datasets. They are organised in a hiera
## Adding a THREDDS Catalog Harvester
-The available options are:
-
-- **Site**
- - *Name* - This is a short description of the THREDDS catalog. It will be shown in the harvesting main page as the name of this THREDDS harvester instance.
- - *Catalog URL* - The remote URL of the THREDDS Catalog from which metadata will be harvested. This must be the xml version of the catalog (i.e. ending with .xml). The harvester will crawl through all datasets and services defined in this catalog creating metadata for them as specified by the options described further below.
- - *Metadata language* - Use this option to specify the language of the metadata to be harvested.
- - *ISO topic category* - Use this option to specify the ISO topic category of service metadata.
- - *Create ISO19119 metadata for all services in catalog* - Select this option to generate iso19119 metadata for services defined in the THREDDS catalog (eg. OpenDAP, OGC WCS, ftp) and for the THREDDS catalog itself.
- - *Create metadata for Collection datasets* - Select this option to generate metadata for each collection dataset (THREDDS dataset containing other datasets). Creation of metadata can be customised using options that are displayed when this option is selected as described further below.
- - *Create metadata for Atomic datasets* - Select this option to generate metadata for each atomic dataset (THREDDS dataset not containing other datasets -- for example cataloguing a netCDF dataset). Creation of metadata can be customised using options that are displayed when this option is selected as described further below.
- - *Ignore harvesting attribute* - Select this option to harvest metadata for selected datasets regardless of the harvest attribute for the dataset in the THREDDS catalog. If this option is not selected, metadata will only be created for datasets that have a harvest attribute set to true.
- - *Extract DIF metadata elements and create ISO metadata* - Select this option to generate ISO metadata for datasets in the THREDDS catalog that have DIF metadata elements. When this option is selected a list of schemas is shown that have a DIFToISO.xsl stylesheet available (see for example `GEONETWORK_DATA_DIR/config/schema_plugins/iso19139/convert/DIFToISO.xsl`). Metadata is generated by reading the DIF metadata items in the THREDDS into a DIF format metadata record and then converting that DIF record to ISO using the DIFToISO stylesheet.
- - *Extract Unidata dataset discovery metadata using fragments* - Select this option when the metadata in your THREDDS or netCDF/ncml datasets follows Unidata dataset discovery conventions (see ). You will need to write your own stylesheets to extract this metadata as fragments and define a template to combine with the fragments. When this option is selected the following additional options will be shown:
- - *Select schema for output metadata records* - choose the ISO metadata schema or profile for the harvested metadata records. Note: only the schemas that have THREDDS fragment stylesheets will be displayed in the list (see the next option for the location of these stylesheets).
- - *Stylesheet to create metadata fragments* - Select a stylesheet to use to convert metadata for the dataset (THREDDS metadata and netCDF ncml where applicable) into metadata fragments. These stylesheets can be found in the directory convert/ThreddsToFragments in the schema directory eg. for iso19139 this would be `GEONETWORK_DATA_DIR/config/schema_plugins/iso19139/convert/ThreddsToFragments`.
- - *Create subtemplates for fragments and XLink them into template* - Select this option to create a subtemplate (=metadata fragment stored in GeoNetwork catalog) for each metadata fragment generated.
- - *Template to combine with fragments* - Select a template that will be filled in with the metadata fragments generated for each dataset. The generated metadata fragments are used to replace referenced elements in the templates with an xlink to a subtemplate if the *Create subtemplates* option is checked. If *Create subtemplates* is not checked, then the fragments are simply copied into the template metadata record.
- - For Atomic Datasets , one additional option is provided *Harvest new or modified datasets only*. If this option is checked only datasets that have been modified or didn't exist when the harvester was last run will be harvested.
- - *Create Thumbnails* - Select this option to create thumbnails for WMS layers in referenced WMS services
- - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing search results.
-- **Options** - Scheduling Options.
-- **Privileges**
-- **Category for Service** - Select the category to assign to the ISO19119 service records for the THREDDS services.
-- **Category for Datasets** - Select the category to assign the generated metadata records (and any subtemplates) to.
-
-At the bottom of the page there are the following buttons:
-
-- **Back** - Go back to the main harvesting page. The harvesting definition is not added.
-- **Save** - Saves this harvester definition creating a new harvesting instance. After the save operation has completed, the main harvesting page will be displayed.
+To create a THREDDS Catalog harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `Thredds Catalog`:
+
+![](img/add-threddscatalog-harvester.png)
+
+Providing the following information:
+
+- **Identification**
+ - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester.
+ - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester.
+ - *User*: User who owns the harvested records.
+
+- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)).
+
+- **Configure connection to Thredds catalog**
+ - *Service URL*: The remote URL of the THREDDS Catalog from which metadata will be harvested. This must be the xml version of the catalog (i.e. ending with .xml). The harvester will crawl through all datasets and services defined in this catalog creating metadata for them as specified by the options described further below.
+
+- **Configure response processing for thredds**
+ - *Language*: Use this option to specify the language of the metadata to be harvested.
+ - *ISO19115 Topic category for output metadata records*: Use this option to specify the ISO topic category of service metadata.
+ - *Create ISO19119 metadata for all services in the thredds catalog*: Select this option to generate iso19119 metadata for services defined in the THREDDS catalog (eg. OpenDAP, OGC WCS, ftp) and for the THREDDS catalog itself.
+ - *Select schema for output metadata records*: The metadata standard to create the metadata. It should be a valid metadata schema installed in GeoNetwork, by default `iso19139`.
+ - *Dataset title*: (Optional) Title for the dataset. Default is catalog url.
+ - *Dataset abstract*: (Optional) Abstract for the dataset. Default is 'Thredds Dataset'.
+ - *Geonetwork category to assign to dataset metadata records* - Select the category to assign to the ISO19119 service records for the THREDDS services.
+ - *Geonetwork category to assign to dataset metadata records* - Select the category to assign the generated metadata records (and any subtemplates) to.
+
+- **Privileges** - Assign privileges to harvested metadata.
## More about harvesting THREDDS DIF metadata elements with the THREDDS Harvester
diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-webdav.md b/docs/manual/docs/user-guide/harvesting/harvesting-webdav.md
index 4313483f627..cdd6b12434a 100644
--- a/docs/manual/docs/user-guide/harvesting/harvesting-webdav.md
+++ b/docs/manual/docs/user-guide/harvesting/harvesting-webdav.md
@@ -4,19 +4,35 @@ This harvesting type uses the WebDAV (Distributed Authoring and Versioning) prot
## Adding a WebDAV harvester
-- **Site** - Options about the remote site.
- - *Subtype* - Select WebDAV or WAF according to the type of server being harvested.
- - *Name* - This is a short description of the remote site. It will be shown in the harvesting main page as the name for this instance of the WebDAV harvester.
- - *URL* - The remote URL from which metadata will be harvested. Each file found that ends with .xml is assumed to be a metadata record.
- - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing search results.
- - *Use account* - Account credentials for basic HTTP authentication on the WebDAV/WAF server.
-- **Options** - Scheduling options.
-- **Options** - Specific harvesting options for this harvester.
- - *Validate* - If checked, the metadata will be validated after retrieval. If the validation does not pass, the metadata will be skipped.
- - *Recurse* - When the harvesting engine will find folders, it will recursively descend into them.
-- **Privileges** - Assign privileges to harvested metadata.
-- **Categories**
+To create a WebDAV harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `WebDAV / WAF`:
+
+![](img/add-webdav-harvester.png)
+
+Providing the following information:
-!!! Notes
+- **Identification**
+ - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester.
+ - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester.
+ - *User*: User who owns the harvested records.
- - The same metadata could be harvested several times by different instances of the WebDAV harvester. This is not good practise because copies of the same metadata record will have a different UUID.
+- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)).
+
+- **Configure connection to WebDAV / WAF**
+ - *URL*: The remote URL from which metadata will be harvested. Each file found that has the extension `.xml` is assumed to be a metadata record.
+ - *Type of protocol*: Select WebDAV or WAF according to the type of server being harvested.
+ - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the WebDAV/WAF server.
+ - *Also search in subfolders*: When the harvesting engine will find folders, it will recursively descend into them.
+
+- **Configure response processing for webdav**
+ - *Action on UUID collision*: When a harvester finds the same uuid on a record collected by another method (another harvester, importer, dashboard editor,...), should this record be skipped (default), overriden or generate a new UUID?
+ - *XSL filter name to apply*: (Optional) The XSL filter is applied to each metadata record. The filter is a process which depends on the schema (see the `process` folder of the schemas).
+
+ It could be composed of parameter which will be sent to XSL transformation using the following syntax: `anonymizer?protocol=MYLOCALNETWORK:FILEPATH&email=gis@organisation.org&thesaurus=MYORGONLYTHEASURUS`
+
+ - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron).
+ - Accept all metadata without validation.
+ - Accept metadata that are XSD valid.
+ - Accept metadata that are XSD and schematron valid.
+ - *Category*: (Optional) A GeoNetwork category to assign to each metadata record.
+
+- **Privileges** - Assign privileges to harvested metadata.
diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-wfs-features.md b/docs/manual/docs/user-guide/harvesting/harvesting-wfs-features.md
index 16abfa13bb7..c198e5f5966 100644
--- a/docs/manual/docs/user-guide/harvesting/harvesting-wfs-features.md
+++ b/docs/manual/docs/user-guide/harvesting/harvesting-wfs-features.md
@@ -2,26 +2,43 @@
Metadata can be present in the tables of a relational databases, which are commonly used by many organisations. Putting an OGC Web Feature Service (WFS) over a relational database will allow metadata to be extracted via standard query mechanisms. This harvesting type allows the user to specify a GetFeature query and map information from the features to fragments of metadata that can be linked or copied into a template to create metadata records.
+An OGC web feature service (WFS) implements a GetFeature query operation that returns data in the form of features (usually rows from related tables in a relational database). GeoNetwork, acting as a client, can read the GetFeature response and apply a user-supplied XSLT stylesheet to produce metadata fragments that can be linked or copied into a user-supplied template to build metadata records.
+
## Adding an OGC WFS GetFeature Harvester
-An OGC web feature service (WFS) implements a GetFeature query operation that returns data in the form of features (usually rows from related tables in a relational database). GeoNetwork, acting as a client, can read the GetFeature response and apply a user-supplied XSLT stylesheet to produce metadata fragments that can be linked or copied into a user-supplied template to build metadata records.
+To create a OGC WFS GetFeature harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `OGC WFS GetFeature`:
+
+![](img/add-wfsgetfeature-harvester.png)
-The available options are:
+Providing the following information:
-- **Site**
- - *Name* - This is a short description of the harvester. It will be shown in the harvesting main page as the name for this WFS GetFeature harvester.
- - *Service URL* - The bare URL of the WFS service (no OGC params required)
- - *Metadata language* - The language that will be used in the metadata records created by the harvester
+- **Identification**
+ - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester.
+ - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester.
+ - *User*: User who owns the harvested records.
+
+- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)).
+
+- **Configure connection to OGC CSW 2.0.2**
+ - *Service URL*: The bare URL of the WFS service (no OGC params required).
+ - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the WFS server.
- *OGC WFS GetFeature Query* - The OGC WFS GetFeature query used to extract features from the WFS.
- - *Schema for output metadata records* - choose the metadata schema or profile for the harvested metadata records. Note: only the schemas that have WFS fragment stylesheets will be displayed in the list (see the next option for the location of these stylesheets).
- - *Stylesheet to create fragments* - User-supplied stylesheet that transforms the GetFeature response to a metadata fragments document (see below for the format of that document). Stylesheets exist in the WFSToFragments directory which is in the convert directory of the selected output schema. eg. for the iso19139 schema, this directory is `GEONETWORK_DATA_DIR/config/schema_plugins/iso19139/convert/WFSToFragments`.
- - *Save large response to disk* - Check this box if you expect the WFS GetFeature response to be large (eg. greater than 10MB). If checked, the GetFeature response will be saved to disk in a temporary file. Each feature will then be extracted from the temporary file and used to create the fragments and metadata records. If not checked, the response will be held in RAM.
- - *Create subtemplates* - Check this box if you want the harvested metadata fragments to be saved as subtemplates in the metadata catalog and xlink'd into the metadata template (see next option). If not checked, the fragments will be copied into the metadata template.
- - *Template to use to build metadata using fragments* - Choose the metadata template that will be combined with the harvested metadata fragments to create metadata records. This is a standard GeoNetwork metadata template record.
- - *Category for records built with linked fragments* - Choose the metadata template that will be combined with the harvested metadata fragments to create metadata records. This is a standard GeoNetwork metadata template record.
-- **Options**
-- **Privileges**
-- **Category for subtemplates** - When fragments are saved to GeoNetwork as subtemplates they will be assigned to the category selected here.
+
+- **Configure response processing for wfsfeatures**
+ - *Language*: The language that will be used in the metadata records created by the harvester.
+ - *Metadata standard*: The metadata standard to create the metadata. It should be a valid metadata schema installed in GeoNetwork, by default `iso19139`.
+ - *Save large response to disk*: Check this box if you expect the WFS GetFeature response to be large (eg. greater than 10MB). If checked, the GetFeature response will be saved to disk in a temporary file. Each feature will then be extracted from the temporary file and used to create the fragments and metadata records. If not checked, the response will be held in RAM.
+ - *Stylesheet to create fragments*: User-supplied stylesheet that transforms the GetFeature response to a metadata fragments document (see below for the format of that document). Stylesheets exist in the WFSToFragments directory which is in the convert directory of the selected output schema. eg. for the iso19139 schema, this directory is `GEONETWORK_DATA_DIR/config/schema_plugins/iso19139/convert/WFSToFragments`.
+ - *Create subtemplates*: Check this box if you want the harvested metadata fragments to be saved as subtemplates in the metadata catalog and xlink'd into the metadata template (see next option). If not checked, the fragments will be copied into the metadata template.
+ - *Select template to combine with fragments*: Choose the metadata template that will be combined with the harvested metadata fragments to create metadata records. This is a standard GeoNetwork metadata template record.
+ - *Category for directory entries*: (Optional) When fragments are saved to GeoNetwork as subtemplates they will be assigned to the category selected here.
+ - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron).
+ - Accept all metadata without validation.
+ - Accept metadata that are XSD valid.
+ - Accept metadata that are XSD and schematron valid.
+
+- **Privileges** - Assign privileges to harvested metadata.
+
## More about turning the GetFeature Response into metadata fragments
diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-z3950.md b/docs/manual/docs/user-guide/harvesting/harvesting-z3950.md
deleted file mode 100644
index 47722c37464..00000000000
--- a/docs/manual/docs/user-guide/harvesting/harvesting-z3950.md
+++ /dev/null
@@ -1,90 +0,0 @@
-# Z3950 Harvesting {#z3950_harvester}
-
-Z3950 is a remote search and harvesting protocol that is commonly used to permit search and harvest of metadata. Although the protocol is often used for library catalogs, significant geospatial metadata catalogs can also be searched using Z3950 (eg. the metadata collections of the Australian Government agencies that participate in the Australian Spatial Data Directory - ASDD). This harvester allows the user to specify a Z3950 query and retrieve metadata records from one or more Z3950 servers.
-
-## Adding a Z3950 Harvester
-
-The available options are:
-
-- **Site**
- - *Name* - A short description of this Z3950 harvester. It will be shown in the harvesting main page using this name.
- - *Z3950 Server(s)* - These are the Z3950 servers that will be searched. You can select one or more of these servers.
- - *Z3950 Query* - Specify the Z3950 query to use when searching the selected Z3950 servers. At present this field is known to support the Prefix Query Format (also known as Prefix Query Notation) which is described at this URL: . See below for more information and some simple examples.
- - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing search results.
-- **Options** - Scheduling options.
-- **Harvested Content**
- - *Apply this XSLT to harvested records* - Choose an XSLT here that will convert harvested records to a different format.
- - *Validate* - If checked, records that do not/cannot be validated will be rejected.
-- **Privileges**
-- **Categories**
-
-!!! note
-
- this harvester automatically creates a new Category named after each of the Z3950 servers that return records. Records that are returned by a server are assigned to the category named after that server.
-
-
-## More about PQF Z3950 Queries
-
-PQF is a rather arcane query language. It is based around the idea of attributes and attribute sets. The most common attribute set used for geospatial metadata in Z3950 servers is the GEO attribute set (which is an extension of the BIB-1 and GILS attribute sets - see ). So all PQF queries to geospatial metadata Z3950 servers should start off with @attrset geo.
-
-The most useful attribute types in the GEO attribute set are as follows:
-
-| @attr number | Meaning | Description |
-|---------------|------------|--------------------------------------------------|
-| 1 | Use | What field to search |
-| 2 | Relation | How to compare the term specified |
-| 4 | Structure | What type is the term? eg. date, numeric, phrase |
-| 5 | Truncation | How to truncate eg. right |
-
-In GeoNetwork the numeric values that can be specified for `@attr 1` map to the lucene index field names as follows:
-
-| @attr 1= | Lucene index field | ISO19139 element |
-|----------------------|-------------------------------|-------------------------------------------------------------------------------------------------------------|
-| 1016 | any | All text from all metadata elements |
-| 4 | title, altTitle | gmd:identificationInfo//gmd:citation//gmd:title/gco:CharacterString |
-| 62 | abstract | gmd:identificationInfo//gmd:abstract/gco:CharacterString |
-| 1012 | _changeDate | Not a metadata element (maintained by GeoNetwork) |
-| 30 | createDate | gmd:MD_Metadata/gmd:dateStamp/gco:Date |
-| 31 | publicationDate | gmd:identificationInfo//gmd:citation//gmd:date/gmd:='publication' |
-| 2072 | tempExtentBegin | gmd:identificationInfo//gmd:extent//gmd:temporalElement//gml:begin(Position) |
-| 2073 | tempExtentEnd | gmd:identificationInfo//gmd:extent//gmd:temporalElement//gml:end(Position) |
-| 2012 | fileId | gmd:MD_Metadata/gmd:fileIdentifier/* |
-| 12 | identifier | gmd:identificationInfo//gmd:citation//gmd:identifier//gmd:code/* |
-| 21,29,2002,3121,3122 | keyword | gmd:identificationInfo//gmd:keyword/* |
-| 2060 | northBL,eastBL,southBL,westBL | gmd:identificationInfo//gmd:extent//gmd:EX_GeographicBoundingBox/gmd:westBoundLongitude*/gco:Decimal (etc) |
-
-Note that this is not a complete set of the mappings between Z3950 GEO attribute set and the GeoNetwork lucene index field names for ISO19139. Check out INSTALL_DIR/web/geonetwork/xml/search/z3950Server.xsl and INSTALL_DIR/web/geonetwork/xml/schemas/iso19139/index-fields.xsl for more details and annexe A of the GEO attribute set for Z3950 at for more details.
-
-Common values for the relation attribute (`@attr=2`):
-
-| @attr 2= | Description |
-|-----------|--------------------------|
-| 1 | Less than |
-| 2 | Less than or equal to |
-| 3 | Equals |
-| 4 | Greater than or equal to |
-| 5 | Greater than |
-| 6 | Not equal to |
-| 7 | Overlaps |
-| 8 | Fully enclosed within |
-| 9 | Encloses |
-| 10 | Fully outside of |
-
-So a simple query to get all metadata records that have the word 'the' in any field would be:
-
-`@attrset geo @attr 1=1016 the`
-
-- `@attr 1=1016` means that we are doing a search on any field in the metadata record
-
-A more sophisticated search on a bounding box might be formulated as:
-
-`@attrset geo @attr 1=2060 @attr 4=201 @attr 2=7 "-36.8262 142.6465 -44.3848 151.2598`
-
-- `@attr 1=2060` means that we are doing a bounding box search
-- `@attr 4=201` means that the query contains coordinate strings
-- `@attr 2=7` means that we are searching for records whose bounding box overlaps the query box specified at the end of the query
-
-!!! Notes
-
- - Z3950 servers must be configured for GeoNetwork in `INSTALL_DIR/web/geonetwork/WEB-INF/classes/JZKitConfig.xml.tem`
- - every time the harvester runs, it will remove previously harvested records and create new ones.
diff --git a/docs/manual/docs/user-guide/harvesting/img/add-arcsde-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-arcsde-harvester.png
new file mode 100644
index 00000000000..258c163bfda
Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-arcsde-harvester.png differ
diff --git a/docs/manual/docs/user-guide/harvesting/img/add-csw-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-csw-harvester.png
new file mode 100644
index 00000000000..e6e484359b9
Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-csw-harvester.png differ
diff --git a/docs/manual/docs/user-guide/harvesting/img/add-filesystem-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-filesystem-harvester.png
new file mode 100644
index 00000000000..0e0f0d66bfd
Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-filesystem-harvester.png differ
diff --git a/docs/manual/docs/user-guide/harvesting/img/add-geonetwork-3-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-geonetwork-3-harvester.png
new file mode 100644
index 00000000000..002459bae7d
Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-geonetwork-3-harvester.png differ
diff --git a/docs/manual/docs/user-guide/harvesting/img/add-geoportalrest-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-geoportalrest-harvester.png
new file mode 100644
index 00000000000..31d60f997e7
Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-geoportalrest-harvester.png differ
diff --git a/docs/manual/docs/user-guide/harvesting/img/add-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-harvester.png
new file mode 100644
index 00000000000..5d50e1dce3e
Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-harvester.png differ
diff --git a/docs/manual/docs/user-guide/harvesting/img/add-oaipmh-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-oaipmh-harvester.png
new file mode 100644
index 00000000000..a6ad14e6a54
Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-oaipmh-harvester.png differ
diff --git a/docs/manual/docs/user-guide/harvesting/img/add-ogcwebservices-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-ogcwebservices-harvester.png
new file mode 100644
index 00000000000..2734781c718
Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-ogcwebservices-harvester.png differ
diff --git a/docs/manual/docs/user-guide/harvesting/img/add-simpleurl-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-simpleurl-harvester.png
new file mode 100644
index 00000000000..6f7af0255a9
Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-simpleurl-harvester.png differ
diff --git a/docs/manual/docs/user-guide/harvesting/img/add-threddscatalog-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-threddscatalog-harvester.png
new file mode 100644
index 00000000000..a326a4b7c79
Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-threddscatalog-harvester.png differ
diff --git a/docs/manual/docs/user-guide/harvesting/img/add-webdav-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-webdav-harvester.png
new file mode 100644
index 00000000000..4b36e089b8d
Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-webdav-harvester.png differ
diff --git a/docs/manual/docs/user-guide/harvesting/img/add-wfsgetfeature-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-wfsgetfeature-harvester.png
new file mode 100644
index 00000000000..bd3646bc0cf
Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-wfsgetfeature-harvester.png differ
diff --git a/docs/manual/docs/user-guide/harvesting/img/harvester-history.png b/docs/manual/docs/user-guide/harvesting/img/harvester-history.png
new file mode 100644
index 00000000000..f9064c1a8f3
Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/harvester-history.png differ
diff --git a/docs/manual/docs/user-guide/harvesting/img/harvester-statistics.png b/docs/manual/docs/user-guide/harvesting/img/harvester-statistics.png
new file mode 100644
index 00000000000..b311bb2ec8e
Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/harvester-statistics.png differ
diff --git a/docs/manual/docs/user-guide/harvesting/img/harvesters.png b/docs/manual/docs/user-guide/harvesting/img/harvesters.png
new file mode 100644
index 00000000000..bd008fdef7c
Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/harvesters.png differ
diff --git a/docs/manual/docs/user-guide/harvesting/index.md b/docs/manual/docs/user-guide/harvesting/index.md
index 46f52f782c5..abea85ff38c 100644
--- a/docs/manual/docs/user-guide/harvesting/index.md
+++ b/docs/manual/docs/user-guide/harvesting/index.md
@@ -6,7 +6,8 @@ Harvesting is the process of ingesting metadata from remote sources and storing
The following sources can be harvested:
-- [GeoNetwork 2.0 Harvester](harvesting-geonetwork.md)
+- [GeoNetwork 2.1-3.X Harvester](harvesting-geonetwork.md)
+- [GeoNetwork 2.0 Harvester](harvesting-geonetwork-2.md)
- [Harvesting CSW services](harvesting-csw.md)
- [Harvesting OGC Services](harvesting-ogcwxs.md)
- [Simple URL harvesting (opendata)](harvesting-simpleurl.md)
@@ -17,7 +18,6 @@ The following sources can be harvested:
- [GeoPortal REST Harvesting](harvesting-geoportal.md)
- [THREDDS Harvesting](harvesting-thredds.md)
- [WFS GetFeature Harvesting](harvesting-wfs-features.md)
-- [Z3950 Harvesting](harvesting-z3950.md)
## Mechanism overview
@@ -134,79 +134,45 @@ The script will add the certificate to the JVM keystore, if you run it as follow
$ ./ssl_key_import.sh https_server_name 443
-## The main page
+## Harvesting page
-To access the harvesting main page you have to be logged in as an administrator. From the administration page, select the harvest shortcut. The harvesting main page will then be displayed.
+To access the harvesting main page you have to be logged in with a profile `Administrator` or `UserAdmin`. From the `Admin console` menu, select the option `Harvesting`.
-The page shows a list of the currently defined harvesters and a set of buttons for management functions. The meaning of each column in the list of harvesters is as follows:
+The page shows a list of the currently defined harvesters with information about the status of the harvesters:
-1. *Select* Check box to select one or more harvesters. The selected harvesters will be affected by the first row of buttons (activate, deactivate, run, remove). For example, if you select three harvesters and press the Remove button, they will all be removed.
-2. *Name* This is the harvester name provided by the administrator.
-3. *Type* The harvester type (eg. GeoNetwork, WebDAV etc\...).
-4. *Status* An icon showing current status. See [Harvesting Status and Error Icons](index.md#admin_harvesting_status) for the different icons and status descriptions.
-5. *Errors* An icon showing the result of the last harvesting run, which could have succeeded or not. See [Harvesting Status and Error Icons](index.md#admin_harvesting_status) for the different icons and error descriptions. Hovering the cursor over the icon will show detailed information about the last harvesting run.
-6. *Run at* and *Every*: Scheduling of harvester runs. Essentially the time of the day + how many hours between repeats and on which days the harvester will run.
-7. *Last run* The date, in ISO 8601 format, of the most recent harvesting run.
-8. *Operation* A list of buttons/links to operations on a harvester.
- - Selecting *Edit* will allow you to change the parameters for a harvester.
- - Selecting *Clone* will allow you to create a clone of this harvester and start editing the details of the clone.
- - Selecting *History* will allow you to view/change the harvesting history for a harvester - see [Harvest History](index.md#harvest_history).
+![](img/harvesters.png)
-At the bottom of the list of harvesters are two rows of buttons. The first row contains buttons that can operate on a selected set of harvesters. You can select the harvesters you want to operate on using the check box in the Select column and then press one of these buttons. When the button finishes its action, the check boxes are cleared. Here is the meaning of each button:
+The following information is shown for each harvester:
-1. *Activate* When a new harvester is created, the status is *inactive*. Use this button to make it *active* and start the harvester(s) according to the schedule it has/they have been configured to use.
-2. *Deactivate* Stops the harvester(s). Note: this does not mean that currently running harvest(s) will be stopped. Instead, it means that the harvester(s) will not be scheduled to run again.
-3. *Run* Start the selected harvesters immediately. This is useful for testing harvester setups.
-4. *Remove* Remove all currently selected harvesters. A dialogue will ask the user to confirm the action.
+- **Last run**: Date on which the harvester was last run.
+- **Total**: It is the total number of metadata found remotely. Metadata with the same id are considered as one.
+- **Updated**: Number of metadata that are present locally but needed to be updated because their last modification date was different from the remote one.
+- **Unchanged**: Number of local metadata that have not been modified. Its remote last modification date has not changed.
-The second row contains general purpose buttons. Here is the meaning of each button:
+At the bottom of the harvester list there are the following buttons:
-1. *Back* Simply returns to the main administration page.
-2. *Add* This button creates a new harvester.
-3. *Refresh* Refreshes the current list of harvesters from the server. This can be useful to see if the harvesting list has been altered by someone else or to get the status of any running harvesters.
-4. *History* Show the harvesting history of all harvesters. See [Harvest History](index.md#harvest_history) for more details.
+1. *Harvest from*: Allows you to select the type of harvester to create.
+2. *Clone*: Creates a new harvester, using the information of an existing harvester.
+3. *Refresh*: Refreshes the list of harvesters.
-## Harvesting Status and Error Icons {#admin_harvesting_status}
+### Adding new harvesters
-## Harvesting result tips
+To add a new harvester, click on the `Harvest from` button. A drop-down list with all available harvesting protocols will appear.
-When a harvester runs and completes, a tool tip showing detailed information about the harvesting process is shown in the **Errors** column for the harvester. If the harvester succeeded then hovering the cursor over the tool tip will show a table, with some rows labelled as follows:
+![](img/add-harvester.png)
-- **Total** - This is the total number of metadata found remotely. Metadata with the same id are considered as one.
-- **Added** - Number of metadata added to the system because they were not present locally.
-- **Removed** - Number of metadata that have been removed locally because they are not present in the remote server anymore.
-- **Updated** - Number of metadata that are present locally but that needed to be updated because their last change date was different from the remote one.
-- **Unchanged** - Local metadata left unchanged. Their remote last change date did not change.
-- **Unknown schema** - Number of skipped metadata because their format was not recognised by GeoNetwork.
-- **Unretrievable** - Number of metadata that were ready to be retrieved from the remote server but for some reason there was an exception during the data transfer process.
-- **Bad Format** - Number of skipped metadata because they did not have a valid XML representation.
-- **Does not validate** - Number of metadata which did not validate against their schema. These metadata were harvested with success but skipped due to the validation process. Usually, there is an option to force validation: if you want to harvest these metadata anyway, simply turn/leave it off.
-- **Thumbnails/Thumbnails failed** - Number of metadata thumbnail images added/that could not be added due to some failure.
-- **Metadata URL attribute used** - Number of layers/featuretypes/coverages that had a metadata URL that could be used to link to a metadata record (OGC Service Harvester only).
-- **Services added** - Number of ISO19119 service records created and added to the catalogue (for THREDDS catalog harvesting only).
-- **Collections added** - Number of collection dataset records added to the catalogue (for THREDDS catalog harvesting only).
-- **Atomics added** - Number of atomic dataset records added to the catalogue (for THREDDS catalog harvesting only).
-- **Subtemplates added** - Number of subtemplates (= fragment visible in the catalog) added to the metadata catalog.
-- **Subtemplates removed** - Number of subtemplates (= fragment visible in the catalog) removed from the metadata catalog.
-- **Fragments w/Unknown schema** - Number of fragments which have an unknown metadata schema.
-- **Fragments returned** - Number of fragments returned by the harvester.
-- **Fragments matched** - Number of fragments that had identifiers that in the template used by the harvester.
-- **Existing datasets** - Number of metadata records for datasets that existed when the THREDDS harvester was run.
-- **Records built** - Number of records built by the harvester from the template and fragments.
-- **Could not insert** - Number of records that the harvester could not insert into the catalog (usually because the record was already present eg. in the Z3950 harvester this can occur if the same record is harvested from different servers).
+You can choose the type of harvesting you want to do. Supported harvesters and details on what to do next can be found in the following sections.
-## Adding new harvesters
+### Harvester History {#harvest_history}
-The Add button in the main page allows you to add new harvesters. A drop down list is then shown with all the available harvester protocols.
+Each time a harvester is run, a log file is generated of what was harvested and/or what went wrong (e.g., an exception report). To view the harvester history, select a harvester in the harvester list and select the `Harvester history` tab on the harvester page:
-You can choose the type of harvest you intend to perform and press *Add* to begin the process of adding the harvester. The supported harvesters and details of what to do next are in the following sections:
+![](img/harvester-history.png)
-## Harvest History {#harvest_history}
+Once the harvester history is displayed, it is possible to download the log file of the harvester run and delete the harvester history.
-Each time a harvester is run, it generates a status report of what was harvested and/or what went wrong (eg. exception report). These reports are stored in a table in the database used by GeoNetwork. The entire harvesting history for all harvesters can be recalled using the History button on the Harvesting Management page. The harvest history for an individual harvester can also be recalled using the History link in the Operations for that harvester.
+### Harvester records
-Once the harvest history has been displayed it is possible to:
+When a harvester is executed, you can see the list of harvested metadata and some statistics about the metadata. Select a harvester in the list of harvesters and select the `Metadata records` tab on the harvester page:
-- expand the detail of any exceptions
-- sort the history by harvest date (or in the case of the history of all harvesters, by harvester name)
-- delete any history entry or the entire history
+![](img/harvester-statistics.png)
diff --git a/docs/manual/docs/user-guide/publishing/managing-privileges.md b/docs/manual/docs/user-guide/publishing/managing-privileges.md
index 7670dbff69e..2bb33525d01 100644
--- a/docs/manual/docs/user-guide/publishing/managing-privileges.md
+++ b/docs/manual/docs/user-guide/publishing/managing-privileges.md
@@ -16,11 +16,11 @@ Below is a brief description for each privilege to help you identify which ones
**Publish**: Users in the specified group/s are able to view the metadata eg. if it matches search criteria entered by such a user.
-**Download**: Users in the specified group/s are able to download the data.
-
**Interactive Map**: Users in the specified group/s are able to get an interactive map. The interactive map has to be created separately using a Web Map Server such as GeoServer, which is distributed with GeoNetwork.
-**Featured**: When randomly selected by GeoNetwork, the metadata record can appear in the `Featured` section of the GeoNetwork home page.
+**Download**: Users in the specified group/s are able to download the data.
+
+**Editing**: Users in the specified group/s are able to edit the metadata, if they have the *editor* profile.
**Notify**: Users in the specified group receive notification if data attached to the metadata record is downloaded.
@@ -57,12 +57,13 @@ Any user (logged in or not) can view the public metadata.
An *administrator* can edit any metadata.
-A *reviewer* can edit a metadata if:
+A *reviewer* / *editor* can edit a metadata if:
+
+* They are the metadata owner.
+
+* The metadata has editing privilege in the group(s) where the user is a *reviewer* / *editor*.
-- The metadata owner is member of one of the groups assigned to the reviewer.
-- They are the metadata owner.
-A *User Administrator* or an *Editor* can only edit metadata they created.
# Setting Privileges
diff --git a/docs/manual/docs/user-guide/workflow/life-cycle.md b/docs/manual/docs/user-guide/workflow/life-cycle.md
index c1bf876157f..70416aa4511 100644
--- a/docs/manual/docs/user-guide/workflow/life-cycle.md
+++ b/docs/manual/docs/user-guide/workflow/life-cycle.md
@@ -1,26 +1,29 @@
# Life cycle
-## Record life cycle
+Metadata records can have a lifecycle that typically goes through one or more states. This is an optional feature that
+can be activated on demand (see [Activate the metadata workflow](#activate-workflow)).
-Metadata records have a lifecycle that typically goes through one or more states. For example, when a record is:
+For example, when a record is:
- created and edited by an `Editor` it is in the `Draft` state.
-- being reviewed by a `content reviewer`, or a review is requested brings the record to `Submitted` state.
+- being reviewed by a `Content reviewer`, or a review is requested brings the record to `Submitted` state.
- completed and corrected by the `Content Reviewer` it is in the `Approved` state.
- superseded or replaced the state is `Retired`.
-The catalog has (an extensible) set of states that a metadata record can have:
+The catalog has a set of states that a metadata record can have:
-- `Unknown` - this is the default state - nothing is known about the status of the metadata record.
- `Draft` - the record is under construction or being edited.
- `Submitted` - the record has been submitted for approval to a content review.
- `Approved` - the content reviewer has reviewed and approved the metadata record.
-- `Rejected` - the content reviewer has reviewed and rejected the metadata record.
- `Retired` - the record has been retired.
-Workflow can be enabled for the full catalogue, certain groups or on an individual record level.
+When the metadata workflow is activated, the existing records are set in a special status `Unknown`.
-In the last case, to enable workflow and change the status from `Unknown` to `Draft`, click the `enable workflow` button in the metadata view:
+## Activate the metadata workflow {#activate-workflow}
+
+To enable the record life cycle, activate the metadata workflow. It can be activated for the full catalogue, certain groups, or on an individual record.
+
+In the case of activating for an individual record: enable workflow in a metadata, change the status from `Unknown` to `Draft`, and then click the `Enable workflow` button in the metadata view:
![](img/workflow-enable.png)
@@ -28,10 +31,11 @@ In the last case, to enable workflow and change the status from `Unknown` to `Dr
To use the workflow for metadata records created before enabling it, you must use the above option.
+To enable workflow for the full catalogue or certain groups, check `Administration` --> `Settings` --> `Metadata Workflow`. In workflow mode, in case approved records are modified, you're working on a copy of the approved record. Changes on the record will not be visible to users outside your group until the modified record is approved again.
-To enable workflow for the full catalogue or certain groups, check Administration --> Settings --> Metadata Workflow. In workflow mode, in case approved records are modified, you're working on a copy of the approved record. Changes on the record will not be visible to users outside your group until the modified record is approved again.
+## Usage
-When done editing you can submit a record for review by a content reviewer. The submit button is available on the `manage record` menu in the metadata view. A popup will open in which you can leave a message for the content reviewer.
+When done editing you can submit a record for review by a content reviewer. The submit button is available on the `Manage record` menu in the metadata view. A popup will open in which you can leave a message for the content reviewer.
![](img/submit-for-review.png)
diff --git a/docs/manual/mkdocs.yml b/docs/manual/mkdocs.yml
index 73af7ac42b5..cc3c1920116 100644
--- a/docs/manual/mkdocs.yml
+++ b/docs/manual/mkdocs.yml
@@ -149,10 +149,11 @@ nav:
- overview/authors.md
- 'Changelog':
- overview/change-log/index.md
- - overview/change-log/version-4.4.5.md
- - overview/change-log/version-4.2.10.md
+ - overview/change-log/version-4.4.6.md
+ - overview/change-log/version-4.2.11.md
- 'Release History':
- overview/change-log/history/index.md
+ - overview/change-log/version-4.4.6.md
- overview/change-log/version-4.4.5.md
- overview/change-log/version-4.4.4.md
- overview/change-log/version-4.4.3.md
@@ -161,6 +162,7 @@ nav:
- overview/change-log/version-4.4.0.md
- 'Release History':
- overview/change-log/history/index.md
+ - overview/change-log/version-4.2.11.md
- overview/change-log/version-4.2.10.md
- overview/change-log/version-4.2.9.md
- overview/change-log/version-4.2.8.md
@@ -294,6 +296,7 @@ nav:
- user-guide/harvesting/harvesting-csw.md
- user-guide/harvesting/harvesting-filesystem.md
- user-guide/harvesting/harvesting-geonetwork.md
+ - user-guide/harvesting/harvesting-geonetwork-2.md
- user-guide/harvesting/harvesting-geoportal.md
- user-guide/harvesting/harvesting-oaipmh.md
- user-guide/harvesting/harvesting-ogcwxs.md
@@ -302,7 +305,6 @@ nav:
- user-guide/harvesting/harvesting-thredds.md
- user-guide/harvesting/harvesting-webdav.md
- user-guide/harvesting/harvesting-wfs-features.md
- - user-guide/harvesting/harvesting-z3950.md
- user-guide/export/index.md
- 'Administration':
- administrator-guide/index.md
@@ -321,6 +323,7 @@ nav:
- administrator-guide/managing-users-and-groups/creating-group.md
- administrator-guide/managing-users-and-groups/creating-user.md
- administrator-guide/managing-users-and-groups/user-self-registration.md
+ - administrator-guide/managing-users-and-groups/user-reset-password.md
- 'Classification Systems':
- administrator-guide/managing-classification-systems/index.md
- administrator-guide/managing-classification-systems/managing-categories.md
diff --git a/docs/manual/pom.xml b/docs/manual/pom.xml
index 4ac33875de1..a0d9a973a96 100644
--- a/docs/manual/pom.xml
+++ b/docs/manual/pom.xml
@@ -27,7 +27,7 @@
gn-docs
org.geonetwork-opensource
- 4.4.6-SNAPSHOT
+ 4.4.7-SNAPSHOT
4.0.0
gn-guide
diff --git a/docs/pom.xml b/docs/pom.xml
index 6330049c7b0..0621811afc1 100644
--- a/docs/pom.xml
+++ b/docs/pom.xml
@@ -27,7 +27,7 @@
geonetwork
org.geonetwork-opensource
- 4.4.6-SNAPSHOT
+ 4.4.7-SNAPSHOT
4.0.0
gn-docs
diff --git a/doi/pom.xml b/doi/pom.xml
index 8f087e09ce3..21348549305 100644
--- a/doi/pom.xml
+++ b/doi/pom.xml
@@ -28,7 +28,7 @@
geonetwork
org.geonetwork-opensource
- 4.4.6-SNAPSHOT
+ 4.4.7-SNAPSHOT
4.0.0
diff --git a/domain/pom.xml b/domain/pom.xml
index 812778871e8..685c96638c2 100644
--- a/domain/pom.xml
+++ b/domain/pom.xml
@@ -27,7 +27,7 @@
geonetwork
org.geonetwork-opensource
- 4.4.6-SNAPSHOT
+ 4.4.7-SNAPSHOT
4.0.0
diff --git a/domain/src/main/java/org/fao/geonet/domain/Group.java b/domain/src/main/java/org/fao/geonet/domain/Group.java
index 19fe92d6eb7..ccab64a875f 100644
--- a/domain/src/main/java/org/fao/geonet/domain/Group.java
+++ b/domain/src/main/java/org/fao/geonet/domain/Group.java
@@ -41,6 +41,8 @@
import javax.persistence.ElementCollection;
import javax.persistence.Entity;
import javax.persistence.EntityListeners;
+import javax.persistence.Enumerated;
+import javax.persistence.EnumType;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
@@ -84,6 +86,7 @@ public class Group extends Localized implements Serializable {
private MetadataCategory defaultCategory;
private List allowedCategories;
private Boolean enableAllowedCategories;
+ private Profile minimumProfileForPrivileges;
/**
* Get the id of the group.
@@ -348,4 +351,24 @@ public Group setEnableAllowedCategories(Boolean enableAllowedCategories) {
this.enableAllowedCategories = enableAllowedCategories;
return this;
}
+
+ /**
+ * Get the minimum profile required to update privileges for this group.
+ *
+ * @return {@link Profile} the minimum profile required to update privileges for this group.
+ */
+ @Enumerated(EnumType.STRING)
+ public Profile getMinimumProfileForPrivileges() {
+ return minimumProfileForPrivileges;
+ }
+
+ /**
+ * Set the minimum profile required to update privileges for this group.
+ * @param minimumProfileForPrivileges the minimum {@link Profile} required to update privileges for this group.
+ * @return this group entity object.
+ */
+ public Group setMinimumProfileForPrivileges(Profile minimumProfileForPrivileges) {
+ this.minimumProfileForPrivileges = minimumProfileForPrivileges;
+ return this;
+ }
}
diff --git a/domain/src/main/java/org/fao/geonet/domain/Profile.java b/domain/src/main/java/org/fao/geonet/domain/Profile.java
index e3a24e71074..b6e680d0ee0 100644
--- a/domain/src/main/java/org/fao/geonet/domain/Profile.java
+++ b/domain/src/main/java/org/fao/geonet/domain/Profile.java
@@ -73,31 +73,70 @@ public static Profile findProfileIgnoreCase(String profileName) {
return null;
}
- public Set getParents() {
- HashSet parents = new HashSet();
+ /**
+ * Retrieves all direct child profiles of the current profile.
+ * Child profiles have fewer permissions than parents.
+ *
+ * @return A set containing profiles that have this profile as a parent.
+ */
+ public Set getChildren() {
+ HashSet children = new HashSet();
for (Profile profile : values()) {
if (profile.parents.contains(this)) {
- parents.add(profile);
+ children.add(profile);
}
}
+ return children;
+ }
+
+ /**
+ * Retrieves the direct parent profiles of the current profile.
+ * Parent profiles have more permissions than children.
+ *
+ * @return A set of profiles that are direct parents of this profile.
+ */
+ public Set getParents() {
return parents;
}
- public Set getAll() {
- HashSet all = new HashSet();
- all.add(this);
- for (Profile parent : getParents()) {
- all.addAll(parent.getAll());
+ /**
+ * Retrieves the profile and all of its children recursively.
+ * The returned set will include the profile itself.
+ * Child profiles have fewer permissions than parents.
+ *
+ * @return A {@link Set} containing the profile and all of its children.
+ */
+ public Set getProfileAndAllChildren() {
+ HashSet profiles = new HashSet();
+ profiles.add(this);
+ for (Profile child : getChildren()) {
+ profiles.addAll(child.getProfileAndAllChildren());
}
- return all;
+ return profiles;
+ }
+
+ /**
+ * Retrieves the profile and all of its parents recursively.
+ * The returned set will include the profile itself.
+ * Parent profiles have more permissions than children.
+ *
+ * @return A {@link Set} containing the profile and all of its parents.
+ */
+ public Set getProfileAndAllParents() {
+ Set profiles = new HashSet<>();
+ profiles.add(this);
+ for (Profile parent : getParents()) {
+ profiles.addAll(parent.getProfileAndAllParents());
+ }
+ return profiles;
}
public Element asElement() {
Element elResult = new Element(PROFILES_ELEM_NAME);
- for (Profile profile : getAll()) {
+ for (Profile profile : getProfileAndAllChildren()) {
if (profile == Guest)
continue;
@@ -109,7 +148,7 @@ public Element asElement() {
public Set getAllNames() {
HashSet names = new HashSet();
- for (Profile p : getAll()) {
+ for (Profile p : getProfileAndAllChildren()) {
names.add(p.name());
}
return names;
diff --git a/domain/src/main/java/org/fao/geonet/domain/StatusValue.java b/domain/src/main/java/org/fao/geonet/domain/StatusValue.java
index 26d5bf4ccc9..47f36e60406 100644
--- a/domain/src/main/java/org/fao/geonet/domain/StatusValue.java
+++ b/domain/src/main/java/org/fao/geonet/domain/StatusValue.java
@@ -24,9 +24,11 @@
package org.fao.geonet.domain;
import org.fao.geonet.entitylistener.StatusValueEntityListenerManager;
+import org.springframework.http.MediaType;
import javax.persistence.*;
+import java.util.Arrays;
import java.util.Map;
/**
@@ -223,20 +225,141 @@ public static final class Status {
* List of predefined status part of the events. Those values are the default
* one for GeoNetwork and may be modified in the database.
*/
- public static final class Events {
- public static final String RECORDCREATED = "50";
- public static final String RECORDUPDATED = "51";
- public static final String ATTACHMENTADDED = "52";
- public static final String ATTACHMENTDELETED = "53";
- public static final String RECORDOWNERCHANGE = "54";
- public static final String RECORDGROUPOWNERCHANGE = "55";
- public static final String RECORDPRIVILEGESCHANGE = "56";
- public static final String RECORDCATEGORYCHANGE = "57";
- public static final String RECORDVALIDATIONTRIGGERED = "58";
- public static final String RECORDSTATUSCHANGE = "59";
- public static final String RECORDPROCESSINGCHANGE = "60";
- public static final String RECORDDELETED = "61";
- public static final String RECORDIMPORTED = "62";
- public static final String RECORDRESTORED = "63";
+ public enum Events {
+ RECORDCREATED(50, false, MediaType.APPLICATION_JSON, null),
+ RECORDUPDATED(51, true, MediaType.APPLICATION_XML, MediaType.APPLICATION_XML),
+ ATTACHMENTADDED(52, false, MediaType.TEXT_PLAIN, null),
+ ATTACHMENTDELETED(53, false, null, MediaType.TEXT_PLAIN),
+ RECORDOWNERCHANGE(54, false, MediaType.APPLICATION_JSON, MediaType.APPLICATION_JSON),
+ RECORDGROUPOWNERCHANGE(55, false, MediaType.APPLICATION_JSON, MediaType.APPLICATION_JSON),
+ RECORDPRIVILEGESCHANGE(56, false, MediaType.APPLICATION_JSON, MediaType.APPLICATION_JSON),
+ RECORDCATEGORYCHANGE(57, false, MediaType.APPLICATION_JSON, MediaType.APPLICATION_JSON),
+ RECORDVALIDATIONTRIGGERED(58, false, MediaType.TEXT_PLAIN, null),
+ RECORDSTATUSCHANGE(59, false, MediaType.APPLICATION_JSON, MediaType.APPLICATION_JSON),
+ RECORDPROCESSINGCHANGE(60, true, MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML),
+ RECORDDELETED(61, true, null, MediaType.APPLICATION_XML),
+ RECORDIMPORTED(62, false, MediaType.APPLICATION_JSON, null),
+ RECORDRESTORED(63, true, MediaType.APPLICATION_XML, MediaType.APPLICATION_XML);
+
+ /**
+ * The id of the event.
+ */
+ private final Integer id;
+ /**
+ * The restore function currently supports these states
+ */
+ private final boolean isSupportedRestoreStatus;
+
+ /**
+ * Indicates mime type of the current state will be stored in.
+ * This is generally json or xml and if it is null then the state is not supported.
+ */
+ private final MediaType currentStateFormat;
+
+ /**
+ * Indicates mime type of the previous state will be stored in.
+ * This is generally json or xml and if it is null then the state is not supported.
+ */
+ private final MediaType previousStateFormat;
+
+ /**
+ * Constructor.
+ *
+ * @param id the id of the event.
+ * @param isSupportedRestoreStatus the restore function currently supports these states.
+ * @param currentStateFormat indicates mime type of the current state will be stored in.
+ * @param previousStateFormat indicates mime type of the current state will be stored in.
+ */
+ Events(Integer id, boolean isSupportedRestoreStatus, MediaType currentStateFormat, MediaType previousStateFormat) {
+ this.id = id;
+ this.isSupportedRestoreStatus = isSupportedRestoreStatus;
+ this.currentStateFormat = currentStateFormat;
+ this.previousStateFormat = previousStateFormat;
+ }
+
+ /**
+ * Get the id of the event.
+ *
+ * @return the id of the event.
+ */
+ public Integer getId() {
+ return id;
+ }
+
+ /**
+ * Get the event from the id.
+ *
+ * @param id the id of the event.
+ * @return the event.
+ */
+ public static Events fromId(Integer id) {
+ return Arrays.stream(values())
+ .filter(event -> event.getId().equals(id))
+ .findFirst()
+ .orElseThrow(() -> new IllegalArgumentException("No event found with id: " + id));
+ }
+
+ /**
+ * Get the code of the event.
+ * The code currently is the string representation of the id.
+ *
+ * @return the code of the event.
+ */
+ public String getCode() {
+ return String.valueOf(id);
+ }
+
+ /**
+ * Get the event from the code.
+ *
+ * @param code the code of the event.
+ * @return the event.
+ */
+ public static Events fromCode(String code) {
+ return Arrays.stream(values())
+ .filter(event -> event.getCode().equals(code))
+ .findFirst()
+ .orElseThrow(() -> new IllegalArgumentException("No event found with code: " + code));
+ }
+
+ /**
+ * Get the mime type of the current state will be stored in.
+ *
+ * @return the mime type of the current state will be stored in.
+ */
+ public MediaType getCurrentStateFormat() {
+ return currentStateFormat;
+ }
+
+ /**
+ * Get the mime type of the previous state will be stored in.
+ *
+ * @return the mime type of the previous state will be stored in.
+ */
+ public MediaType getPreviousStateFormat() {
+ return previousStateFormat;
+ }
+
+ /**
+ * Identify if the current status supports restoring the values.
+ * This is mostly for restoring xml metadata records.
+ *
+ * @return true if supported.
+ */
+ public boolean isSupportedRestoreStatus() {
+ return isSupportedRestoreStatus;
+ }
+
+ /**
+ * Get an array of the event that support restoring statuses.
+ * This is mostly for restoring xml metadata records.
+ *
+ * @return list of events with isSupportedRestoreStatus set to true.
+ */
+ public static Events[] getSupportedRestoreStatuses() {
+ return Arrays.stream(values())
+ .filter(Events::isSupportedRestoreStatus)
+ .toArray(Events[]::new);
+ }
}
}
diff --git a/domain/src/main/java/org/fao/geonet/domain/page/Page.java b/domain/src/main/java/org/fao/geonet/domain/page/Page.java
index fb3247a4308..563e58663fa 100644
--- a/domain/src/main/java/org/fao/geonet/domain/page/Page.java
+++ b/domain/src/main/java/org/fao/geonet/domain/page/Page.java
@@ -23,10 +23,13 @@
package org.fao.geonet.domain.page;
import java.io.Serializable;
+import java.util.LinkedHashSet;
import java.util.List;
+import java.util.Set;
import javax.annotation.Nullable;
import javax.persistence.Basic;
+import javax.persistence.CascadeType;
import javax.persistence.CollectionTable;
import javax.persistence.Column;
import javax.persistence.ElementCollection;
@@ -35,10 +38,14 @@
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
+import javax.persistence.JoinTable;
import javax.persistence.Lob;
+import javax.persistence.ManyToMany;
import javax.persistence.Table;
import org.fao.geonet.domain.GeonetEntity;
+import org.fao.geonet.domain.Group;
import org.hibernate.annotations.Type;
/**
@@ -56,6 +63,7 @@ public class Page extends GeonetEntity implements Serializable {
private PageFormat format;
private List sections;
private PageStatus status;
+ private Set groups = new LinkedHashSet<>();
private String label;
private String icon;
@@ -64,7 +72,7 @@ public Page() {
}
- public Page(PageIdentity pageIdentity, byte[] data, String link, PageFormat format, List sections, PageStatus status, String label, String icon) {
+ public Page(PageIdentity pageIdentity, byte[] data, String link, PageFormat format, List sections, PageStatus status, String label, String icon, Set groups) {
super();
this.pageIdentity = pageIdentity;
this.data = data;
@@ -74,10 +82,11 @@ public Page(PageIdentity pageIdentity, byte[] data, String link, PageFormat form
this.status = status;
this.label = label;
this.icon = icon;
+ this.groups = groups;
}
public enum PageStatus {
- PUBLIC, PUBLIC_ONLY, PRIVATE, HIDDEN;
+ PUBLIC, PUBLIC_ONLY, GROUPS, PRIVATE, HIDDEN;
}
public enum PageFormat {
@@ -146,6 +155,28 @@ public String getIcon() {
return icon;
}
+ /**
+ * Get all the page's groups.
+ *
+ * @return all the page's groups.
+ */
+ @ManyToMany(fetch = FetchType.EAGER, cascade = {CascadeType.DETACH, CascadeType.PERSIST, CascadeType.REFRESH})
+ @JoinTable(name = "spg_page_group", joinColumns = {@JoinColumn(name = "language"), @JoinColumn(name = "linktext")},
+ inverseJoinColumns = {@JoinColumn(name = "groupid", referencedColumnName = "id", unique = false)})
+ public Set getGroups() {
+ return groups;
+ }
+
+ /**
+ * Set all the page's groups.
+ *
+ * @param groups all the page's groups.
+ * @return this group object
+ */
+ public void setGroups(Set groups) {
+ this.groups = groups;
+ }
+
public void setPageIdentity(PageIdentity pageIdentity) {
this.pageIdentity = pageIdentity;
}
diff --git a/domain/src/main/java/org/fao/geonet/repository/GroupRepository.java b/domain/src/main/java/org/fao/geonet/repository/GroupRepository.java
index 617de58a3dc..5faa3f84f12 100644
--- a/domain/src/main/java/org/fao/geonet/repository/GroupRepository.java
+++ b/domain/src/main/java/org/fao/geonet/repository/GroupRepository.java
@@ -52,6 +52,14 @@ public interface GroupRepository extends GeonetRepository, Group
@Nullable
Group findByEmail(@Nonnull String email);
+ /**
+ * Find all groups with a minimumProfileForPrivileges not equal to null.
+ * These groups are "restricted".
+ *
+ * @return a list of groups with a minimumProfileForPrivileges not equal to null
+ */
+ @Nullable
+ List findByMinimumProfileForPrivilegesNotNull();
public
@Nullable
diff --git a/domain/src/main/java/org/fao/geonet/repository/MetadataDraftRepository.java b/domain/src/main/java/org/fao/geonet/repository/MetadataDraftRepository.java
index b382a4dd876..9e226a86a32 100644
--- a/domain/src/main/java/org/fao/geonet/repository/MetadataDraftRepository.java
+++ b/domain/src/main/java/org/fao/geonet/repository/MetadataDraftRepository.java
@@ -30,6 +30,8 @@
import org.fao.geonet.domain.MetadataDraft;
import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
+import org.springframework.data.jpa.repository.Query;
+import org.springframework.data.repository.query.Param;
/**
* Data Access object for the {@link MetadataDraft} entities.
@@ -68,4 +70,51 @@ public interface MetadataDraftRepository
*/
@Nonnull
List findAllByHarvestInfo_Uuid(@Nonnull String uuid);
+
+ /**
+ * Get the metadata after preforming a search and replace on it.
+ * @param uuid The UUID of the metadata to search for.
+ * @param search The string to search for.
+ * @param replace The string to replace the search string with.
+ * @return The metadata with the search and replace applied.
+ */
+ @Query(value = "SELECT replace(data, :search, :replace) FROM MetadataDraft m " +
+ "WHERE uuid = :uuid",
+ nativeQuery = true)
+ String selectOneWithSearchAndReplace(
+ @Param("uuid") String uuid,
+ @Param("search") String search,
+ @Param("replace") String replace);
+
+ /**
+ * Get the metadata after preforming a regex search and replace on it.
+ * @param uuid The UUID of the metadata to search for.
+ * @param search The string to search for.
+ * @param replace The string to replace the search string with.
+ * @return The metadata with the search and replace applied.
+ */
+ @Query(value = "SELECT regexp_replace(data, :pattern, :replace) FROM MetadataDraft m " +
+ "WHERE uuid = :uuid",
+ nativeQuery = true)
+ String selectOneWithRegexSearchAndReplace(
+ @Param("uuid") String uuid,
+ @Param("pattern") String search,
+ @Param("replace") String replace);
+
+ /**
+ * Get the metadata after preforming a regex search and replace on it with regex flags.
+ * @param uuid The UUID of the metadata to search for.
+ * @param search The string to search for.
+ * @param replace The string to replace the search string with.
+ * @param flags The regex flags to use.
+ * @return The metadata with the search and replace applied.
+ */
+ @Query(value = "SELECT regexp_replace(data, :pattern, :replace, :flags) FROM MetadataDraft m " +
+ "WHERE uuid = :uuid",
+ nativeQuery = true)
+ String selectOneWithRegexSearchAndReplaceWithFlags(
+ @Param("uuid") String uuid,
+ @Param("pattern") String search,
+ @Param("replace") String replace,
+ @Param("flags") String flags);
}
diff --git a/domain/src/main/java/org/fao/geonet/repository/MetadataRepository.java b/domain/src/main/java/org/fao/geonet/repository/MetadataRepository.java
index 3a8fefb4955..eba9c43f0ee 100644
--- a/domain/src/main/java/org/fao/geonet/repository/MetadataRepository.java
+++ b/domain/src/main/java/org/fao/geonet/repository/MetadataRepository.java
@@ -76,8 +76,13 @@ public interface MetadataRepository extends GeonetRepository