From 6405768cc164005155a790293d59a38f230105ab Mon Sep 17 00:00:00 2001
From: Adrian Fish
Date: Fri, 3 Jan 2025 10:35:48 +0000
Subject: [PATCH] SAK-50754 sitearchive Cleanup some code and add unit tests
https://sakaiproject.atlassian.net/browse/SAK-50754
---
common/archive-impl/impl2/pom.xml | 43 ++
.../archive/impl/SiteArchiver.java | 25 +-
.../sakaiproject/archive/impl/SiteMerger.java | 405 +++++++-----------
.../test/ArchiveServiceTestConfiguration.java | 45 ++
.../impl/test/ArchiveServiceTests.java | 285 ++++++++++++
.../test/resources/archive/conversations.xml | 5 +
.../src/test/resources/archive/rubrics.xml | 6 +
.../impl2/src/test/resources/archive/site.xml | 2 +
.../impl2/src/test/resources/archive/user.xml | 2 +
.../src/test/resources/hibernate.properties | 19 +
.../org/sakaiproject/site/impl/BaseSite.java | 4 +-
.../test/SakaiTestConfiguration.java | 5 +-
12 files changed, 589 insertions(+), 257 deletions(-)
create mode 100644 common/archive-impl/impl2/src/test/org/sakaiproject/archive/impl/test/ArchiveServiceTestConfiguration.java
create mode 100644 common/archive-impl/impl2/src/test/org/sakaiproject/archive/impl/test/ArchiveServiceTests.java
create mode 100644 common/archive-impl/impl2/src/test/resources/archive/conversations.xml
create mode 100644 common/archive-impl/impl2/src/test/resources/archive/rubrics.xml
create mode 100644 common/archive-impl/impl2/src/test/resources/archive/site.xml
create mode 100644 common/archive-impl/impl2/src/test/resources/archive/user.xml
create mode 100644 common/archive-impl/impl2/src/test/resources/hibernate.properties
diff --git a/common/archive-impl/impl2/pom.xml b/common/archive-impl/impl2/pom.xml
index d2a016d5a6db..3d739f215c94 100644
--- a/common/archive-impl/impl2/pom.xml
+++ b/common/archive-impl/impl2/pom.xml
@@ -51,8 +51,51 @@
org.springframework
spring-tx
+
+ org.sakaiproject.kernel
+ sakai-kernel-test
+ test
+
+
+ javax.servlet
+ javax.servlet-api
+ test
+
+
+ org.springframework
+ spring-context
+ test
+
+
+ org.springframework
+ spring-test
+ test
+
+
+ org.hibernate
+ hibernate-core
+ test
+
+
+ org.hsqldb
+ hsqldb
+ test
+
+
+ org.springframework
+ spring-orm
+ test
+
+
+
+ ${basedir}/src/webapp
+
+
+ ${basedir}/src/test/resources
+
+
diff --git a/common/archive-impl/impl2/src/java/org/sakaiproject/archive/impl/SiteArchiver.java b/common/archive-impl/impl2/src/java/org/sakaiproject/archive/impl/SiteArchiver.java
index 95c9228a8a67..be7e37255720 100644
--- a/common/archive-impl/impl2/src/java/org/sakaiproject/archive/impl/SiteArchiver.java
+++ b/common/archive-impl/impl2/src/java/org/sakaiproject/archive/impl/SiteArchiver.java
@@ -153,7 +153,7 @@ public String archive(String siteId, String m_storagePath, String fromSystem)
dir.mkdirs();
- // for each registered ResourceService, give it a chance to archve
+ // for each registered EntityProducer, give it a chance to archive
Collection producers = m_entityManager.getEntityProducers();
for (EntityProducer producer : producers) {
if (producer == null) continue;
@@ -248,8 +248,8 @@ public String archive(String siteId, String m_storagePath, String fromSystem)
results.append("<===== End =====>\n\n");
stack.pop();
- Xml.writeDocument(doc, m_storagePath + siteId + "-archive/site.xml");
+ Xml.writeDocument(doc, m_storagePath + siteId + "-archive/site.xml");
// *** Users
doc = Xml.createDocument();
@@ -293,14 +293,13 @@ public String archive(String siteId, String m_storagePath, String fromSystem)
/**
- * Archive the site definition.
- * @param site the site.
- * @param doc The document to contain the xml.
- * @param stack The stack of elements, the top of which will be the containing
- * element of the "site" element.
- */
-
- protected String archiveSite(Site site, Document doc, Stack stack, String fromSystem)
+ * Archive the site definition.
+ * @param site the site.
+ * @param doc The document to contain the xml.
+ * @param stack The stack of elements, the top of which will be the containing
+ * element of the "site" element.
+ */
+ private String archiveSite(Site site, Document doc, Stack stack, String fromSystem)
{
Element element = doc.createElement(SiteService.APPLICATION_ID);
((Element)stack.peek()).appendChild(element);
@@ -322,7 +321,7 @@ protected String archiveSite(Site site, Document doc, Stack stack, String fromSy
if ( pattern != null ) {
NodeList nl = siteNode.getElementsByTagName("property");
- List toRemove = new ArrayList();
+ List toRemove = new ArrayList<>();
for(int i = 0; i < nl.getLength(); i++) {
Element proptag = (Element)nl.item(i);
@@ -397,11 +396,11 @@ protected String archiveSite(Site site, Document doc, Stack stack, String fromSy
}
catch(Exception any)
{
- log.warn("archve: exception archiving site: {}: {}", site.getId(), any);
+ log.warn("Exception archiving site: {}: {}", site.getId(), any.toString());
}
stack.pop();
-
+
return "archiving Site: " + site.getId() + "\n";
} // archiveSite
diff --git a/common/archive-impl/impl2/src/java/org/sakaiproject/archive/impl/SiteMerger.java b/common/archive-impl/impl2/src/java/org/sakaiproject/archive/impl/SiteMerger.java
index 358934186d5b..f356ceeec649 100644
--- a/common/archive-impl/impl2/src/java/org/sakaiproject/archive/impl/SiteMerger.java
+++ b/common/archive-impl/impl2/src/java/org/sakaiproject/archive/impl/SiteMerger.java
@@ -25,9 +25,11 @@
import java.util.HashSet;
import java.util.List;
import java.util.Map;
-import java.util.Vector;
+import java.util.Optional;
+import java.util.Set;
import lombok.extern.slf4j.Slf4j;
+import lombok.Setter;
import org.apache.commons.codec.binary.Base64;
@@ -37,7 +39,6 @@
import org.w3c.dom.NodeList;
import org.sakaiproject.archive.api.ArchiveService;
-import org.sakaiproject.authz.api.AuthzGroup;
import org.sakaiproject.authz.api.AuthzGroupService;
import org.sakaiproject.authz.api.SecurityService;
import org.sakaiproject.component.cover.ComponentManager;
@@ -56,42 +57,15 @@
import org.sakaiproject.util.Xml;
@Slf4j
+@Setter
public class SiteMerger {
- protected static HashMap userIdTrans = new HashMap();
- /**********************************************/
- /* Injected Dependencies */
- /**********************************************/
- protected AuthzGroupService m_authzGroupService = null;
- public void setAuthzGroupService(AuthzGroupService service) {
- m_authzGroupService = service;
- }
-
- protected UserDirectoryService m_userDirectoryService = null;
- public void setUserDirectoryService(UserDirectoryService service) {
- m_userDirectoryService = service;
- }
-
- protected SiteService m_siteService = null;
- public void setSiteService(SiteService service) {
- m_siteService = service;
- }
-
- protected SecurityService m_securityService = null;
- public void setSecurityService(SecurityService service) {
- m_securityService = service;
- }
-
- protected EntityManager m_entityManager = null;
- public void setEntityManager(EntityManager m_entityManager) {
- this.m_entityManager = m_entityManager;
- }
-
- protected ServerConfigurationService m_serverConfigurationService;
- public void setServerConfigurationService(ServerConfigurationService m_serverConfigurationService) {
- this.m_serverConfigurationService = m_serverConfigurationService;
- }
-
+ private AuthzGroupService authzGroupService;
+ private UserDirectoryService userDirectoryService;
+ private SiteService siteService;
+ private SecurityService securityService;
+ private EntityManager entityManager;
+ private ServerConfigurationService serverConfigurationService;
// only the resources created by the followinng roles will be imported
// role sets are different to different system
@@ -104,15 +78,16 @@ public void setServerConfigurationService(ServerConfigurationService m_serverCon
private String[] new_toolIds = {"sakai.preferences", "sakai.online", "sakai.siteinfo", "sakai.sitesetup", "sakai.discussion"};
//SWG TODO I have a feeling this is a bug
- protected HashSet usersListAllowImport = new HashSet();
+ private Set usersListAllowImport = new HashSet<>();
+
/**
- * Process a merge for the file, or if it's a directory, for all contained files (one level deep).
- * @param fileName The site name (for the archive file) to read from.
- * @param mergeId The id string to use to make ids in the merge consistent and unique.
- * @param creatorId The creator id
- * If null or blank, the date/time string of the merge is used.
- */
- //TODO Javadoc this
+ * Process a merge for the file, or if it's a directory, for all contained files (one level deep).
+ *
+ * @param fileName The site name (for the archive file) to read from.
+ * @param mergeId The id string to use to make ids in the merge consistent and unique.
+ * @param creatorId The creator id
+ * If null or blank, the date/time string of the merge is used.
+ */
public String merge(String fileName, String siteId, String creatorId, String m_storagePath,
boolean filterSakaiServices, String[] filteredSakaiServices, boolean filterSakaiRoles, String[] filteredSakaiRoles)
{
@@ -128,18 +103,23 @@ public String merge(String fileName, String siteId, String creatorId, String m_s
if ((file == null) || (!file.exists()))
{
results.append("file: " + fileName + " not found.\n");
- log.warn("merge(): file not found: " + file.getPath());
+ log.warn("merge(): file not found: {}", file.getPath());
return results.toString();
} else {
try {
// Path must be within tomcat.home (one up from SakaiHome)
- File baseLocation = new File(m_serverConfigurationService.getSakaiHomePath());
+ // TODO: This seems arbitrary. Why should the archive files be within tomcat's
+ // directory structure?
+ File baseLocation = new File(serverConfigurationService.getSakaiHomePath());
if (!file.getCanonicalPath().startsWith(baseLocation.getParent())) {
throw new Exception();
}
} catch (Exception ex) {
+ // TODO; this exception will either be thrown by the home path file creation, or
+ // explicitly because the archive is not hosted inside Tomcat's directory structure.
+ // Like, why bother having this block here?
results.append("file: " + fileName + " not permitted.\n");
- log.warn("merge(): file not permitted: " + file.getPath());
+ log.warn("merge(): file not permitted: {}", file.getPath());
return results.toString();
}
}
@@ -154,104 +134,85 @@ public String merge(String fileName, String siteId, String creatorId, String m_s
files[0] = file;
}
- // track old to new attachment names
- Map attachmentNames = new HashMap();
-
- // The archive.xml is really a debug log, not actual archive data - it does not participate in any merge
- for (int i = 0; i < files.length; i++)
+ Site theSite = null;
+ try
{
- if ((files[i] != null) && (files[i].getPath().indexOf("archive.xml") != -1))
- {
- files[i] = null;
- break;
- }
+ theSite = siteService.getSite(siteId);
}
+ catch (IdUnusedException ignore) {
+ log.info("Site not found for id: {}. New site will be created.", siteId);
+ }
+
+ // track old to new attachment names
+ Map attachmentNames = new HashMap();
+
+ // The archive.xml is really a debug log, not actual archive data - it does not participate in any merge
+ List specialCases = List.of("archive.xml", "user.xml", "site.xml", "attachment.xml");
+
+ List fileList = List.of(files);
// firstly, merge the users
- for (int i = 0; i < files.length; i++)
- {
- if ((files[i] != null) && (files[i].getPath().indexOf("user.xml") != -1))
- {
- processMerge(files[i].getPath(), siteId, results, attachmentNames, null, filterSakaiServices, filteredSakaiServices, filterSakaiRoles, filteredSakaiRoles);
- files[i] = null;
- break;
- }
- }
+ fileList.stream().filter(f -> f.getName().equals("user.xml")).findAny().ifPresent(f -> {
+ processMerge(f, siteId, results, attachmentNames, null, filterSakaiServices, filteredSakaiServices, filterSakaiRoles, filteredSakaiRoles);
+ });
// see if there's a site definition which we will process at the end.
- String siteFile = null;
- for (int i = 0; i < files.length; i++)
- {
- if ((files[i] != null) && (files[i].getPath().indexOf("site.xml") != -1))
- {
- siteFile = files[i].getPath();
- processMerge(files[i].getPath(), siteId, results, attachmentNames, creatorId, filterSakaiServices, filteredSakaiServices, filterSakaiRoles, filteredSakaiRoles);
- files[i] = null;
- break;
- }
- }
+ Optional siteFile = fileList.stream().filter(f -> f.getName().equals("site.xml")).findAny().map(f -> {
+
+ processMerge(f, siteId, results, attachmentNames, creatorId, filterSakaiServices, filteredSakaiServices, filterSakaiRoles, filteredSakaiRoles);
+ return f;
+ });
// see if there's an attachments definition
- for (int i = 0; i < files.length; i++)
- {
- if ((files[i] != null) && (files[i].getPath().indexOf("attachment.xml") != -1))
- {
- processMerge(files[i].getPath(), siteId, results, attachmentNames, null, filterSakaiServices, filteredSakaiServices, filterSakaiRoles, filteredSakaiRoles);
- files[i] = null;
- break;
- }
- }
+ fileList.stream().filter(f -> f.getName().equals("attachment.xml")).findAny().ifPresent(f -> {
+ processMerge(f, siteId, results, attachmentNames, null, filterSakaiServices, filteredSakaiServices, filterSakaiRoles, filteredSakaiRoles);
+ });
// process each remaining file that is an .xml file
- for (int i = 0; i < files.length; i++)
- {
- if (files[i] != null)
- if (files[i].getPath().endsWith(".xml"))
- {
- processMerge(files[i].getPath(), siteId, results, attachmentNames, creatorId, filterSakaiServices, filteredSakaiServices, filterSakaiRoles, filteredSakaiRoles);
- }
- }
+ fileList.stream().filter(f -> !specialCases.contains(f.getName()) && f.getName().endsWith(".xml")).forEach(f -> {
+ processMerge(f, siteId, results, attachmentNames, creatorId, filterSakaiServices, filteredSakaiServices, filterSakaiRoles, filteredSakaiRoles);
+ });
- if (siteFile != null )
- {
- processMerge(siteFile, siteId, results, attachmentNames, creatorId, filterSakaiServices, filteredSakaiServices, filterSakaiRoles, filteredSakaiRoles);
- }
+ siteFile.ifPresent(f -> {
+ processMerge(f, siteId, results, attachmentNames, creatorId, filterSakaiServices, filteredSakaiServices, filterSakaiRoles, filteredSakaiRoles);
+ });
return results.toString();
-
} // merge
/**
- * Read in an archive file and merge the entries into the specified site.
- * @param fileName The site name (for the archive file) to read from.
- * @param siteId The id of the site to merge the content into.
- * @param results A buffer to accumulate result messages.
- * @param attachmentNames A map of old to new attachment names.
- * @param useIdTrans A map of old WorkTools id to new Ctools id
- * @param creatorId The creator id
- */
- protected void processMerge(String fileName, String siteId, StringBuilder results, Map attachmentNames, String creatorId, boolean filterSakaiService, String[] filteredSakaiService, boolean filterSakaiRoles, String[] filteredSakaiRoles)
+ *
+ * Read in an archive file and merge the entries into the specified site.
+ *
+ * @param fileName The site name (for the archive file) to read from.
+ * @param siteId The id of the site to merge the content into.
+ * @param results A buffer to accumulate result messages.
+ * @param attachmentNames A map of old to new attachment names.
+ * @param creatorId The creator id
+ */
+ private void processMerge(File file, String siteId, StringBuilder results, Map attachmentNames, String creatorId, boolean filterSakaiService, String[] filteredSakaiService, boolean filterSakaiRoles, String[] filteredSakaiRoles)
{
+ String filePath = file.getPath();
+
// correct for windows backslashes
- fileName = fileName.replace('\\', '/');
+ filePath = filePath.replace('\\', '/');
- if (log.isDebugEnabled())
- log.debug("merge(): processing file: " + fileName);
+ log.debug("merge(): processing file: {}", filePath);
Site theSite = null;
try
{
- theSite = m_siteService.getSite(siteId);
+ theSite = siteService.getSite(siteId);
}
catch (IdUnusedException ignore) {
log.info("Site not found for id:"+siteId+". New site will be created.");
}
// read the whole file into a DOM
- Document doc = Xml.readDocument(fileName);
+ Document doc = Xml.readDocument(filePath);
if (doc == null)
{
- results.append("Error reading xml from: " + fileName + "\n");
+ results.append("Error reading xml from: " + filePath + "\n");
return;
}
@@ -259,7 +220,7 @@ protected void processMerge(String fileName, String siteId, StringBuilder result
Element root = doc.getDocumentElement();
if (!root.getTagName().equals("archive"))
{
- results.append("File: " + fileName + " does not contain archive xml. Found this root tag: " + root.getTagName() + "\n");
+ results.append("File: " + filePath + " does not contain archive xml. Found this root tag: " + root.getTagName() + "\n");
return;
}
@@ -269,8 +230,7 @@ protected void processMerge(String fileName, String siteId, StringBuilder result
// the children
NodeList children = root.getChildNodes();
- final int length = children.getLength();
- for(int i = 0; i < length; i++)
+ for(int i = 0; i < children.getLength(); i++)
{
Node child = children.item(i);
if (child.getNodeType() != Node.ELEMENT_NODE) continue;
@@ -279,20 +239,8 @@ protected void processMerge(String fileName, String siteId, StringBuilder result
// look for site stuff
if (element.getTagName().equals(SiteService.APPLICATION_ID))
{
- //if the xml file is from WT site, merge it with the translated user ids
- //if (system.equalsIgnoreCase(ArchiveService.FROM_WT))
- // mergeSite(siteId, fromSite, element, userIdTrans, creatorId);
- //else
- mergeSite(siteId, fromSite, element, new HashMap()/*empty userIdMap */, creatorId, filterSakaiRoles, filteredSakaiRoles);
+ mergeSite(siteId, fromSite, element, creatorId, filterSakaiRoles, filteredSakaiRoles);
}
- else if (element.getTagName().equals(UserDirectoryService.APPLICATION_ID))
- { ;
- // Apparently, users have only been merged in they are from WorkTools...
- // Is this every going to be wanted in Sakai?
- // String msg = mergeUsers(element, userIdTrans);
- // results.append(msg);
- }
-
else
{
// we need a site now
@@ -316,10 +264,14 @@ else if (element.getTagName().equals(UserDirectoryService.APPLICATION_ID))
// get the service
try
{
+ // TODO: Do we need this call into the spring context when we have the entity
+ // producer architecture? The only time this would add value is if a service
+ // declared a EntityProducer bean but didn't bother to register it.
EntityProducer service = (EntityProducer) ComponentManager.get(serviceName);
+
if (service == null) {
// find the service using the EntityManager
- Collection entityProducers = m_entityManager.getEntityProducers();
+ Collection entityProducers = entityManager.getEntityProducers();
for (EntityProducer entityProducer : entityProducers) {
if (serviceName.equals(entityProducer.getClass().getName())
|| serviceName.equals(entityProducer.getLabel())
@@ -337,29 +289,29 @@ else if (element.getTagName().equals(UserDirectoryService.APPLICATION_ID))
if ((system.equalsIgnoreCase(ArchiveService.FROM_SAKAI) || system.equalsIgnoreCase(ArchiveService.FROM_SAKAI_2_8))) {
if (checkSakaiService(filterSakaiService, filteredSakaiService, serviceName)) {
// checks passed so now we attempt to do the merge
- log.debug("Merging archive data for {} ({}) to site {}", serviceName, fileName, siteId);
- msg = service.merge(siteId, element, fileName, fromSite, creatorId, attachmentNames, new HashMap() /* empty userIdTran map */, usersListAllowImport);
+ log.debug("Merging archive data for {} ({}) to site {}", serviceName, filePath, siteId);
+ msg = service.merge(siteId, element, filePath, fromSite, creatorId, attachmentNames, new HashMap() /* empty userIdTran map */, usersListAllowImport);
} else {
- log.warn("Skipping merge archive data for "+serviceName+" ("+fileName+") to site "+siteId+", checked filter failed (filtersOn="+filterSakaiService+", filters="+Arrays.toString(filteredSakaiService)+")");
+ log.warn("Skipping merge archive data for {} ({}) to site {}, checked filter failed (filtersOn={}, filters={})", serviceName, filePath, siteId, filterSakaiService, Arrays.toString(filteredSakaiService));
}
} else {
- log.warn("Skipping archive data for for "+serviceName+" ("+fileName+") to site "+siteId+", this does not appear to be a sakai archive");
+ log.warn("Skipping archive data for for {} ({}) to site {}, this does not appear to be a sakai archive", serviceName, filePath, siteId);
}
} else {
- log.warn("Skipping archive data for for "+serviceName+" ("+fileName+") to site "+siteId+", no service (EntityProducer) could be found to deal with this data");
+ log.warn("Skipping archive data for for {} ({}) to site {}, no service (EntityProducer) could be found to deal with this data", serviceName, filePath, siteId);
}
results.append(msg);
}
catch (Throwable t)
{
- results.append("Error merging: " + serviceName + " in file: " + fileName + " : " + t.toString() + "\n");
- log.warn("Error merging: " + serviceName + " in file: " + fileName + " : " + t.toString(),t);
+ results.append("Error merging: " + serviceName + " in file: " + filePath + " : " + t.toString() + "\n");
+ log.warn("Error merging: {} in file: {} : {}", serviceName, filePath, t.toString());
}
}
catch (Throwable t)
{
- results.append("Did not recognize the resource service: " + serviceName + " in file: " + fileName + "\n");
- log.warn("Did not recognize the resource service: " + serviceName + " in file: " + fileName, t);
+ results.append("Did not recognize the resource service: " + serviceName + " in file: " + filePath + "\n");
+ log.warn("Did not recognize the resource service: {} in file: {} : {}", serviceName, filePath, t.toString());
}
}
}
@@ -374,7 +326,7 @@ else if (element.getTagName().equals(UserDirectoryService.APPLICATION_ID))
* @param element The XML DOM tree of messages to merge.
* @param creatorId The creator id
*/
- protected void mergeSite(String siteId, String fromSiteId, Element element, HashMap useIdTrans, String creatorId, boolean filterSakaiRoles, String[] filteredSakaiRoles)
+ private void mergeSite(String siteId, String fromSiteId, Element element, String creatorId, boolean filterSakaiRoles, String[] filteredSakaiRoles)
{
String source = "";
@@ -386,8 +338,7 @@ protected void mergeSite(String siteId, String fromSiteId, Element element, Hash
}
NodeList children = element.getChildNodes();
- final int length = children.getLength();
- for(int i = 0; i < length; i++)
+ for(int i = 0; i < children.getLength(); i++)
{
Node child = children.item(i);
if (child.getNodeType() != Node.ELEMENT_NODE) continue;
@@ -395,8 +346,7 @@ protected void mergeSite(String siteId, String fromSiteId, Element element, Hash
if (!element2.getTagName().equals("site")) continue;
NodeList toolChildren = element2.getElementsByTagName("tool");
- final int tLength = toolChildren.getLength();
- for(int i2 = 0; i2 < tLength; i2++)
+ for(int i2 = 0; i2 < toolChildren.getLength(); i2++)
{
Element element3 = (Element) toolChildren.item(i2);
String toolId = element3.getAttribute("toolId");
@@ -414,30 +364,29 @@ protected void mergeSite(String siteId, String fromSiteId, Element element, Hash
// merge the site info first
try
{
- m_siteService.merge(siteId, element2, creatorId);
+ siteService.merge(siteId, element2, creatorId);
mergeSiteInfo(element2, siteId);
}
catch(Exception any)
{
- log.warn(any.getMessage(), any);
+ log.warn(any.getMessage(), any.toString());
}
Site site = null;
try
{
- site = m_siteService.getSite(siteId);
+ site = siteService.getSite(siteId);
}
catch (IdUnusedException e)
{
- log.warn(this + "The site with id " + siteId + " doesn't exit", e);
+ log.warn("The site with id {} doesn't exist: {}", siteId, e.toString());
return;
}
if (site != null)
{
NodeList children2 = element2.getChildNodes();
- final int length2 = children2.getLength();
- for(int i2 = 0; i2 < length2; i2++)
+ for(int i2 = 0; i2 < children2.getLength(); i2++)
{
Node child2 = children2.item(i2);
if (child2.getNodeType() != Node.ELEMENT_NODE) continue;
@@ -445,10 +394,10 @@ protected void mergeSite(String siteId, String fromSiteId, Element element, Hash
if (!element3.getTagName().equals("roles")) continue;
try {
- mergeSiteRoles(element3, siteId, useIdTrans, filterSakaiRoles, filteredSakaiRoles);
+ mergeSiteRoles(element3, siteId, filterSakaiRoles, filteredSakaiRoles);
}
catch (PermissionException e1) {
- log.warn(e1.getMessage(), e1);
+ log.warn(e1.getMessage(), e1.toString());
}
}
}
@@ -456,66 +405,65 @@ protected void mergeSite(String siteId, String fromSiteId, Element element, Hash
} // mergeSite
/**
- * Merge the site info like description from the site part of the archive file into the site service.
- * @param element The XML DOM tree of messages to merge.
- * @param siteId The id of the site getting imported into.
- */
- protected void mergeSiteInfo(Element el, String siteId)
+ * Merge the site info like description from the site part of the archive file into the site service.
+ * @param element The XML DOM tree of messages to merge.
+ * @param siteId The id of the site getting imported into.
+ */
+ private void mergeSiteInfo(Element el, String siteId)
throws IdInvalidException, IdUsedException, PermissionException, IdUnusedException, InUseException
{
// heck security (throws if not permitted)
- unlock(SiteService.SECURE_UPDATE_SITE, m_siteService.siteReference(siteId));
+ unlock(SiteService.SECURE_UPDATE_SITE, siteService.siteReference(siteId));
- Site edit = m_siteService.getSite(siteId);
+ Site edit = siteService.getSite(siteId);
String desc = el.getAttribute("description-enc");
try
{
- byte[] decoded = Base64.decodeBase64(desc.getBytes("UTF-8"));
- byte[] filteredDecoded = decoded;
- for(int i=0; i {
+
+ Document doc = (Document) invocation.getArgument(1);
+ Stack stack = (Stack) invocation.getArgument(2);
+ ((Element) stack.peek()).appendChild(doc.createElement("rubrics"));
+ return "";
+ });
+
+ EntityProducer conversationsProducer = mock(EntityProducer.class);
+ when(conversationsProducer.getLabel()).thenReturn("conversations");
+ when(conversationsProducer.willArchiveMerge()).thenReturn(true);
+ when(conversationsProducer.archive(any(), any(), any(), any(), any())).thenAnswer(invocation -> {
+
+ Document doc = (Document) invocation.getArgument(1);
+ Stack stack = (Stack) invocation.getArgument(2);
+ ((Element) stack.peek()).appendChild(doc.createElement("conversations"));
+ return "";
+ });
+
+ when(entityManager.getEntityProducers()).thenReturn(List.of(rubricsProducer, conversationsProducer));
+
+ File archiveDir = folder.newFolder("archive");
+
+ ((ArchiveService2Impl) AopTestUtils.getTargetObject(archiveService)).setStoragePath(archiveDir.getCanonicalPath() + File.separator);
+
+ Time time = mock(Time.class);
+ when(time.toString()).thenReturn("deprecated time :(");
+ when(timeService.newTime()).thenReturn(time);
+
+ try {
+ when(authzGroupService.getAuthzGroup(any())).thenReturn(mock(AuthzGroup.class));
+ } catch (Exception e) {
+ }
+
+ String user1Id = UUID.randomUUID().toString();
+ String user1Eid = "user1";
+
+ User user1 = mock(User.class);
+ when(user1.getId()).thenReturn(user1Id);
+ when(user1.getEid()).thenReturn(user1Eid);
+ when(user1.toXml(any(), any())).thenAnswer(invocation -> {
+
+ Document doc = (Document) invocation.getArgument(0);
+ Stack stack = (Stack) invocation.getArgument(1);
+
+ Element userEl = doc.createElement("user");
+ ((Element) stack.peek()).appendChild(userEl);
+ return userEl;
+ });
+
+ when(userDirectoryService.getUsers(any())).thenReturn(List.of(user1));
+
+ Site site = mock(Site.class);
+ when(site.getId()).thenReturn(siteId);
+ when(site.toXml(any(), any())).thenAnswer(invocation -> {
+
+ Document doc = (Document) invocation.getArgument(0);
+ Stack stack = (Stack) invocation.getArgument(1);
+
+ Element siteEl = doc.createElement("site");
+ ((Element) stack.peek()).appendChild(siteEl);
+ return siteEl;
+ });
+
+ try {
+ when(siteService.getSite(siteId)).thenReturn(site);
+ } catch (Exception e) {
+ }
+
+ when(serverConfigurationService.getString("archive.toolproperties.excludefilter","password|secret")).thenReturn("password|secret");
+
+ archiveService.archive(siteId);
+
+ File[] files = archiveDir.listFiles();
+ assertEquals(1, files.length);
+
+ File siteArchiveDir = files[0];
+
+ assertEquals(siteId + "-archive", siteArchiveDir.getName());
+
+ assertEquals(5, siteArchiveDir.list().length);
+
+ List fileNames = Arrays.asList(siteArchiveDir.list());
+
+ assertTrue(fileNames.contains("site.xml"));
+ Document doc = Xml.readDocument(siteArchiveDir + File.separator + "site.xml");
+ assertNotNull(doc);
+ Element rootElement = doc.getDocumentElement();
+ assertEquals("archive", rootElement.getTagName());
+ NodeList siteNodes = rootElement.getElementsByTagName("site");
+ assertEquals(1, siteNodes.getLength());
+
+ assertTrue(fileNames.contains("user.xml"));
+ doc = Xml.readDocument(siteArchiveDir + File.separator + "user.xml");
+ assertNotNull(doc);
+ rootElement = doc.getDocumentElement();
+ assertEquals("archive", rootElement.getTagName());
+ NodeList usersNodes = rootElement.getElementsByTagName(UserDirectoryService.APPLICATION_ID);
+ assertEquals(1, usersNodes.getLength());
+ NodeList userNodes = ((Element) usersNodes.item(0)).getElementsByTagName("user");
+ assertEquals(1, userNodes.getLength());
+
+ assertTrue(fileNames.contains("archive.xml"));
+ doc = Xml.readDocument(siteArchiveDir + File.separator + "archive.xml");
+ assertNotNull(doc);
+ rootElement = doc.getDocumentElement();
+ assertEquals("archive", rootElement.getTagName());
+ NodeList logNodes = rootElement.getElementsByTagName("log");
+ assertEquals(1, logNodes.getLength());
+
+ assertTrue(fileNames.contains(rubricsProducer.getLabel() + ".xml"));
+ doc = Xml.readDocument(siteArchiveDir + File.separator + "rubrics.xml");
+ assertNotNull(doc);
+ rootElement = doc.getDocumentElement();
+ assertEquals("archive", rootElement.getTagName());
+ NodeList rubricsNodes = rootElement.getElementsByTagName("rubrics");
+ assertEquals(1, rubricsNodes.getLength());
+
+ assertTrue(fileNames.contains(conversationsProducer.getLabel() + ".xml"));
+ doc = Xml.readDocument(siteArchiveDir + File.separator + "conversations.xml");
+ assertNotNull(doc);
+ rootElement = doc.getDocumentElement();
+ assertEquals("archive", rootElement.getTagName());
+ NodeList conversationsNodes = rootElement.getElementsByTagName("conversations");
+ assertEquals(1, conversationsNodes.getLength());
+ }
+
+ @Test
+ public void merge() throws IOException, URISyntaxException {
+
+ String siteId = "xyz";
+
+ File archiveDir = folder.newFolder("archive");
+
+ // Set the sakai home path to be the topmost component of the archive dir path. So,
+ // definitely above the archive directory
+ String sakaiHomePath = File.separator + archiveDir.toPath().subpath(0,1).toString();
+ when(serverConfigurationService.getSakaiHomePath()).thenReturn(sakaiHomePath);
+
+ File siteArchiveDir = new File(archiveDir, siteId + "-archive");
+ siteArchiveDir.mkdir();
+
+ ((ArchiveService2Impl) AopTestUtils.getTargetObject(archiveService)).setStoragePath(archiveDir.getCanonicalPath() + File.separator);
+
+ String archiveBasePath = siteArchiveDir.getCanonicalPath();
+
+ Path siteXmlPath = Paths.get(Objects.requireNonNull(ArchiveServiceTests.class.getResource("/archive/site.xml")).toURI());
+ assertNotNull(siteXmlPath);
+ Path archiveSiteXmlPath = Paths.get(archiveBasePath, File.separator, "site.xml");
+ Files.copy(siteXmlPath, archiveSiteXmlPath, StandardCopyOption.REPLACE_EXISTING);
+
+ Path userXmlPath = Paths.get(Objects.requireNonNull(ArchiveServiceTests.class.getResource("/archive/user.xml")).toURI());
+ assertNotNull(userXmlPath);
+ Path archiveUserXmlPath = Paths.get(archiveBasePath, File.separator, "user.xml");
+ Files.copy(userXmlPath, archiveUserXmlPath, StandardCopyOption.REPLACE_EXISTING);
+
+ Path rubricsXmlPath = Paths.get(Objects.requireNonNull(ArchiveServiceTests.class.getResource("/archive/rubrics.xml")).toURI());
+ assertNotNull(rubricsXmlPath);
+ Path archiveRubricsXmlPath = Paths.get(archiveBasePath, File.separator, "rubrics.xml");
+ Files.copy(rubricsXmlPath, archiveRubricsXmlPath, StandardCopyOption.REPLACE_EXISTING);
+
+ Path conversationsXmlPath = Paths.get(Objects.requireNonNull(ArchiveServiceTests.class.getResource("/archive/conversations.xml")).toURI());
+ assertNotNull(conversationsXmlPath);
+ Path archiveConversationsXmlPath = Paths.get(archiveBasePath, File.separator, "conversations.xml");
+ Files.copy(conversationsXmlPath, archiveConversationsXmlPath, StandardCopyOption.REPLACE_EXISTING);
+
+ String toSiteId = "abc";
+
+ Site site = mock(Site.class);
+ try {
+ when(siteService.getSite(toSiteId)).thenReturn(site);
+ } catch (Exception e) {
+ }
+
+ EntityProducer userProducer = mock(EntityProducer.class);
+ when(userProducer.getLabel()).thenReturn(UserDirectoryService.APPLICATION_ID);
+ when(userProducer.willArchiveMerge()).thenReturn(true);
+
+ EntityProducer rubricsProducer = mock(EntityProducer.class);
+ when(rubricsProducer.getLabel()).thenReturn("rubrics");
+ when(rubricsProducer.willArchiveMerge()).thenReturn(true);
+
+ EntityProducer conversationsProducer = mock(EntityProducer.class);
+ when(conversationsProducer.getLabel()).thenReturn("conversations");
+ when(conversationsProducer.willArchiveMerge()).thenReturn(true);
+
+ when(entityManager.getEntityProducers()).thenReturn(List.of(userProducer, rubricsProducer, conversationsProducer));
+
+ archiveService.merge(siteId + "-archive", toSiteId, "admin");
+
+ verify(userProducer).merge(any(), any(), any(), any(), any(), any(), any(), any());
+ verify(rubricsProducer).merge(any(), any(), any(), any(), any(), any(), any(), any());
+ verify(conversationsProducer).merge(any(), any(), any(), any(), any(), any(), any(), any());
+ }
+}
diff --git a/common/archive-impl/impl2/src/test/resources/archive/conversations.xml b/common/archive-impl/impl2/src/test/resources/archive/conversations.xml
new file mode 100644
index 000000000000..88ef43f84aac
--- /dev/null
+++ b/common/archive-impl/impl2/src/test/resources/archive/conversations.xml
@@ -0,0 +1,5 @@
+Let's discuss aliens, right here.
+]]>It's philosophy, innit?
+]]>Does anybody know where the toilets actually are?
+]]>sporting stuff
+]]>
\ No newline at end of file
diff --git a/common/archive-impl/impl2/src/test/resources/archive/rubrics.xml b/common/archive-impl/impl2/src/test/resources/archive/rubrics.xml
new file mode 100644
index 000000000000..2ed007936cb0
--- /dev/null
+++ b/common/archive-impl/impl2/src/test/resources/archive/rubrics.xml
@@ -0,0 +1,6 @@
+
+spin it around
+]]>some foods
+]]>rackets
+]]>pitch
+]]>
\ No newline at end of file
diff --git a/common/archive-impl/impl2/src/test/resources/archive/site.xml b/common/archive-impl/impl2/src/test/resources/archive/site.xml
new file mode 100644
index 000000000000..1862d20eebec
--- /dev/null
+++ b/common/archive-impl/impl2/src/test/resources/archive/site.xml
@@ -0,0 +1,2 @@
+
+
\ No newline at end of file
diff --git a/common/archive-impl/impl2/src/test/resources/archive/user.xml b/common/archive-impl/impl2/src/test/resources/archive/user.xml
new file mode 100644
index 000000000000..42b23d9eaf43
--- /dev/null
+++ b/common/archive-impl/impl2/src/test/resources/archive/user.xml
@@ -0,0 +1,2 @@
+
+
\ No newline at end of file
diff --git a/common/archive-impl/impl2/src/test/resources/hibernate.properties b/common/archive-impl/impl2/src/test/resources/hibernate.properties
new file mode 100644
index 000000000000..75c0aac3e5a1
--- /dev/null
+++ b/common/archive-impl/impl2/src/test/resources/hibernate.properties
@@ -0,0 +1,19 @@
+# Base Hibernate settings
+hibernate.show_sql=false
+hibernate.hbm2ddl.auto=create
+hibernate.enable_lazy_load_no_trans=true
+hibernate.cache.use_second_level_cache=false
+hibernate.current_session_context_class=org.springframework.orm.hibernate5.SpringSessionContext
+
+# Connection definition to the HSQLDB database
+hibernate.connection.driver_class=org.hsqldb.jdbcDriver
+hibernate.connection.url=jdbc:hsqldb:mem:test
+hibernate.dialect=org.hibernate.dialect.HSQLDialect
+hibernate.connection.username=sa
+hibernate.connection.password=
+
+#hibernate.connection.driver_class=com.mysql.jdbc.Driver
+#hibernate.connection.url=jdbc:mysql://localhost:3306/sakai?useUnicode=true&characterEncoding=UTF-8
+#hibernate.dialect=org.hibernate.dialect.MySQL5InnoDBDialect
+#hibernate.connection.username=sakai
+#hibernate.connection.password=sakai
diff --git a/kernel/kernel-impl/src/main/java/org/sakaiproject/site/impl/BaseSite.java b/kernel/kernel-impl/src/main/java/org/sakaiproject/site/impl/BaseSite.java
index af036580c612..bb4953e74de4 100644
--- a/kernel/kernel-impl/src/main/java/org/sakaiproject/site/impl/BaseSite.java
+++ b/kernel/kernel-impl/src/main/java/org/sakaiproject/site/impl/BaseSite.java
@@ -1436,9 +1436,7 @@ public boolean isPubView()
return m_pubView;
}
- /**
- * {@inheritDoc}
- */
+ @Override
public Element toXml(Document doc, Stack stack)
{
Element site = doc.createElement("site");
diff --git a/kernel/kernel-test/src/main/java/org/sakaiproject/test/SakaiTestConfiguration.java b/kernel/kernel-test/src/main/java/org/sakaiproject/test/SakaiTestConfiguration.java
index f42b2a1ea351..8fa0dfab8ca4 100644
--- a/kernel/kernel-test/src/main/java/org/sakaiproject/test/SakaiTestConfiguration.java
+++ b/kernel/kernel-test/src/main/java/org/sakaiproject/test/SakaiTestConfiguration.java
@@ -37,6 +37,7 @@
import org.sakaiproject.hibernate.AssignableUUIDGenerator;
import org.sakaiproject.memory.api.MemoryService;
import org.sakaiproject.springframework.orm.hibernate.AdditionalHibernateMappings;
+import org.sakaiproject.springframework.orm.hibernate.impl.AdditionalHibernateMappingsImpl;
import org.sakaiproject.site.api.SiteService;
import org.sakaiproject.tool.api.SessionManager;
import org.sakaiproject.tool.api.ToolManager;
@@ -53,7 +54,9 @@ public abstract class SakaiTestConfiguration {
@Autowired protected Environment environment;
- protected abstract AdditionalHibernateMappings getAdditionalHibernateMappings();
+ protected AdditionalHibernateMappings getAdditionalHibernateMappings() {
+ return new AdditionalHibernateMappingsImpl();
+ }
@Bean(name = "org.sakaiproject.springframework.orm.hibernate.GlobalSessionFactory")
public SessionFactory sessionFactory() throws IOException {