Commit 237ec3ba authored by Unico Hommes's avatar Unico Hommes

REPO-1281 merge changes from trunk

parents e7b88136 b2885e62
......@@ -45,6 +45,7 @@ api/src/main/java/org/onehippo/repository/security/domain/DomainRuleExtension.ja
api/src/main/java/org/onehippo/repository/security/domain/FacetRule.java -text
api/src/main/java/org/onehippo/repository/update/BaseNodeUpdateVisitor.java -text
api/src/main/java/org/onehippo/repository/update/NodeUpdateVisitor.java -text
api/src/main/java/org/onehippo/repository/update/NodeUpdateVisitorContext.java -text svneol=unset#text/plain
api/src/main/java/org/onehippo/repository/update/NodeUpdaterService.java -text
api/src/main/java/org/onehippo/repository/util/FileContentResourceLoader.java -text
api/src/main/java/org/onehippo/repository/util/JcrConstants.java -text
......
......@@ -29,6 +29,7 @@ public abstract class BaseNodeUpdateVisitor implements NodeUpdateVisitor {
protected Logger log;
protected Map<String, Object> parametersMap;
protected NodeUpdateVisitorContext visitorContext;
public void setLogger(Logger log) {
this.log = log;
......@@ -38,6 +39,10 @@ public abstract class BaseNodeUpdateVisitor implements NodeUpdateVisitor {
this.parametersMap = parametersMap;
}
public void setVisitorContext(NodeUpdateVisitorContext visitorContext) {
this.visitorContext = visitorContext;
}
@Override
public void initialize(Session session) throws RepositoryException {
}
......
/*
* Copyright 2015-2015 Hippo B.V. (http://www.onehippo.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onehippo.repository.update;
/**
* Provided as an execution context to {@link NodeUpdateVisitor} instance in order to allow an {@link NodeUpdateVisitor}
* to be able to manually update skipped/updated/failed node count while being executed on single node iteration
* (e.g, in <code>#doUpdate(node)</code> method in a groovy updater script).
*/
public interface NodeUpdateVisitorContext {
/**
* Manually report/increment the skipped count in the current execution.
*/
void reportSkipped(String path);
/**
* Manually report/increment the updated count in the current execution.
* <P>
* <EM>WARNING: this invocation may trigger committing or reverting the batch.</EM>
* </P>
*/
void reportUpdated(String path);
/**
* Manually report/increment the failed count in the current execution.
*/
void reportFailed(String path);
}
......@@ -20,7 +20,7 @@ import javax.jcr.Node;
import org.onehippo.cms7.services.SingletonService;
/**
* The node updater service runs registered &amp; not-net-completed updater visitors.
* The node updater service runs registered &amp; not-yet-completed updater visitors.
* It is intended to be used by code that assumes the content to have been updated.
* <p>
* Since updater visitors run asynchronously, potentially taking a long time when
......
......@@ -70,6 +70,11 @@
<groupId>org.apache.jackrabbit</groupId>
<artifactId>jackrabbit-spi-commons</artifactId>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
</dependency>
</dependencies>
......
......@@ -110,6 +110,7 @@ public class UpdaterExecutor implements EventListener {
if (updater instanceof BaseNodeUpdateVisitor) {
((BaseNodeUpdateVisitor) updater).setLogger(getLogger());
((BaseNodeUpdateVisitor) updater).setParametersMap(jsonToParamsMap(updaterInfo.getParameters()));
((BaseNodeUpdateVisitor) updater).setVisitorContext(new BaseNodeUpdateVisitorContext());
}
updater.initialize(session);
report.start();
......@@ -428,11 +429,21 @@ public class UpdaterExecutor implements EventListener {
private void commitBatchIfNeeded() throws RepositoryException {
final boolean batchCompleted = report.getUpdateCount() != lastUpdateCount && report.getUpdateCount() % updaterInfo.getBatchSize() == 0;
if (batchCompleted) {
if (getLogger().isDebugEnabled()) {
getLogger().debug("batch unit completion indicated. updateCount: {}, batchSize: {}",
report.getUpdateCount(), updaterInfo.getBatchSize());
}
}
if (batchCompleted || report.isFinished()) {
if (updaterInfo.isDryRun()) {
debug("discarding all pending changes currently recorded in this batch unit.");
session.refresh(false);
} else {
try {
debug("saving all pending changes currently recorded in this batch unit.");
session.save();
} catch (RepositoryException e) {
error("Failed to save session", e);
......@@ -443,6 +454,7 @@ public class UpdaterExecutor implements EventListener {
report.startBatch();
saveReport();
}
if (batchCompleted) {
lastUpdateCount = report.getUpdateCount();
throttle(updaterInfo.getThrottle());
......@@ -593,4 +605,32 @@ public class UpdaterExecutor implements EventListener {
}
return JSONObject.fromObject(paramsInJson);
}
class BaseNodeUpdateVisitorContext implements NodeUpdateVisitorContext {
BaseNodeUpdateVisitorContext() {
}
@Override
public void reportSkipped(String path) {
report.skipped(path);
}
@Override
public void reportUpdated(String path) {
report.updated(path);
try {
commitBatchIfNeeded();
} catch (RepositoryException e) {
// log.error() instead of error() on purpose: report already saved
log.error(e.getClass().getName() + ": " + e.getMessage(), e);
}
}
@Override
public void reportFailed(String path) {
report.failed(path);
}
}
}
......@@ -69,9 +69,14 @@ public class DefaultContentHandler extends DefaultHandler {
try {
SAXParserFactory factory = SAXParserFactory.newInstance();
factory.setNamespaceAware(true);
factory.setFeature(
"http://xml.org/sax/features/namespace-prefixes", false);
factory.setFeature("http://xml.org/sax/features/namespace-prefixes", false);
factory.setValidating(false);
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
factory.setFeature("http://xml.org/sax/features/validation", false);
factory.setFeature("http://apache.org/xml/features/nonvalidating/load-dtd-grammar", false);
factory.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false);
factory.setFeature("http://xml.org/sax/features/external-parameter-entities", false);
factory.setFeature("http://xml.org/sax/features/external-general-entities", false);
SAXParser parser = factory.newSAXParser();
// JCR-984 & JCR-985: Log the name of the SAXParser class
logger.debug("Using SAX parser " + parser.getClass().getName());
......
......@@ -84,6 +84,7 @@
<!-- Project Dependencies -->
<commons-io.version>2.4</commons-io.version>
<commons-lang.version>2.6</commons-lang.version>
<commons-collections.version>3.2.2</commons-collections.version>
<xmlbeans-java6.version>2.3.0</xmlbeans-java6.version>
<pdfbox.version>1.8.8</pdfbox.version>
<quartz.version>2.2.1</quartz.version>
......@@ -213,6 +214,11 @@
<artifactId>commons-io</artifactId>
<version>${commons-io.version}</version>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>${commons-collections.version}</version>
</dependency>
<dependency>
<groupId>org.apache.pdfbox</groupId>
<artifactId>pdfbox</artifactId>
......
......@@ -182,11 +182,8 @@ public abstract class RepositoryTestCase {
protected void removeNode(final String path) throws RepositoryException {
while (session != null && session.nodeExists(path)) {
try {
session.getNode(path).remove();
session.save();
} catch (ItemNotFoundException ignore) {
}
session.getNode(path).remove();
session.save();
}
}
......@@ -360,5 +357,5 @@ public abstract class RepositoryTestCase {
}
return ((HippoWorkspace) session.getWorkspace()).getHierarchyResolver().getNode(session.getRootNode(), path);
}
}
......@@ -15,6 +15,7 @@
*/
package org.onehippo.repository.documentworkflow;
import java.util.Calendar;
import java.util.Date;
import javax.jcr.Node;
......@@ -37,6 +38,7 @@ public class WorkflowRequest extends Request {
private static Node newRequestNode(Node parent) throws RepositoryException {
JcrUtils.ensureIsCheckedOut(parent);
Node requestNode = parent.addNode(HippoStdPubWfNodeType.HIPPO_REQUEST, HippoStdPubWfNodeType.NT_HIPPOSTDPUBWF_REQUEST);
requestNode.setProperty(HippoStdPubWfNodeType.HIPPOSTDPUBWF_CREATION_DATE, Calendar.getInstance());
requestNode.addMixin(JcrConstants.MIX_REFERENCEABLE);
return requestNode;
}
......
......@@ -20,6 +20,7 @@ import java.rmi.RemoteException;
import javax.jcr.Node;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.SimpleCredentials;
import org.hippoecm.repository.HippoStdNodeType;
import org.hippoecm.repository.api.HippoNodeType;
......@@ -101,9 +102,7 @@ public class CopyVariantTask extends AbstractDocumentTask {
if (saveNeeded) {
workflowSession.save();
if (dm.hasMultipleDocumentVariants(getTargetState())) {
targetDoc.getNode(workflowSession).remove();
workflowSession.save();
throw new WorkflowException("Concurrent workflow action detected");
deleteDuplicateVariant(workflowSession, dm, targetDoc, getTargetState());
}
}
......@@ -112,4 +111,29 @@ public class CopyVariantTask extends AbstractDocumentTask {
return null;
}
/**
* Remove accidentally duplicated (or even more!) same state variant.
* Method needs to be static synchronized as well as use a separate impersonated session to prevent repository
* internal state corruption when two (or more!) threads do this concurrently for the same variant handle
* (corruption likely occurring because the document variants being same-name-siblings).
* For further reference see: REPO-1386
* @throws WorkflowException when this thread still finds a duplicate same state variant, after having deleted the variant
*/
private static synchronized void deleteDuplicateVariant(Session session, DocumentHandle dm, DocumentVariant variant, String state)
throws RepositoryException, WorkflowException {
boolean fail = false;
final Session deleteSession = session.impersonate(new SimpleCredentials(session.getUserID(), new char[]{}));
try {
if (dm.hasMultipleDocumentVariants(state)) {
fail = true;
variant.getNode(deleteSession).remove();
deleteSession.save();
}
} finally {
deleteSession.logout();
}
if (fail) {
throw new WorkflowException("Concurrent workflow action detected");
}
}
}
/*
* Copyright 2013 Hippo B.V. (http://www.onehippo.com)
* Copyright 2013-2015 Hippo B.V. (http://www.onehippo.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
......@@ -128,7 +128,7 @@ public class ScheduleWorkflowTask extends AbstractDocumentTask {
workflow.depublish();
}
} catch (RemoteException | WorkflowException | RepositoryException e) {
log.error("Execution of scheduled workflow operation {} on {} failed", methodName, subjectPath, e);
log.error("Execution of scheduled workflow operation " + methodName + " on " + subjectPath + " failed", e);
} finally {
if (session != null) {
session.logout();
......
......@@ -132,10 +132,6 @@ public class VersionRestoreToTask extends AbstractDocumentTask {
if (version != null) {
clear(targetNode);
restore(targetNode, version.getFrozenNode());
if (targetNode.isNodeType(HippoStdPubWfNodeType.HIPPOSTDPUBWF_DOCUMENT)) {
final Calendar cal = Calendar.getInstance();
targetNode.setProperty(HippoStdPubWfNodeType.HIPPOSTDPUBWF_LAST_MODIFIED_DATE, cal);
}
targetNode.save();
return new DocumentVariant(targetNode);
}
......
......@@ -36,3 +36,4 @@
- hippostdpubwf:username (String)
- hippostdpubwf:document (Reference)
- hippostdpubwf:refId (String)
- hippostdpubwf:creationDate (Date)
......@@ -25,6 +25,7 @@ import javax.jcr.SimpleCredentials;
import org.hippoecm.repository.api.HippoWorkspace;
import org.hippoecm.repository.api.WorkflowManager;
import org.hippoecm.repository.util.NodeIterable;
import org.junit.Ignore;
import org.junit.Test;
import org.onehippo.repository.documentworkflow.DocumentWorkflow;
import org.slf4j.Logger;
......@@ -48,7 +49,7 @@ public class DocumentWorkflowConcurrencyTest extends AbstractDocumentWorkflowInt
}
private void doConcurrentPublishFailsOnMultiplePublishedVariants() throws Exception {
final Session session1 = session.impersonate(new SimpleCredentials("admin", new char[] {}));
final Session session1 = session.impersonate(new SimpleCredentials("admin", new char[]{}));
final Session session2 = session.impersonate(new SimpleCredentials("admin", new char[]{}));
try {
final Node handle1 = session1.getNode(handle.getPath());
......
......@@ -16,6 +16,7 @@
package org.onehippo.repository.documentworkflow.integration;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
......@@ -30,6 +31,7 @@ import org.onehippo.repository.documentworkflow.DocumentWorkflow;
import org.onehippo.repository.events.HippoWorkflowEvent;
import static org.hippoecm.repository.HippoStdNodeType.PUBLISHED;
import static org.hippoecm.repository.HippoStdPubWfNodeType.HIPPOSTDPUBWF_CREATION_DATE;
import static org.hippoecm.repository.HippoStdPubWfNodeType.HIPPOSTDPUBWF_REASON;
import static org.hippoecm.repository.HippoStdPubWfNodeType.HIPPOSTDPUBWF_TYPE;
import static org.hippoecm.repository.HippoStdPubWfNodeType.REJECTED;
......@@ -195,6 +197,31 @@ public class DocumentWorkflowRequestTest extends AbstractDocumentWorkflowIntegra
assertFalse("Request still on handle", handle.hasNode(HIPPO_REQUEST));
}
@Test
public void testRequestCreationDate() throws Exception {
DocumentWorkflow workflow = getDocumentWorkflow(handle);
final Calendar beforeCreation = Calendar.getInstance();
// Perform a request publication
workflow.requestPublication();
// Get the request
Node request = handle.getNode(HIPPO_REQUEST);
assertTrue(request.hasProperty(HIPPOSTDPUBWF_CREATION_DATE));
final Calendar afterCreation = Calendar.getInstance();
final Calendar creationDate = request.getProperty(HIPPOSTDPUBWF_CREATION_DATE).getDate();
assertTrue(creationDate.after(beforeCreation));
assertTrue(creationDate.before(afterCreation));
// Cancel the request
workflow.rejectRequest(request.getIdentifier(), "Testing");
final Calendar checkDate = handle.getNode(HIPPO_REQUEST).getProperty(HIPPOSTDPUBWF_CREATION_DATE).getDate();
assertEquals(creationDate, checkDate);
}
public static class HippoEventListener {
private Collection<String> actions = new ArrayList<>();
@Subscribe
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment