Commit 088fba59 authored by Ate Douma's avatar Ate Douma

CMS7-7024: refactoring to better encapsulate and modularize the types loading process

parent ac0ec09d
......@@ -52,9 +52,6 @@ public class AggregatedDocumentTypesCache {
private final Lock read = readWriteLock.readLock();
private final Lock write = readWriteLock.writeLock();
private final DocumentTypesCache dtCache;
/**
* An ordered set of the keys. This is used for {@link #findBest(Key)}.
*/
......@@ -82,16 +79,11 @@ public class AggregatedDocumentTypesCache {
/**
* Creates a new bitset effective node type cache
*/
public AggregatedDocumentTypesCache(DocumentTypesCache dtCache) {
this.dtCache = dtCache;
public AggregatedDocumentTypesCache() {
sortedKeys = new HashMap<Integer, TreeSet<Key>>();
aggregates = new HashMap<Key, DocumentTypeImpl>();
}
public DocumentTypesCache getDocumentTypesCache() {
return dtCache;
}
public Key getKey(String name) {
return new Key(name);
}
......
......@@ -287,18 +287,18 @@ public class DocumentTypeImpl extends Sealable implements DocumentType {
return true;
}
public void resolveFields(AggregatedDocumentTypesCache adtCache) {
public void resolveFields(DocumentTypesCache dtCache) {
checkSealed();
Set<String> ignoredFields = new HashSet<String>();
mergeInheritedFields(adtCache);
resolvePropertiesToFields(adtCache, ignoredFields);
resolveChildrenToFields(adtCache, ignoredFields);
resolveFieldsToResidualItems(adtCache);
mergeInheritedFields(dtCache);
resolvePropertiesToFields(dtCache, ignoredFields);
resolveChildrenToFields(dtCache, ignoredFields);
resolveFieldsToResidualItems(dtCache);
}
private void mergeInheritedFields(AggregatedDocumentTypesCache adtCache) {
private void mergeInheritedFields(DocumentTypesCache dtCache) {
for (String s : superTypes) {
DocumentTypeImpl sdt = adtCache.get(s);
DocumentTypeImpl sdt = dtCache.getAdtCache().get(s);
for (Map.Entry<String, DocumentTypeField> entry : sdt.fields.entrySet()) {
if (!fields.containsKey(entry.getKey())) {
fields.put(entry.getKey(), entry.getValue());
......@@ -307,7 +307,7 @@ public class DocumentTypeImpl extends Sealable implements DocumentType {
}
}
private void resolvePropertiesToFields(AggregatedDocumentTypesCache adtCache, Set<String> ignoredFields) {
private void resolvePropertiesToFields(DocumentTypesCache dtCache, Set<String> ignoredFields) {
for (Map.Entry<String,List<EffectiveNodeTypeProperty>> entry : ent.getProperties().entrySet()) {
if (!"*".equals(entry.getKey())) {
DocumentTypeFieldImpl dft = (DocumentTypeFieldImpl)fields.get(entry.getKey());
......@@ -394,7 +394,7 @@ public class DocumentTypeImpl extends Sealable implements DocumentType {
}
}
private void resolveChildrenToFields(AggregatedDocumentTypesCache adtCache, Set<String> ignoredFields) {
private void resolveChildrenToFields(DocumentTypesCache dtCache, Set<String> ignoredFields) {
for (Map.Entry<String,List<EffectiveNodeTypeChild>> entry : ent.getChildren().entrySet()) {
if (!"*".equals(entry.getKey())) {
DocumentTypeFieldImpl dft = (DocumentTypeFieldImpl)fields.get(entry.getKey());
......@@ -429,10 +429,10 @@ public class DocumentTypeImpl extends Sealable implements DocumentType {
}
else {
// first check predefined document types cache: it might contain an aggregated (with optional mixins) document type
DocumentTypeImpl ct = adtCache.getDocumentTypesCache().getType(dft.getItemType());
DocumentTypeImpl ct = dtCache.getType(dft.getItemType());
if (ct == null) {
// not an aggregated document type: get it from the aggregated document type cache (which also contains every non-aggregated type)
ct = adtCache.get(dft.getItemType());
ct = dtCache.getAdtCache().get(dft.getItemType());
}
if (ct == null) {
log.error("Effective NodeType {} defines a child node named {} with corresponding field in Document Type {} which has unresolved type {}. "
......@@ -508,7 +508,7 @@ public class DocumentTypeImpl extends Sealable implements DocumentType {
}
}
private void resolveFieldsToResidualItems(AggregatedDocumentTypesCache adtCache) {
private void resolveFieldsToResidualItems(DocumentTypesCache dtCache) {
for (Iterator<String> fieldNameIterator = fields.keySet().iterator(); fieldNameIterator.hasNext(); ) {
DocumentTypeFieldImpl dft = (DocumentTypeFieldImpl)fields.get(fieldNameIterator.next());
if (dft.isSealed()) {
......@@ -553,10 +553,10 @@ public class DocumentTypeImpl extends Sealable implements DocumentType {
}
else {
// first check predefined document types cache: it might contain an aggregated (with optional mixins) document type
DocumentTypeImpl ct = adtCache.getDocumentTypesCache().getType(dft.getItemType());
DocumentTypeImpl ct = dtCache.getType(dft.getItemType());
if (ct == null) {
// not an aggregated document type: get it from the aggregated document type cache (which also contains every non-aggregated type)
ct = adtCache.get(dft.getItemType());
ct = dtCache.getAdtCache().get(dft.getItemType());
}
if (ct == null) {
log.error("Document Type {} defines node child field named {} with unresolved type {}. "
......
......@@ -16,13 +16,24 @@
package org.onehippo.cms7.services.contenttype;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import javax.jcr.PropertyType;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.Value;
import javax.jcr.nodetype.NodeDefinition;
import javax.jcr.nodetype.NodeType;
import javax.jcr.nodetype.NodeTypeIterator;
import javax.jcr.nodetype.PropertyDefinition;
class EffectiveNodeTypesCache extends Sealable implements EffectiveNodeTypes {
private volatile static long versionSequence = 0;
......@@ -32,8 +43,122 @@ class EffectiveNodeTypesCache extends Sealable implements EffectiveNodeTypes {
private Map<String, EffectiveNodeTypeImpl> types = new TreeMap<String, EffectiveNodeTypeImpl>();
private SortedMap<String, Set<EffectiveNodeType>> prefixesMap;
protected Map<String, EffectiveNodeTypeImpl> getTypes() {
return types;
public EffectiveNodeTypesCache(Session serviceSession) throws RepositoryException {
loadEffectiveNodeTypes(serviceSession, true);
}
private void loadEffectiveNodeTypes(Session session, boolean allowRetry) throws RepositoryException {
try {
NodeTypeIterator nodeTypes = session.getWorkspace().getNodeTypeManager().getAllNodeTypes();
// load all jcr node types (recursively if needed)
while (nodeTypes.hasNext()) {
loadEffectiveNodeType(nodeTypes.nextNodeType());
}
}
catch (RepositoryException re) {
if (allowRetry) {
// for now only do and support retrying once
loadEffectiveNodeTypes(session, false);
}
throw re;
}
// lock down
seal();
}
private EffectiveNodeTypeImpl loadEffectiveNodeType(NodeType nodeType) throws RepositoryException {
EffectiveNodeTypeImpl ent = types.get(nodeType.getName());
if (ent == null) {
ent = new EffectiveNodeTypeImpl(nodeType.getName(), version);
ent.setMixin(nodeType.isMixin());
ent.setAbstract(nodeType.isAbstract());
ent.setOrdered(nodeType.hasOrderableChildNodes());
ent.setPrimaryItemName(nodeType.getPrimaryItemName());
types.put(ent.getName(), ent);
// ensure all super types are also loaded
for (NodeType superType : nodeType.getSupertypes()) {
ent.getSuperTypes().add(loadEffectiveNodeType(superType).getName());
}
loadChildNodeDefinitions(nodeType, ent);
loadPropertyDefinitions(nodeType, ent);
}
return ent;
}
private void loadChildNodeDefinitions(NodeType nodeType, EffectiveNodeTypeImpl ent) throws RepositoryException {
for (NodeDefinition nd : nodeType.getChildNodeDefinitions()) {
EffectiveNodeTypeChildImpl child =
// ensure child definition declaring type is also loaded
new EffectiveNodeTypeChildImpl(nd.getName(), loadEffectiveNodeType(nd.getDeclaringNodeType()).getName());
for (NodeType childType : nd.getRequiredPrimaryTypes()) {
// ensure all possible child types are also loaded
child.getRequiredPrimaryTypes().add(loadEffectiveNodeType(childType).getName());
}
if (nd.getDefaultPrimaryType() != null) {
// ensure possible primary type is also loaded
child.setDefaultPrimaryType(loadEffectiveNodeType(nd.getDefaultPrimaryType()).getName());
}
child.setMandatory(nd.isMandatory());
child.setAutoCreated(nd.isAutoCreated());
child.setMultiple(nd.allowsSameNameSiblings());
child.setProtected(nd.isProtected());
// each child definition is maintained in a list by name
List<EffectiveNodeTypeChild> childList = ent.getChildren().get(child.getName());
if (childList == null) {
childList = new ArrayList<EffectiveNodeTypeChild>();
ent.getChildren().put(child.getName(), childList);
}
childList.add(child);
}
}
private void loadPropertyDefinitions(NodeType nodeType, EffectiveNodeTypeImpl ent) throws RepositoryException {
for (PropertyDefinition pd : nodeType.getPropertyDefinitions()) {
EffectiveNodeTypePropertyImpl property =
// ensure property definition declaring type is also loaded
new EffectiveNodeTypePropertyImpl(pd.getName(), loadEffectiveNodeType(pd.getDeclaringNodeType()).getName(), pd.getRequiredType());
property.setMandatory(pd.isMandatory());
property.setAutoCreated(pd.isAutoCreated());
property.setMultiple(pd.isMultiple());
property.setProtected(pd.isProtected());
String[] valueConstraints = pd.getValueConstraints();
if (valueConstraints != null) {
for (String s : valueConstraints) {
if (s != null) {
property.getValueConstraints().add(s);
}
}
}
Value[] defaultValues = pd.getDefaultValues();
if (defaultValues != null) {
for (Value value : defaultValues) {
// skip/ignore BINARY type values (unsupported)
if (value.getType() != PropertyType.BINARY) {
property.getDefaultValues().add(value.getString());
}
}
}
// each property definition is maintained in a list by name
List<EffectiveNodeTypeProperty> propertyList = ent.getProperties().get(property.getName());
if (propertyList == null) {
propertyList = new ArrayList<EffectiveNodeTypeProperty>();
ent.getProperties().put(property.getName(), propertyList);
}
propertyList.add(property);
}
}
protected void doSeal() {
......@@ -73,4 +198,8 @@ class EffectiveNodeTypesCache extends Sealable implements EffectiveNodeTypes {
public SortedMap<String, Set<EffectiveNodeType>> getTypesByPrefix() {
return prefixesMap;
}
public Map<String, EffectiveNodeTypeImpl> getTypes() {
return types;
}
}
......@@ -269,7 +269,7 @@ public class HippoContentTypeServiceTest extends PluginTest {
session.getRootNode().addNode("testNode", "test:test");
session.save();
t = service.getDocumentTypeForNodeByPath(session, "/testNode");
t = service.getDocumentTypes().getDocumentTypeForNodeByPath(session, "/testNode");
assertEquals(4, t.getFields().size());
assertEquals(1, t.getAggregatedTypes().size());
......@@ -277,7 +277,7 @@ public class HippoContentTypeServiceTest extends PluginTest {
session.getNode("/testNode").addMixin("hippostd:relaxed");
session.save();
t = service.getDocumentTypeForNodeByPath(session, "/testNode");
t = service.getDocumentTypes().getDocumentTypeForNodeByPath(session, "/testNode");
assertEquals(5, t.getFields().size());
assertTrue(t.getFields().containsKey("test:extraField"));
assertTrue(t.getAggregatedTypes().contains("hippostd:relaxed"));
......@@ -321,6 +321,8 @@ public class HippoContentTypeServiceTest extends PluginTest {
}
session.getRootNode().addNode("hippo:namespaces/test", "hipposysedit:namespace");
session.save();
// need to wait a bit to get Jackrabbit to refresh and notify the changes
Thread.sleep(1000);
dtCache1 = service.getDocumentTypes();
if (dtCache1.version()==dtCache2.version()) {
fail("DocumentTypes cache should have been reloaded.");
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment