Commit 8302915f authored by Ate Douma's avatar Ate Douma

CMS7-7024: more appropriate and generic renaming DocumentType to ContentType

- also add/fix ContentType.isDocumentType() and .isCompoundType()
- expose also the derived ContentTypes instead of only the explicitly defined ones
parent 088fba59
......@@ -28,10 +28,10 @@ import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
/**
* Implements an effective DocumentTypeImpl cache of all DocumentTypes and aggregated versions thereof, using a bit set for storing the
* information about participating DocumentTypes in a set.
* Implements an effective ContentTypeImpl cache of all ContentTypes and aggregated versions thereof, using a bit set for storing the
* information about participating ContentTypes in a set.
*/
public class AggregatedDocumentTypesCache {
public class AggregatedContentTypesCache {
/**
* constant for bits-per-word
......@@ -60,7 +60,7 @@ public class AggregatedDocumentTypesCache {
/**
* cache of pre-built aggregations of node types
*/
private final HashMap<Key, DocumentTypeImpl> aggregates;
private final HashMap<Key, ContentTypeImpl> aggregates;
/**
* A lookup table for bit numbers for a given name.
......@@ -79,9 +79,9 @@ public class AggregatedDocumentTypesCache {
/**
* Creates a new bitset effective node type cache
*/
public AggregatedDocumentTypesCache() {
public AggregatedContentTypesCache() {
sortedKeys = new HashMap<Integer, TreeSet<Key>>();
aggregates = new HashMap<Key, DocumentTypeImpl>();
aggregates = new HashMap<Key, ContentTypeImpl>();
}
public Key getKey(String name) {
......@@ -92,7 +92,7 @@ public class AggregatedDocumentTypesCache {
return new Key(names);
}
public DocumentTypeImpl put(DocumentTypeImpl dt) {
public ContentTypeImpl put(ContentTypeImpl dt) {
return put(getKey(dt.getAggregatedTypes()), dt);
}
......@@ -100,8 +100,8 @@ public class AggregatedDocumentTypesCache {
return aggregates.keySet();
}
public DocumentTypeImpl put(Key key, DocumentTypeImpl dt) {
DocumentTypeImpl existing = get(key);
public ContentTypeImpl put(Key key, ContentTypeImpl dt) {
ContentTypeImpl existing = get(key);
if (existing != null) {
// don't overwrite an existing element, return what already was stored
return existing;
......@@ -157,15 +157,15 @@ public class AggregatedDocumentTypesCache {
return aggregates.containsKey(key);
}
public DocumentTypeImpl get(Key key) {
public ContentTypeImpl get(Key key) {
return aggregates.get(key);
}
public DocumentTypeImpl get(String name) {
public ContentTypeImpl get(String name) {
return aggregates.get(getKey(name));
}
public DocumentTypeImpl get(Set<String> names) {
public ContentTypeImpl get(Set<String> names) {
return aggregates.get(getKey(names));
}
......
......@@ -22,7 +22,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class DocumentTypeFieldImpl extends Sealable implements DocumentTypeField {
public class ContentTypeFieldImpl extends Sealable implements ContentTypeField {
private final String definingType;
private final String name;
......@@ -49,7 +49,7 @@ public class DocumentTypeFieldImpl extends Sealable implements DocumentTypeField
fieldProperties = Collections.unmodifiableMap(fieldProperties);
}
public DocumentTypeFieldImpl(String definingType, String name, String fieldType, String itemType) {
public ContentTypeFieldImpl(String definingType, String name, String fieldType, String itemType) {
this.definingType = definingType;
this.name = name;
this.fieldType = fieldType;
......@@ -57,7 +57,7 @@ public class DocumentTypeFieldImpl extends Sealable implements DocumentTypeField
this.propertyField = true;
}
public DocumentTypeFieldImpl(String definingType, String name, String fieldType) {
public ContentTypeFieldImpl(String definingType, String name, String fieldType) {
this.definingType = definingType;
this.name = name;
this.fieldType = fieldType;
......@@ -65,7 +65,7 @@ public class DocumentTypeFieldImpl extends Sealable implements DocumentTypeField
this.propertyField = false;
}
public DocumentTypeFieldImpl(EffectiveNodeTypeProperty property) {
public ContentTypeFieldImpl(EffectiveNodeTypeProperty property) {
this.definingType = property.getDefiningType();
this.nti = property;
this.primaryField = false;
......@@ -81,7 +81,7 @@ public class DocumentTypeFieldImpl extends Sealable implements DocumentTypeField
this.ordered = false;
}
public DocumentTypeFieldImpl(EffectiveNodeTypeChild child) {
public ContentTypeFieldImpl(EffectiveNodeTypeChild child) {
this.definingType = child.getDefiningType();
this.nti = child;
this.primaryField = false;
......@@ -97,7 +97,7 @@ public class DocumentTypeFieldImpl extends Sealable implements DocumentTypeField
this.ordered = false;
}
public DocumentTypeFieldImpl(DocumentTypeFieldImpl other) {
public ContentTypeFieldImpl(ContentTypeFieldImpl other) {
this.definingType = other.definingType;
this.nti = other.nti;
this.primaryField = other.primaryField;
......
......@@ -30,7 +30,7 @@ public class HippoContentTypeService implements ContentTypeService {
private Session serviceSession;
private EffectiveNodeTypesCache entCache;
private DocumentTypesCache dtCache;
private ContentTypesCache ctCache;
private EventListener nodeTypesChangeListener = new EventListener() {
@Override
......@@ -38,18 +38,18 @@ public class HippoContentTypeService implements ContentTypeService {
synchronized (HippoContentTypeService.this) {
// delete caches to be rebuild again on next invocation
entCache = null;
dtCache = null;
ctCache = null;
}
}
};
private EventListener documentTypesChangeListener = new EventListener() {
private EventListener contentTypesChangeListener = new EventListener() {
@Override
public void onEvent(final EventIterator events) {
// TODO: make it more finegrained by only reacting to changes of 'committed' document types
// TODO: make it more finegrained by only reacting to changes of 'committed' document types?
synchronized (HippoContentTypeService.this) {
// delete caches to be rebuild again on next invocation
dtCache = null;
ctCache = null;
}
}
};
......@@ -62,20 +62,20 @@ public class HippoContentTypeService implements ContentTypeService {
Event.NODE_ADDED|Event.NODE_REMOVED|Event.NODE_MOVED|Event.PROPERTY_ADDED|Event.PROPERTY_CHANGED|Event.PROPERTY_REMOVED,
"/jcr:system/jcr:nodeTypes", true, null, null, false);
// register our documentTypesChangeListener
serviceSession.getWorkspace().getObservationManager().addEventListener(documentTypesChangeListener,
// register our contentTypesChangeListener
serviceSession.getWorkspace().getObservationManager().addEventListener(contentTypesChangeListener,
Event.NODE_ADDED|Event.NODE_REMOVED|Event.NODE_MOVED|Event.PROPERTY_ADDED|Event.PROPERTY_CHANGED|Event.PROPERTY_REMOVED,
"/hippo:namespaces", true, null, null, false);
}
public synchronized void shutdown() {
try {
serviceSession.getWorkspace().getObservationManager().removeEventListener(documentTypesChangeListener);
serviceSession.getWorkspace().getObservationManager().removeEventListener(contentTypesChangeListener);
serviceSession.getWorkspace().getObservationManager().removeEventListener(nodeTypesChangeListener);
} catch (RepositoryException e) {
// ignore
}
dtCache = null;
ctCache = null;
entCache = null;
serviceSession = null;
}
......@@ -90,11 +90,11 @@ public class HippoContentTypeService implements ContentTypeService {
}
@Override
public synchronized DocumentTypesCache getDocumentTypes() throws RepositoryException {
if (dtCache == null) {
dtCache = new DocumentTypesCache(serviceSession, getEffectiveNodeTypes());
public synchronized ContentTypesCache getContentTypes() throws RepositoryException {
if (ctCache == null) {
ctCache = new ContentTypesCache(serviceSession, getEffectiveNodeTypes());
// TODO: check if not already changed again
}
return dtCache;
return ctCache;
}
}
......@@ -5,9 +5,10 @@
"prefix" : null,
"superTypes" : [ "hippo:container", "hippostd:container", "nt:base" ],
"aggregatedTypes" : [ "hippostd:relaxed", "test:test" ],
"compound" : false,
"documentType" : false,
"compoundType" : false,
"mixin" : false,
"template" : false,
"templateType" : false,
"cascadeValidate" : false,
"fields" : {
"test:title" : {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment